code
stringlengths 2
1.05M
| repo_name
stringlengths 5
104
| path
stringlengths 4
251
| language
stringclasses 1
value | license
stringclasses 15
values | size
int32 2
1.05M
|
|---|---|---|---|---|---|
import pytest
from django.test import RequestFactory
from iati.factory import iati_factory
from api.sector import serializers
class TestSectorSerializers:
request_dummy = RequestFactory().get('/')
def test_SectorSerializer(self):
sector = iati_factory.SectorFactory.build(
code=10,
name='Sector A',
description='Description A'
)
serializer = serializers.SectorSerializer(
sector,
context={'request': self.request_dummy}
)
assert serializer.data['code'] == sector.code, \
"""
the data in sector.code should be serialized to a field named code
inside the serialized object
"""
assert serializer.data['name'] == sector.name, \
"""
the data in sector.name should be serialized to a field named code
inside the serialized object
"""
assert serializer.data['description'] == sector.description, \
"""
the data in sector.description should be serialized to a field named code
inside the serialized object
"""
required_fields = (
'url',
'activities',
'category'
)
assertion_msg = "the field '{0}' should be in the serialized sector"
for field in required_fields:
assert field in serializer.data, assertion_msg.format(field)
def test_SectorCategorySerializer(self):
sector_category = iati_factory.SectorCategoryFactory.build(
code=2,
)
serializer = serializers.SectorCategorySerializer(sector_category)
assert serializer.data['code'] == sector_category.code,\
"""
'sector_category.code' should be serialized to a field called 'code'
"""
|
tokatikato/OIPA
|
OIPA/api/sector/tests/test_serializers.py
|
Python
|
agpl-3.0
| 1,865
|
# -*- coding: utf-8 -*-
# This file is part of Shuup.
#
# Copyright (c) 2012-2021, Shuup Commerce Inc. All rights reserved.
#
# This source code is licensed under the OSL-3.0 license found in the
# LICENSE file in the root directory of this source tree.
from __future__ import unicode_literals
from django.db.models import Count
from django.utils.translation import ugettext_lazy as _
from shuup.admin.utils.picotable import Column, TextFilter
from shuup.admin.utils.views import PicotableListView
from shuup.core.models import ProductType
class ProductTypeListView(PicotableListView):
model = ProductType
default_columns = [
Column(
"name",
_("Name"),
sort_field="translations__name",
display="name",
filter_config=TextFilter(filter_field="translations__name", placeholder=_("Filter by name...")),
),
Column("n_attributes", _("Number of Attributes")),
]
toolbar_buttons_provider_key = "product_type_list_toolbar_provider"
mass_actions_provider_key = "product_type_list_mass_actions_provider"
def get_queryset(self):
return ProductType.objects.all().annotate(n_attributes=Count("attributes"))
|
shoopio/shoop
|
shuup/admin/modules/product_types/views/list.py
|
Python
|
agpl-3.0
| 1,214
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from udata.api import api, fields, API
from udata.auth import current_user
from .actions import get_notifications
notifs = api.namespace('notifications', 'Notifications API')
notifications_fields = api.model('Notification', {
'type': fields.String(description='The notification type', readonly=True),
'created_on': fields.ISODateTime(
description='The notification creation datetime', readonly=True),
'details': fields.Raw(
description='Key-Value details depending on notification type',
readonly=True)
})
@notifs.route('/', endpoint='notifications')
class NotificationsAPI(API):
@api.secure
@api.doc('get_notifications')
@api.marshal_list_with(notifications_fields)
def get(self):
'''List all current user pending notifications'''
user = current_user._get_current_object()
return get_notifications(user)
|
jphnoel/udata
|
udata/features/notifications/api.py
|
Python
|
agpl-3.0
| 951
|
# Copyright 2015-2016 Matthieu Dietrich (Camptocamp SA)
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl).
import logging
import datetime
from dateutil.relativedelta import relativedelta
from odoo import api, fields, models
_logger = logging.getLogger(__name__)
class FetchmailServer(models.Model):
"""Incoming POP/IMAP mail server account."""
_inherit = 'fetchmail.server'
cleanup_days = fields.Integer(
string='Expiration days',
help="Number of days before marking an e-mail as read",
)
cleanup_folder = fields.Char(
string='Expiration folder',
help="Folder where an e-mail marked as read will be moved.",
)
purge_days = fields.Integer(
string='Deletion days',
help="Number of days before removing an e-mail",
)
@property
def _server_env_fields(self):
base_fields = super()._server_env_fields
mail_cleanup_fields = {
'cleanup_days': {
'getter': 'getint',
},
'purge_days': {
'getter': 'getint',
},
'cleanup_folder': {},
}
mail_cleanup_fields.update(base_fields)
return mail_cleanup_fields
def _cleanup_fetchmail_server(self, server, imap_server):
count, failed = 0, 0
expiration_date = datetime.date.today()
expiration_date -= relativedelta(days=server.cleanup_days)
search_text = expiration_date.strftime('(UNSEEN BEFORE %d-%b-%Y)')
imap_server.select()
result, data = imap_server.search(None, search_text)
for num in data[0].split():
try:
# Mark message as read
imap_server.store(num, '+FLAGS', '\\Seen')
if server.cleanup_folder:
# To move a message, you have to COPY
# then DELETE the message
result = imap_server.copy(num, server.cleanup_folder)
if result[0] == 'OK':
imap_server.store(num, '+FLAGS', '\\Deleted')
except Exception:
_logger.exception(
'Failed to cleanup mail from %s server %s.',
server.type, server.name)
failed += 1
count += 1
_logger.info(
'Marked %d email(s) as read on %s server %s;'
' %d succeeded, %d failed.', count, server.type,
server.name, (count - failed), failed)
def _purge_fetchmail_server(self, server, imap_server):
# Purging e-mails older than the purge date, if available
count, failed = 0, 0
purge_date = datetime.date.today()
purge_date -= relativedelta(days=server.purge_days)
search_text = purge_date.strftime('(BEFORE %d-%b-%Y)')
imap_server.select()
result, data = imap_server.search(None, search_text)
for num in data[0].split():
try:
# Delete message
imap_server.store(num, '+FLAGS', '\\Deleted')
except Exception:
_logger.exception(
'Failed to remove mail from %s server %s.',
server.type, server.name)
failed += 1
count += 1
_logger.info(
'Removed %d email(s) on %s server %s;'
' %d succeeded, %d failed.', count, server.type,
server.name, (count - failed), failed)
@api.multi
def fetch_mail(self):
# Called before the fetch, in order to clean up right before
# retrieving emails.
for server in self:
_logger.info('start cleaning up emails on %s server %s',
server.type, server.name)
imap_server = False
if server.type == 'imap':
try:
imap_server = server.connect()
if server.cleanup_days > 0:
self._cleanup_fetchmail_server(server, imap_server)
if server.purge_days > 0:
self._purge_fetchmail_server(server, imap_server)
# Do the final cleanup: delete all messages
# flagged as deleted
imap_server.expunge()
except Exception:
_logger.exception(
'General failure when trying to cleanup'
' mail from %s server %s.',
server.type, server.name)
finally:
if imap_server:
imap_server.close()
imap_server.logout()
return super().fetch_mail()
|
brain-tec/server-tools
|
mail_cleanup/models/fetchmail_server.py
|
Python
|
agpl-3.0
| 4,725
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# NoC TBM simulation support - Utilities
# This module declares additional helper functions
#
# Author: Oscar Diaz
# Version: 0.2
# Date: 17-03-2011
#
# This code is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This code is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the
# Free Software Foundation, Inc., 59 Temple Place, Suite 330,
# Boston, MA 02111-1307 USA
#
#
# Changelog:
#
# 03-03-2011 : (OD) initial release
#
"""
=============================
NoCmodel TBM simulation utils
=============================
This module declares additional helper functions.
* Function 'add_tbm_basic_support'
"""
from noc_tbm_base import *
from nocmodel.basicmodels import *
# helper functions
def add_tbm_basic_support(instance, **kwargs):
"""
This function will add for every object in noc_instance a noc_tbm object
"""
if isinstance(instance, noc):
# add simulation object
instance.tbmsim = noc_tbm_simulation(instance, **kwargs)
# and add tbm objects recursively
for obj in instance.all_list():
altkwargs = kwargs
altkwargs.pop("log_file", None)
altkwargs.pop("log_level", None)
add_tbm_basic_support(obj, **kwargs)
elif isinstance(instance, ipcore):
instance.tbm = basic_ipcore_tbm(instance, **kwargs)
# don't forget internal channel
instance.channel_ref.tbm = basic_channel_tbm(instance.channel_ref, **kwargs)
elif isinstance(instance, router):
instance.tbm = basic_router_tbm(instance, **kwargs)
elif isinstance(instance, channel):
instance.tbm = basic_channel_tbm(instance, **kwargs)
else:
raise TypeError("Unsupported object: type %s" % type(instance))
|
dargor0/nocmodel
|
nocmodel/noc_tbm_utils.py
|
Python
|
lgpl-2.1
| 2,287
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Copyright (C) 2010 Radim Rehurek <radimrehurek@seznam.cz>
# Licensed under the GNU LGPL v2.1 - http://www.gnu.org/licenses/lgpl.html
"""
This module implements the concept of Dictionary -- a mapping between words and
their integer ids.
Dictionaries can be created from a corpus and can later be pruned according to
document frequency (removing (un)common words via the :func:`Dictionary.filter_extremes` method),
save/loaded from disk (via :func:`Dictionary.save` and :func:`Dictionary.load` methods), merged
with other dictionary (:func:`Dictionary.merge_with`) etc.
"""
from __future__ import with_statement
from collections import Mapping
import sys
import logging
import itertools
from gensim import utils
if sys.version_info[0] >= 3:
unicode = str
from six import PY3, iteritems, iterkeys, itervalues, string_types
from six.moves import xrange
from six.moves import zip as izip
logger = logging.getLogger('gensim.corpora.dictionary')
class Dictionary(utils.SaveLoad, Mapping):
"""
Dictionary encapsulates the mapping between normalized words and their integer ids.
The main function is `doc2bow`, which converts a collection of words to its
bag-of-words representation: a list of (word_id, word_frequency) 2-tuples.
"""
def __init__(self, documents=None, prune_at=2000000):
"""
If `documents` are given, use them to initialize Dictionary (see `add_documents()`).
"""
self.token2id = {} # token -> tokenId
self.id2token = {} # reverse mapping for token2id; only formed on request, to save memory
self.dfs = {} # document frequencies: tokenId -> in how many documents this token appeared
self.num_docs = 0 # number of documents processed
self.num_pos = 0 # total number of corpus positions
self.num_nnz = 0 # total number of non-zeroes in the BOW matrix
if documents is not None:
self.add_documents(documents, prune_at=prune_at)
def __getitem__(self, tokenid):
if len(self.id2token) != len(self.token2id):
# the word->id mapping has changed (presumably via add_documents);
# recompute id->word accordingly
self.id2token = dict((v, k) for k, v in iteritems(self.token2id))
return self.id2token[tokenid] # will throw for non-existent ids
def __iter__(self):
return iter(self.keys())
if PY3:
# restore Py2-style dict API
iterkeys = __iter__
def iteritems(self):
return self.items()
def itervalues(self):
return self.values()
def keys(self):
"""Return a list of all token ids."""
return list(self.token2id.values())
def __len__(self):
"""
Return the number of token->id mappings in the dictionary.
"""
return len(self.token2id)
def __str__(self):
some_keys = list(itertools.islice(iterkeys(self.token2id), 5))
return "Dictionary(%i unique tokens: %s%s)" % (len(self), some_keys, '...' if len(self) > 5 else '')
@staticmethod
def from_documents(documents):
return Dictionary(documents=documents)
def add_documents(self, documents, prune_at=2000000):
"""
Update dictionary from a collection of documents. Each document is a list
of tokens = **tokenized and normalized** strings (either utf8 or unicode).
This is a convenience wrapper for calling `doc2bow` on each document
with `allow_update=True`, which also prunes infrequent words, keeping the
total number of unique words <= `prune_at`. This is to save memory on very
large inputs. To disable this pruning, set `prune_at=None`.
>>> print(Dictionary(["máma mele maso".split(), "ema má máma".split()]))
Dictionary(5 unique tokens)
"""
for docno, document in enumerate(documents):
# log progress & run a regular check for pruning, once every 10k docs
if docno % 10000 == 0:
if prune_at is not None and len(self) > prune_at:
self.filter_extremes(no_below=0, no_above=1.0, keep_n=prune_at)
logger.info("adding document #%i to %s", docno, self)
# update Dictionary with the document
_ = self.doc2bow(document, allow_update=True) # ignore the result, here we only care about updating token ids
logger.info("built %s from %i documents (total %i corpus positions)",
self, self.num_docs, self.num_pos)
def doc2bow(self, document, allow_update=False, return_missing=False):
"""
Convert `document` (a list of words) into the bag-of-words format = list
of `(token_id, token_count)` 2-tuples. Each word is assumed to be a
**tokenized and normalized** string (either unicode or utf8-encoded). No further preprocessing
is done on the words in `document`; apply tokenization, stemming etc. before
calling this method.
If `allow_update` is set, then also update dictionary in the process: create
ids for new words. At the same time, update document frequencies -- for
each word appearing in this document, increase its document frequency (`self.dfs`)
by one.
If `allow_update` is **not** set, this function is `const`, aka read-only.
"""
result = {}
missing = {}
if isinstance(document, string_types):
raise TypeError("doc2bow expects an array of unicode tokens on input, not a single string")
document = sorted(utils.to_unicode(token) for token in document)
# construct (word, frequency) mapping. in python3 this is done simply
# using Counter(), but here i use itertools.groupby() for the job
for word_norm, group in itertools.groupby(document):
frequency = len(list(group)) # how many times does this word appear in the input document
tokenid = self.token2id.get(word_norm, None)
if tokenid is None:
# first time we see this token (~normalized form)
if return_missing:
missing[word_norm] = frequency
if not allow_update: # if we aren't allowed to create new tokens, continue with the next unique token
continue
tokenid = len(self.token2id)
self.token2id[word_norm] = tokenid # new id = number of ids made so far; NOTE this assumes there are no gaps in the id sequence!
# update how many times a token appeared in the document
result[tokenid] = frequency
if allow_update:
self.num_docs += 1
self.num_pos += len(document)
self.num_nnz += len(result)
# increase document count for each unique token that appeared in the document
for tokenid in iterkeys(result):
self.dfs[tokenid] = self.dfs.get(tokenid, 0) + 1
# return tokenids, in ascending id order
result = sorted(iteritems(result))
if return_missing:
return result, missing
else:
return result
def filter_extremes(self, no_below=5, no_above=0.5, keep_n=100000):
"""
Filter out tokens that appear in
1. less than `no_below` documents (absolute number) or
2. more than `no_above` documents (fraction of total corpus size, *not*
absolute number).
3. after (1) and (2), keep only the first `keep_n` most frequent tokens (or
keep all if `None`).
After the pruning, shrink resulting gaps in word ids.
**Note**: Due to the gap shrinking, the same word may have a different
word id before and after the call to this function!
"""
no_above_abs = int(no_above * self.num_docs) # convert fractional threshold to absolute threshold
# determine which tokens to keep
good_ids = (v for v in itervalues(self.token2id)
if no_below <= self.dfs.get(v, 0) <= no_above_abs)
good_ids = sorted(good_ids, key=self.dfs.get, reverse=True)
if keep_n is not None:
good_ids = good_ids[:keep_n]
bad_words = [(self[id], self.dfs.get(id, 0)) for id in set(self).difference(good_ids)]
logger.info("discarding %i tokens: %s...", len(self) - len(good_ids), bad_words[:10])
logger.info("keeping %i tokens which were in no less than %i and no more than %i (=%.1f%%) documents",
len(good_ids), no_below, no_above_abs, 100.0 * no_above)
# do the actual filtering, then rebuild dictionary to remove gaps in ids
self.filter_tokens(good_ids=good_ids)
logger.info("resulting dictionary: %s" % self)
def filter_tokens(self, bad_ids=None, good_ids=None):
"""
Remove the selected `bad_ids` tokens from all dictionary mappings, or, keep
selected `good_ids` in the mapping and remove the rest.
`bad_ids` and `good_ids` are collections of word ids to be removed.
"""
if bad_ids is not None:
bad_ids = set(bad_ids)
self.token2id = dict((token, tokenid)
for token, tokenid in iteritems(self.token2id)
if tokenid not in bad_ids)
self.dfs = dict((tokenid, freq)
for tokenid, freq in iteritems(self.dfs)
if tokenid not in bad_ids)
if good_ids is not None:
good_ids = set(good_ids)
self.token2id = dict((token, tokenid)
for token, tokenid in iteritems(self.token2id)
if tokenid in good_ids)
self.dfs = dict((tokenid, freq)
for tokenid, freq in iteritems(self.dfs)
if tokenid in good_ids)
self.compactify()
def compactify(self):
"""
Assign new word ids to all words.
This is done to make the ids more compact, e.g. after some tokens have
been removed via :func:`filter_tokens` and there are gaps in the id series.
Calling this method will remove the gaps.
"""
logger.debug("rebuilding dictionary, shrinking gaps")
# build mapping from old id -> new id
idmap = dict(izip(itervalues(self.token2id), xrange(len(self.token2id))))
# reassign mappings to new ids
self.token2id = dict((token, idmap[tokenid]) for token, tokenid in iteritems(self.token2id))
self.id2token = {}
self.dfs = dict((idmap[tokenid], freq) for tokenid, freq in iteritems(self.dfs))
def save_as_text(self, fname, sort_by_word=True):
"""
Save this Dictionary to a text file, in format:
`id[TAB]word_utf8[TAB]document frequency[NEWLINE]`. Sorted by word,
or by decreasing word frequency.
Note: text format should be use for corpus inspection. Use `save`/`load`
to store in binary format (pickle) for improved performance.
"""
logger.info("saving dictionary mapping to %s" % fname)
with utils.smart_open(fname, 'wb') as fout:
if sort_by_word:
for token, tokenid in sorted(iteritems(self.token2id)):
line = "%i\t%s\t%i\n" % (tokenid, token, self.dfs.get(tokenid, 0))
fout.write(utils.to_utf8(line))
else:
for tokenid, freq in sorted(iteritems(self.dfs), key=lambda item: -item[1]):
line = "%i\t%s\t%i\n" % (tokenid, self[tokenid], freq)
fout.write(utils.to_utf8(line))
def merge_with(self, other):
"""
Merge another dictionary into this dictionary, mapping same tokens to the
same ids and new tokens to new ids. The purpose is to merge two corpora
created using two different dictionaries, one from `self` and one from `other`.
`other` can be any id=>word mapping (a dict, a Dictionary object, ...).
Return a transformation object which, when accessed as `result[doc_from_other_corpus]`,
will convert documents from a corpus built using the `other` dictionary
into a document using the new, merged dictionary (see :class:`gensim.interfaces.TransformationABC`).
Example:
>>> dict1 = Dictionary(some_documents)
>>> dict2 = Dictionary(other_documents) # ids not compatible with dict1!
>>> dict2_to_dict1 = dict1.merge_with(dict2)
>>> # now we can merge corpora from the two incompatible dictionaries into one
>>> merged_corpus = itertools.chain(some_corpus_from_dict1, dict2_to_dict1[some_corpus_from_dict2])
"""
old2new = {}
for other_id, other_token in iteritems(other):
if other_token in self.token2id:
new_id = self.token2id[other_token]
else:
new_id = len(self.token2id)
self.token2id[other_token] = new_id
self.dfs[new_id] = 0
old2new[other_id] = new_id
try:
self.dfs[new_id] += other.dfs[other_id]
except:
# `other` isn't a Dictionary (probably just a dict) => ignore dfs, keep going
pass
try:
self.num_docs += other.num_docs
self.num_nnz += other.num_nnz
self.num_pos += other.num_pos
except:
pass
import gensim.models
return gensim.models.VocabTransform(old2new)
@staticmethod
def load_from_text(fname):
"""
Load a previously stored Dictionary from a text file.
Mirror function to `save_as_text`.
"""
result = Dictionary()
with utils.smart_open(fname) as f:
for lineno, line in enumerate(f):
line = utils.to_unicode(line)
try:
wordid, word, docfreq = line[:-1].split('\t')
except Exception:
raise ValueError("invalid line in dictionary file %s: %s"
% (fname, line.strip()))
wordid = int(wordid)
if word in result.token2id:
raise KeyError('token %s is defined as ID %d and as ID %d' % (word, wordid, result.token2id[word]))
result.token2id[word] = wordid
result.dfs[wordid] = int(docfreq)
return result
@staticmethod
def from_corpus(corpus, id2word=None):
"""
Create Dictionary from an existing corpus. This can be useful if you only
have a term-document BOW matrix (represented by `corpus`), but not the
original text corpus.
This will scan the term-document count matrix for all word ids that
appear in it, then construct and return Dictionary which maps each
`word_id -> id2word[word_id]`.
`id2word` is an optional dictionary that maps the `word_id` to a token. In
case `id2word` isn't specified the mapping `id2word[word_id] = str(word_id)`
will be used.
"""
result = Dictionary()
max_id = -1
for docno, document in enumerate(corpus):
if docno % 10000 == 0:
logger.info("adding document #%i to %s" % (docno, result))
result.num_docs += 1
result.num_nnz += len(document)
for wordid, word_freq in document:
max_id = max(wordid, max_id)
result.num_pos += word_freq
result.dfs[wordid] = result.dfs.get(wordid, 0) + 1
if id2word is None:
# make sure length(result) == get_max_id(corpus) + 1
result.token2id = dict((unicode(i), i) for i in xrange(max_id + 1))
else:
# id=>word mapping given: simply copy it
result.token2id = dict((utils.to_unicode(token), id) for id, token in iteritems(id2word))
for id in itervalues(result.token2id):
# make sure all token ids have a valid `dfs` entry
result.dfs[id] = result.dfs.get(id, 0)
logger.info("built %s from %i documents (total %i corpus positions)" %
(result, result.num_docs, result.num_pos))
return result
#endclass Dictionary
|
nvoron23/gensim
|
gensim/corpora/dictionary.py
|
Python
|
lgpl-3.0
| 16,434
|
#!/usr/bin/env python
from pymt import *
class DoubleTapIndicator(MTWidget):
def __init__(self, **kwargs):
self.red = True
w = getWindow()
self.diameter = max(min(*w.size)/8., 20)
kwargs["size"] = (self.diameter, ) * 2
super(DoubleTapIndicator, self).__init__(**kwargs)
def on_touch_down(self, touch):
if touch.is_double_tap and self.collide_point(*touch.pos):
self.red = not self.red
def draw(self):
if self.red:
set_color(1, 0, 0)
else:
set_color(0, 0, 1)
drawRectangle(self.pos, self.size)
class DoubleTapSettingsAdjuster(MTWidget):
"""Explanation:
This tool can be used to adjust the doubletap settings to your liking.
The distance that the second touch (for a doubletap) might travel (before
both touches are considered as no doubletap) can be adjusted.
Additionally, you can adjust the maximum time that might pass between two
touches that you want to be considered a doubletap.
"""
def __init__(self, **kwargs):
super(DoubleTapSettingsAdjuster, self).__init__(**kwargs)
self.module = m = pymt_postproc_modules["doubletap"]
self.orig_distance = m.double_tap_distance
self.orig_time = m.double_tap_time
self.distance_slider = MTSlider(min=0, max=1000, value=self.orig_distance * 1000,
value_show=True, orientation="horizontal")
self.time_slider = MTSlider(min=0, max=2000, value=self.orig_time * 1000,
value_show=True, orientation="horizontal")
self.distance_slider.connect("on_value_change", self.distance_callback)
self.time_slider.connect("on_value_change", self.time_callback)
distlabel = MTLabel(anchor_x='left', anchor_y='bottom',
autosize=True, label="Maximum Distance:")
timelabel = MTLabel(anchor_x='left', anchor_y='bottom',
autosize=True, label="Maximum Time:")
touchlabel = MTLabel(anchor_x='center', anchor_y='center',
autosize=True, label="Test settings:")
explanation = MTLabel(pos=(10, 10), anchor_x='left', anchor_y='top',
autosize=True, label=self.__doc__)
dti = DoubleTapIndicator()
save = MTButton(label="Save current settings", autoheight=True)
save.connect("on_release", self.save_settings)
reset = MTButton(label="Reset to original settings", autoheight=True)
reset.connect("on_release", self.reset_settings)
save.width = reset.width = dti.width = self.distance_slider.width
self.box = MTBoxLayout(orientation="vertical", spacing=20)
self.box.add_widget(touchlabel)
self.box.add_widget(dti)
self.box.add_widget(distlabel)
self.box.add_widget(self.distance_slider)
self.box.add_widget(timelabel)
self.box.add_widget(self.time_slider)
self.box.add_widget(save)
self.box.add_widget(reset)
w = getWindow()
x, y = w.center
x -= self.box.width / 2
y -= self.box.height / 2
self.box.pos = (x, y)
self.add_widget(self.box)
self.add_widget(explanation)
def distance_callback(self, v):
self.module.double_tap_distance = v / 1000.0
self.module.touches = {}
def time_callback(self, v):
self.module.double_tap_time = v / 1000.0
self.module.touches = {}
def set_values(self, time, dist):
pymt_config.set('pymt', 'double_tap_time', int(time * 1000))
pymt_config.set('pymt', 'double_tap_distance', int(dist * 1000))
pymt_config.write()
def save_settings(self, touch):
self.set_values(self.module.double_tap_time, self.module.double_tap_distance)
def reset_settings(self, touch):
self.set_values(self.orig_time, self.orig_distance)
if __name__ == "__main__":
dtsa = DoubleTapSettingsAdjuster()
runTouchApp(dtsa)
|
nuigroup/pymt-widgets
|
pymt/tools/calibration/doubletap.py
|
Python
|
lgpl-3.0
| 4,053
|
import os
from setuptools import setup, find_packages
base_dir = os.path.dirname(__file__)
about = {}
with open(os.path.join(base_dir, "flake8_import_order", "__about__.py")) as f:
exec(f.read(), about)
with open(os.path.join(base_dir, "README.rst")) as f:
long_description = f.read()
setup(
name=about["__title__"],
version=about["__version__"],
description=about["__summary__"],
long_description=long_description,
license=about["__license__"],
url=about["__uri__"],
author=about["__author__"],
author_email=about["__email__"],
maintainer=about['__maintainer__'],
maintainer_email=about['__maintainer_email__'],
packages=find_packages(exclude=["tests", "tests.*"]),
zip_safe=False,
install_requires=[
"enum34 ; python_version <= '2.7'",
"pycodestyle",
"setuptools",
],
tests_require=[
"pytest",
"flake8",
"pycodestyle",
"pylama"
],
py_modules=['flake8_import_order'],
entry_points={
'flake8_import_order.styles': [
'cryptography = flake8_import_order.styles:Cryptography',
'google = flake8_import_order.styles:Google',
'pep8 = flake8_import_order.styles:PEP8',
'smarkets = flake8_import_order.styles:Smarkets',
'appnexus = flake8_import_order.styles:AppNexus',
'edited = flake8_import_order.styles:Edited',
'pycharm = flake8_import_order.styles:PyCharm',
],
'flake8.extension': [
'I = flake8_import_order.flake8_linter:Linter',
],
'pylama.linter': [
'import_order = flake8_import_order.pylama_linter:Linter'
]
},
classifiers=[
"Framework :: Flake8",
"Intended Audience :: Developers",
"Development Status :: 4 - Beta",
"License :: OSI Approved :: MIT License",
"Programming Language :: Python",
"Programming Language :: Python :: 2",
"Programming Language :: Python :: 3",
(
"License :: OSI Approved :: "
"GNU Lesser General Public License v3 (LGPLv3)"
),
"Topic :: Software Development :: Libraries :: Python Modules",
"Topic :: Software Development :: Quality Assurance",
"Operating System :: OS Independent"
]
)
|
PyCQA/flake8-import-order
|
setup.py
|
Python
|
lgpl-3.0
| 2,353
|
###############################################################
# Copyright 2020 Lawrence Livermore National Security, LLC
# (c.f. AUTHORS, NOTICE.LLNS, COPYING)
#
# This file is part of the Flux resource manager framework.
# For details, see https://github.com/flux-framework.
#
# SPDX-License-Identifier: LGPL-3.0
###############################################################
import json
import errno
from flux.future import Future
from flux.job._wrapper import _RAW as RAW
from _flux._core import ffi
# Names of events that may appear in the main eventlog (i.e. ``eventlog="eventlog"``)
# See Flux RFC 21 for documentation on each event.
MAIN_EVENTS = frozenset(
{
"submit",
"depend",
"priority",
"flux-restart",
"urgency",
"alloc",
"free",
"start",
"release",
"finish",
"clean",
"debug",
"exception",
}
)
class EventLogEvent:
"""
wrapper class for a single KVS EventLog entry
"""
def __init__(self, event):
"""
"Initialize from a string or dict eventlog event
"""
if isinstance(event, str):
event = json.loads(event)
self._name = event["name"]
self._timestamp = event["timestamp"]
self._context = {}
if "context" in event:
self._context = event["context"]
def __str__(self):
return "{0.timestamp:<0.5f}: {0.name} {0.context}".format(self)
@property
def name(self):
return self._name
@property
def timestamp(self):
return self._timestamp
@property
def context(self):
return self._context
class JobEventWatchFuture(Future):
"""
A future returned from job.event_watch_async().
Adds get_event() method to return an EventLogEntry event
"""
def __del__(self):
if self.needs_cancel is not False:
self.cancel()
try:
super().__del__()
except AttributeError:
pass
def __init__(self, future_handle):
super().__init__(future_handle)
self.needs_cancel = True
def get_event(self, autoreset=True):
"""
Return the next event from a JobEventWatchFuture, or None
if the event stream has terminated.
The future is auto-reset unless autoreset=False, so a subsequent
call to get_event() will try to fetch the next event and thus
may block.
"""
result = ffi.new("char *[1]")
try:
# Block until Future is ready:
self.wait_for()
RAW.event_watch_get(self.pimpl, result)
except OSError as exc:
if exc.errno == errno.ENODATA:
self.needs_cancel = False
return None
# re-raise all other exceptions
raise
event = EventLogEvent(ffi.string(result[0]).decode("utf-8"))
if autoreset is True:
self.reset()
return event
def cancel(self):
"""Cancel a streaming job.event_watch_async() future"""
RAW.event_watch_cancel(self.pimpl)
self.needs_cancel = False
def event_watch_async(flux_handle, jobid, eventlog="eventlog"):
"""Asynchronously get eventlog updates for a job
Asynchronously watch the events of a job eventlog.
Returns a JobEventWatchFuture. Call .get_event() from the then
callback to get the currently returned event from the Future object.
.. seealso::
:doc:`rfc:spec_21`
Documentation for the events in the main eventlog
:param flux_handle: handle for Flux broker from flux.Flux()
:type flux_handle: Flux
:param jobid: the job ID on which to watch events
:param eventlog: eventlog path in job kvs directory (default: eventlog)
:returns: a JobEventWatchFuture object
:rtype: JobEventWatchFuture
"""
future = RAW.event_watch(flux_handle, int(jobid), eventlog, 0)
return JobEventWatchFuture(future)
def event_watch(flux_handle, jobid, eventlog="eventlog"):
"""Python generator to watch all events for a job
Synchronously watch events a job eventlog via a simple generator.
Example:
>>> for event in job.event_watch(flux_handle, jobid):
... # do something with event
.. seealso::
:doc:`rfc:spec_21`
Documentation for the events in the main eventlog
:param flux_handle: handle for Flux broker from flux.Flux()
:type flux_handle: Flux
:param jobid: the job ID on which to watch events
:param eventlog: eventlog path in job kvs directory (default: eventlog)
"""
watcher = event_watch_async(flux_handle, jobid, eventlog)
event = watcher.get_event()
while event is not None:
yield event
event = watcher.get_event()
class JobException(Exception):
"""Represents an 'exception' event occurring to a job.
Instances expose a few public attributes.
:var timestamp: the timestamp of the 'exception' event.
:var type: A string identifying the type of job exception.
:var note: Brief human-readable explanation of the exception.
:var severity: the severity of the exception. Exceptions with a severity
of 0 are fatal to the job; any other severity is non-fatal.
"""
def __init__(self, event):
self.timestamp = event.timestamp
self.type = event.context["type"]
self.note = event.context.get("note", "no explanation given")
self.severity = event.context["severity"]
super().__init__(self)
def __str__(self):
return f"job.exception: type={self.type}: {self.note}"
def event_wait(flux_handle, jobid, name, eventlog="eventlog", raiseJobException=True):
"""Wait for a job eventlog entry 'name'
Wait synchronously for an eventlog entry named "name" and
return the entry to caller, raises OSError with ENODATA if
event never occurred
.. seealso::
:doc:`rfc:spec_21`
Documentation for the events in the main eventlog
:param flux_handle: handle for Flux broker from flux.Flux()
:type flux_handle: Flux
:param jobid: the job ID on which to wait for eventlog events
:param name: The event name for which to wait
:param eventlog: eventlog path in job kvs directory (default: eventlog)
:param raiseJobException: if True, watch for job exception events and
raise a JobException if one is seen before event 'name' (default=True)
:returns: an EventLogEntry object, or raises OSError if eventlog
ended before matching event was found
:rtype: EventLogEntry
"""
for event in event_watch(flux_handle, jobid, eventlog):
if event.name == name:
return event
if (
raiseJobException
and event.name == "exception"
and event.context["severity"] == 0
):
raise JobException(event)
raise OSError(errno.ENODATA, f"eventlog ended before event='{name}'")
|
grondo/flux-core
|
src/bindings/python/flux/job/event.py
|
Python
|
lgpl-3.0
| 7,000
|
__author__ = 'sunnyday'
|
synnyday/test_ss
|
test_ss/management/commands/__init__.py
|
Python
|
unlicense
| 24
|
"""myproject URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/1.9/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: url(r'^$', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: url(r'^$', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.conf.urls import url, include
2. Add a URL to urlpatterns: url(r'^blog/', include('blog.urls'))
"""
from django.conf.urls import url
from django.contrib import admin
urlpatterns = [
url(r'^admin/', admin.site.urls),
]
|
thinkAmi-sandbox/Django_separate_model_file-sample
|
myproject/urls.py
|
Python
|
unlicense
| 765
|
# a head script for InMoov
inMoov = Runtime.createAndStart("inMoov", "InMoov")
# variables to adjust
leftSerialPort = "COM7"
cameraIndex = 1
# attach an arduino to InMoov
# possible board types include uno atmega168 atmega328p atmega2560 atmega1280 atmega32u4
# the MRLComm.ino sketch must be loaded into the Arduino for MyRobotLab control !
# set COM number according to the com of your Arduino board
inMoov.attachArduino("left","uno", leftSerialPort)
inMoov.attachHead("left")
# system check
inMoov.systemCheck()
# if you have a laptop with a camera the one in InMoov is likely to be index #1
inMoov.setCameraIndex(cameraIndex)
# listen for these key words
# to get voice to work - you must be attached to the internet for
# at least the first time
ear = inMoov.getEar()
ear.addCommand("rest", inMoov.getName(), "rest")
ear.addCommand("track", inMoov.getName(), "track")
ear.addCommand("freeze track", inMoov.getName(), "clearTrackingPoints")
ear.addCommand("capture gesture", inMoov.getName(), "captureGesture")
ear.addCommand("manual", ear.getName(), "lockOutAllGrammarExcept", "voice control")
ear.addCommand("voice control", ear.getName(), "clearLock")
ear.addCommand("camera on", inMoov.getName(), "cameraOn")
# ear.addCommand("off camera", inMoov.getName(), "cameraOff") FIXME
ear.addComfirmations("yes","correct","yeah","ya")
ear.addNegations("no","wrong","nope","nah")
ear.startListening()
|
sujitbehera27/MyRoboticsProjects-Arduino
|
src/resource/Python/examples/InMoov.head.py
|
Python
|
apache-2.0
| 1,449
|
# Copyright 2017 Red Hat, Inc.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from oslo_log import log
from oslo_utils import importutils
from ironic.common import exception
from ironic.common.i18n import _
from ironic.common import states
from ironic.conductor import task_manager
from ironic.drivers import base
from ironic.drivers.modules.bmc_redfish import utils as redfish_utils
LOG = log.getLogger(__name__)
sushy = importutils.try_import('hwsushy')
if sushy:
GET_POWER_STATE_MAP = {
sushy.SYSTEM_POWER_STATE_ON: states.POWER_ON,
sushy.SYSTEM_POWER_STATE_POWERING_ON: states.POWER_ON,
sushy.SYSTEM_POWER_STATE_OFF: states.POWER_OFF,
sushy.SYSTEM_POWER_STATE_POWERING_OFF: states.POWER_OFF
}
SET_POWER_STATE_MAP = {
states.POWER_ON: sushy.RESET_ON,
states.POWER_OFF: sushy.RESET_FORCE_OFF,
states.REBOOT: sushy.RESET_FORCE_RESTART,
states.SOFT_REBOOT: sushy.RESET_GRACEFUL_RESTART,
states.SOFT_POWER_OFF: sushy.RESET_GRACEFUL_SHUTDOWN
}
class BMCRedfishPower(base.PowerInterface):
def __init__(self):
"""Initialize the Redfish power interface.
:raises: DriverLoadError if the driver can't be loaded due to
missing dependencies
"""
super(BMCRedfishPower, self).__init__()
if not sushy:
raise exception.DriverLoadError(
driver='redfish',
reason=_('Unable to import the sushy library'))
def get_properties(self):
"""Return the properties of the interface.
:returns: dictionary of <property name>:<property description> entries.
"""
return redfish_utils.COMMON_PROPERTIES.copy()
def validate(self, task):
"""Validates the driver information needed by the redfish driver.
:param task: a TaskManager instance containing the node to act on.
:raises: InvalidParameterValue on malformed parameter(s)
:raises: MissingParameterValue on missing parameter(s)
"""
redfish_utils.parse_driver_info(task.node)
def get_power_state(self, task):
"""Get the current power state of the task's node.
:param task: a TaskManager instance containing the node to act on.
:returns: a power state. One of :mod:`ironic.common.states`.
:raises: InvalidParameterValue on malformed parameter(s)
:raises: MissingParameterValue on missing parameter(s)
:raises: RedfishConnectionError when it fails to connect to Redfish
:raises: RedfishError on an error from the Sushy library
"""
system = redfish_utils.get_system(task.node)
return GET_POWER_STATE_MAP.get(system.power_state)
@task_manager.require_exclusive_lock
def set_power_state(self, task, power_state, timeout=None):
"""Set the power state of the task's node.
:param task: a TaskManager instance containing the node to act on.
:param power_state: Any power state from :mod:`ironic.common.states`.
:param timeout: Not used by this driver.
:raises: MissingParameterValue if a required parameter is missing.
:raises: RedfishConnectionError when it fails to connect to Redfish
:raises: RedfishError on an error from the Sushy library
"""
system = redfish_utils.get_system(task.node)
try:
system.reset_system(SET_POWER_STATE_MAP.get(power_state))
except sushy.exceptions.SushyError as e:
error_msg = (_('Redfish set power state failed for node '
'%(node)s. Error: %(error)s') %
{'node': task.node.uuid, 'error': e})
LOG.error(error_msg)
raise exception.RedfishError(error=error_msg)
@task_manager.require_exclusive_lock
def reboot(self, task, timeout=None):
"""Perform a hard reboot of the task's node.
:param task: a TaskManager instance containing the node to act on.
:param timeout: Not used by this driver.
:raises: MissingParameterValue if a required parameter is missing.
:raises: RedfishConnectionError when it fails to connect to Redfish
:raises: RedfishError on an error from the Sushy library
"""
system = redfish_utils.get_system(task.node)
current_power_state = GET_POWER_STATE_MAP.get(system.power_state)
try:
if current_power_state == states.POWER_ON:
system.reset_system(SET_POWER_STATE_MAP.get(states.REBOOT))
else:
system.reset_system(SET_POWER_STATE_MAP.get(states.POWER_ON))
except sushy.exceptions.SushyError as e:
error_msg = (_('Redfish reboot failed for node %(node)s. '
'Error: %(error)s') % {'node': task.node.uuid,
'error': e})
LOG.error(error_msg)
raise exception.RedfishError(error=error_msg)
def get_supported_power_states(self, task):
"""Get a list of the supported power states.
:param task: A TaskManager instance containing the node to act on.
Not used by this driver at the moment.
:returns: A list with the supported power states defined
in :mod:`ironic.common.states`.
"""
return list(SET_POWER_STATE_MAP)
|
jiazichenzhan/Server_Manage_Plugin
|
ironic-plugin-pike/ironic/drivers/modules/bmc_redfish/power.py
|
Python
|
apache-2.0
| 5,932
|
# Copyright 2013 OpenStack Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from oslo.serialization import jsonutils
from oslo.utils import timeutils
import six
from six.moves.urllib import parse
from keystone.common import dependency
from keystone import config
from keystone.contrib import federation
from keystone import exception
from keystone.i18n import _, _LE
from keystone.openstack.common import log
from keystone import token
from keystone.token import provider
LOG = log.getLogger(__name__)
CONF = config.CONF
class V2TokenDataHelper(object):
"""Creates V2 token data."""
@classmethod
def format_token(cls, token_ref, roles_ref=None, catalog_ref=None,
trust_ref=None):
audit_info = None
user_ref = token_ref['user']
metadata_ref = token_ref['metadata']
if roles_ref is None:
roles_ref = []
expires = token_ref.get('expires', provider.default_expire_time())
if expires is not None:
if not isinstance(expires, six.text_type):
expires = timeutils.isotime(expires)
token_data = token_ref.get('token_data')
if token_data:
token_audit = token_data.get(
'access', token_data).get('token', {}).get('audit_ids')
audit_info = token_audit
if audit_info is None:
audit_info = provider.audit_info(token_ref.get('parent_audit_id'))
o = {'access': {'token': {'id': token_ref['id'],
'expires': expires,
'issued_at': timeutils.strtime(),
'audit_ids': audit_info
},
'user': {'id': user_ref['id'],
'name': user_ref['name'],
'username': user_ref['name'],
'roles': roles_ref,
'roles_links': metadata_ref.get('roles_links',
[])
}
}
}
if 'bind' in token_ref:
o['access']['token']['bind'] = token_ref['bind']
if 'tenant' in token_ref and token_ref['tenant']:
token_ref['tenant']['enabled'] = True
o['access']['token']['tenant'] = token_ref['tenant']
if catalog_ref is not None:
o['access']['serviceCatalog'] = V2TokenDataHelper.format_catalog(
catalog_ref)
if metadata_ref:
if 'is_admin' in metadata_ref:
o['access']['metadata'] = {'is_admin':
metadata_ref['is_admin']}
else:
o['access']['metadata'] = {'is_admin': 0}
if 'roles' in metadata_ref:
o['access']['metadata']['roles'] = metadata_ref['roles']
if CONF.trust.enabled and trust_ref:
o['access']['trust'] = {'trustee_user_id':
trust_ref['trustee_user_id'],
'id': trust_ref['id'],
'trustor_user_id':
trust_ref['trustor_user_id'],
'impersonation':
trust_ref['impersonation']
}
return o
@classmethod
def format_catalog(cls, catalog_ref):
"""Munge catalogs from internal to output format
Internal catalogs look like::
{$REGION: {
{$SERVICE: {
$key1: $value1,
...
}
}
}
The legacy api wants them to look like::
[{'name': $SERVICE[name],
'type': $SERVICE,
'endpoints': [{
'tenantId': $tenant_id,
...
'region': $REGION,
}],
'endpoints_links': [],
}]
"""
if not catalog_ref:
return []
services = {}
for region, region_ref in six.iteritems(catalog_ref):
for service, service_ref in six.iteritems(region_ref):
new_service_ref = services.get(service, {})
new_service_ref['name'] = service_ref.pop('name')
new_service_ref['type'] = service
new_service_ref['endpoints_links'] = []
service_ref['region'] = region
endpoints_ref = new_service_ref.get('endpoints', [])
endpoints_ref.append(service_ref)
new_service_ref['endpoints'] = endpoints_ref
services[service] = new_service_ref
return services.values()
@dependency.requires('assignment_api', 'catalog_api', 'identity_api',
'role_api', 'trust_api')
class V3TokenDataHelper(object):
"""Token data helper."""
def __init__(self):
# Keep __init__ around to ensure dependency injection works.
super(V3TokenDataHelper, self).__init__()
def _get_filtered_domain(self, domain_id):
domain_ref = self.assignment_api.get_domain(domain_id)
return {'id': domain_ref['id'], 'name': domain_ref['name']}
def _get_filtered_project(self, project_id):
project_ref = self.assignment_api.get_project(project_id)
filtered_project = {
'id': project_ref['id'],
'name': project_ref['name']}
filtered_project['domain'] = self._get_filtered_domain(
project_ref['domain_id'])
return filtered_project
def _populate_scope(self, token_data, domain_id, project_id):
if 'domain' in token_data or 'project' in token_data:
# scope already exist, no need to populate it again
return
if domain_id:
token_data['domain'] = self._get_filtered_domain(domain_id)
if project_id:
token_data['project'] = self._get_filtered_project(project_id)
def _get_roles_for_user(self, user_id, domain_id, project_id):
roles = []
if domain_id:
roles = self.assignment_api.get_roles_for_user_and_domain(
user_id, domain_id)
if project_id:
roles = self.assignment_api.get_roles_for_user_and_project(
user_id, project_id)
return [self.role_api.get_role(role_id) for role_id in roles]
def _populate_roles_for_groups(self, group_ids,
project_id=None, domain_id=None,
user_id=None):
def _check_roles(roles, user_id, project_id, domain_id):
# User was granted roles so simply exit this function.
if roles:
return
if project_id:
msg = _('User %(user_id)s has no access '
'to project %(project_id)s') % {
'user_id': user_id,
'project_id': project_id}
elif domain_id:
msg = _('User %(user_id)s has no access '
'to domain %(domain_id)s') % {
'user_id': user_id,
'domain_id': domain_id}
# Since no roles were found a user is not authorized to
# perform any operations. Raise an exception with
# appropriate error message.
raise exception.Unauthorized(msg)
roles = self.assignment_api.get_roles_for_groups(group_ids,
project_id,
domain_id)
_check_roles(roles, user_id, project_id, domain_id)
return roles
def _populate_user(self, token_data, user_id, trust):
if 'user' in token_data:
# no need to repopulate user if it already exists
return
user_ref = self.identity_api.get_user(user_id)
if CONF.trust.enabled and trust and 'OS-TRUST:trust' not in token_data:
trustor_user_ref = (self.identity_api.get_user(
trust['trustor_user_id']))
try:
self.identity_api.assert_user_enabled(trust['trustor_user_id'])
except AssertionError:
raise exception.Forbidden(_('Trustor is disabled.'))
if trust['impersonation']:
user_ref = trustor_user_ref
token_data['OS-TRUST:trust'] = (
{
'id': trust['id'],
'trustor_user': {'id': trust['trustor_user_id']},
'trustee_user': {'id': trust['trustee_user_id']},
'impersonation': trust['impersonation']
})
filtered_user = {
'id': user_ref['id'],
'name': user_ref['name'],
'domain': self._get_filtered_domain(user_ref['domain_id'])}
token_data['user'] = filtered_user
def _populate_oauth_section(self, token_data, access_token):
if access_token:
access_token_id = access_token['id']
consumer_id = access_token['consumer_id']
token_data['OS-OAUTH1'] = ({'access_token_id': access_token_id,
'consumer_id': consumer_id})
def _populate_roles(self, token_data, user_id, domain_id, project_id,
trust, access_token):
if 'roles' in token_data:
# no need to repopulate roles
return
if access_token:
filtered_roles = []
authed_role_ids = jsonutils.loads(access_token['role_ids'])
all_roles = self.role_api.list_roles()
for role in all_roles:
for authed_role in authed_role_ids:
if authed_role == role['id']:
filtered_roles.append({'id': role['id'],
'name': role['name']})
token_data['roles'] = filtered_roles
return
if CONF.trust.enabled and trust:
token_user_id = trust['trustor_user_id']
token_project_id = trust['project_id']
# trusts do not support domains yet
token_domain_id = None
else:
token_user_id = user_id
token_project_id = project_id
token_domain_id = domain_id
if token_domain_id or token_project_id:
roles = self._get_roles_for_user(token_user_id,
token_domain_id,
token_project_id)
filtered_roles = []
if CONF.trust.enabled and trust:
for trust_role in trust['roles']:
match_roles = [x for x in roles
if x['id'] == trust_role['id']]
if match_roles:
filtered_roles.append(match_roles[0])
else:
raise exception.Forbidden(
_('Trustee has no delegated roles.'))
else:
for role in roles:
filtered_roles.append({'id': role['id'],
'name': role['name']})
# user has no project or domain roles, therefore access denied
if not filtered_roles:
if token_project_id:
msg = _('User %(user_id)s has no access '
'to project %(project_id)s') % {
'user_id': user_id,
'project_id': token_project_id}
else:
msg = _('User %(user_id)s has no access '
'to domain %(domain_id)s') % {
'user_id': user_id,
'domain_id': token_domain_id}
LOG.debug(msg)
raise exception.Unauthorized(msg)
token_data['roles'] = filtered_roles
def _populate_service_catalog(self, token_data, user_id,
domain_id, project_id, trust):
if 'catalog' in token_data:
# no need to repopulate service catalog
return
if CONF.trust.enabled and trust:
user_id = trust['trustor_user_id']
if project_id or domain_id:
service_catalog = self.catalog_api.get_v3_catalog(
user_id, project_id)
# TODO(ayoung): Enforce Endpoints for trust
token_data['catalog'] = service_catalog
def _populate_token_dates(self, token_data, expires=None, trust=None,
issued_at=None):
if not expires:
expires = provider.default_expire_time()
if not isinstance(expires, six.string_types):
expires = timeutils.isotime(expires, subsecond=True)
token_data['expires_at'] = expires
token_data['issued_at'] = (issued_at or
timeutils.isotime(subsecond=True))
def _populate_audit_info(self, token_data, audit_info=None):
if audit_info is None or isinstance(audit_info, six.string_types):
token_data['audit_ids'] = provider.audit_info(audit_info)
elif isinstance(audit_info, list):
token_data['audit_ids'] = audit_info
else:
msg = (_('Invalid audit info data type: %(data)s (%(type)s)') %
{'data': audit_info, 'type': type(audit_info)})
LOG.error(msg)
raise exception.UnexpectedError(msg)
def get_token_data(self, user_id, method_names, extras,
domain_id=None, project_id=None, expires=None,
trust=None, token=None, include_catalog=True,
bind=None, access_token=None, issued_at=None,
audit_info=None):
token_data = {'methods': method_names,
'extras': extras}
# We've probably already written these to the token
if token:
for x in ('roles', 'user', 'catalog', 'project', 'domain'):
if x in token:
token_data[x] = token[x]
if CONF.trust.enabled and trust:
if user_id != trust['trustee_user_id']:
raise exception.Forbidden(_('User is not a trustee.'))
if bind:
token_data['bind'] = bind
self._populate_scope(token_data, domain_id, project_id)
self._populate_user(token_data, user_id, trust)
self._populate_roles(token_data, user_id, domain_id, project_id, trust,
access_token)
self._populate_audit_info(token_data, audit_info)
if include_catalog:
self._populate_service_catalog(token_data, user_id, domain_id,
project_id, trust)
self._populate_token_dates(token_data, expires=expires, trust=trust,
issued_at=issued_at)
self._populate_oauth_section(token_data, access_token)
return {'token': token_data}
@dependency.optional('oauth_api')
@dependency.requires('assignment_api', 'catalog_api', 'identity_api',
'role_api', 'trust_api')
class BaseProvider(provider.Provider):
def __init__(self, *args, **kwargs):
super(BaseProvider, self).__init__(*args, **kwargs)
self.v3_token_data_helper = V3TokenDataHelper()
self.v2_token_data_helper = V2TokenDataHelper()
def get_token_version(self, token_data):
if token_data and isinstance(token_data, dict):
if 'token_version' in token_data:
if token_data['token_version'] in token.provider.VERSIONS:
return token_data['token_version']
# FIXME(morganfainberg): deprecate the following logic in future
# revisions. It is better to just specify the token_version in
# the token_data itself. This way we can support future versions
# that might have the same fields.
if 'access' in token_data:
return token.provider.V2
if 'token' in token_data and 'methods' in token_data['token']:
return token.provider.V3
raise exception.UnsupportedTokenVersionException()
def issue_v2_token(self, token_ref, roles_ref=None,
catalog_ref=None):
metadata_ref = token_ref['metadata']
trust_ref = None
if CONF.trust.enabled and metadata_ref and 'trust_id' in metadata_ref:
trust_ref = self.trust_api.get_trust(metadata_ref['trust_id'])
token_data = self.v2_token_data_helper.format_token(
token_ref, roles_ref, catalog_ref, trust_ref)
token_id = self._get_token_id(token_data)
token_data['access']['token']['id'] = token_id
return token_id, token_data
def _is_mapped_token(self, auth_context):
return (federation.IDENTITY_PROVIDER in auth_context and
federation.PROTOCOL in auth_context)
def issue_v3_token(self, user_id, method_names, expires_at=None,
project_id=None, domain_id=None, auth_context=None,
trust=None, metadata_ref=None, include_catalog=True,
parent_audit_id=None):
# for V2, trust is stashed in metadata_ref
if (CONF.trust.enabled and not trust and metadata_ref and
'trust_id' in metadata_ref):
trust = self.trust_api.get_trust(metadata_ref['trust_id'])
token_ref = None
if auth_context and self._is_mapped_token(auth_context):
token_ref = self._handle_mapped_tokens(
auth_context, project_id, domain_id)
access_token = None
if 'oauth1' in method_names:
if self.oauth_api:
access_token_id = auth_context['access_token_id']
access_token = self.oauth_api.get_access_token(access_token_id)
else:
raise exception.Forbidden(_('Oauth is disabled.'))
token_data = self.v3_token_data_helper.get_token_data(
user_id,
method_names,
auth_context.get('extras') if auth_context else None,
domain_id=domain_id,
project_id=project_id,
expires=expires_at,
trust=trust,
bind=auth_context.get('bind') if auth_context else None,
token=token_ref,
include_catalog=include_catalog,
access_token=access_token,
audit_info=parent_audit_id)
token_id = self._get_token_id(token_data)
return token_id, token_data
def _handle_mapped_tokens(self, auth_context, project_id, domain_id):
user_id = auth_context['user_id']
group_ids = auth_context['group_ids']
idp = auth_context[federation.IDENTITY_PROVIDER]
protocol = auth_context[federation.PROTOCOL]
token_data = {
'user': {
'id': user_id,
'name': parse.unquote(user_id),
federation.FEDERATION: {
'identity_provider': {'id': idp},
'protocol': {'id': protocol}
}
}
}
if project_id or domain_id:
roles = self.v3_token_data_helper._populate_roles_for_groups(
group_ids, project_id, domain_id, user_id)
token_data.update({'roles': roles})
else:
token_data['user'][federation.FEDERATION].update({
'groups': [{'id': x} for x in group_ids]
})
return token_data
def _verify_token_ref(self, token_ref):
"""Verify and return the given token_ref."""
if not token_ref:
raise exception.Unauthorized()
return token_ref
def _assert_default_domain(self, token_ref):
"""Make sure we are operating on default domain only."""
if (token_ref.get('token_data') and
self.get_token_version(token_ref.get('token_data')) ==
token.provider.V3):
# this is a V3 token
msg = _('Non-default domain is not supported')
# user in a non-default is prohibited
if (token_ref['token_data']['token']['user']['domain']['id'] !=
CONF.identity.default_domain_id):
raise exception.Unauthorized(msg)
# domain scoping is prohibited
if token_ref['token_data']['token'].get('domain'):
raise exception.Unauthorized(
_('Domain scoped token is not supported'))
# project in non-default domain is prohibited
if token_ref['token_data']['token'].get('project'):
project = token_ref['token_data']['token']['project']
project_domain_id = project['domain']['id']
# scoped to project in non-default domain is prohibited
if project_domain_id != CONF.identity.default_domain_id:
raise exception.Unauthorized(msg)
# if token is scoped to trust, both trustor and trustee must
# be in the default domain. Furthermore, the delegated project
# must also be in the default domain
metadata_ref = token_ref['metadata']
if CONF.trust.enabled and 'trust_id' in metadata_ref:
trust_ref = self.trust_api.get_trust(metadata_ref['trust_id'])
trustee_user_ref = self.identity_api.get_user(
trust_ref['trustee_user_id'])
if (trustee_user_ref['domain_id'] !=
CONF.identity.default_domain_id):
raise exception.Unauthorized(msg)
trustor_user_ref = self.identity_api.get_user(
trust_ref['trustor_user_id'])
if (trustor_user_ref['domain_id'] !=
CONF.identity.default_domain_id):
raise exception.Unauthorized(msg)
project_ref = self.assignment_api.get_project(
trust_ref['project_id'])
if (project_ref['domain_id'] !=
CONF.identity.default_domain_id):
raise exception.Unauthorized(msg)
def validate_v2_token(self, token_ref):
try:
self._assert_default_domain(token_ref)
# FIXME(gyee): performance or correctness? Should we return the
# cached token or reconstruct it? Obviously if we are going with
# the cached token, any role, project, or domain name changes
# will not be reflected. One may argue that with PKI tokens,
# we are essentially doing cached token validation anyway.
# Lets go with the cached token strategy. Since token
# management layer is now pluggable, one can always provide
# their own implementation to suit their needs.
token_data = token_ref.get('token_data')
if (not token_data or
self.get_token_version(token_data) !=
token.provider.V2):
# token is created by old v2 logic
metadata_ref = token_ref['metadata']
roles_ref = []
for role_id in metadata_ref.get('roles', []):
roles_ref.append(self.role_api.get_role(role_id))
# Get a service catalog if possible
# This is needed for on-behalf-of requests
catalog_ref = None
if token_ref.get('tenant'):
catalog_ref = self.catalog_api.get_catalog(
token_ref['user']['id'],
token_ref['tenant']['id'],
metadata_ref)
trust_ref = None
if CONF.trust.enabled and 'trust_id' in metadata_ref:
trust_ref = self.trust_api.get_trust(
metadata_ref['trust_id'])
token_data = self.v2_token_data_helper.format_token(
token_ref, roles_ref, catalog_ref, trust_ref)
trust_id = token_data['access'].get('trust', {}).get('id')
if trust_id:
# token trust validation
self.trust_api.get_trust(trust_id)
return token_data
except exception.ValidationError as e:
LOG.exception(_LE('Failed to validate token'))
raise exception.TokenNotFound(e)
def validate_v3_token(self, token_ref):
# FIXME(gyee): performance or correctness? Should we return the
# cached token or reconstruct it? Obviously if we are going with
# the cached token, any role, project, or domain name changes
# will not be reflected. One may argue that with PKI tokens,
# we are essentially doing cached token validation anyway.
# Lets go with the cached token strategy. Since token
# management layer is now pluggable, one can always provide
# their own implementation to suit their needs.
trust_id = token_ref.get('trust_id')
if trust_id:
# token trust validation
self.trust_api.get_trust(trust_id)
token_data = token_ref.get('token_data')
if not token_data or 'token' not in token_data:
# token ref is created by V2 API
project_id = None
project_ref = token_ref.get('tenant')
if project_ref:
project_id = project_ref['id']
issued_at = token_ref['token_data']['access']['token']['issued_at']
audit = token_ref['token_data']['access']['token'].get('audit_ids')
token_data = self.v3_token_data_helper.get_token_data(
token_ref['user']['id'],
['password', 'token'],
{},
project_id=project_id,
bind=token_ref.get('bind'),
expires=token_ref['expires'],
issued_at=issued_at,
audit_info=audit)
return token_data
|
blueboxgroup/keystone
|
keystone/token/providers/common.py
|
Python
|
apache-2.0
| 26,928
|
# ===============================================================================
# Copyright 2014 Jake Ross
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ===============================================================================
# ============= enthought library imports =======================
from __future__ import absolute_import
import time
from threading import Thread
from pyface.confirmation_dialog import confirm
from pyface.constant import YES
from traits.api import Instance, Int, Property, String, Bool
from traitsui.api import Controller, UItem, TabularEditor, VGroup, UReadonly
# ============= standard library imports ========================
# ============= local library imports ==========================
from traitsui.tabular_adapter import TabularAdapter
from pychron.core.helpers.formatting import floatfmt
from pychron.core.helpers.traitsui_shortcuts import okcancel_view
from pychron.core.ui.gui import invoke_in_main_thread
from pychron.envisage.resources import icon
from pychron.pychron_constants import LIGHT_GREEN, LIGHT_RED, LIGHT_YELLOW
from pychron.startup_test.tester import TestResult
COLOR_MAP = {
"Passed": LIGHT_GREEN,
"Skipped": "lightblue",
"Failed": LIGHT_RED,
"Invalid": LIGHT_YELLOW,
}
ICON_MAP = {
"Passed": "green_ball",
"Skipped": "gray_ball",
"Failed": "red_ball",
"Invalid": "yellow_ball",
}
class ResultsAdapter(TabularAdapter):
columns = [
("", "result_image"),
("Plugin", "plugin"),
("Name", "name"),
("Duration (s)", "duration"),
("Result", "result"),
]
plugin_width = Int(200)
name_width = Int(190)
duration_width = Int(80)
duration_text = Property
result_image_image = Property
result_image_text = Property
def _get_result_image_text(self):
return ""
def _get_result_image_image(self):
return icon(ICON_MAP[self.item.result])
def get_bg_color(self, obj, trait, row, column=0):
return COLOR_MAP[self.item.result]
def _get_duration_text(self):
return floatfmt(self.item.duration) # '{:0.5f}'.format(self.item.duration)
class ResultsView(Controller):
model = Instance("pychron.startup_test.tester.StartupTester")
auto_close = 5
selected = Instance(TestResult, ())
cancel_auto_close = Bool(False)
base_help_str = "Select any row to cancel auto close. Auto close in {}"
help_str = String
_auto_closed = False
can_cancel = Bool(True)
def _selected_changed(self, new):
self.cancel_auto_close = bool(new)
def _timer_func(self):
delay = self.auto_close
st = time.time()
while 1:
time.sleep(0.25)
ct = time.time() - st
if ct > delay or self.cancel_auto_close:
break
self.help_str = self.base_help_str.format(delay - int(ct))
if self.cancel_auto_close:
self.help_str = "Auto close canceled"
else:
invoke_in_main_thread(self._do_auto_close)
def init(self, info):
if self.auto_close and self.model.all_passed:
t = Thread(target=self._timer_func)
t.start()
# do_after(self.auto_close * 1000, self._do_auto_close)
else:
self.help_str = ""
def closed(self, info, is_ok):
import sys
if not self._auto_closed and not is_ok:
if confirm(info.ui.control, "Are you sure you want to Quit?") == YES:
self.model.info("User quit because of Startup fail")
sys.exit()
else:
if not self.model.ok_close():
if (
confirm(
info.ui.control,
"Pychron is not communicating with a Spectrometer.\n"
"Are you sure you want to enter "
"Spectrometer Simulation mode?",
)
!= YES
):
sys.exit()
def _do_auto_close(self):
if not self.cancel_auto_close:
self._auto_closed = True
try:
self.info.ui.dispose()
except AttributeError:
pass
def traits_view(self):
if self.can_cancel:
buttons = ["OK", "Cancel"]
else:
buttons = ["OK"]
v = okcancel_view(
VGroup(
UItem(
"results",
editor=TabularEditor(
adapter=ResultsAdapter(),
editable=False,
selected="controller.selected",
),
),
VGroup(
UReadonly("controller.selected.description"),
show_border=True,
label="Description",
),
VGroup(
UReadonly("controller.selected.error"),
show_border=True,
visible_when="controller.selected.error",
label="Error",
),
VGroup(
UReadonly("controller.help_str"),
show_border=True,
visible_when="controller.help_str",
),
),
title="Test Results",
buttons=buttons,
height=500,
width=650,
)
return v
# ============= EOF =============================================
|
USGSDenverPychron/pychron
|
pychron/startup_test/results_view.py
|
Python
|
apache-2.0
| 6,045
|
import os
import shutil
import json
import tempfile
import logging
from avatar.util.ostools import get_random_free_port
from collections import OrderedDict
log = logging.getLogger(__name__)
class S2EConfiguration():
def __init__(self, config):
assert("s2e" in config) #S2E configuration must be present
assert("machine_configuration" in config) #Configurable machine configuration must be present
self._s2e_configuration = config["s2e"]
self._cm_configuration = config["machine_configuration"]
self._output_directory = config["output_directory"]
self._config_directory = config["configuration_directory"]
self._avatar_configuration = "avatar_configuration" in config and config["avatar_configuration"] or {}
self._qemu_configuration = ("qemu_configuration" in config) and config["qemu_configuration"] or {}
mem_addr = "127.0.0.1"
mem_port = get_random_free_port()
if not isinstance(self._s2e_configuration["plugins"],OrderedDict):
log.warn("plugins dictionnary should be ordered (use OrderedDict), s2e should take care of ordering plugins one day !")
if "RemoteMemory" in self._s2e_configuration["plugins"] \
and "listen_address" in self._s2e_configuration["plugins"]["RemoteMemory"] \
and self._s2e_configuration["plugins"]["RemoteMemory"]["listen_address"]:
listen_addr = self._s2e_configuration["plugins"]["RemoteMemory"]["listen_address"]
mem_addr = str(listen_addr[:listen_addr.rfind(":")])
mem_port = int(listen_addr[listen_addr.rfind(":") + 1:])
self._s2e_remote_memory_plugin_sockaddr = (mem_addr, mem_port)
#TODO: Test if this is specified in configuration, and use values from config if so
self._s2e_gdb_sockaddr = ("127.0.0.1", get_random_free_port())
def get_klee_cmdline(self):
cmdline = []
if "klee" in self._s2e_configuration:
klee_conf = self._s2e_configuration["klee"]
cmdline.append("--use-batching-search=%s" % (("use-batching-search" in klee_conf and klee_conf["use-batching-search"]) and "true" or "false"))
if "batch-time" in klee_conf:
cmdline.append("--batch-time=%f" % klee_conf["batch-time"])
if "use-random-path" in klee_conf and klee_conf["use-random-path"]:
cmdline.append("--use-random-path")
cmdline.append("--use-cex-cache=%s" % (("use-cex-cache" in klee_conf and klee_conf["use-cex-cache"]) and "true" or "false"))
cmdline.append("--use-cache=%s" % (("use-cache" in klee_conf and klee_conf["use-cache"]) and "true" or "false"))
cmdline.append("--use-fast-cex-solver=%s" % (("use-fast-cex-solver" in klee_conf and klee_conf["use-fast-cex-solver"]) and "true" or "false"))
if "max-stp-time" in klee_conf:
cmdline.append("--max-stp-time=%f" % klee_conf["max-stp-time"])
cmdline.append("--use-expr-simplifier=%s" % (("use-expr-simplifier" in klee_conf and klee_conf["use-expr-simplifier"]) and "true" or "false"))
cmdline.append("--use-concolic-execution=%s" % (("use-concolic-execution" in klee_conf and klee_conf["use-concolic-execution"]) and "true" or "false"))
cmdline.append("--print-mode-switch=true")
cmdline.append("--concretize-io-address=false")
cmdline.append("--concretize-io-writes=true")
cmdline.append("--allow-external-sym-calls=false")
cmdline.append("--verbose-fork-info=true")
return cmdline
def get_s2e_lua(self):
lua = []
lua.append("-- Automatically generated Lua script configuration for S2E\n")
lua.append("-- Do not edit!\n")
lua.append("\n")
lua.append("AVATAR_SRC_ROOT_PATH = \"%s\"\n" % self._config_directory)
lua.append("s2e = {\n")
lua.append("generate_testcase_on_kill = %s," % (("generate_testcase_on_kill" not in self._s2e_configuration \
or self._s2e_configuration["generate_testcase_on_kill"]) and "true" or "false"))
# First klee configuration
lua.append("\tkleeArgs = {\n\t\t")
lua.append(",\n\t\t".join(["\"%s\"" % x for x in self.get_klee_cmdline()]))
lua.append("\n\t}")
lua.append("\n}")
#Then list of enabled plugins
if "plugins" in self._s2e_configuration and self._s2e_configuration["plugins"]:
lua.append("\n\nplugins = {\n\t")
lua.append(",\n\t".join(["\"%s\"" % x for x in self._s2e_configuration["plugins"]]))
lua.append("\n}\n\n")
#Then configuration for each plugin
plugin_configs = [(plugin, self.get_plugin_lua(plugin)) for plugin in self._s2e_configuration["plugins"]]
lua.append("pluginsConfig = {\n\t")
lua.append(",\n\t".join(["%s = {\n\t\t%s\n\t}" % (plg_name, "\n\t\t".join(plg_conf.split("\n"))) for (plg_name, plg_conf) in plugin_configs]))
lua.append("\n}\n")
#Then include raw external files (eg. annotation functions)
if "include" in self._s2e_configuration and self._s2e_configuration["include"]:
for fname in self._s2e_configuration["include"]:
f = open(os.path.join(self._config_directory, fname), 'r')
lua.append("\n\n--Including content of file %s\n" % fname)
for line in f.readlines():
lua.append(line)
lua.append("--End of file %s\n" % fname)
f.close()
return "".join(lua)
def get_plugin_lua(self, plugin):
if plugin in ["BaseInstructions", "Initializer", "FunctionMonitor"]:
return "" #Plugins not supposed to have options
elif plugin == "RemoteMemory":
plug_conf = self._s2e_configuration["plugins"]["RemoteMemory"]
lua = []
lua.append("verbose = %s," % (("verbose" in plug_conf and plug_conf["verbose"]) and "true" or "false"))
if "listen" in plug_conf:
# using the listen config from the main python config file
host, port = plug_conf["listen"].split(':')
self._s2e_remote_memory_plugin_sockaddr = (host, int(port))
lua.append("listen = \"%s:%d\"," % self._s2e_remote_memory_plugin_sockaddr)
lua.append("ranges = {")
ranges = []
for (range_name, mem_range) in plug_conf["ranges"].items():
ranges.append(
"""
\t%s = {
\t\taddress = 0x%x,
\t\tsize = 0x%x,
\t\taccess = {%s}
\t}
""" % (range_name,
mem_range["address"],
mem_range["size"],
", ".join(["\"%s\"" % x for x in mem_range["access"]])))
lua.append(",\n".join(ranges))
lua.append("}")
return "\n".join(lua)
elif plugin == "MemoryInterceptorMediator":
plug_conf = self._s2e_configuration["plugins"]["MemoryInterceptorMediator"]
lua = []
lua.append("verbose = %s,\n" % (("verbose" in plug_conf and plug_conf["verbose"]) and "true" or "false"))
interceptors = []
for interceptor in ("interceptors" in plug_conf and plug_conf["interceptors"] or {}):
mem_regions = []
for mem_region_name in plug_conf["interceptors"][interceptor]:
mem_region = plug_conf["interceptors"][interceptor][mem_region_name]
mem_regions.append(
("\t\t%s = {\n" % mem_region_name) + \
("\t\t\trange_start = 0x%08x,\n" % mem_region["range_start"]) + \
("\t\t\trange_end = 0x%08x,\n" % mem_region["range_end"]) + \
("\t\t\tpriority = %d,\n" % mem_region["priority"]) + \
"\t\t\taccess_type = {\n" + \
",\n".join(["\t\t\t\t\"%s\"" % x for x in mem_region["access_type"]]) + \
"\n\t\t\t}" + \
"\n\t\t}")
interceptors.append("\t%s = {\n" % interceptor + \
",\n".join(mem_regions) + \
"\n\t}")
lua.append("interceptors = {\n" + \
",\n".join(interceptors) + \
"\n}")
return "".join(lua)
else:
log.warn("Unknown plugin '%s' in configuration - including raw config", plugin)
return self._s2e_configuration["plugins"][plugin]
def get_s2e_executable(self, arch, endianness='little'):
"""
This method returns the absolute path to S2E binary.
"""
# explicit binary path in config
if "QEMU_S2E" in os.environ:
return os.environ["QEMU_S2E"]
elif "s2e_binary" in self._s2e_configuration and self._s2e_configuration["s2e_binary"]:
return self._s2e_configuration["s2e_binary"]
# fallback, architecture specific
elif arch == "arm" and "s2e_debug" in self._avatar_configuration and self._avatar_configuration["s2e_debug"]:
return "/home/zaddach/projects/eurecom-s2e/build/qemu-debug/arm-s2e-softmmu/qemu-system-arm"
elif arch == "arm" and endianness == 'little':
return "~/projects/eurecom-s2e/build/qemu-release/arm-s2e-softmmu/qemu-system-arm"
elif arch == "arm" and endianness == 'big':
return "/home/lucian/eurecom/s2e-build-release/qemu-release/armeb-s2e-softmmu/qemu-system-armeb"
else:
assert(False) #Architecture not yet implemented
def get_command_line(self):
cmdline = []
# Check if debugging/tracing facilities are to be employed.
# See http://wiki.qemu.org/Documentation/Debugging for details.
if "gdbserver" in self._qemu_configuration and self._qemu_configuration["gdbserver"]:
cmdline.append("gdbserver")
# TODO: make this a configurable IP:port tuple
cmdline.append("localhost:1222")
elif "valgrind" in self._qemu_configuration and self._qemu_configuration["valgrind"]:
cmdline.append("valgrind")
cmdline.append("--smc-check=all")
cmdline.append("--leak-check=full")
# S2E parameters
cmdline.append(self.get_s2e_executable(self._cm_configuration["architecture"], "endianness" in self._cm_configuration and self._cm_configuration["endianness"] or "little"))
cmdline.append("-s2e-config-file")
cmdline.append(os.path.join(self._output_directory, "s2e_conf.lua"))
if "verbose" in self._s2e_configuration and self._s2e_configuration["verbose"]:
cmdline.append("-s2e-verbose")
if "max-process" in self._s2e_configuration :
cmdline.append("-s2e-max-processes")
cmdline.append(" %d"% self._s2e_configuration["verbose"])
cmdline.append("-nographic")
# QEMU parameters
cmdline.append("-M")
cmdline.append("configurable")
cmdline.append("-kernel")
cmdline.append(os.path.join(self._output_directory, "configurable_machine.json"))
if "halt_processor_on_startup" in self._qemu_configuration and self._qemu_configuration["halt_processor_on_startup"]:
cmdline.append("-S")
self._qemu_configuration["gdb"] = "tcp::%d,server" % get_random_free_port()
cmdline.append("-gdb")
cmdline.append("tcp:127.0.0.1:%d,server" % self._s2e_gdb_sockaddr[1])
if "append" in self._qemu_configuration:
for val in self._qemu_configuration["append"]:
cmdline.append(val)
TRACE_OPTIONS = {"trace_instructions": "in_asm", "trace_microops": "op"}
trace_opts = []
for (config_trace_opt, qemu_trace_opt) in TRACE_OPTIONS.items():
if config_trace_opt in self._qemu_configuration and self._qemu_configuration[config_trace_opt]:
trace_opts.append(qemu_trace_opt)
if trace_opts:
cmdline.append("-D")
cmdline.append(os.path.join(self._output_directory, "qemu_trace.log"))
cmdline.append("-d")
cmdline.append(",".join(trace_opts))
if "extra_opts" in self._qemu_configuration:
for o in self._qemu_configuration["extra_opts"]:
cmdline.append(o)
return cmdline
def write_configurable_machine_configuration_file(self):
cm_conf = {}
conf_dir = self._config_directory
output_dir = self._output_directory
assert("architecture" in self._cm_configuration) #Architecture must be specified
assert("cpu_model" in self._cm_configuration) #CPU must be specified
assert("entry_address" in self._cm_configuration or "elf_executable" in self._cm_configuration) #Entry address must be specified
assert("memory_map" in self._cm_configuration and self._cm_configuration["memory_map"]) #Memory map must be specified
cm_conf["architecture"] = self._cm_configuration["architecture"]
cm_conf["cpu_model"] = self._cm_configuration["cpu_model"]
if "entry_address" in self._cm_configuration:
cm_conf["entry_address"] = self._cm_configuration["entry_address"]
if "elf_executable" in self._cm_configuration:
cm_conf["elf_executable"] = self._cm_configuration["elf_executable"]
if "init_state" in self._cm_configuration: #Initial state is optional
cm_conf["init_state"] = self._cm_configuration["init_state"]
cm_conf["memory_map"] = []
for region in self._cm_configuration["memory_map"]:
new_region = {"size": region["size"], "name": region["name"]}
if "is_rom" in region:
new_region["is_rom"] = region["is_rom"]
assert(not ("file" in region and "data" in region)) #Cannot have both file and data attribute
if "file" in region:
#Copy from source directory to output directory
shutil.copy(os.path.join(conf_dir, region["file"]), os.path.join(output_dir, os.path.basename(region["file"])))
new_region["file"] = os.path.join(output_dir, os.path.basename(region["file"]))
if "data" in region:
#Output data to file
(f, dest_file) = tempfile.mkstemp(suffix = '.bin', dir = output_dir, text = False)
os.write(f, region["data"])
os.close(f)
new_region["file"] = dest_file
new_region["map"] = []
for mapping in region["map"]:
new_region["map"].append({"address": mapping["address"],
"type": mapping["type"],
"permissions": mapping["permissions"]})
cm_conf["memory_map"].append(new_region)
cm_conf["devices"] = []
devices = "devices" in self._cm_configuration and self._cm_configuration["devices"] or []
for dev in devices:
cm_conf["devices"].append(dev)
f = open(os.path.join(output_dir, "configurable_machine.json"), 'w')
json.dump(cm_conf, f, indent = 4)
f.write("\n\n")
f.close()
def write_configuration_files(self, output_dir):
f = open(os.path.join(output_dir, "s2e_conf.lua"), 'w')
f.write(self.get_s2e_lua())
f.close()
self.write_configurable_machine_configuration_file()
def get_output_directory(self):
return self._output_directory
def get_s2e_gdb_port(self):
return self._s2e_gdb_sockaddr[1]
def get_remote_memory_listen_address(self):
return self._s2e_remote_memory_plugin_sockaddr
|
jmatthed/avatar-python
|
avatar/emulators/s2e/configuration.py
|
Python
|
apache-2.0
| 16,144
|
# Copyright (c) 2014 The Johns Hopkins University/Applied Physics Laboratory
# All Rights Reserved
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import binascii
import os
from oslo_concurrency import processutils
from oslo_log import log as logging
from oslo_utils import excutils
from nova.virt.libvirt import utils
LOG = logging.getLogger(__name__)
_dmcrypt_suffix = '-dmcrypt'
def volume_name(base):
"""Returns the suffixed dmcrypt volume name.
This is to avoid collisions with similarly named device mapper names for
LVM volumes
"""
return base + _dmcrypt_suffix
def is_encrypted(path):
"""Returns true if the path corresponds to an encrypted disk."""
if path.startswith('/dev/mapper'):
return path.rpartition('/')[2].endswith(_dmcrypt_suffix)
else:
return False
def create_volume(target, device, cipher, key_size, key):
"""Sets up a dmcrypt mapping
:param target: device mapper logical device name
:param device: underlying block device
:param cipher: encryption cipher string digestible by cryptsetup
:param key_size: encryption key size
:param key: encoded encryption key bytestring
"""
cmd = ('cryptsetup',
'create',
target,
device,
'--cipher=' + cipher,
'--key-size=' + str(key_size),
'--key-file=-')
key = binascii.hexlify(key).decode('utf-8')
try:
utils.execute(*cmd, process_input=key, run_as_root=True)
except processutils.ProcessExecutionError as e:
with excutils.save_and_reraise_exception():
LOG.error("Could not start encryption for disk %(device)s: "
"%(exception)s", {'device': device, 'exception': e})
def delete_volume(target):
"""Deletes a dmcrypt mapping
:param target: name of the mapped logical device
"""
try:
utils.execute('cryptsetup', 'remove', target, run_as_root=True)
except processutils.ProcessExecutionError as e:
# cryptsetup returns 4 when attempting to destroy a non-existent
# dm-crypt device. It indicates that the device is invalid, which
# means that the device is invalid (i.e., it has already been
# destroyed).
if e.exit_code == 4:
LOG.debug("Ignoring exit code 4, volume already destroyed")
else:
with excutils.save_and_reraise_exception():
LOG.error("Could not disconnect encrypted volume "
"%(volume)s. If dm-crypt device is still active "
"it will have to be destroyed manually for "
"cleanup to succeed.", {'volume': target})
def list_volumes():
"""Function enumerates encrypted volumes."""
return [dmdev for dmdev in os.listdir('/dev/mapper')
if dmdev.endswith('-dmcrypt')]
|
Juniper/nova
|
nova/virt/libvirt/storage/dmcrypt.py
|
Python
|
apache-2.0
| 3,393
|
# -*- coding: utf-8 -*-
# Copyright 2022 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from .client import LanguageServiceClient
from .async_client import LanguageServiceAsyncClient
__all__ = (
"LanguageServiceClient",
"LanguageServiceAsyncClient",
)
|
googleapis/python-language
|
google/cloud/language_v1beta2/services/language_service/__init__.py
|
Python
|
apache-2.0
| 773
|
#!/usr/bin/python
#
# Copyright 2017 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Publish manager helper module.
Classes for handling publish requests on the low level (database, filesystem).
"""
import copy
import datetime
import json
import logging
import operator
import os
import re
import shutil
import subprocess
import tempfile
import urlparse
from common import exceptions
from common import utils
import psycopg2
from serve import basic_types
from serve import constants
from serve import http_io
from serve import serve_utils
from serve import stream_manager
from serve.push.search.core import search_manager
logger = logging.getLogger("ge_stream_publisher")
HTACCESS_REWRITE_BASE = "\nRewriteBase /\n"
# Minimum portable globe size in MB.
GLOBE_SIZE_THRESHOLD = 1.0
LINE0_TARGETDESCR = "\n# target: %s\n"
# Rewrite rule template for adding trailing slash.
LINE1_TRAILING_SLASH_REWRITERULE = "RewriteRule '^%s$' '%s/' [NC,R]\n"
# Rewrite rule template for POISearch serving.
LINE2_POISEARCH_REWRITERULE = "RewriteRule '^%s/%s(.*)' %s$1 [NC,PT]\n"
# Rewrite rule templates for WMS serving.
WMS_LINE0_REWRITERULE_R404 = "RewriteRule '^%s/wms' - [NC,R=404]\n"
WMS_LINE0_REWRITECOND = "RewriteCond %{QUERY_STRING} ^(.*)$\n"
WMS_LINE1_REWRITERULE = (
"RewriteRule '^%s/wms' 'wms?%%1&TargetPath=%s' [NC,PT]\n")
# Rewrite rules templates for GE database requests serving.
GE_LINE0_REWRITERULE = "RewriteRule '^%s/+$' earth/earth_local.html [NC,PT]\n"
GE_LINE1_REWRITECOND = "RewriteCond %{QUERY_STRING} ^(.*)$\n"
GE_LINE2_REWRITERULE = (
"RewriteRule '^%s/(.*)' '%s%s/db/$1?%%1&db_type=%s' [NC]\n")
# Rewrite rules templates for Map database requests serving.
MAP_LINE0_LOCAL_REWRITERULE = (
"RewriteRule '^%s/+$' maps/maps_local.html [NC,PT]\n")
MAP_LINE0_GOOGLE_REWRITERULE = (
"RewriteRule '^%s/+$' maps/maps_google.html [NC,PT]\n")
MAP_LINE1_REWRITERULE = (
"RewriteRule '^%s/+maps/+mapfiles/(.*)$' maps/mapfiles/$1 [NC,PT]\n")
MAP_LINE2_REWRITECOND = "RewriteCond %{QUERY_STRING} ^(.*)$\n"
MAP_LINE3_REWRITERULE = (
"RewriteRule '^%s/(.*)' '%s%s/db/$1?%%1&db_type=%s' [NC]\n")
# Rewrite rules templates for portable globes requests serving.
# GLB or 3d GLC
GLX_LINE0_REWRITERULE = (
"RewriteRule '^%s/+$' portable/preview.html?%s [NC,PT]\n")
GLX_LINE1_REWRITECOND = "RewriteCond %{QUERY_STRING} ^(.*)$\n"
GLX_LINE2_REWRITERULE = (
"RewriteRule '^%s/(.*)' '%s%s/db/$1?%%1&db_type=%s' [NC]\n")
class PublishManagerHelper(stream_manager.StreamManager):
"""Class for handling publish requests."""
VS_CONFIG_PATH = "/opt/google/gehttpd/conf.d/virtual_servers"
HTACCESS_PATH = "/opt/google/gehttpd/htdocs/.htaccess"
HTACCESS_TMP_PREFIX = "gepublish_htacces_"
HTACCESS_GE_PUBLISH_BEGIN = "### GE_PUBLISH BEGIN\n"
HTACCESS_GE_PUBLISH_END = "### GE_PUBLISH END\n"
PUBLISH_PATH_TEMPL = "{}{}"
TARGET_PATH_TEMPL = "{}/targets{}"
def __init__(self):
"""Inits publish manager helper."""
super(PublishManagerHelper, self).__init__()
self._search_manager = search_manager.SearchManager()
def BuildDbPublishPath(self, fusion_hostname, db_name):
"""Builds publish path for Fusion database.
Args:
fusion_hostname: Fusion hostname.
db_name: database name (assetroot path).
Returns:
The complete publish path of specified Fusion database.
"""
# Fusion hostname should be always defined for Fusion database.
assert fusion_hostname
return os.path.normpath(
PublishManagerHelper.PUBLISH_PATH_TEMPL.format(
self.GetFusionDbPublishPathPrefix(fusion_hostname), db_name))
def BuildTargetPublishPath(self, db_publish_path, target_path):
"""Builds complete publish path for target point of Fusion database.
Args:
db_publish_path: publish path of database.
target_path: target path.
Returns:
The complete publish path of specified target.
"""
return os.path.normpath(
PublishManagerHelper.TARGET_PATH_TEMPL.format(
db_publish_path, target_path))
def HandleQueryRequest(self, request, response):
"""Handles query requests.
Args:
request: request object.
response: response object.
Raises:
psycopg2.Error/Warning, PublishServeException.
"""
query_cmd = request.GetParameter(constants.QUERY_CMD)
if not query_cmd:
raise exceptions.PublishServeException("Missing Query Command.")
# List all DBs registered on server.
if query_cmd == constants.QUERY_CMD_LIST_DBS:
self._GetDbsList(response)
# TODO: Convert _GetAllAssets to _GetDbsList once
# the front end is ready to receive the new response.
elif query_cmd == constants.QUERY_CMD_LIST_ASSETS:
self._GetAllAssets(response)
# List all Virtual Hosts registered on server.
elif query_cmd == constants.QUERY_CMD_LIST_VSS:
results = self.QueryVhList()
for vh_name, vh_url, vh_ssl in results:
http_io.ResponseWriter.AddBodyElement(
response, constants.HDR_VS_NAME, vh_name)
http_io.ResponseWriter.AddBodyElement(
response, constants.HDR_VS_URL,
self._GetVhCompleteUrl(vh_url, vh_ssl))
http_io.ResponseWriter.AddBodyElement(
response, constants.HDR_STATUS_CODE, constants.STATUS_SUCCESS)
# Get Virtual Host details.
elif query_cmd == constants.QUERY_CMD_VS_DETAILS:
vh_name = request.GetParameter(constants.VS_NAME)
if not vh_name:
raise exceptions.PublishServeException("Missing virtual host name.")
vh_url, vh_ssl = self.QueryVh(vh_name)
vh_complete_url = self._GetVhCompleteUrl(vh_url, vh_ssl)
if vh_complete_url:
http_io.ResponseWriter.AddBodyElement(
response, constants.HDR_VS_URL, vh_complete_url)
http_io.ResponseWriter.AddBodyElement(
response, constants.HDR_STATUS_CODE, constants.STATUS_SUCCESS)
# List all target paths serving published databases.
elif query_cmd == constants.QUERY_CMD_LIST_TGS:
query_string = (
"SELECT target_path FROM target_table WHERE target_id IN ("
"SELECT target_id FROM target_db_table)")
results = self.DbQuery(query_string)
for line in results:
http_io.ResponseWriter.AddBodyElement(
response, constants.HDR_TARGET_PATH, line)
http_io.ResponseWriter.AddBodyElement(
response, constants.HDR_STATUS_CODE, constants.STATUS_SUCCESS)
# Get target details.
# TODO: consider to remove unnecessary details from response.
# It might be QUERY_CMD_DB_DETAILS which return only DB info that we have
# in postgres for specified target.
# Note: in ListDbs we return all this information.
elif query_cmd == constants.QUERY_CMD_PUBLISHED_DB_DETAILS:
target_path = request.GetParameter(constants.TARGET_PATH)
norm_target_path = serve_utils.NormalizeTargetPath(target_path)
if not norm_target_path:
raise exceptions.PublishServeException("Missing target path.")
query_string = ("""
SELECT db_table.host_name, db_table.db_name, db_table.db_pretty_name,
db_table.db_timestamp AT TIME ZONE 'UTC', db_table.db_size,
virtual_host_table.virtual_host_name,
virtual_host_table.virtual_host_url,
virtual_host_table.virtual_host_ssl,
target_table.target_path, target_table.serve_wms
FROM target_table, target_db_table, db_table, virtual_host_table
WHERE target_table.target_path = %s AND
target_table.target_id = target_db_table.target_id AND
target_db_table.db_id = db_table.db_id AND
target_db_table.virtual_host_id = virtual_host_table.virtual_host_id
""")
results = self.DbQuery(query_string, (norm_target_path,))
if results:
assert isinstance(results, list) and len(results) == 1
(r_host_name, r_db_path, r_db_name, r_db_timestamp, r_db_size,
r_virtual_host_name, r_virtual_host_url, r_virtual_host_ssl,
r_target_path, r_serve_wms) = results[0]
db_info = basic_types.DbInfo()
# TODO: make re-factoring - implement some Set function
# to use it where it is needed. Maybe build an aux. dictionary and
# pass as a parameter to that function.
db_info.host = r_host_name # db_table.host_name
db_info.path = r_db_path # db_table.db_name
db_info.name = r_db_name # db_table.db_pretty_name
timestamp = r_db_timestamp # db_table.db_timestamp
if timestamp:
assert isinstance(timestamp, datetime.datetime)
db_info.timestamp = serve_utils.DatetimeNoTzToIsoFormatUtc(timestamp)
db_info.size = r_db_size # db_table.db_size
db_info.description = r_db_name # db_table.db_pretty_name
db_info.virtual_host_name = r_virtual_host_name
db_info.target_base_url = self.GetVhBaseUrl(r_virtual_host_url,
r_virtual_host_ssl)
db_info.target_path = r_target_path
db_info.serve_wms = r_serve_wms
db_info.registered = True
# Calculate database attributes.
serve_utils.CalcDatabaseAttributes(db_info)
# Check whether the Fusion database has been pushed from remote host
# and set corresponding flag in DbInfo.
if serve_utils.IsFusionDb(db_info.type):
db_info.remote = self._IsFusionDbRemote(db_info)
# Set whether database has POI search data.
search_db_id = self._search_manager.QueryDbId(
db_info.host, db_info.path)
db_info.has_poi = search_db_id != 0
http_io.ResponseWriter.AddBodyElement(
response, constants.HDR_STATUS_CODE, constants.STATUS_SUCCESS)
http_io.ResponseWriter.AddBodyElement(
response, constants.HDR_DATA,
json.dumps(db_info, cls=basic_types.DbInfoJsonEncoder))
else:
raise exceptions.PublishServeException(
"Target path %s does not exist." % target_path)
# Gets published DB path by target path.
elif query_cmd == constants.QUERY_CMD_GEDB_PATH:
query_target_path = request.GetParameter(constants.TARGET_PATH)
norm_target_path = serve_utils.NormalizeTargetPath(query_target_path)
if not norm_target_path:
raise exceptions.PublishServeException("Missing target path.")
query_string = ("""
SELECT db_table.host_name, db_table.db_name, target_table.target_path
FROM target_table, target_db_table, db_table
WHERE target_table.target_path = %s AND
target_table.target_id = target_db_table.target_id AND
target_db_table.db_id = db_table.db_id
""")
results = self.DbQuery(query_string, (norm_target_path,))
if results:
assert isinstance(results, list) and len(results) == 1
(client_host_name, db_path, target_path) = results[0]
gedb_path = self.BuildDbPublishPath(client_host_name, db_path)
target_gedb_path = self.BuildTargetPublishPath(
gedb_path, target_path)
http_io.ResponseWriter.AddBodyElement(
response, constants.HDR_STATUS_CODE, constants.STATUS_SUCCESS)
http_io.ResponseWriter.AddBodyElement(
response, constants.HDR_DATA, target_gedb_path)
else:
raise exceptions.PublishServeException(
"Target path '%s' does not exist." % query_target_path)
elif query_cmd == constants.QUERY_CMD_TARGET_DETAILS:
target_path_in = request.GetParameter(constants.TARGET_PATH)
if not target_path_in:
raise exceptions.PublishServeException(
"Missing target path in the request.")
target_path = serve_utils.NormalizeTargetPath(target_path_in)
if not target_path:
raise exceptions.PublishServeException(
"Not a valid target path %s "
"(path format is /sub_path1[/sub_path2]." % target_path)
self.HandleTargetDetailsRequest(target_path, response)
else:
raise exceptions.PublishServeException(
"Invalid Query Command: %s." % query_cmd)
def HandlePublishRequest(self, db_id, publish_def):
"""Handles publish database request.
Args:
db_id: database ID.
publish_def: The PublishDef object encapsulating
set of the publish parameters.
Raises:
psycopg2.Error/Warning, PublishServeException.
"""
target_path = publish_def.target_path
virtual_host_name = publish_def.virtual_host_name
db_type = publish_def.db_type
client_host_name = publish_def.client_host_name
serve_wms = publish_def.serve_wms
snippets_set_name = publish_def.snippets_set_name
search_defs = publish_def.search_tabs
sup_search_defs = publish_def.sup_search_tabs
poifederated = publish_def.poi_federated
assert target_path and target_path[0] == "/" and target_path[-1] != "/"
# Check if the VS template exists.
virtual_host_id = self._QueryVirtualHostId(virtual_host_name)
if virtual_host_id == -1:
raise exceptions.PublishServeException(
"Virtual host %s does not exist." % virtual_host_name)
transfer_file_paths = self.SynchronizeDb(db_id, db_type, client_host_name)
if not transfer_file_paths:
# Add target point into target_table.
target_id = self._AddTarget(target_path, serve_wms)
# Insert publish context into 'publish_context_table' table.
query_string = ("INSERT INTO publish_context_table"
" (snippets_set_name, search_def_names,"
" supplemental_search_def_names, poifederated)"
" VALUES(%s, %s, %s, %s) RETURNING"
" publish_context_id")
result = self.DbModify(
query_string,
(snippets_set_name, search_defs, sup_search_defs, poifederated),
returning=True)
publish_context_id = 0
if result:
publish_context_id = result[0]
# Note: target is not removed from target_table in case of
# any exception below.
# Link target point with VS template, database and publish context.
query_string = ("INSERT INTO target_db_table"
" (target_id, virtual_host_id, db_id, publish_context_id)"
" VALUES(%s, %s, %s, %s)")
self.DbModify(query_string,
(target_id, virtual_host_id, db_id, publish_context_id))
else:
raise exceptions.PublishServeException("Database is not pushed.")
def HandleUnpublishRequest(self, target_path):
"""Handles un-publish database request.
Deletes the entry in target_db_table, target_search_id_table,
publish_context_table, updates .htaccess file,
deletes target's publish directory.
Note: target is not removed from target_table
Args:
target_path: target path to un-publish.
Raises:
psycopg2.Error/Warning, PublishServeException.
"""
unused_host_name, db_name = self.DoUnpublish(target_path)
if not db_name:
raise exceptions.PublishServeException(
"There is no database associated with target path %s." % (
target_path))
def DoUnpublish(self, target_path):
"""Do unpublish specified target path.
Args:
target_path: target path to un-publish.
Raises:
psycopg2.Error/Warning.
Returns:
(fusion_host_name, db_name): unpublished database info.
"""
# Check if target exists.
# Note: Here we have case-sensitive query for target from target_table.
# It allows to keep target path as user have entered it. Client gets target
# path to unpublish from Server.
target_id = self._QueryTargetIdByPath(target_path)
if target_id == -1:
logger.warning(
"HandleUnpublishRequest: The target path %s does not exist.",
target_path)
return None, None
publish_context_id = self._QueryPublishContextId(target_id)
# Only try delete from the publish_context_table for
# a valid non zero publish_context_id.
if publish_context_id != 0:
# Delete the entry in 'publish_context_table' table.
query_string = ("DELETE FROM publish_context_table "
"WHERE publish_context_id = %s")
num_rows = self.DbModify(query_string, (publish_context_id,))
# Get db_name before deleting a corresponding entry in the
# target_db_table.
(unused_virtual_host_url, db_name, fusion_host_name,
unused_db_flags) = self._QueryTargetDetailsById(target_id)
# Delete the entry in target_db_table.
query_string = "DELETE FROM target_db_table WHERE target_id = %s"
num_rows = self.DbModify(query_string, (target_id,))
if num_rows:
# Remove un-published target from .htaccess by updating .htaccess file.
self.UpdateHtaccessFile()
if db_name:
# Delete target's publish directory.
self.DeleteTargetPublishDir(target_path, fusion_host_name, db_name)
return fusion_host_name, db_name
def IsTargetPathUsed(self, target_path):
"""Checks whether specific target path is in use.
Note: The check is case-insensitive, since we make target path(URL-path)
case insensitive. We do not allow to have two of the same published points,
while keeping a target path in database as user have entered it.
Args:
target_path: target path.
Returns:
whether target path is in use.
Raises:
psycopg2.Error/Warning.
"""
query_string = ("""
SELECT 1 FROM target_table, target_db_table
WHERE lower(target_table.target_path) = %s AND
target_table.target_id = target_db_table.target_id
LIMIT 1""")
result = self.DbQuery(query_string, (target_path.lower(),))
if result:
return True
return False
def DeleteTargetPublishDir(self, target_path, client_host_name, db_name):
"""Deletes target's publish directory.
Args:
target_path: target path.
client_host_name: client host name.
db_name: database name (assetroot path).
Raises:
PublishServeException.
"""
(norm_db_path, db_type) = serve_utils.IdentifyPublishedDb(db_name)
if serve_utils.IsFusionDb(db_type):
if not client_host_name:
raise exceptions.PublishServeException(
"Internal error - undefined host name for Fusion database %s." %
db_name)
gedb_path = self.BuildDbPublishPath(client_host_name, norm_db_path)
target_gedb_path = self.BuildTargetPublishPath(gedb_path, target_path)
try:
logger.debug("Delete DB publish directory: %s", target_gedb_path)
# Remove all the files/dirs under the publish db path (included).
shutil.rmtree(target_gedb_path)
except OSError as e:
logger.warning(
"HandleUnpublishRequest: Could not delete DB publish directory: %s"
", Error: %s", target_gedb_path, e)
try:
# Remove '..gedb/targets'- directory if it is empty.
os.rmdir(os.path.dirname(target_gedb_path))
except OSError:
pass
def Cleanup(self):
"""Cleans up publisher (publish info tables).
Un-publishes Fusion DBs, portable globes that do not exist on filesystem.
Returns:
list of unpublished Fusion DBs/portables [{host:, path:},..]
"""
# Get information about published DBs/globes.
query_string = (
"""SELECT db_table.host_name, db_table.db_name, db_table.db_pretty_name,
target_table.target_path
FROM target_db_table, db_table, target_table
WHERE target_table.target_id = target_db_table.target_id AND
db_table.db_id = target_db_table.db_id
""")
results = self.DbQuery(query_string)
unpublished_dbs = []
# Flag for whether globes directory is mounted and at least one portable
# globe exists. If not, don't remove Portables from postgres db.
is_globes_mounted = (
os.path.exists(constants.CUTTER_GLOBES_PATH) and
serve_utils.ExistsPortableInDir(
constants.CUTTER_GLOBES_PATH))
if not is_globes_mounted:
logger.warning(
"HandleCleanupRequest: No portable files in directory %s."
" Volume may not be mounted.",
constants.CUTTER_GLOBES_PATH)
logger.warning("Portable globe publish records have not been cleaned.")
for line in results:
# Get database type.
(db_path, db_type) = serve_utils.IdentifyPublishedDb(line[1])
do_clean_up = False
if serve_utils.IsFusionDb(db_type):
db_host = line[0]
publish_db_path = self.BuildDbPublishPath(db_host, db_path)
publish_db_path = "{}/header.xml".format(publish_db_path)
db_name = serve_utils.GetFusionDbInfoName(line[2], db_type)
do_clean_up = True
else:
assert serve_utils.IsPortable(db_type)
if is_globes_mounted:
publish_db_path = "{}{}".format(
constants.CUTTER_GLOBES_PATH, db_path)
db_name = line[1]
db_host = ""
do_clean_up = True
else:
logger.warning("%s does not exist. Volume may not be mounted.",
PublishManagerHelper.CUTTER_GLOBES_PATH)
target_path = line[3]
if do_clean_up and not os.path.exists(publish_db_path):
self.DoUnpublish(target_path)
unpublished_dbs.append({"host": db_host, "path": db_path})
logger.warning(
"The database/portable globe '{}' could not be found."
" The path '{}' serving it has been un-published.".format(
db_name, target_path))
logger.info("Publish info cleanup is complete.")
return unpublished_dbs
def _QueryTargetDbDetailsByPath(self, target_path):
"""Queries target details by target path.
Args:
target_path: target path.
Raises:
psycopg2.Error/Warning.
Returns:
target details as tuple (virtual_host_url, db_name, host_name).
(None, None, None) tuple is returned in case of no DB published to
this target.
"""
assert target_path and target_path[0] == "/" and target_path[-1] != "/"
target_details = {}
query_string = ("""SELECT db_table.host_name, db_table.db_name,
virtual_host_table.virtual_host_name, target_table.serve_wms
FROM target_table, target_db_table, db_table, virtual_host_table
WHERE target_table.target_path = %s AND
target_table.target_id = target_db_table.target_id AND
target_db_table.db_id = db_table.db_id AND
target_db_table.virtual_host_id =
virtual_host_table.virtual_host_id""")
result = self.DbQuery(query_string, (target_path,))
if result:
assert isinstance(result[0], tuple)
(db_host_name, db_name, virtual_host_name, servewms) = result[0]
target_details.update({
"servewms": servewms,
"fusion_host": db_host_name,
"dbname": db_name,
"vhname": virtual_host_name,
})
return target_details
def _QueryPublishContextByTargetPath(self, target_path):
"""Queries gestream database to get publish_context for target path.
Args:
target_path : target path.
Raises:
psycopg2.Error/Warning.
Returns:
publish_context as dict with fields {snippetssetname:string,
searchdefs:[string,], supsearchdefs:[string,], poifederated:bool}.
"""
publish_context = {}
query_string = ("""SELECT publish_context_table.snippets_set_name,
publish_context_table.search_def_names,
publish_context_table.supplemental_search_def_names,
publish_context_table.poifederated
FROM target_table, target_db_table, publish_context_table
WHERE target_table.target_path = %s AND
target_table.target_id = target_db_table.target_id AND
target_db_table.publish_context_id =
publish_context_table.publish_context_id""")
result = self.DbQuery(query_string, (target_path,))
if result:
assert isinstance(result[0], tuple)
(snippets_set_name, search_def_names, sup_search_def_names,
poifederated) = result[0]
publish_context.update({
"snippetsetname": snippets_set_name,
"searchdefs": search_def_names,
"supsearchdefs": sup_search_def_names,
})
if "POISearch" in search_def_names:
publish_context["poifederated"] = poifederated
return publish_context
def _QueryPublishContextId(self, target_id):
"""Queries publish_context_id from target_db_table.
Args:
target_id: target path Id.
Raises:
psycopg2.Error/Warning.
Returns:
Publish context id.
"""
publish_context_id = 0
query_string = ("SELECT publish_context_id FROM target_db_table "
"WHERE target_id = %s")
result = self.DbQuery(query_string, (target_id,))
if result:
publish_context_id = int(result[0])
return publish_context_id
def _QueryTargetDetailsById(self, target_id):
"""Queries target details by target ID.
Args:
target_id: target ID.
Raises:
psycopg2.Error/Warning.
Returns:
target details as tuple (virtual_host_url, db_name, host_name).
(None, None, None) tuple is returned in case of no DB published to
this target.
"""
virtual_host_url = None
db_name = None
host_name = None
db_flags = None
query_string = ("""
SELECT virtual_host_table.virtual_host_url, db_table.db_name,
db_table.host_name, db_table.db_flags
FROM target_db_table, virtual_host_table, db_table
WHERE target_db_table.target_id = %s AND
virtual_host_table.virtual_host_id =
target_db_table.virtual_host_id AND
db_table.db_id = target_db_table.db_id""")
result = self.DbQuery(query_string, (target_id,))
if result:
assert isinstance(result[0], tuple)
(virtual_host_url, db_name, host_name, db_flags) = result[0]
return (virtual_host_url, db_name, host_name, db_flags)
def HandleAddVsRequest(self,
vs_name, vs_url, vs_ssl, vs_cache_level,
response):
"""Handles add virtual server request.
Args:
vs_name: the virtual server name.
vs_url: the virtual server URL.
vs_ssl: whether it is SSL virtual server.
vs_cache_level: the virtual server cache level.
response: the response object.
Raises:
psycopg2.Error/Warning, PublishServeException
"""
# Check if virtual host already exists.
if self._QueryVirtualHostId(vs_name) != -1:
raise exceptions.PublishServeException(
"HandleAddVsRequest: Virtual host %s already exists." % vs_name)
# We do not check if the corresponding config file exists. This because
# we don't know how our users might want to name that file.
# Add the virtual host entry.
query_string = (
"INSERT INTO virtual_host_table (virtual_host_name,"
" virtual_host_url, virtual_host_ssl, virtual_host_cache_level)"
" VALUES(%s, %s, %s, %s)")
self.DbModify(query_string, (vs_name, vs_url, vs_ssl, vs_cache_level))
# Create virtual server config file.
vs_url_complete = self._GetVhCompleteUrl(vs_url, vs_ssl)
self._CreateVsConfig(vs_name, vs_url_complete)
self._RestartServers()
http_io.ResponseWriter.AddBodyElement(
response, constants.HDR_STATUS_CODE, constants.STATUS_SUCCESS)
def HandleDeleteVsRequest(self, vs_name, response):
"""Handles delete virtual server request.
Args:
vs_name: virtual host name.
response: response object.
Raises:
psycopg2.Error/Warning, PublishServeException
"""
# Check if virtual server exists and is disabled. There is no database
# published on this virtual server.
if self._QueryVsUsed(vs_name):
raise exceptions.PublishServeException(
"HandleDeleteVsRequest: Make sure the virtual host %s"
" exists and is currently not being used." % vs_name)
# Delete the entry in virtual_host_table.
query_string = "DELETE FROM virtual_host_table WHERE virtual_host_name = %s"
self.DbModify(query_string, [vs_name])
self._RestartServers()
http_io.ResponseWriter.AddBodyElement(
response, constants.HDR_STATUS_CODE, constants.STATUS_SUCCESS)
def GetPublishInfoList(self):
"""Gets publish info list.
Returns:
list of tuples (target_path, host_name, db_name).
"""
query_string = """
SELECT target_path, host_name, db_name
FROM target_table, db_table, target_db_table
WHERE
target_table.target_id = target_db_table.target_id AND
db_table.db_id = target_db_table.db_id"""
return self.DbQuery(query_string)
def GetSearchDefDetails(self, search_def_name):
return self._search_manager.GetSearchDefDetails(search_def_name)
def GetSearchDbId(self, client_host_name, db_name):
return self._search_manager.QueryDbId(client_host_name, db_name)
def GetVsUrlPathList(self):
query_string = (
"SELECT virtual_host_url FROM virtual_host_table")
results = self.DbQuery(query_string)
vh_list = []
for vh_url in results:
url_parse_res = urlparse.urlparse(vh_url)
vh_list.append(url_parse_res.path)
return vh_list
def GetCutSpecs(self):
"""Gets cut specifications.
Returns:
list of cut specifications.
"""
results = self.DbQuery("""
SELECT name, qtnodes, exclusion_qtnodes,
min_level, default_level, max_level
FROM cut_spec_table""")
return results
def UpdateHtaccessFile(self):
"""Updates .htaccess file."""
# Get a list of (target_path, target_id) pairs from target_table.
target_paths_list = self._ListTargetPaths()
if not target_paths_list:
return
# Sort by target path in descending order.
# Note: The order in which these rules are defined is
# important - this is the order in which they will be applied at run-time.
# Sorting in descending order is necessary to prevent usurping by shorter
# paths that would match first.
get_key = operator.itemgetter(0)
target_paths_list.sort(key=lambda elem: get_key(elem).lower(), reverse=True)
# Write publish content into .htaccess.
out_file = tempfile.NamedTemporaryFile(
mode="w+",
prefix=PublishManagerHelper.HTACCESS_TMP_PREFIX,
delete=False)
try:
if os.path.exists(PublishManagerHelper.HTACCESS_PATH):
is_publish_content_added = False
with open(PublishManagerHelper.HTACCESS_PATH, mode="r") as in_file:
in_publish_section = False
for line in in_file:
if line == PublishManagerHelper.HTACCESS_GE_PUBLISH_BEGIN:
in_publish_section = True
self._WritePublishContentToHtaccessFile(
out_file, target_paths_list)
is_publish_content_added = True
elif line == PublishManagerHelper.HTACCESS_GE_PUBLISH_END:
in_publish_section = False
continue
if not in_publish_section:
out_file.write(line)
if not is_publish_content_added:
self._WritePublishContentToHtaccessFile(out_file, target_paths_list)
else:
self._WritePublishContentToHtaccessFile(out_file, target_paths_list)
except Exception:
out_file.close()
os.unlink(out_file.name)
raise
else:
# Copy temp htaccess file into htdocs rewriting existing .htaccess.
out_file.close()
shutil.copyfile(out_file.name, PublishManagerHelper.HTACCESS_PATH)
os.unlink(out_file.name)
def _AddTarget(self, target_path, serve_wms):
"""Adds target path into target_table and sets serve_wms flag.
Args:
target_path: target path.
serve_wms: whether target point is servable through WMS.
Raises:
psycopg2.Error/Warning, PublishServeException.
PublishServeException is raised in case of this target path is already
in use.
Returns:
target_id: ID of added/existed target point.
"""
assert target_path and target_path[0] == "/" and target_path[-1] != "/"
# Check if the target point already exists.
# Note: Here we have case-sensitive query for target from target_table.
# It allows to keep target path as user have entered it.
target_id = self._QueryTargetIdByPath(target_path)
if target_id != -1:
# Check if the target point is currently used.
if self._QueryIsTargetUsed(target_id):
# Note: might be an assert since we check it before.
raise exceptions.PublishServeException(
"Target path %s is already in use. Note that paths are "
"case insensitve. Input another path"
" or un-publish database using this path." % target_path)
# Sets serve_wms flag for existing path.
query_string = ("UPDATE target_table SET serve_wms = %s"
" WHERE target_path = %s")
self.DbModify(query_string, (serve_wms, target_path))
return target_id
# Add the target point entry.
query_string = (
"INSERT INTO target_table (target_path, serve_wms) VALUES(%s, %s)")
self.DbModify(query_string, (target_path, serve_wms))
target_id = self._QueryTargetIdByPath(target_path)
return target_id
def _QueryVirtualHostId(self, virtual_host_name):
"""Queries Virtual Host ID by name.
Args:
virtual_host_name: name of Virtual Host.
Raises:
psycopg2.Error/Warning.
Returns:
ID of Virtual Host in case of it exists, otherwise -1.
"""
query_string = ("SELECT virtual_host_id FROM virtual_host_table"
" WHERE virtual_host_name = %s")
result = self.DbQuery(query_string, (virtual_host_name,))
virtual_host_id = -1
if result:
virtual_host_id = int(result[0])
return virtual_host_id
def _QueryVirtualHostIdAndDbId(self, target_id):
"""Queries Virtual Host ID and Db ID by target ID.
Args:
target_id: target ID.
Raises:
psycopg2.Error/Warning.
Returns:
tuple (virtual_host_id, db_id). If there is no DB published on
specified target then it returns tuple (None, None).
"""
query_string = ("SELECT virtual_host_id, db_id FROM target_db_table"
" WHERE target_id = %s")
result = self.DbQuery(query_string, (target_id,))
virtual_host_id = None
db_id = None
if result:
assert isinstance(result[0], tuple)
virtual_host_id = int(result[0][0])
db_id = int(result[0][1])
return (virtual_host_id, db_id)
def _QueryTargetIdByPath(self, target_path):
"""Queries target point ID by its path.
Note: query is case-sensitive since we keep target path in database as user
have entered it.
Args:
target_path: target point path.
Raises:
psycopg2.Error/Warning.
Returns:
ID of target point in case of it exists otherwise -1.
"""
query_string = "SELECT target_id FROM target_table WHERE target_path = %s"
result = self.DbQuery(query_string, (target_path,))
target_id = -1
if result:
target_id = int(result[0])
return target_id
def _QueryIsTargetUsed(self, target_id):
"""Queries whether target point is taken.
Args:
target_id: target point ID.
Raises:
psycopg2.Error/Warning.
Returns:
whether target point with specified target_id is used.
"""
is_target_used = False
query_string = "SELECT db_id FROM target_db_table WHERE target_id = %s"
result = self.DbQuery(query_string, (target_id,))
if result:
is_target_used = True
return is_target_used
def _QueryDbAndHostName(self, db_id):
"""Queries database name and host name by database ID.
Args:
db_id: database ID.
Raises:
psycopg2.Error/Warning.
Returns:
(db_name, host_name) tuple. Values are None in case database with
specified db_id does not exist.
"""
host_name = None
db_name = None
if db_id == 0:
return (db_name, host_name)
query_string = "SELECT db_name, host_name FROM db_table WHERE db_id = %s"
result = self.DbQuery(query_string, (db_id,))
if result:
assert isinstance(result[0], tuple)
db_name = result[0][0]
host_name = result[0][1]
return (db_name, host_name)
def _QueryVsUsed(self, vs_name):
"""Queries whether virtual server is used.
Virtual server is used - there is a database served with it.
Args:
vs_name: virtual server name.
Returns:
whether virtual server is used.
Raises:
psycopg2.Error/Warning
"""
query_string = (
"SELECT db_id FROM target_db_table WHERE virtual_host_id IN ("
"SELECT virtual_host_id FROM virtual_host_table"
" WHERE virtual_host_name = %s)")
results = self.DbQuery(query_string, (vs_name,))
if results:
return True
return False
def _GetDbsList(self, response):
"""Gets list of available databases.
Args:
response: response object.
Raises:
psycopg2.Error/Warning.
Returns:
list of DbInfo objects serialized into json response object.
"""
# TODO: try-except here is a temporary solution.
# Move to DoGet() when all responses are formatted in json.
try:
database_list, unused_set = self._GetDatabaseList()
http_io.ResponseWriter.AddJsonBody(
response, constants.STATUS_SUCCESS, database_list)
except exceptions.PublishServeException as e:
logger.error(e)
http_io.ResponseWriter.AddJsonFailureBody(response, str(e))
except psycopg2.Warning as w:
logger.error(w)
http_io.ResponseWriter.AddJsonFailureBody(response, str(e))
except psycopg2.Error as e:
logger.error(e)
http_io.ResponseWriter.AddJsonFailureBody(response, str(w))
except Exception as e:
logger.error(e)
http_io.ResponseWriter.AddJsonFailureBody(
response, "Server-side Internal Error: {}".format(e))
def _GetAllAssets(self, response):
"""Gets list of available fusion databases and portables.
Args:
response: response object.
Raises:
psycopg2.Error/Warning.
Returns:
list of databases and portables serialized into json response object.
"""
try:
results, registered_portable_set = self._GetDatabaseList()
results.extend(self._GetPortableGlobesList(registered_portable_set))
http_io.ResponseWriter.AddJsonBody(
response, constants.STATUS_SUCCESS, results)
except exceptions.PublishServeException as e:
logger.error(e)
http_io.ResponseWriter.AddJsonFailureBody(response, str(e))
except psycopg2.Warning as w:
logger.error(w)
http_io.ResponseWriter.AddJsonFailureBody(response, str(e))
except psycopg2.Error as e:
logger.error(e)
http_io.ResponseWriter.AddJsonFailureBody(response, str(w))
except Exception as e:
logger.error(e)
http_io.ResponseWriter.AddJsonFailureBody(
response, "Server-side Internal Error: {}".format(e))
# TODO: add database description in Fusion and handle it here.
def _GetDatabaseList(self):
"""Gets list of Fusion databases.
Raises:
psycopg2.Error/Warning.
Returns:
tuple (list of DbInfo objects, list of registered portable names).
"""
# Get database information from db_table.
query_string = ("SELECT db_id, host_name, db_name, db_pretty_name,"
" db_timestamp AT TIME ZONE 'UTC', db_size FROM db_table")
results = self.DbQuery(query_string)
# Holder for registered portable names.
registered_portable_set = set()
# Parsing results into DbInfo list.
db_info_list = []
db_id_list = []
for line in results:
db_id_list.append(int(line[0])) # db_table.db_id
db_info = basic_types.DbInfo()
db_info.host = line[1] # db_table.host_name
db_info.path = line[2] # db_table.db_name
db_info.name = line[3] # db_table.db_pretty_name
timestamp = line[4] # db_table.db_timestamp
if timestamp:
assert isinstance(timestamp, datetime.datetime)
db_info.timestamp = serve_utils.DatetimeNoTzToIsoFormatUtc(timestamp)
db_info.size = line[5] # db_table.db_size
db_info.description = line[3] # db_table.db_pretty_name
db_info.registered = True
# Determine database features.
serve_utils.CalcDatabaseAttributes(db_info)
# Check whether the Fusion database has been pushed from remote host and
# set corresponding flag in DbInfo.
if serve_utils.IsFusionDb(db_info.type):
db_info.remote = self._IsFusionDbRemote(db_info)
# Store name of registered portables to avoid list duplicates.
if serve_utils.IsPortable(db_info.type):
storage_name = (db_info.name[1:] if db_info.name[0] == "/" else
db_info.name)
registered_portable_set.add(storage_name)
db_info_list.append(db_info)
# Set whether database has POI search data.
search_dbs_set = set(self._search_manager.QueryListDbs())
if search_dbs_set:
for db_info in db_info_list:
db_info.has_poi = (db_info.host, db_info.path) in search_dbs_set
# Get auxiliary dictionary mapping a database ID to a publish info list.
db_to_publish_info = self._GetDbIdToPublishInfoDict()
# Get auxiliary dictionary mapping a Virtual Host name to a base URL.
vhname_to_baseurl = self._GetVhNameToBaseUrlDict()
# For published databases in DbInfo list, set publish info:
# virtual host name, target base URL, target path, serve_wms.
# Note: we get additional db_info-s in case we have
# databases that are published to more then one target.
add_db_info_list = []
for db_id, db_info in zip(db_id_list, db_info_list):
if db_id in db_to_publish_info:
publish_info_list = db_to_publish_info[db_id]
publish_info = publish_info_list[0]
db_info.virtual_host_name = publish_info[0]
db_info.target_base_url = vhname_to_baseurl[db_info.virtual_host_name]
db_info.target_path = publish_info[1]
db_info.serve_wms = publish_info[2]
if len(publish_info_list) > 1:
for vh_name, target_path, serve_wms in publish_info_list[1:]:
add_db_info = copy.copy(db_info)
add_db_info.virtual_host_name = vh_name
add_db_info.target_base_url = vhname_to_baseurl[
add_db_info.virtual_host_name]
add_db_info.target_path = target_path
add_db_info.serve_wms = serve_wms
add_db_info_list.append(add_db_info)
db_info_list.extend(add_db_info_list)
return (db_info_list, registered_portable_set)
def _GetPortableGlobesList(self, registered_portable_set):
"""Gets portable globes list.
Scans cutter/globes directory and sub-directories for all the glx files
located there. First checks for registered portables and ignores them;
They are already added in _GetDatabaseList().
Args:
registered_portable_set: set of registered portable names.
Returns:
list of (unregistered) Portable globes.
"""
# Build a list of portable globes.
globes_list = []
root = constants.CUTTER_GLOBES_PATH
for name in os.listdir(root):
# Ignore globes that are registered.
if name not in registered_portable_set:
db_info = basic_types.DbInfo()
db_info.name = name
db_info.type = db_info.name[-3:]
# Ignore files that are not Portables, eg .README
if serve_utils.IsPortable(db_info.type):
serve_utils.GlxDetails(db_info)
if db_info.size > GLOBE_SIZE_THRESHOLD:
globes_list.append(db_info)
return globes_list
def _CreateVsConfig(self, vs_name, vs_url):
"""Writes virtual server config for specified virtual host.
Args:
vs_name: virtual server name.
vs_url: virtual server URL.
"""
logger.debug("_CreateVsConfig...")
url_parse_res = urlparse.urlparse(vs_url)
if url_parse_res.scheme == "https":
vs_config_file_path = os.path.normpath(
os.path.join(
PublishManagerHelper.VS_CONFIG_PATH,
(vs_name + "_host.location_ssl")))
self._WriteSslVsConfig(
vs_config_file_path, vs_name, url_parse_res.path)
else:
vs_config_file_path = os.path.normpath(
os.path.join(
PublishManagerHelper.VS_CONFIG_PATH,
(vs_name + "_host.location")))
self._WriteVsConfig(
vs_config_file_path, vs_name, url_parse_res.path)
def _WriteVsConfig(self, vs_config_file_path, vs_name, vs_path):
"""Write default content to VS config file.
Args:
vs_config_file_path: config file path.
vs_name: virtual server name.
vs_path: virtual server path (location).
"""
with open(vs_config_file_path, "w") as f:
f.write("# The virtual host %s.\n" % vs_name)
f.write("RewriteEngine on\n\n")
f.write("<Location %s/>\n" % vs_path)
f.write(" SetHandler fdb-handler\n")
f.write("</Location>\n")
def _WriteSslVsConfig(self, vs_config_file_path, vs_name, vs_path):
"""Write default content to SSL VS config.
Args:
vs_config_file_path: config file path.
vs_name: virtual server name.
vs_path: virtual server path (location).
"""
with open(vs_config_file_path, "w") as f:
f.write("# The SSL virtual host %s.\n" % vs_name)
f.write("RewriteEngine on\n\n")
f.write("<Location %s/>\n" % vs_path)
f.write(" SetHandler fdb-handler\n")
f.write(" SSLRequireSSL\n")
f.write(" SSLVerifyClient none\n")
f.write("</Location>\n")
def _RestartServers(self):
"""Restart servers.
Raises:
PublishServeException
"""
logger.debug("_RestartServers...")
try:
# Reload Apache configs
cmd_reload = "/opt/google/bin/gerestartapache"
logger.info("Earth Server restarting...")
subprocess.Popen([cmd_reload, ""])
except Exception as e:
raise exceptions.PublishServeException(e)
def _ListTargetPaths(self):
"""Gets target paths serving published databases.
Raises:
psycopg2.Error/Warning.
Returns:
list of tuples (target_path, target_id, serve_wms).
"""
query_string = (
"SELECT target_path, target_id, serve_wms FROM target_table"
" WHERE target_id IN (SELECT target_id FROM target_db_table)")
return self.DbQuery(query_string)
def _WritePublishContentToHtaccessFile(self, htaccess_file,
target_paths_list):
"""Writes publish content into htaccess-file.
Args:
htaccess_file: file descriptor for writing to.
target_paths_list: target paths list.
Raises:
psycopg2.Error/Warning, PublishServeException.
"""
# Write publish header to file.
htaccess_file.write("%s" % PublishManagerHelper.HTACCESS_GE_PUBLISH_BEGIN)
# Write RewriteBase to file.
htaccess_file.write("%s" % HTACCESS_REWRITE_BASE)
# Collects all the needed information for all the target paths based on
# target ID and adds corresponding rewrite rules into htacces-file.
for (target_path, target_id, serve_wms) in target_paths_list:
(virtual_host_url,
db_name, host_name, db_flags) = self._QueryTargetDetailsById(target_id)
if (not virtual_host_url) or (not db_name):
continue # no DB published on this target path.
# Identify type of published DB.
(unused_norm_db_path, db_type) = serve_utils.IdentifyPublishedDb(db_name)
if serve_utils.IsFusionDb(db_type):
if not host_name:
raise exceptions.PublishServeException(
"Internal Error - undefined host name for Fusion database %s." %
db_name)
else:
assert serve_utils.IsPortable(db_type)
if host_name:
raise exceptions.PublishServeException(
"Internal Error - host name is not empty for portable %s." %
db_name)
# Put the rules into htacces file for current target
url_parse_res = urlparse.urlparse(virtual_host_url)
virtual_host_path = url_parse_res.path
relative_target_path = target_path[1:]
# Common lines for all the databases, globes.
htaccess_file.write(LINE0_TARGETDESCR % target_path)
htaccess_file.write(LINE1_TRAILING_SLASH_REWRITERULE % (
relative_target_path, relative_target_path))
htaccess_file.write(LINE2_POISEARCH_REWRITERULE % (
relative_target_path,
constants.POI_SEARCH_SERVICE_NAME,
constants.POI_SEARCH_SERVICE_NAME))
if serve_wms:
htaccess_file.write(WMS_LINE0_REWRITECOND)
htaccess_file.write(WMS_LINE1_REWRITERULE % (
relative_target_path, target_path))
else:
htaccess_file.write(WMS_LINE0_REWRITERULE_R404 % (
relative_target_path))
# Content for Fusion earth (GE database).
if db_type == basic_types.DbType.TYPE_GE:
htaccess_file.write(GE_LINE0_REWRITERULE % relative_target_path)
htaccess_file.write(GE_LINE1_REWRITECOND)
htaccess_file.write(GE_LINE2_REWRITERULE % (
relative_target_path, virtual_host_path, target_path, db_type))
# Content for Fusion map (map database).
elif db_type == basic_types.DbType.TYPE_MAP:
assert isinstance(db_flags, int)
if db_flags & basic_types.DbFlags.USE_GOOGLE_BASEMAP == 0:
htaccess_file.write(MAP_LINE0_LOCAL_REWRITERULE %
relative_target_path)
else:
htaccess_file.write(MAP_LINE0_GOOGLE_REWRITERULE %
relative_target_path)
htaccess_file.write(MAP_LINE1_REWRITERULE % relative_target_path)
htaccess_file.write(MAP_LINE2_REWRITECOND)
htaccess_file.write(MAP_LINE3_REWRITERULE % (
relative_target_path, virtual_host_path, target_path, db_type))
# Content for portable globes.
elif serve_utils.IsPortable(db_type):
htaccess_file.write(GLX_LINE0_REWRITERULE % (
relative_target_path, target_path))
htaccess_file.write(GLX_LINE1_REWRITECOND)
htaccess_file.write(GLX_LINE2_REWRITERULE % (
relative_target_path, virtual_host_path, target_path, db_type))
else:
raise exceptions.PublishServeException(
"Unsupported DB type %s.", db_type)
# write publish footer to file.
htaccess_file.write("\n%s" %PublishManagerHelper.HTACCESS_GE_PUBLISH_END)
def _GetDbIdToPublishInfoDict(self):
"""Builds a dictionary mapping a database ID to a publish info list.
Returns:
dictionary mapping a database ID to a publish info list -
{db_id: [(vh_name, target_path, serve_wms),]}
"""
# Get (db_id, target paths, virtual_host) tuples for all published dbs.
query_db_target = (
"SELECT target_db_table.db_id,"
" virtual_host_table.virtual_host_name,"
" target_table.target_path, target_table.serve_wms"
" FROM target_db_table, target_table, virtual_host_table"
" WHERE target_table.target_id = target_db_table.target_id AND"
" virtual_host_table.virtual_host_id = target_db_table.virtual_host_id")
results = self.DbQuery(query_db_target)
# Build a dictionary.
db_to_publish_info_dct = dict(
(db_id, []) for (db_id, unused_vh_name, unused_target_path,
unused_serve_wms) in results)
for db_id, vh_name, target_path, serve_wms in results:
db_to_publish_info_dct[db_id].append(
(vh_name, target_path, serve_wms))
return db_to_publish_info_dct
def _GetVhNameToBaseUrlDict(self):
"""Builds a dictionary mapping a virtual host name to a base URL.
Returns:
dictionary mapping a virtual host name to a base URL
(scheme://host[:port]) - {vh_name: vh_base_url}
"""
vh_list = self.QueryVhList()
vhname_to_baseurl_dct = {
vh_name: self.GetVhBaseUrl(vh_url, vh_ssl) for (
vh_name, vh_url, vh_ssl) in vh_list}
return vhname_to_baseurl_dct
def GetVhBaseUrl(self, vh_url, vh_ssl):
"""Builds a Virtual Host base URL.
If the vh_url is scheme://host:port/path, then it extracts
scheme://host:port to build a base URL, otherwise (vh_url is a path,
e.g. /public) it builds a base URL based on information in Apache config
and FQDN.
Args:
vh_url: virtual host URL - /path or scheme://host:[port]/path.
vh_ssl: whether virtual host is SSL.
Raises:
PublishServeException.
Returns:
virtual host base URL - scheme://host[:port]
"""
url_parse_res = urlparse.urlparse(vh_url)
if url_parse_res.scheme and url_parse_res.netloc:
return "{}://{}".format(url_parse_res.scheme, url_parse_res.netloc)
else:
# VH URL is specified as absolute path, then build VH base URL based on
# information in Apache config.
scheme_host_port = utils.GetApacheSchemeHostPort()
if not scheme_host_port:
raise exceptions.PublishServeException(
"Unable to build Server URL based on Apache config.")
else:
assert len(scheme_host_port) == 3
(scheme, host, port) = scheme_host_port
assert scheme
assert host
assert port
# override scheme in according with VH properties.
scheme = "https" if vh_ssl else "http"
host = "localhost" if vh_url == "/local_host" else host
vh_base_url = "{}://{}".format(scheme, host)
# Note: Do not pick up port from Apache config for SSL virtual host,
# use default port if SSL virtual host specified with absolute path.
if (not vh_ssl) and port and port != "80":
# Get port number for not SSL virtual host from Apache config and
# put it into URL if it is not default.
vh_base_url += ":{}".format(port)
return vh_base_url
def _GetVhCompleteUrl(self, vh_url, vh_ssl):
"""Builds a Virtual Host complete URL.
If the vh_url is scheme://host:port/path, then just return vh_url,
otherwise (vh_url is an absolute path, e.g. /public) it builds a VH URL
based on information in Apache config and FQDN.
Args:
vh_url: virtual host URL - /path or scheme://host:[port]/path.
vh_ssl: whether virtual host is SSL.
Raises:
PublishServeException.
Returns:
virtual host complete URL - scheme://host[:port]/path
"""
vh_base_url = self.GetVhBaseUrl(vh_url, vh_ssl)
return urlparse.urljoin(vh_base_url, vh_url)
def _IsFusionDbRemote(self, db_info):
"""Determines whether a Fusion database has been pushed from remote host.
Args:
db_info: database info.
Returns:
whether a Fusion database has been pushed from remote Fusion host.
"""
assert serve_utils.IsFusionDb(db_info.type)
return db_info.host != self.server_hostname
def GetTargetDetails(self, target_path):
"""gets target details by target path.
Args:
target_path: target path.
Raises:
PublishServeException
Returns:
target details of a target.
"""
target_details = {}
target_db_details = {}
publish_context = {}
target_db_details = self._QueryTargetDbDetailsByPath(target_path)
if not target_db_details:
error_msg = (
"GetTargetDetails: No target details found for target path %s. "
"Make sure target path exists." % target_path)
raise exceptions.PublishServeException(error_msg)
target_details.update({
"targetpath": target_path,
"servewms": target_db_details["servewms"],
"fusion_host": target_db_details["fusion_host"],
"dbname": target_db_details["dbname"],
"vhname": target_db_details["vhname"],
})
publish_context = self._QueryPublishContextByTargetPath(target_path)
if publish_context:
target_details.update({"publishcontext": publish_context,})
return target_details
def HandleTargetDetailsRequest(self, target_path, response):
"""Handles 'targetdetails' request.
Args:
target_path: target path.
response: response object.
Raises:
PublishServeException
"""
target_details = self.GetTargetDetails(target_path)
if not target_details:
raise exceptions.PublishServeException(
"HandleTargetDetailsRequest: The publish target %s does not exist." %
target_path)
http_io.ResponseWriter.AddBodyElement(response, constants.HDR_STATUS_CODE,
constants.STATUS_SUCCESS)
for key, value in target_details.iteritems():
if key == "publishcontext":
for publish_context_key, publish_context_value in value.iteritems():
http_io.ResponseWriter.AddBodyElement(
response, constants.HDR_DATA,
"%s : %s" % (publish_context_key, publish_context_value))
else:
http_io.ResponseWriter.AddBodyElement(response, constants.HDR_DATA,
"%s : %s" % (key, value))
def IsDatabasePushed(self, client_host_name, db_name):
"""Check if the database is pushed.
Args:
client_host_name: Request originating host name.
db_name: Fusion database name.
Raises:
PublishServeException
Returns:
"True" if the database is pushed.
"""
(unused_db_path, db_type) = serve_utils.IdentifyPublishedDb(db_name)
# Check if the DB exists.
db_id = self.QueryDbId(client_host_name, db_name)
if db_id == 0:
raise exceptions.PublishServeException(
"Database %s does not exist on server.\n"
"It needs to be registered/pushed before publishing." % db_name)
# Check if the DB is pushed.
if self.SynchronizeDb(db_id, db_type, client_host_name):
error_msg = ("Database %s does not exist on server. It needs to be "
"registered/pushed before publishing." % db_name)
logger.error(error_msg)
raise exceptions.PublishServeException(error_msg)
return True
def SwapTargets(self, target_path_a, target_path_b):
"""Check if the targets can be swapped.
Args:
target_path_a: First target path.
target_path_b: Second target path.
Raises:
PublishServeException
Returns:
Publish Context of both targets.
"""
# Check if the target paths are the same.
if target_path_a == target_path_b:
raise exceptions.PublishServeException(
"HandleSwapTargetsRequest:target paths %s and %s are same." %
(target_path_a, target_path_b))
# Get target details for target_path_a.
target_details_a = self.GetTargetDetails(target_path_a)
if not target_details_a:
raise exceptions.PublishServeException(
"HandleSwapTargetsRequest: Make sure the target path %s "
"exists and is currently published." % target_path_a)
if "publishcontext" not in target_details_a.keys():
error_msg = ("SwapTargets: publish context does not exist "
"for target path %s. This command is not supported "
"for databases published with GEE 5.1.2 or earlier." %
target_path_a)
raise exceptions.PublishServeException(error_msg)
# Get target details for target_path_b.
target_details_b = self.GetTargetDetails(target_path_b)
if not target_details_b:
raise exceptions.PublishServeException(
"HandleSwapTargetsRequest: Make sure the target path '%s' "
"exists and is currently published." % target_path_b)
if "publishcontext" not in target_details_b.keys():
error_msg = (
"SwapTargets: publish context does not exist "
"for target path %s. This command is not supported for databases "
"pubished using older version of fusion." % target_path_b)
raise exceptions.PublishServeException(error_msg)
# Swap target paths.
t_path = target_details_a["targetpath"]
target_details_a["targetpath"] = target_details_b["targetpath"]
target_details_b["targetpath"] = t_path
return (target_details_a, target_details_b)
def AreDatabasesComparable(self, db_name1, host_name1, db_name2, host_name2):
"""Check if the databases are same.
Args:
db_name1: First database.
host_name1 : GEE host where db_name1 is published.
db_name2: Second database.
host_name2 : GEE host where db_name2 is published.
Returns:
boolean value depending on whether databases are comparable or not.
"""
if host_name1 != host_name2:
return False
p = re.compile(r".*/(.*)/.*\.kda/.*")
match = p.search(db_name1)
if match:
dbname_1 = match.groups()[0]
match = p.search(db_name2)
if match:
dbname_2 = match.groups()[0]
return dbname_1 == dbname_2
return False
def main():
pass
if __name__ == "__main__":
main()
|
daydayuplo/gee
|
earth_enterprise/src/server/wsgi/serve/publish/publish_manager_helper.py
|
Python
|
apache-2.0
| 61,989
|
"""Certbot constants."""
import os
import logging
from acme import challenges
SETUPTOOLS_PLUGINS_ENTRY_POINT = "certbot.plugins"
"""Setuptools entry point group name for plugins."""
OLD_SETUPTOOLS_PLUGINS_ENTRY_POINT = "letsencrypt.plugins"
"""Plugins Setuptools entry point before rename."""
CLI_DEFAULTS = dict(
config_files=[
"/etc/letsencrypt/cli.ini",
# http://freedesktop.org/wiki/Software/xdg-user-dirs/
os.path.join(os.environ.get("XDG_CONFIG_HOME", "~/.config"),
"letsencrypt", "cli.ini"),
],
verbose_count=-int(logging.INFO / 10),
server="https://acme-v01.api.letsencrypt.org/directory",
rsa_key_size=2048,
rollback_checkpoints=1,
config_dir="/etc/letsencrypt",
work_dir="/var/lib/letsencrypt",
logs_dir="/var/log/letsencrypt",
no_verify_ssl=False,
http01_port=challenges.HTTP01Response.PORT,
tls_sni_01_port=challenges.TLSSNI01Response.PORT,
auth_cert_path="./cert.pem",
auth_chain_path="./chain.pem",
strict_permissions=False,
)
STAGING_URI = "https://acme-staging.api.letsencrypt.org/directory"
"""Defaults for CLI flags and `.IConfig` attributes."""
QUIET_LOGGING_LEVEL = logging.WARNING
"""Logging level to use in quiet mode."""
RENEWER_DEFAULTS = dict(
renewer_enabled="yes",
renew_before_expiry="30 days",
# This value should ensure that there is never a deployment delay by
# default.
deploy_before_expiry="99 years",
)
"""Defaults for renewer script."""
ENHANCEMENTS = ["redirect", "http-header", "ocsp-stapling", "spdy"]
"""List of possible :class:`certbot.interfaces.IInstaller`
enhancements.
List of expected options parameters:
- redirect: None
- http-header: TODO
- ocsp-stapling: certificate chain file path
- spdy: TODO
"""
ARCHIVE_DIR = "archive"
"""Archive directory, relative to `IConfig.config_dir`."""
CONFIG_DIRS_MODE = 0o755
"""Directory mode for ``.IConfig.config_dir`` et al."""
ACCOUNTS_DIR = "accounts"
"""Directory where all accounts are saved."""
BACKUP_DIR = "backups"
"""Directory (relative to `IConfig.work_dir`) where backups are kept."""
CSR_DIR = "csr"
"""See `.IConfig.csr_dir`."""
IN_PROGRESS_DIR = "IN_PROGRESS"
"""Directory used before a permanent checkpoint is finalized (relative to
`IConfig.work_dir`)."""
KEY_DIR = "keys"
"""Directory (relative to `IConfig.config_dir`) where keys are saved."""
LIVE_DIR = "live"
"""Live directory, relative to `IConfig.config_dir`."""
TEMP_CHECKPOINT_DIR = "temp_checkpoint"
"""Temporary checkpoint directory (relative to `IConfig.work_dir`)."""
RENEWAL_CONFIGS_DIR = "renewal"
"""Renewal configs directory, relative to `IConfig.config_dir`."""
RENEWER_CONFIG_FILENAME = "renewer.conf"
"""Renewer config file name (relative to `IConfig.config_dir`)."""
|
bsmr-misc-forks/letsencrypt
|
certbot/constants.py
|
Python
|
apache-2.0
| 2,789
|
"""
Support for AlarmDecoder-based alarm control panels (Honeywell/DSC).
For more details about this platform, please refer to the documentation at
https://home-assistant.io/components/alarm_control_panel.alarmdecoder/
"""
import asyncio
import logging
from homeassistant.core import callback
from homeassistant.helpers.dispatcher import async_dispatcher_connect
import homeassistant.components.alarm_control_panel as alarm
from homeassistant.components.alarmdecoder import (DATA_AD,
SIGNAL_PANEL_MESSAGE)
from homeassistant.const import (
STATE_ALARM_ARMED_AWAY, STATE_ALARM_ARMED_HOME, STATE_ALARM_DISARMED,
STATE_UNKNOWN, STATE_ALARM_TRIGGERED)
_LOGGER = logging.getLogger(__name__)
DEPENDENCIES = ['alarmdecoder']
@asyncio.coroutine
def async_setup_platform(hass, config, async_add_devices, discovery_info=None):
"""Perform the setup for AlarmDecoder alarm panels."""
_LOGGER.debug("AlarmDecoderAlarmPanel: setup")
device = AlarmDecoderAlarmPanel("Alarm Panel", hass)
async_add_devices([device])
return True
class AlarmDecoderAlarmPanel(alarm.AlarmControlPanel):
"""Representation of an AlarmDecoder-based alarm panel."""
def __init__(self, name, hass):
"""Initialize the alarm panel."""
self._display = ""
self._name = name
self._state = STATE_UNKNOWN
_LOGGER.debug("Setting up panel")
@asyncio.coroutine
def async_added_to_hass(self):
"""Register callbacks."""
async_dispatcher_connect(
self.hass, SIGNAL_PANEL_MESSAGE, self._message_callback)
@callback
def _message_callback(self, message):
if message.alarm_sounding or message.fire_alarm:
if self._state != STATE_ALARM_TRIGGERED:
self._state = STATE_ALARM_TRIGGERED
self.hass.async_add_job(self.async_update_ha_state())
elif message.armed_away:
if self._state != STATE_ALARM_ARMED_AWAY:
self._state = STATE_ALARM_ARMED_AWAY
self.hass.async_add_job(self.async_update_ha_state())
elif message.armed_home:
if self._state != STATE_ALARM_ARMED_HOME:
self._state = STATE_ALARM_ARMED_HOME
self.hass.async_add_job(self.async_update_ha_state())
else:
if self._state != STATE_ALARM_DISARMED:
self._state = STATE_ALARM_DISARMED
self.hass.async_add_job(self.async_update_ha_state())
@property
def name(self):
"""Return the name of the device."""
return self._name
@property
def should_poll(self):
"""No polling needed."""
return False
@property
def code_format(self):
"""Regex for code format or None if no code is required."""
return '^\\d{4,6}$'
@property
def state(self):
"""Return the state of the device."""
return self._state
@asyncio.coroutine
def async_alarm_disarm(self, code=None):
"""Send disarm command."""
_LOGGER.debug("alarm_disarm: %s", code)
if code:
_LOGGER.debug("alarm_disarm: sending %s1", str(code))
self.hass.data[DATA_AD].send("{!s}1".format(code))
@asyncio.coroutine
def async_alarm_arm_away(self, code=None):
"""Send arm away command."""
_LOGGER.debug("alarm_arm_away: %s", code)
if code:
_LOGGER.debug("alarm_arm_away: sending %s2", str(code))
self.hass.data[DATA_AD].send("{!s}2".format(code))
@asyncio.coroutine
def async_alarm_arm_home(self, code=None):
"""Send arm home command."""
_LOGGER.debug("alarm_arm_home: %s", code)
if code:
_LOGGER.debug("alarm_arm_home: sending %s3", str(code))
self.hass.data[DATA_AD].send("{!s}3".format(code))
|
JshWright/home-assistant
|
homeassistant/components/alarm_control_panel/alarmdecoder.py
|
Python
|
apache-2.0
| 3,884
|
# -*- coding: utf-8 -*-
import json
import time
import copy
import math
import subprocess
import re
from requests_toolbelt import MultipartEncoder
from . import config
def getVideoInfo(filename):
res = {}
try:
terminalResult = subprocess.Popen(["ffprobe", filename],
stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
for x in terminalResult.stdout.readlines():
# Duration: 00:00:59.51, start: 0.000000, bitrate: 435 kb/s
m = re.search('duration: (\d\d:\d\d:\d\d\.\d\d),', str(x), flags=re.IGNORECASE)
if m is not None:
res['duration'] = m.group(1)
# Video: h264 (Constrained Baseline) (avc1 / 0x31637661), yuv420p, 480x268
m = re.search('video:\s.*\s(\d+)x(\d+)\s', str(x), flags=re.IGNORECASE)
if m is not None:
res['width'] = m.group(1)
res['height'] = m.group(2)
finally:
if 'width' not in res:
print("ERROR: 'ffprobe' not found, pls install 'ffprobe' with one of following methods")
print(" sudo apt-get install ffmpeg")
print("or sudo apt-get install -y libav-tools")
return res
def uploadVideo(self, video, thumbnail, caption=None, upload_id=None):
if upload_id is None:
upload_id = str(int(time.time() * 1000))
data = {
'upload_id': upload_id,
'_csrftoken': self.token,
'media_type': '2',
'_uuid': self.uuid,
}
m = MultipartEncoder(data, boundary=self.uuid)
self.session.headers.update({'X-IG-Capabilities': '3Q4=',
'X-IG-Connection-Type': 'WIFI',
'Host': 'i.instagram.com',
'Cookie2': '$Version=1',
'Accept-Language': 'en-US',
'Accept-Encoding': 'gzip, deflate',
'Content-type': m.content_type,
'Connection': 'keep-alive',
'User-Agent': config.USER_AGENT})
response = self.session.post(config.API_URL + "upload/video/", data=m.to_string())
if response.status_code == 200:
body = json.loads(response.text)
upload_url = body['video_upload_urls'][3]['url']
upload_job = body['video_upload_urls'][3]['job']
videoData = open(video, 'rb').read()
# solve issue #85 TypeError: slice indices must be integers or None or have an __index__ method
request_size = int(math.floor(len(videoData) / 4))
lastRequestExtra = (len(videoData) - (request_size * 3))
headers = copy.deepcopy(self.session.headers)
self.session.headers.update({'X-IG-Capabilities': '3Q4=',
'X-IG-Connection-Type': 'WIFI',
'Cookie2': '$Version=1',
'Accept-Language': 'en-US',
'Accept-Encoding': 'gzip, deflate',
'Content-type': 'application/octet-stream',
'Session-ID': upload_id,
'Connection': 'keep-alive',
'Content-Disposition': 'attachment; filename="video.mov"',
'job': upload_job,
'Host': 'upload.instagram.com',
'User-Agent': config.USER_AGENT})
for i in range(0, 4):
start = i * request_size
if i == 3:
end = i * request_size + lastRequestExtra
else:
end = (i + 1) * request_size
length = lastRequestExtra if i == 3 else request_size
content_range = "bytes {start}-{end}/{lenVideo}".format(start=start, end=(end - 1),
lenVideo=len(videoData)).encode('utf-8')
self.session.headers.update({'Content-Length': str(end - start), 'Content-Range': content_range, })
response = self.session.post(upload_url, data=videoData[start:start + length])
self.session.headers = headers
if response.status_code == 200:
if self.configureVideo(upload_id, video, thumbnail, caption):
self.expose()
return True
return False
def configureVideo(self, upload_id, video, thumbnail, caption=''):
clipInfo = getVideoInfo(video)
self.uploadPhoto(photo=thumbnail, caption=caption, upload_id=upload_id)
data = json.dumps({
'upload_id': upload_id,
'source_type': 3,
'poster_frame_index': 0,
'length': 0.00,
'audio_muted': False,
'filter_type': 0,
'video_result': 'deprecated',
'clips': {
'length': clipInfo['duration'],
'source_type': '3',
'camera_position': 'back',
},
'extra': {
'source_width': clipInfo['width'],
'source_height': clipInfo['height'],
},
'device': config.DEVICE_SETTINTS,
'_csrftoken': self.token,
'_uuid': self.uuid,
'_uid': self.user_id,
'caption': caption,
})
return self.SendRequest('media/configure/?video=1', self.generateSignature(data))
|
Diapostrofo/instabot
|
instabot/api/api_video.py
|
Python
|
apache-2.0
| 5,431
|
from past.builtins import basestring
import os
import itertools
import builtins
import json
import logging
import warnings
from math import ceil
from contextlib import contextmanager
from django.apps import apps
from django.db import connection
from django.db.migrations.operations.base import Operation
from osf.models.base import generate_object_id
from osf.utils.sanitize import strip_html, unescape_entities
from website import settings
from website.project.metadata.schemas import get_osf_meta_schemas
logger = logging.getLogger(__file__)
increment = 100000
# Dict to map original schema formats to schema block types
FORMAT_TYPE_TO_TYPE_MAP = {
('multiselect', 'choose'): 'multi-select-input',
(None, 'multiselect'): 'multi-select-input',
(None, 'choose'): 'single-select-input',
('osf-upload-open', 'osf-upload'): 'file-input',
('osf-upload-toggle', 'osf-upload'): 'file-input',
('singleselect', 'choose'): 'single-select-input',
('text', 'string'): 'short-text-input',
('textarea', 'osf-author-import'): 'contributors-input',
('textarea', None): 'long-text-input',
('textarea', 'string'): 'long-text-input',
('textarea-lg', None): 'long-text-input',
('textarea-lg', 'string'): 'long-text-input',
('textarea-xl', 'string'): 'long-text-input',
}
def get_osf_models():
"""
Helper function to retrieve all osf related models.
Example usage:
with disable_auto_now_fields(models=get_osf_models()):
...
"""
return list(itertools.chain(*[app.get_models() for app in apps.get_app_configs() if app.label.startswith('addons_') or app.label.startswith('osf')]))
@contextmanager
def disable_auto_now_fields(models=None):
"""
Context manager to disable auto_now field updates.
If models=None, updates for all auto_now fields on *all* models will be disabled.
:param list models: Optional list of models for which auto_now field updates should be disabled.
"""
if not models:
models = apps.get_models()
changed = []
for model in models:
for field in model._meta.get_fields():
if hasattr(field, 'auto_now') and field.auto_now:
field.auto_now = False
changed.append(field)
try:
yield
finally:
for field in changed:
if hasattr(field, 'auto_now') and not field.auto_now:
field.auto_now = True
@contextmanager
def disable_auto_now_add_fields(models=None):
"""
Context manager to disable auto_now_add field updates.
If models=None, updates for all auto_now_add fields on *all* models will be disabled.
:param list models: Optional list of models for which auto_now_add field updates should be disabled.
"""
if not models:
models = apps.get_models()
changed = []
for model in models:
for field in model._meta.get_fields():
if hasattr(field, 'auto_now_add') and field.auto_now_add:
field.auto_now_add = False
changed.append(field)
try:
yield
finally:
for field in changed:
if hasattr(field, 'auto_now_add') and not field.auto_now_add:
field.auto_now_add = True
def ensure_licenses(*args, **kwargs):
"""Upsert the licenses in our database based on a JSON file.
:return tuple: (number inserted, number updated)
Moved from website/project/licenses/__init__.py
"""
ninserted = 0
nupdated = 0
try:
NodeLicense = args[0].get_model('osf', 'nodelicense')
except Exception:
# Working outside a migration
from osf.models import NodeLicense
with builtins.open(
os.path.join(
settings.APP_PATH,
'node_modules', '@centerforopenscience', 'list-of-licenses', 'dist', 'list-of-licenses.json'
)
) as fp:
licenses = json.loads(fp.read())
for id, info in licenses.items():
name = info['name']
text = info['text']
properties = info.get('properties', [])
url = info.get('url', '')
node_license, created = NodeLicense.objects.get_or_create(license_id=id)
node_license.name = name
node_license.text = text
node_license.properties = properties
node_license.url = url
node_license.save()
if created:
ninserted += 1
else:
nupdated += 1
logger.info('License {name} ({id}) added to the database.'.format(name=name, id=id))
logger.info('{} licenses inserted into the database, {} licenses updated in the database.'.format(
ninserted, nupdated
))
return ninserted, nupdated
def remove_licenses(*args):
from osf.models import NodeLicense
pre_count = NodeLicense.objects.all().count()
NodeLicense.objects.all().delete()
logger.info('{} licenses removed from the database.'.format(pre_count))
def ensure_schemas(*args):
"""Import meta-data schemas from JSON to database if not already loaded
"""
state = args[0] if args else apps
schema_count = 0
try:
schema_model = state.get_model('osf', 'registrationschema')
except LookupError:
# Use MetaSchema model if migrating from a version before RegistrationSchema existed
schema_model = state.get_model('osf', 'metaschema')
for schema in get_osf_meta_schemas():
schema_obj, created = schema_model.objects.update_or_create(
name=schema['name'],
schema_version=schema.get('version', 1),
defaults={
'schema': schema,
}
)
schema_count += 1
if created:
logger.info('Added schema {} to the database'.format(schema['name']))
logger.info('Ensured {} schemas are in the database'.format(schema_count))
def remove_schemas(*args):
from osf.models import RegistrationSchema
pre_count = RegistrationSchema.objects.all().count()
RegistrationSchema.objects.all().delete()
logger.info('Removed {} schemas from the database'.format(pre_count))
def create_schema_block(state, schema_id, block_type, display_text='', required=False, help_text='',
registration_response_key=None, schema_block_group_key='', example_text=''):
"""
For mapping schemas to schema blocks: creates a given block from the specified parameters
"""
state = state or apps
schema_block_model = state.get_model('osf', 'registrationschemablock')
return schema_block_model.objects.create(
schema_id=schema_id,
block_type=block_type,
required=required,
display_text=unescape_entities(
display_text,
safe={
'<': '<',
'>': '>'
}
),
help_text=unescape_entities(
help_text,
safe={
'<': '<',
'>': '>'
}
),
registration_response_key=registration_response_key,
schema_block_group_key=schema_block_group_key,
example_text=unescape_entities(
example_text,
safe={
'<': '<',
'>': '>'
}
)
)
# Split question multiple choice options into their own blocks
def split_options_into_blocks(state, rs, question, schema_block_group_key):
"""
For mapping schemas to schema blocks: splits individual multiple choice
options into their own schema blocks
"""
for option in question.get('options', []):
answer_text = option if isinstance(option, basestring) else option.get('text')
help_text = '' if isinstance(option, basestring) else option.get('tooltip', '')
create_schema_block(
state,
rs.id,
'select-input-option',
display_text=answer_text,
help_text=help_text,
schema_block_group_key=schema_block_group_key,
)
def get_registration_response_key(question):
"""
For mapping schemas to schema blocks:
Answer ids will map to the user's response
"""
return question.get('qid', '') or question.get('id', '')
def find_title_description_help_example(rs, question):
"""
For mapping schemas to schema blocks:
Schemas are inconsistent with regards to the information going into "title",
"description", and "help" blocks.
:returns tuple, title, description, help, example strings
"""
title = question.get('title', '')
description = strip_html(question.get('description', ''))
help = strip_html(question.get('help', ''))
example = strip_html(question.get('example', ''))
schema_name = rs.schema.get('name', '')
# Descriptions that contain any of these keywords
# are turned into help text instead.
help_text_keywords = [
'please',
'choose',
'provide',
'format',
'describe',
'who',
'what',
'when',
'where',
'use',
'you',
'your',
'skip',
'enter',
]
if title:
if schema_name in ['OSF Preregistration', 'Prereg Challenge', 'Secondary Data Preregistration']:
# These two schemas have clear "example" text in the "help" section
example = help
help = description
description = ''
else:
for keyword in help_text_keywords:
if keyword in description.lower():
help = description
description = ''
break
else:
# if no title, description text is moved to title.
title = description
description = ''
return title, description, help, example
def get_subquestion_qid(question, subquestion):
"""
For mapping schemas to schema blocks:
Return a qid in the format "parent-id.current-id", to reflect its nested nature and ensure uniqueness
"""
return '{}.{}'.format(get_registration_response_key(question) or '', subquestion.get('id', ''))
def create_schema_blocks_for_question(state, rs, question, sub=False):
"""
For mapping schemas to schema blocks:
Split the original question from the schema into multiple schema blocks, all of
which have the same schema_block_group_key, to link them.
"""
# If there are subquestions, recurse and format subquestions
properties = question.get('properties')
if properties:
first_subquestion = properties[0]
first_subq_text = first_subquestion.get('title') or first_subquestion.get('description', '')
if first_subq_text:
# the first subquestion has text, so this seems like an actual [sub]section
create_schema_block(
state,
rs.id,
block_type='subsection-heading' if sub else 'section-heading',
display_text=question.get('title', '') or question.get('description', ''),
)
else:
# the first subquestion has no text, so the "section" heading is better interpreted as a question label
first_subquestion['title'] = question.get('title', '')
first_subquestion['description'] = question.get('description', '')
if not first_subquestion.get('help'):
first_subquestion['help'] = question.get('help', '')
for subquestion in properties:
subquestion['qid'] = get_subquestion_qid(question, subquestion)
create_schema_blocks_for_question(state, rs, subquestion, sub=True)
else:
# All schema blocks related to a particular question share the same schema_block_group_key.
schema_block_group_key = generate_object_id()
title, description, help, example = find_title_description_help_example(rs, question)
# Creates question title block
create_schema_block(
state,
rs.id,
block_type='question-label',
display_text=title,
help_text='' if description else help,
example_text=example,
schema_block_group_key=schema_block_group_key
)
# Creates paragraph block (question description)
if description:
create_schema_block(
state,
rs.id,
block_type='paragraph',
display_text=description,
help_text=help,
schema_block_group_key=schema_block_group_key,
)
if question.get('format') or question.get('type'):
# Creates question input block - this block will correspond to an answer
# Map the original schema section format to the new block_type, and create a schema block
block_type = FORMAT_TYPE_TO_TYPE_MAP[(question.get('format'), question.get('type'))]
create_schema_block(
state,
rs.id,
block_type,
required=question.get('required', False),
schema_block_group_key=schema_block_group_key,
registration_response_key=get_registration_response_key(question)
)
# If there are multiple choice answers, create blocks for these as well.
split_options_into_blocks(state, rs, question, schema_block_group_key)
def create_schema_blocks_for_atomic_schema(schema):
"""
Atomic schemas are a short cut around making an typical metaschemas by being totally explict about the schemablocks
being created.
"""
from osf.models import RegistrationSchemaBlock
current_group_key = None
for index, block in enumerate(schema.schema['blocks']):
# registration_response_key and schema_block_group_key are unused
# for most block types and can/should be empty.
# registration_response_key gets explicitly filtered by isnull :/
block['registration_response_key'] = None
block['schema_block_group_key'] = ''
block_type = block['block_type']
if block_type == 'question-label':
# This key will be used by input and option fields for this question
current_group_key = generate_object_id()
block['schema_block_group_key'] = current_group_key
elif block_type in RegistrationSchemaBlock.INPUT_BLOCK_TYPES:
block['registration_response_key'] = f'{schema.id}-{index}'
block['schema_block_group_key'] = current_group_key
elif block_type in ['select-input-option', 'select-input-other']:
block['schema_block_group_key'] = current_group_key
RegistrationSchemaBlock.objects.create(
schema_id=schema.id,
**block
)
def map_schemas_to_schemablocks(*args):
"""Map schemas to schema blocks
WARNING: Deletes existing schema blocks
"""
state = args[0] if args else apps
try:
schema_model = state.get_model('osf', 'registrationschema')
except LookupError:
# Use MetaSchema model if migrating from a version before RegistrationSchema existed
schema_model = state.get_model('osf', 'metaschema')
# Delete all existing schema blocks (avoid creating duplicates)
unmap_schemablocks(*args)
for rs in schema_model.objects.all():
logger.info('Migrating schema {}, version {} to schema blocks.'.format(rs.name, rs.schema_version))
if rs.schema.get('atomicSchema'):
create_schema_blocks_for_atomic_schema(rs)
continue
for page in rs.schema['pages']:
# Create page heading block
create_schema_block(
state,
rs.id,
'page-heading',
display_text=strip_html(page.get('title', '')),
help_text=strip_html(page.get('description', ''))
)
for question in page['questions']:
create_schema_blocks_for_question(state, rs, question)
def unmap_schemablocks(*args):
state = args[0] if args else apps
schema_block_model = state.get_model('osf', 'registrationschemablock')
schema_block_model.objects.all().delete()
class UpdateRegistrationSchemas(Operation):
"""Custom migration operation to update registration schemas
"""
reversible = True
def state_forwards(self, app_label, state):
pass
def database_forwards(self, app_label, schema_editor, from_state, to_state):
ensure_schemas(to_state.apps)
def database_backwards(self, app_label, schema_editor, from_state, to_state):
warnings.warn('Reversing UpdateRegistrationSchemas is a noop')
def describe(self):
return 'Updated registration schemas'
class UpdateRegistrationSchemasAndSchemaBlocks(Operation):
"""Custom migration operation to update registration schemas
"""
reversible = True
def state_forwards(self, app_label, state):
pass
def database_forwards(self, app_label, schema_editor, from_state, to_state):
ensure_schemas(to_state.apps)
map_schemas_to_schemablocks(to_state.apps)
def database_backwards(self, app_label, schema_editor, from_state, to_state):
RegistrationSchemaBlock = to_state.apps.get_model('osf', 'registrationschemablock')
RegistrationSchemaBlock.objects.all().delete()
def describe(self):
return 'Updated registration schemas and its schema blocks'
class AddWaffleFlags(Operation):
"""Custom migration operation to add waffle flags
Params:
- flag_names: iterable of strings, flag names to create
- on_for_everyone: boolean (default False), whether to activate the newly created flags
"""
reversible = True
def __init__(self, flag_names, on_for_everyone=False):
self.flag_names = flag_names
self.on_for_everyone = on_for_everyone
def state_forwards(self, app_label, state):
pass
def database_forwards(self, app_label, schema_editor, from_state, to_state):
Flag = to_state.apps.get_model('waffle', 'flag')
for flag_name in self.flag_names:
Flag.objects.get_or_create(name=flag_name, defaults={'everyone': self.on_for_everyone})
def database_backwards(self, app_label, schema_editor, from_state, to_state):
Flag = to_state.apps.get_model('waffle', 'flag')
Flag.objects.filter(name__in=self.flag_names).delete()
def describe(self):
return 'Adds waffle flags: {}'.format(', '.join(self.flag_names))
class DeleteWaffleFlags(Operation):
"""Custom migration operation to delete waffle flags
Params:
- flag_names: iterable of strings, flag names to delete
"""
reversible = True
def __init__(self, flag_names):
self.flag_names = flag_names
def state_forwards(self, app_label, state):
pass
def database_forwards(self, app_label, schema_editor, from_state, to_state):
Flag = to_state.apps.get_model('waffle', 'flag')
Flag.objects.filter(name__in=self.flag_names).delete()
def database_backwards(self, app_label, schema_editor, from_state, to_state):
Flag = to_state.apps.get_model('waffle', 'flag')
for flag_name in self.flag_names:
Flag.objects.get_or_create(name=flag_name)
def describe(self):
return 'Removes waffle flags: {}'.format(', '.join(self.flag_names))
class AddWaffleSwitches(Operation):
"""Custom migration operation to add waffle switches
Params:
- switch_names: iterable of strings, the names of the switches to create
- active: boolean (default False), whether the switches should be active
"""
reversible = True
def __init__(self, switch_names, active=False):
self.switch_names = switch_names
self.active = active
def state_forwards(self, app_label, state):
pass
def database_forwards(self, app_label, schema_editor, from_state, to_state):
Switch = to_state.apps.get_model('waffle', 'switch')
for switch in self.switch_names:
Switch.objects.get_or_create(name=switch, defaults={'active': self.active})
def database_backwards(self, app_label, schema_editor, from_state, to_state):
Switch = to_state.apps.get_model('waffle', 'switch')
Switch.objects.filter(name__in=self.switch_names).delete()
def describe(self):
return 'Adds waffle switches: {}'.format(', '.join(self.switch_names))
class DeleteWaffleSwitches(Operation):
"""Custom migration operation to delete waffle switches
Params:
- switch_names: iterable of strings, switch names to delete
"""
reversible = True
def __init__(self, switch_names):
self.switch_names = switch_names
def state_forwards(self, app_label, state):
pass
def database_forwards(self, app_label, schema_editor, from_state, to_state):
Switch = to_state.apps.get_model('waffle', 'switch')
Switch.objects.filter(name__in=self.switch_names).delete()
def database_backwards(self, app_label, schema_editor, from_state, to_state):
Switch = to_state.apps.get_model('waffle', 'switch')
for switch in self.switch_names:
Switch.objects.get_or_create(name=switch)
def describe(self):
return 'Removes waffle switches: {}'.format(', '.join(self.switch_names))
def batch_node_migrations(state, migrations):
AbstractNode = state.get_model('osf', 'abstractnode')
max_nid = getattr(AbstractNode.objects.last(), 'id', 0)
for migration in migrations:
total_pages = int(ceil(max_nid / float(increment)))
page_start = 0
page_end = 0
page = 0
logger.info('{}'.format(migration['description']))
while page_end <= (max_nid):
page += 1
page_end += increment
if page <= total_pages:
logger.info('Updating page {} / {}'.format(page_end / increment, total_pages))
with connection.cursor() as cursor:
cursor.execute(migration['sql'].format(
start=page_start,
end=page_end
))
page_start = page_end
|
adlius/osf.io
|
osf/utils/migrations.py
|
Python
|
apache-2.0
| 22,296
|
from pydruid.db.api import connect
from pydruid.db.exceptions import (
DataError,
DatabaseError,
Error,
IntegrityError,
InterfaceError,
InternalError,
NotSupportedError,
OperationalError,
ProgrammingError,
Warning,
)
__all__ = [
"connect",
"apilevel",
"threadsafety",
"paramstyle",
"DataError",
"DatabaseError",
"Error",
"IntegrityError",
"InterfaceError",
"InternalError",
"NotSupportedError",
"OperationalError",
"ProgrammingError",
"Warning",
]
apilevel = "2.0"
# Threads may share the module and connections
threadsafety = 2
paramstyle = "pyformat"
|
kawamon/hue
|
desktop/core/ext-py/pydruid-0.5.11/pydruid/db/__init__.py
|
Python
|
apache-2.0
| 653
|
# coding: utf-8
"""
Kubernetes
No description provided (generated by Swagger Codegen https://github.com/swagger-api/swagger-codegen)
OpenAPI spec version: v1.7.4
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from pprint import pformat
from six import iteritems
import re
class V2alpha1PodsMetricSource(object):
"""
NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
"""
def __init__(self, metric_name=None, target_average_value=None):
"""
V2alpha1PodsMetricSource - a model defined in Swagger
:param dict swaggerTypes: The key is attribute name
and the value is attribute type.
:param dict attributeMap: The key is attribute name
and the value is json key in definition.
"""
self.swagger_types = {
'metric_name': 'str',
'target_average_value': 'str'
}
self.attribute_map = {
'metric_name': 'metricName',
'target_average_value': 'targetAverageValue'
}
self._metric_name = metric_name
self._target_average_value = target_average_value
@property
def metric_name(self):
"""
Gets the metric_name of this V2alpha1PodsMetricSource.
metricName is the name of the metric in question
:return: The metric_name of this V2alpha1PodsMetricSource.
:rtype: str
"""
return self._metric_name
@metric_name.setter
def metric_name(self, metric_name):
"""
Sets the metric_name of this V2alpha1PodsMetricSource.
metricName is the name of the metric in question
:param metric_name: The metric_name of this V2alpha1PodsMetricSource.
:type: str
"""
if metric_name is None:
raise ValueError("Invalid value for `metric_name`, must not be `None`")
self._metric_name = metric_name
@property
def target_average_value(self):
"""
Gets the target_average_value of this V2alpha1PodsMetricSource.
targetAverageValue is the target value of the average of the metric across all relevant pods (as a quantity)
:return: The target_average_value of this V2alpha1PodsMetricSource.
:rtype: str
"""
return self._target_average_value
@target_average_value.setter
def target_average_value(self, target_average_value):
"""
Sets the target_average_value of this V2alpha1PodsMetricSource.
targetAverageValue is the target value of the average of the metric across all relevant pods (as a quantity)
:param target_average_value: The target_average_value of this V2alpha1PodsMetricSource.
:type: str
"""
if target_average_value is None:
raise ValueError("Invalid value for `target_average_value`, must not be `None`")
self._target_average_value = target_average_value
def to_dict(self):
"""
Returns the model properties as a dict
"""
result = {}
for attr, _ in iteritems(self.swagger_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
return result
def to_str(self):
"""
Returns the string representation of the model
"""
return pformat(self.to_dict())
def __repr__(self):
"""
For `print` and `pprint`
"""
return self.to_str()
def __eq__(self, other):
"""
Returns true if both objects are equal
"""
if not isinstance(other, V2alpha1PodsMetricSource):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""
Returns true if both objects are not equal
"""
return not self == other
|
djkonro/client-python
|
kubernetes/client/models/v2alpha1_pods_metric_source.py
|
Python
|
apache-2.0
| 4,518
|
# -*- coding: utf-8 -*-
"""The FileVault Drive Encryption (FVDE) file-like object."""
import pyfvde
from dfvfs.file_io import file_object_io
from dfvfs.lib import errors
from dfvfs.lib import fvde_helper
from dfvfs.resolver import resolver
class FVDEFile(file_object_io.FileObjectIO):
"""File input/output (IO) object using pyfvde."""
def _Close(self):
"""Closes the file-like object."""
try:
# TODO: ensure pyfvde volume object is open.
self._file_object.close()
except IOError:
pass
self._file_object = None
def _OpenFileObject(self, path_spec):
"""Opens the file-like object defined by path specification.
Args:
path_spec (PathSpec): path specification.
Returns:
FileIO: a file-like object.
Raises:
PathSpecError: if the path specification is incorrect.
"""
if not path_spec.HasParent():
raise errors.PathSpecError(
'Unsupported path specification without parent.')
resolver.Resolver.key_chain.ExtractCredentialsFromPathSpec(path_spec)
file_object = resolver.Resolver.OpenFileObject(
path_spec.parent, resolver_context=self._resolver_context)
fvde_volume = pyfvde.volume()
fvde_helper.FVDEOpenVolume(
fvde_volume, path_spec, file_object, resolver.Resolver.key_chain)
return fvde_volume
@property
def is_locked(self):
"""bool: True if the volume is locked."""
return self._file_object.is_locked()
|
joachimmetz/dfvfs
|
dfvfs/file_io/fvde_file_io.py
|
Python
|
apache-2.0
| 1,458
|
# Import the bemio.mesh_utilities module
from bemio.mesh_utilities import mesh
import numpy as np
# Read WAMIT mesh
sphere = mesh.read(file_name='sphere.gdf')
# Save to a NEMOH mesh
sphere.write(mesh_format='NEMOH')
|
bradling/WEC-Sim
|
tutorials/BEMIO/NEMOH/Sphere/Mesh/run.py
|
Python
|
apache-2.0
| 218
|
# Copyright 2017-present Open Networking Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import absolute_import
import ast
def xproto_check_synchronizer(m):
try:
sync_step_path = "synchronizer/steps/sync_%s.py" % m["name"].lower()
sync_step = open(sync_step_path).read()
except IOError:
return "510 Model needs a sync step %s" % sync_step_path
try:
sync_step_ast = ast.parse(sync_step)
except SyntaxError:
return "511 Could not parse sync step %s" % sync_step_path
classes = [x for x in sync_step_ast.body if isinstance(x, ast.ClassDef)]
found_sync_step_class = False
for c in classes:
base_names = [v.id for v in c.bases]
if "SyncStep" in base_names or "SyncInstanceUsingAnsible" in base_names:
attributes = [x for x in c.body if isinstance(x, ast.Assign)]
for a in attributes:
target_names = [t.id for t in a.targets]
values = a.value.elts if isinstance(a.value, ast.List) else [a.value]
value_names = [v.id for v in values]
if "observes" in target_names and m["name"] in value_names:
found_sync_step_class = True
break
if not found_sync_step_class:
return (
"512 Synchronizer needs a sync step class with an observes field containing %s"
% m["name"]
)
else:
return "200 OK"
def xproto_check_policy(m):
try:
model_policy_path = (
"synchronizer/model_policies/model_policy_%s.py" % m["name"].lower()
)
model_policy = open(model_policy_path).read()
except IOError:
return "510 Model needs a model policy %s" % model_policy_path
try:
model_policy_ast = ast.parse(model_policy)
except SyntaxError:
return "511 Could not parse sync step %s" % model_policy_path
classes = [x for x in model_policy_ast.body if isinstance(x, ast.ClassDef)]
found_model_policy_class = False
for c in classes:
base_names = [v.id for v in c.bases]
if "Policy" in base_names or "TenantWithContainerPolicy" in base_names:
found_model_policy_class = True
break
if not found_model_policy_class:
return "513 Synchronizer needs a model policy class"
else:
return "200 OK"
|
opencord/xos
|
lib/xos-genx/xosgenx/jinja2_extensions/checklib.py
|
Python
|
apache-2.0
| 2,894
|
from django.contrib import admin
from import_export import resources, fields
from import_export.widgets import ForeignKeyWidget, ManyToManyWidget
from import_export.admin import ImportExportModelAdmin
from simple_history.admin import SimpleHistoryAdmin
from .models import *
from workflow.models import Sector, WorkflowLevel1
class IndicatorResource(resources.ModelResource):
indicator_type = ManyToManyWidget(IndicatorType, separator=" | ", field="indicator_type")
objective = ManyToManyWidget(Objective, separator=" | ", field="objective"),
strategic_objective = ManyToManyWidget(StrategicObjective, separator=" | ", field="strategic_objective")
level = ManyToManyWidget(Level, separator=" | ", field="level")
reporting_frequency = fields.Field(column_name='reporting_frequency', attribute='reporting_frequency', widget=ForeignKeyWidget(Frequency, 'frequency'))
sector = fields.Field(column_name='sector', attribute='sector', widget=ForeignKeyWidget(Sector, 'sector'))
workflowlevel1 = ManyToManyWidget(WorkflowLevel1, separator=" | ", field="name")
class Meta:
model = Indicator
fields = ('id','indicator_key','indicator_type','level','objective','strategic_objective','name','number',\
'source','definition', 'justification', 'unit_of_measure', 'baseline','lop_target', 'rationale_for_target', 'means_of_verification','data_collection_method', 'data_collection_frequency', 'data_points', 'responsible_person',\
'method_of_analysis','information_use','reporting_frequency', 'quality_assurance', 'data_issues', 'indicator_changes', 'comments','disaggregation','sector',\
'workflowlevel1','key_performance_indicator')
#import_id_fields = ['id']
class IndicatorAdmin(ImportExportModelAdmin,SimpleHistoryAdmin):
resource_class = IndicatorResource
list_display = ('indicator_types','name','sector','key_performance_indicator')
search_fields = ('name','number','workflowlevel1__name')
list_filter = ('workflowlevel1','key_performance_indicator','sector')
display = 'Indicators'
filter_horizontal = ('workflowlevel1','objectives','strategic_objectives','disaggregation')
pass
class TolaTableResource(resources.ModelResource):
class Meta:
model = TolaTable
fields = ('id','name','table_id','owner','remote_owner','url')
#import_id_fields = ['id']
class TolaTableAdmin(ImportExportModelAdmin):
list_display = ('name','owner','url','create_date','edit_date')
search_fields = ('country__country','name')
list_filter = ('country__country',)
display = 'Tola Table'
pass
class CollectedDataResource(resources.ModelResource):
class Meta:
model = CollectedData
#import_id_fields = ['id']
class CollectedDataAdmin(ImportExportModelAdmin,SimpleHistoryAdmin):
resource_class = CollectedDataResource
list_display = ('indicator','workflowlevel1','workflowlevel2')
search_fields = ('indicator','workflowlevel1','owner__username')
list_filter = ('indicator__workflowlevel1__country__country','workflowlevel1','approved_by')
display = 'Collected Data on Indicators'
pass
class FrequencyAdmin(admin.ModelAdmin):
list_display = ('frequency','description','create_date','edit_date')
display = 'Reporting Frequency'
class LevelAdmin(admin.ModelAdmin):
list_display = ('name')
display = 'Levels'
class DisaggregationLabelAdmin(admin.ModelAdmin):
list_display = ('disaggregation_type', 'customsort', 'label',)
display = 'Disaggregation Label'
list_filter = ('disaggregation_type__disaggregation_type',)
class PeriodicTargetResource(resources.ModelResource):
class Meta:
model = PeriodicTarget
class PeriodicTargetAdmin(ImportExportModelAdmin):
resource_class = PeriodicTargetResource
list_display = ('period', 'target', 'customsort',)
display = 'Indicator Periodic Target'
list_filter = ('period',)
admin.site.register(IndicatorType)
admin.site.register(Indicator,IndicatorAdmin)
admin.site.register(Frequency)
admin.site.register(DisaggregationType, DisaggregationTypeAdmin)
admin.site.register(DisaggregationLabel, DisaggregationLabelAdmin)
admin.site.register(CollectedData, CollectedDataAdmin)
admin.site.register(Objective,ObjectiveAdmin)
admin.site.register(StrategicObjective, StrategicObjectiveAdmin)
admin.site.register(Level)
admin.site.register(ExternalService, ExternalServiceAdmin)
admin.site.register(ExternalServiceRecord, ExternalServiceRecordAdmin)
admin.site.register(TolaTable, TolaTableAdmin)
admin.site.register(PeriodicTarget, PeriodicTargetAdmin)
|
toladata/TolaActivity
|
indicators/admin.py
|
Python
|
apache-2.0
| 4,660
|
# ==================================================================================================
# Copyright 2011 Twitter, Inc.
# --------------------------------------------------------------------------------------------------
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this work except in compliance with the License.
# You may obtain a copy of the License in the LICENSE file, or at:
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==================================================================================================
from __future__ import print_function
__author__ = 'John Sirois'
from . import Command
from twitter.pants.base import BuildFile, Target
class Filemap(Command):
"""Outputs a mapping from source file to the target that owns the source file."""
__command__ = 'filemap'
def setup_parser(self, parser, args):
parser.set_usage("%prog filemap")
parser.epilog = """Outputs a mapping from source file to the target that owns the source file.
The mapping is output in 2 columns."""
def __init__(self, root_dir, parser, argv):
Command.__init__(self, root_dir, parser, argv)
if self.args:
self.error("The filemap subcommand accepts no arguments.")
def execute(self):
for buildfile in BuildFile.scan_buildfiles(self.root_dir):
for address in Target.get_all_addresses(buildfile):
target = Target.get(address)
if hasattr(target, 'sources') and target.sources is not None:
for sourcefile in target.sources:
print(sourcefile, address)
|
foursquare/commons-old
|
src/python/twitter/pants/commands/filemap.py
|
Python
|
apache-2.0
| 1,912
|
# Copyright 2014 Koert van der Veer
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import logging
import unittest
from eventlet.green import time
import logshipper.input
LOG = logging.getLogger(__name__)
class Command(unittest.TestCase):
def setUp(self):
self.messages = []
def handler(self, message):
LOG.debug("Produced message %r", message)
self.messages.append(message)
def test_shell1(self):
cmd = logshipper.input.Command(["echo", "'\""])
cmd.set_handler(self.handler)
cmd.start()
time.sleep(0.1)
cmd.stop()
self.assertEqual(self.messages[0]['message'],
"'\"")
def test_shell2(self):
cmd = logshipper.input.Command("echo \"'\\\"\"")
cmd.set_handler(self.handler)
cmd.start()
time.sleep(0.01)
cmd.stop()
self.assertEqual(self.messages[0]['message'], "'\"")
def test_alt_separator(self):
cmd = logshipper.input.Command(commandline="echo test__test2_boo",
separator="__")
cmd.set_handler(self.handler)
cmd.start()
time.sleep(0.1)
cmd.stop()
time.sleep(0.1)
self.assertEqual(self.messages[0]['message'], "test")
self.assertEqual(self.messages[1]['message'], "test2_boo\n")
def test_unicode1(self):
test_string = u"\u2713" # unicode checkmark
cmd = logshipper.input.Command(["echo", test_string])
cmd.set_handler(self.handler)
cmd.start()
time.sleep(0.1)
cmd.stop()
self.assertEqual(self.messages[0]['message'], test_string)
def test_unicode2(self):
cmd = logshipper.input.Command(u"echo \u2713") # unicode checkmark
cmd.set_handler(self.handler)
cmd.start()
time.sleep(0.1)
cmd.stop()
self.assertEqual(self.messages[0]['message'], u"\u2713")
def test_oneshot(self):
cmd = logshipper.input.Command("echo 123")
cmd.set_handler(self.handler)
cmd.start()
time.sleep(0.1)
cmd.stop()
self.assertEqual(len(self.messages), 1)
def test_repeat(self):
cmd = logshipper.input.Command("echo 123", interval=.1)
cmd.set_handler(self.handler)
cmd.start()
time.sleep(0.3)
cmd.stop()
self.assertGreater(len(self.messages), 1)
def test_kill(self):
cmd = logshipper.input.Command("sleep .2; echo 123")
cmd.set_handler(self.handler)
cmd.start()
time.sleep(0.01)
cmd.stop()
time.sleep(0.3)
self.assertEqual(len(self.messages), 0)
|
ondergetekende/logshipper
|
logshipper/test/test_command.py
|
Python
|
apache-2.0
| 3,240
|
'''
Copyright 2017 Roy E. Lowrance
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on as "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing premission and
limitation under the license.
'''
import pickle as pickle
import os.path
import pdb
def unpickle_file(
path=None,
process_unpickled_object=None,
on_EOFError=None,
on_ValueError=None,
on_FileNotExists=None,
):
'unpickle each object in the file at the path'
# NOTE: caller must define the type of the object by, for example, importing a class
if not os.path.isfile(path):
if on_FileNotExists is None:
return # simulate end of file
else:
on_FileNotExists('file does not exist: %s' % path)
return
with open(path, 'r') as f:
unpickler = pickle.Unpickler(f)
try:
while True:
obj = unpickler.load()
process_unpickled_object(obj)
except EOFError as e:
if on_EOFError is None:
raise e
else:
on_EOFError(e)
return
except ValueError as e:
if on_ValueError is None:
raise e
else:
on_ValueError(e)
return
if False:
# avoid pyflake8 warnings
pdb
|
rlowrance/python_lib
|
applied_data_science3/pickle_utilities.py
|
Python
|
apache-2.0
| 1,681
|
#!/usr/bin/python
#
# Hosts artifacts via a local HTTP service.
# Produces a universe, and puts it in a host dir, then runs an HTTP server against that dir.
#
# Env:
# HTTP_DIR (default: /tmp/dcos-http-<pkgname>/)
# HTTP_HOST (default: 172.17.0.1, which is the ip of the VM when running dcos-docker)
# HTTP_PORT (default: 0, for an ephemeral port)
import json
import logging
import os
import os.path
import shutil
import socket
import subprocess
import sys
import github_update
import universe_builder
logger = logging.getLogger(__name__)
logging.basicConfig(level=logging.DEBUG, format="%(message)s")
class HTTPPublisher(object):
def __init__(
self,
package_name,
input_dir_path,
artifact_paths,
package_version = 'stub-universe'):
self._pkg_name = package_name
self._pkg_version = package_version
self._input_dir_path = input_dir_path
self._http_dir = os.environ.get('HTTP_DIR', '/tmp/dcos-http-{}/'.format(self._pkg_name))
self._http_host = os.environ.get('HTTP_HOST', '172.17.0.1')
self._http_port = int(os.environ.get('HTTP_PORT', '0'))
self._github_updater = github_update.GithubStatusUpdater('upload:{}'.format(package_name))
if not os.path.isdir(input_dir_path):
err = 'Provided package path is not a directory: {}'.format(input_dir_path)
self._github_updater.update('error', err)
raise Exception(err)
self._artifact_paths = []
for artifact_path in artifact_paths:
if not os.path.isfile(artifact_path):
err = 'Provided package path is not a file: {} (full list: {})'.format(artifact_path, artifact_paths)
raise Exception(err)
if artifact_path in self._artifact_paths:
err = 'Duplicate filename between "{}" and "{}". Artifact filenames must be unique.'.format(prior_path, artifact_path)
self._github_updater.update('error', err)
raise Exception(err)
self._artifact_paths.append(artifact_path)
def _copy_artifact(self, http_url_root, filepath):
filename = os.path.basename(filepath)
destpath = os.path.join(self._http_dir, filename)
logger.info('- {}'.format(destpath))
shutil.copyfile(filepath, destpath)
return '{}/{}'.format(http_url_root, filename)
def _spam_universe_url(self, universe_url):
# write jenkins properties file to $WORKSPACE/<pkg_version>.properties:
jenkins_workspace_path = os.environ.get('WORKSPACE', '')
if jenkins_workspace_path:
properties_file = open(os.path.join(jenkins_workspace_path, '{}.properties'.format(self._pkg_version)), 'w')
properties_file.write('STUB_UNIVERSE_URL={}\n'.format(universe_url))
properties_file.write('STUB_UNIVERSE_S3_DIR={}\n'.format(self._s3_directory))
properties_file.flush()
properties_file.close()
# write URL to provided text file path:
universe_url_path = os.environ.get('UNIVERSE_URL_PATH', '')
if universe_url_path:
universe_url_file = open(universe_url_path, 'w')
universe_url_file.write('{}\n'.format(universe_url))
universe_url_file.flush()
universe_url_file.close()
num_artifacts = len(self._artifact_paths)
if num_artifacts > 1:
suffix = 's'
else:
suffix = ''
self._github_updater.update(
'success',
'Copied stub universe and {} artifact{}'.format(num_artifacts, suffix),
universe_url)
def build(self, http_url_root):
'''copies artifacts and a new stub universe into the http root directory'''
try:
universe_path = universe_builder.UniversePackageBuilder(
self._pkg_name, self._pkg_version,
self._input_dir_path, http_url_root, self._artifact_paths).build_zip()
except Exception as e:
err = 'Failed to create stub universe: {}'.format(str(e))
self._github_updater.update('error', err)
raise
# wipe files in dir
if not os.path.isdir(self._http_dir):
os.makedirs(self._http_dir)
for filename in os.listdir(self._http_dir):
path = os.path.join(self._http_dir, filename)
logger.info('Deleting preexisting file in artifact dir: {}'.format(path))
os.remove(path)
# print universe url early
universe_url = self._copy_artifact(http_url_root, universe_path)
logger.info('---')
logger.info('Built and copied stub universe:')
logger.info(universe_url)
logger.info('---')
logger.info('Copying {} artifacts into {}:'.format(len(self._artifact_paths), self._http_dir))
for path in self._artifact_paths:
self._copy_artifact(http_url_root, path)
self._spam_universe_url(universe_url)
# print to stdout, while the rest is all stderr:
print(universe_url)
return universe_url
def launch_http(self):
# kill any prior matching process
procname = 'publish_httpd_{}.py'.format(self._pkg_name)
try:
subprocess.check_call('killall -9 {}'.format(procname).split())
logger.info("Killed previous HTTP process(es): {}".format(procname))
except:
logger.info("No previous HTTP process found: {}".format(procname))
if self._http_port == 0:
# hack: grab/release a suitable ephemeral port and hope nobody steals it in the meantime
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
sock.bind((self._http_host, 0))
port = sock.getsockname()[1]
sock.close()
else:
port = self._http_port
# hack: write httpd script then run it directly
httpd_py_content = '''#!/usr/bin/python
import os, SimpleHTTPServer, SocketServer
rootdir = '{}'
host = '{}'
port = {}
os.chdir(rootdir)
httpd = SocketServer.TCPServer((host, port), SimpleHTTPServer.SimpleHTTPRequestHandler)
print('Serving %s at http://%s:%s' % (rootdir, host, port))
httpd.serve_forever()
'''.format(self._http_dir, self._http_host, port)
httpd_py_path = os.path.join(self._http_dir, procname)
if not os.path.isdir(self._http_dir):
os.makedirs(self._http_dir)
httpd_py_file = file(httpd_py_path, 'w+')
httpd_py_file.write(httpd_py_content)
httpd_py_file.flush()
httpd_py_file.close()
os.chmod(httpd_py_path, 0744)
logger.info('Launching HTTPD: {}'.format(httpd_py_path))
subprocess.Popen([httpd_py_path, "2&1>", "/dev/null"])
return 'http://{}:{}'.format(self._http_host, port)
def add_repo_to_cli(self, repo_url):
try:
devnull = open(os.devnull,'wb')
subprocess.check_call('dcos -h'.split(), stdout=devnull, stderr=devnull)
except:
logger.info('No "dcos" command in $PATH, skipping automatic repo configuration')
return False
repo_name = self._pkg_name + '-local'
# check for any preexisting universes and remove them -- the cluster requires no duplicate uris
logger.info('Checking for duplicate repositories: name={}, url={}'.format(repo_name, repo_url))
cur_universes = subprocess.check_output('dcos package repo list --json'.split()).decode('utf-8')
for repo in json.loads(cur_universes)['repositories']:
# {u'name': u'Universe', u'uri': u'https://universe.mesosphere.com/repo'}
if repo['name'] == repo_name or repo['uri'] == repo_url:
logger.info('Removing duplicate repository: {} {}'.format(repo['name'], repo['uri']))
subprocess.check_call('dcos package repo remove {}'.format(repo['name']).split())
logger.info('Adding repository: {} {}'.format(repo_name, repo_url))
subprocess.check_call('dcos package repo add --index=0 {} {}'.format(repo_name, repo_url).split(' '))
return True
def print_help(argv):
logger.info('Syntax: {} <package-name> <template-package-dir> [artifact files ...]'.format(argv[0]))
logger.info(' Example: $ {} kafka /path/to/universe/jsons/ /path/to/artifact1.zip /path/to/artifact2.zip /path/to/artifact3.zip'.format(argv[0]))
logger.info('In addition, environment variables named \'TEMPLATE_SOME_PARAMETER\' will be inserted against the provided package template (with params of the form \'{{some-parameter}}\')')
def main(argv):
if len(argv) < 3:
print_help(argv)
return 1
# the package name:
package_name = argv[1]
# local path where the package template is located:
package_dir_path = argv[2].rstrip('/')
# artifact paths (to copy along with stub universe)
artifact_paths = argv[3:]
logger.info('''###
Package: {}
Template path: {}
Artifacts: {}
###'''.format(package_name, package_dir_path, ', '.join(artifact_paths)))
publisher = HTTPPublisher(package_name, package_dir_path, artifact_paths)
http_url_root = publisher.launch_http()
universe_url = publisher.build(http_url_root)
repo_added = publisher.add_repo_to_cli(universe_url)
logger.info('---')
logger.info('(Re)install your package using the following commands:')
logger.info('dcos package uninstall {}'.format(package_name))
logger.info('dcos node ssh --master-proxy --leader ' +
'"docker run mesosphere/janitor /janitor.py -r {0}-role -p {0}-principal -z dcos-service-{0}"'.format(package_name))
if not repo_added:
logger.info('dcos package repo remove {}-local'.format(package_name))
logger.info('dcos package repo add --index=0 {}-local {}'.format(package_name, universe_url))
logger.info('dcos package install --yes {}'.format(package_name))
return 0
if __name__ == '__main__':
sys.exit(main(sys.argv))
|
adragomir/dcos-commons
|
tools/publish_http.py
|
Python
|
apache-2.0
| 10,036
|
# Copyright (c) 2013 OpenStack Foundation
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from oslo.config import cfg
from oslo.db import exception as db_exc
from six import moves
import sqlalchemy as sa
from sqlalchemy import sql
from neutron.common import exceptions as n_exc
from neutron.db import api as db_api
from neutron.db import model_base
from neutron.i18n import _LE, _LW
from neutron.openstack.common import log
from neutron.plugins.common import constants as p_const
from neutron.plugins.ml2.drivers import type_tunnel
LOG = log.getLogger(__name__)
gre_opts = [
cfg.ListOpt('tunnel_id_ranges',
default=[],
help=_("Comma-separated list of <tun_min>:<tun_max> tuples "
"enumerating ranges of GRE tunnel IDs that are "
"available for tenant network allocation"))
]
cfg.CONF.register_opts(gre_opts, "ml2_type_gre")
class GreAllocation(model_base.BASEV2):
__tablename__ = 'ml2_gre_allocations'
gre_id = sa.Column(sa.Integer, nullable=False, primary_key=True,
autoincrement=False)
allocated = sa.Column(sa.Boolean, nullable=False, default=False,
server_default=sql.false())
class GreEndpoints(model_base.BASEV2):
"""Represents tunnel endpoint in RPC mode."""
__tablename__ = 'ml2_gre_endpoints'
__table_args__ = (
sa.UniqueConstraint('host',
name='unique_ml2_gre_endpoints0host'),
)
ip_address = sa.Column(sa.String(64), primary_key=True)
host = sa.Column(sa.String(255), nullable=True)
def __repr__(self):
return "<GreTunnelEndpoint(%s)>" % self.ip_address
class GreTypeDriver(type_tunnel.TunnelTypeDriver):
def __init__(self):
super(GreTypeDriver, self).__init__(GreAllocation)
def get_type(self):
return p_const.TYPE_GRE
def initialize(self):
try:
self._initialize(cfg.CONF.ml2_type_gre.tunnel_id_ranges)
except n_exc.NetworkTunnelRangeError:
LOG.exception(_LE("Failed to parse tunnel_id_ranges. "
"Service terminated!"))
raise SystemExit()
def sync_allocations(self):
# determine current configured allocatable gres
gre_ids = set()
for gre_id_range in self.tunnel_ranges:
tun_min, tun_max = gre_id_range
if tun_max + 1 - tun_min > 1000000:
LOG.error(_LE("Skipping unreasonable gre ID range "
"%(tun_min)s:%(tun_max)s"),
{'tun_min': tun_min, 'tun_max': tun_max})
else:
gre_ids |= set(moves.xrange(tun_min, tun_max + 1))
session = db_api.get_session()
with session.begin(subtransactions=True):
# remove from table unallocated tunnels not currently allocatable
allocs = (session.query(GreAllocation).all())
for alloc in allocs:
try:
# see if tunnel is allocatable
gre_ids.remove(alloc.gre_id)
except KeyError:
# it's not allocatable, so check if its allocated
if not alloc.allocated:
# it's not, so remove it from table
LOG.debug("Removing tunnel %s from pool", alloc.gre_id)
session.delete(alloc)
# add missing allocatable tunnels to table
for gre_id in sorted(gre_ids):
alloc = GreAllocation(gre_id=gre_id)
session.add(alloc)
def get_endpoints(self):
"""Get every gre endpoints from database."""
LOG.debug("get_gre_endpoints() called")
session = db_api.get_session()
gre_endpoints = session.query(GreEndpoints)
return [{'ip_address': gre_endpoint.ip_address,
'host': gre_endpoint.host}
for gre_endpoint in gre_endpoints]
def get_endpoint_by_host(self, host):
LOG.debug("get_endpoint_by_host() called for host %s", host)
session = db_api.get_session()
return (session.query(GreEndpoints).
filter_by(host=host).first())
def get_endpoint_by_ip(self, ip):
LOG.debug("get_endpoint_by_ip() called for ip %s", ip)
session = db_api.get_session()
return (session.query(GreEndpoints).
filter_by(ip_address=ip).first())
def add_endpoint(self, ip, host):
LOG.debug("add_gre_endpoint() called for ip %s", ip)
session = db_api.get_session()
try:
gre_endpoint = GreEndpoints(ip_address=ip, host=host)
gre_endpoint.save(session)
except db_exc.DBDuplicateEntry:
gre_endpoint = (session.query(GreEndpoints).
filter_by(ip_address=ip).one())
LOG.warning(_LW("Gre endpoint with ip %s already exists"), ip)
return gre_endpoint
def delete_endpoint(self, ip):
LOG.debug("delete_gre_endpoint() called for ip %s", ip)
session = db_api.get_session()
with session.begin(subtransactions=True):
session.query(GreEndpoints).filter_by(ip_address=ip).delete()
|
projectcalico/calico-neutron
|
neutron/plugins/ml2/drivers/type_gre.py
|
Python
|
apache-2.0
| 5,810
|
"""Fast Shared Response Model (FastSRM)
The implementation is based on the following publications:
.. [Richard2019] "Fast Shared Response Model for fMRI data"
H. Richard, L. Martin, A. Pinho, J. Pillow, B. Thirion, 2019
https://arxiv.org/pdf/1909.12537.pdf
"""
# Author: Hugo Richard
import hashlib
import logging
import os
import numpy as np
import scipy
from joblib import Parallel, delayed
from brainiak.funcalign.srm import DetSRM
from sklearn.base import BaseEstimator, TransformerMixin
from sklearn.exceptions import NotFittedError
import uuid
__all__ = [
"FastSRM",
]
logger = logging.getLogger(__name__)
def get_shape(path):
"""Get shape of saved np array
Parameters
----------
path: str
path to np array
"""
f = open(path, "rb")
version = np.lib.format.read_magic(f)
shape, fortran_order, dtype = np.lib.format._read_array_header(f, version)
f.close()
return shape
def safe_load(data):
"""If data is an array returns data else returns np.load(data)"""
if isinstance(data, np.ndarray):
return data
else:
return np.load(data)
def safe_encode(img):
if isinstance(img, np.ndarray):
name = hashlib.md5(img.tostring()).hexdigest()
else:
name = hashlib.md5(img.encode()).hexdigest()
return name
def assert_non_empty_list(input_list, list_name):
"""
Check that input list is not empty
Parameters
----------
input_list: list
list_name: str
Name of the list
"""
if len(input_list) == 0:
raise ValueError("%s is a list of length 0 which is not valid" %
list_name)
def assert_array_2axis(array, name_array):
"""Check that input is an np array with 2 axes
Parameters
----------
array: np array
name_array: str
Name of the array
"""
if not isinstance(array, np.ndarray):
raise ValueError("%s should be of type "
"np.ndarray but is of type %s" %
(name_array, type(array)))
if len(array.shape) != 2:
raise ValueError("%s must have exactly 2 axes "
"but has %i axes" % (name_array, len(array.shape)))
def assert_valid_index(indexes, max_value, name_indexes):
"""
Check that indexes are between 0 and max_value and number
of indexes is less than max_value
"""
for i, ind_i in enumerate(indexes):
if ind_i < 0 or ind_i >= max_value:
raise ValueError("Index %i of %s has value %i "
"whereas value should be between 0 and %i" %
(i, name_indexes, ind_i, max_value - 1))
def _check_imgs_list(imgs):
"""
Checks that imgs is a non empty list of elements of the same type
Parameters
----------
imgs : list
"""
# Check the list is non empty
assert_non_empty_list(imgs, "imgs")
# Check that all input have same type
for i in range(len(imgs)):
if not isinstance(imgs[i], type(imgs[0])):
raise ValueError("imgs[%i] has type %s whereas "
"imgs[%i] has type %s. "
"This is inconsistent." %
(i, type(imgs[i]), 0, type(imgs[0])))
def _check_imgs_list_list(imgs):
"""
Check input images if they are list of list of arrays
Parameters
----------
imgs : list of list of array of shape [n_voxels, n_components]
imgs is a list of list of arrays where element i, j of
the array is a numpy array of shape [n_voxels, n_timeframes] that
contains the data of subject i collected during session j.
n_timeframes and n_voxels are assumed to be the same across
subjects
n_timeframes can vary across sessions
Each voxel's timecourse is assumed to have mean 0 and variance 1
Returns
-------
shapes: array
Shape of input images
"""
n_subjects = len(imgs)
# Check that the number of session is not 0
assert_non_empty_list(imgs[0], "imgs[%i]" % 0)
# Check that the number of sessions is the same for all subjects
n_sessions = None
for i in range(len(imgs)):
if n_sessions is None:
n_sessions = len(imgs[i])
if n_sessions != len(imgs[i]):
raise ValueError("imgs[%i] has length %i whereas imgs[%i] "
"has length %i. All subjects should have "
"the same number of sessions." %
(i, len(imgs[i]), 0, len(imgs[0])))
shapes = np.zeros((n_subjects, n_sessions, 2))
# Run array-level checks
for i in range(len(imgs)):
for j in range(len(imgs[i])):
assert_array_2axis(imgs[i][j], "imgs[%i][%i]" % (i, j))
shapes[i, j, :] = imgs[i][j].shape
return shapes
def _check_imgs_list_array(imgs):
"""
Check input images if they are list of arrays.
In this case returned images are a list of list of arrays
where element i,j of the array is a numpy array of
shape [n_voxels, n_timeframes] that contains the data of subject i
collected during session j.
Parameters
----------
imgs : array of str, shape=[n_subjects, n_sessions]
imgs is a list of arrays where element i of the array is
a numpy array of shape [n_voxels, n_timeframes] that contains the
data of subject i (number of sessions is implicitly 1)
n_timeframes and n_voxels are assumed to be the same across
subjects
n_timeframes can vary across sessions
Each voxel's timecourse is assumed to have mean 0 and variance 1
Returns
-------
shapes: array
Shape of input images
new_imgs: list of list of array of shape [n_voxels, n_components]
"""
n_subjects = len(imgs)
n_sessions = 1
shapes = np.zeros((n_subjects, n_sessions, 2))
new_imgs = []
for i in range(len(imgs)):
assert_array_2axis(imgs[i], "imgs[%i]" % i)
shapes[i, 0, :] = imgs[i].shape
new_imgs.append([imgs[i]])
return new_imgs, shapes
def _check_imgs_array(imgs):
"""Check input image if it is an array
Parameters
----------
imgs : array of str, shape=[n_subjects, n_sessions]
Element i, j of the array is a path to the data of subject i
collected during session j.
Data are loaded with numpy.load and expected
shape is [n_voxels, n_timeframes]
n_timeframes and n_voxels are assumed to be the same across
subjects
n_timeframes can vary across sessions
Each voxel's timecourse is assumed to have mean 0 and variance 1
Returns
-------
shapes : array
Shape of input images
"""
assert_array_2axis(imgs, "imgs")
n_subjects, n_sessions = imgs.shape
shapes = np.zeros((n_subjects, n_sessions, 2))
for i in range(n_subjects):
for j in range(n_sessions):
if not (isinstance(imgs[i, j], str) or isinstance(
imgs[i, j], np.str_) or isinstance(imgs[i, j], np.str)):
raise ValueError("imgs[%i, %i] is stored using "
"type %s which is not a str" %
(i, j, type(imgs[i, j])))
shapes[i, j, :] = get_shape(imgs[i, j])
return shapes
def _check_shapes_components(n_components, n_timeframes):
"""Check that n_timeframes is greater than number of components"""
def _check_shapes_atlas_compatibility(n_voxels,
n_timeframes,
n_components=None,
atlas_shape=None):
if n_components is not None:
if np.sum(n_timeframes) < n_components:
raise ValueError("Total number of timeframes is shorter than "
"number of components (%i < %i)" %
(np.sum(n_timeframes), n_components))
if atlas_shape is not None:
n_supervoxels, n_atlas_voxels = atlas_shape
if n_atlas_voxels != n_voxels:
raise ValueError(
"Number of voxels in the atlas is not the same "
"as the number of voxels in input data (%i != %i)" %
(n_atlas_voxels, n_voxels))
def _check_shapes(shapes,
n_components=None,
atlas_shape=None,
ignore_nsubjects=False):
"""Check that number of voxels is the same for each subjects. Number of
timeframes can vary between sessions but must be consistent across
subjects
Parameters
----------
shapes : array of shape (n_subjects, n_sessions, 2)
Array of shapes of input images
"""
n_subjects, n_sessions, _ = shapes.shape
if n_subjects <= 1 and not ignore_nsubjects:
raise ValueError("The number of subjects should be greater than 1")
n_timeframes_list = [None] * n_sessions
n_voxels = None
for n in range(n_subjects):
for m in range(n_sessions):
if n_timeframes_list[m] is None:
n_timeframes_list[m] = shapes[n, m, 1]
if n_voxels is None:
n_voxels = shapes[m, n, 0]
if n_timeframes_list[m] != shapes[n, m, 1]:
raise ValueError("Subject %i Session %i does not have the "
"same number of timeframes "
"as Subject %i Session %i" % (n, m, 0, m))
if n_voxels != shapes[n, m, 0]:
raise ValueError("Subject %i Session %i"
" does not have the same number of voxels as "
"Subject %i Session %i." % (n, m, 0, 0))
_check_shapes_atlas_compatibility(n_voxels, np.sum(n_timeframes_list),
n_components, atlas_shape)
def check_atlas(atlas, n_components=None):
""" Check input atlas
Parameters
----------
atlas : array, shape=[n_supervoxels, n_voxels] or array, shape=[n_voxels]
or str or None
Probabilistic or deterministic atlas on which to project the data
Deterministic atlas is an array of shape [n_voxels,] where values
range from 1 to n_supervoxels. Voxels labelled 0 will be ignored.
If atlas is a str the corresponding array is loaded with numpy.load
and expected shape is (n_voxels,) for a deterministic atlas and
(n_supervoxels, n_voxels) for a probabilistic atlas.
n_components : int
Number of timecourses of the shared coordinates
Returns
-------
shape : array or None
atlas shape
"""
if atlas is None:
return None
if not (isinstance(atlas, np.ndarray) or isinstance(atlas, str)
or isinstance(atlas, np.str_) or isinstance(atlas, np.str)):
raise ValueError("Atlas is stored using "
"type %s which is neither np.ndarray or str" %
type(atlas))
if isinstance(atlas, np.ndarray):
shape = atlas.shape
else:
shape = get_shape(atlas)
if len(shape) == 1:
# We have a deterministic atlas
atlas_array = safe_load(atlas)
n_voxels = atlas_array.shape[0]
n_supervoxels = len(np.unique(atlas_array)) - 1
shape = (n_supervoxels, n_voxels)
elif len(shape) != 2:
raise ValueError(
"Atlas has %i axes. It should have either 1 or 2 axes." %
len(shape))
n_supervoxels, n_voxels = shape
if n_supervoxels > n_voxels:
raise ValueError("Number of regions in the atlas is bigger than "
"the number of voxels (%i > %i)" %
(n_supervoxels, n_voxels))
if n_components is not None:
if n_supervoxels < n_components:
raise ValueError("Number of regions in the atlas is "
"lower than the number of components "
"(%i < %i)" % (n_supervoxels, n_components))
return shape
def check_imgs(imgs,
n_components=None,
atlas_shape=None,
ignore_nsubjects=False):
"""
Check input images
Parameters
----------
imgs : array of str, shape=[n_subjects, n_sessions]
Element i, j of the array is a path to the data of subject i
collected during session j.
Data are loaded with numpy.load and expected
shape is [n_voxels, n_timeframes]
n_timeframes and n_voxels are assumed to be the same across
subjects
n_timeframes can vary across sessions
Each voxel's timecourse is assumed to have mean 0 and variance 1
imgs can also be a list of list of arrays where element i, j of
the array is a numpy array of shape [n_voxels, n_timeframes] that
contains the data of subject i collected during session j.
imgs can also be a list of arrays where element i of the array is
a numpy array of shape [n_voxels, n_timeframes] that contains the
data of subject i (number of sessions is implicitly 1)
Returns
-------
reshaped_input: bool
True if input had to be reshaped to match the
n_subjects, n_sessions input
new_imgs: list of list of array or np array
input imgs reshaped if it is a list of arrays so that it becomes a
list of list of arrays
shapes: array
Shape of input images
"""
reshaped_input = False
new_imgs = imgs
if isinstance(imgs, list):
_check_imgs_list(imgs)
if isinstance(imgs[0], list):
shapes = _check_imgs_list_list(imgs)
elif isinstance(imgs[0], np.ndarray):
new_imgs, shapes = _check_imgs_list_array(imgs)
reshaped_input = True
else:
raise ValueError(
"Since imgs is a list, it should be a list of list "
"of arrays or a list of arrays but imgs[0] has type %s" %
type(imgs[0]))
elif isinstance(imgs, np.ndarray):
shapes = _check_imgs_array(imgs)
else:
raise ValueError(
"Input imgs should either be a list or an array but has type %s" %
type(imgs))
_check_shapes(shapes, n_components, atlas_shape, ignore_nsubjects)
return reshaped_input, new_imgs, shapes
def check_indexes(indexes, name):
if not (indexes is None or isinstance(indexes, list)
or isinstance(indexes, np.ndarray)):
raise ValueError(
"%s should be either a list, an array or None but received type %s"
% (name, type(indexes)))
def _check_shared_response_list_of_list(shared_response, n_components,
input_shapes):
# Check that shared_response is indeed a list of list of arrays
n_subjects = len(shared_response)
n_sessions = None
for i in range(len(shared_response)):
if not isinstance(shared_response[i], list):
raise ValueError("shared_response[0] is a list but "
"shared_response[%i] is not a list "
"this is incompatible." % i)
assert_non_empty_list(shared_response[i], "shared_response[%i]" % i)
if n_sessions is None:
n_sessions = len(shared_response[i])
elif n_sessions != len(shared_response[i]):
raise ValueError(
"shared_response[%i] has len %i whereas "
"shared_response[0] has len %i. They should "
"have same length" %
(i, len(shared_response[i]), len(shared_response[0])))
for j in range(len(shared_response[i])):
assert_array_2axis(shared_response[i][j],
"shared_response[%i][%i]" % (i, j))
return _check_shared_response_list_sessions([
np.mean([shared_response[i][j] for i in range(n_subjects)], axis=0)
for j in range(n_sessions)
], n_components, input_shapes)
def _check_shared_response_list_sessions(shared_response, n_components,
input_shapes):
for j in range(len(shared_response)):
assert_array_2axis(shared_response[j], "shared_response[%i]" % j)
if input_shapes is not None:
if shared_response[j].shape[1] != input_shapes[0][j][1]:
raise ValueError(
"Number of timeframes in input images during "
"session %i does not match the number of "
"timeframes during session %i "
"of shared_response (%i != %i)" %
(j, j, shared_response[j].shape[1], input_shapes[0, j, 1]))
if n_components is not None:
if shared_response[j].shape[0] != n_components:
raise ValueError(
"Number of components in "
"shared_response during session %i is "
"different than "
"the number of components of the model (%i != %i)" %
(j, shared_response[j].shape[0], n_components))
return shared_response
def _check_shared_response_list_subjects(shared_response, n_components,
input_shapes):
for i in range(len(shared_response)):
assert_array_2axis(shared_response[i], "shared_response[%i]" % i)
return _check_shared_response_array(np.mean(shared_response, axis=0),
n_components, input_shapes)
def _check_shared_response_array(shared_response, n_components, input_shapes):
assert_array_2axis(shared_response, "shared_response")
if input_shapes is None:
new_input_shapes = None
else:
n_subjects, n_sessions, _ = input_shapes.shape
new_input_shapes = np.zeros((n_subjects, 1, 2))
new_input_shapes[:, 0, 0] = input_shapes[:, 0, 0]
new_input_shapes[:, 0, 1] = np.sum(input_shapes[:, :, 1], axis=1)
return _check_shared_response_list_sessions([shared_response],
n_components, new_input_shapes)
def check_shared_response(shared_response,
aggregate="mean",
n_components=None,
input_shapes=None):
"""
Check that shared response has valid input and turn it into
a session-wise shared response
Returns
-------
added_session: bool
True if an artificial sessions was added to match the list of
session input type for shared_response
reshaped_shared_response: list of arrays
shared response (reshaped to match the list of session input)
"""
# Depending on aggregate and shape of input we infer what to do
if isinstance(shared_response, list):
assert_non_empty_list(shared_response, "shared_response")
if isinstance(shared_response[0], list):
if aggregate == "mean":
raise ValueError("self.aggregate has value 'mean' but "
"shared response is a list of list. This is "
"incompatible")
return False, _check_shared_response_list_of_list(
shared_response, n_components, input_shapes)
elif isinstance(shared_response[0], np.ndarray):
if aggregate == "mean":
return False, _check_shared_response_list_sessions(
shared_response, n_components, input_shapes)
else:
return True, _check_shared_response_list_subjects(
shared_response, n_components, input_shapes)
else:
raise ValueError("shared_response is a list but "
"shared_response[0] is neither a list "
"or an array. This is invalid.")
elif isinstance(shared_response, np.ndarray):
return True, _check_shared_response_array(shared_response,
n_components, input_shapes)
else:
raise ValueError("shared_response should be either "
"a list or an array but is of type %s" %
type(shared_response))
def create_temp_dir(temp_dir):
"""
This check whether temp_dir exists and creates dir otherwise
"""
if temp_dir is None:
return None
if not os.path.exists(temp_dir):
os.makedirs(temp_dir)
else:
raise ValueError("Path %s already exists. "
"When a model is used, filesystem should be cleaned "
"by using the .clean() method" % temp_dir)
def reduce_data_single(subject_index,
session_index,
img,
atlas=None,
inv_atlas=None,
low_ram=False,
temp_dir=None):
"""Reduce data using given atlas
Parameters
----------
subject_index : int
session_index : int
img : str or array
path to data.
Data are loaded with numpy.load and expected shape is
(n_voxels, n_timeframes)
n_timeframes and n_voxels are assumed to be the same across subjects
n_timeframes can vary across sessions
Each voxel's timecourse is assumed to have mean 0 and variance 1
img can also be an array of shape (n_voxels, n_timeframes)
atlas : array, shape=[n_supervoxels, n_voxels] or [n_voxels] or None
Probabilistic or deterministic atlas on which to project the data
Deterministic atlas is an array of shape [n_voxels,] where values
range from 1 to n_supervoxels. Voxels labelled 0 will be ignored.
inv_atlas : array, shape=[n_voxels, n_supervoxels] or None
Pseudo inverse of the atlas (only for probabilistic atlases)
temp_dir : str or None
path to dir where temporary results are stored
if None temporary results will be stored in memory. This
can results in memory errors when the number of subjects
and / or sessions is large
low_ram : bool
if True and temp_dir is not None, reduced_data will be saved on disk
this increases the number of IO but reduces memory complexity when the
number
of subject and number of sessions are large
Returns
-------
reduced_data : array, shape=[n_timeframes, n_supervoxels]
reduced data
"""
# Here we return to the conventions of the paper
data = safe_load(img).T
n_timeframes, n_voxels = data.shape
# Here we check that input is normalized
if (np.max(np.abs(np.mean(data, axis=0))) > 1e-6
or np.max(np.abs(np.var(data, axis=0) - 1))) > 1e-6:
ValueError("Data in imgs[%i, %i] does not have 0 mean and unit \
variance. If you are using NiftiMasker to mask your data \
(nilearn) please use standardize=True." %
(subject_index, session_index))
if inv_atlas is None and atlas is not None:
atlas_values = np.unique(atlas)
if 0 in atlas_values:
atlas_values = atlas_values[1:]
reduced_data = np.array(
[np.mean(data[:, atlas == c], axis=1) for c in atlas_values]).T
elif inv_atlas is not None and atlas is None:
# this means that it is a probabilistic atlas
reduced_data = data.dot(inv_atlas)
else:
reduced_data = data
if low_ram:
name = safe_encode(img)
path = os.path.join(temp_dir, "reduced_data_" + name)
np.save(path, reduced_data)
return path + ".npy"
else:
return reduced_data
def reduce_data(imgs, atlas, n_jobs=1, low_ram=False, temp_dir=None):
"""Reduce data using given atlas.
Work done in parallel across subjects.
Parameters
----------
imgs : array of str, shape=[n_subjects, n_sessions]
Element i, j of the array is a path to the data of subject i
collected during session j.
Data are loaded with numpy.load and expected shape is
[n_timeframes, n_voxels]
n_timeframes and n_voxels are assumed to be the same across subjects
n_timeframes can vary across sessions
Each voxel's timecourse is assumed to have mean 0 and variance 1
imgs can also be a list of list of arrays where element i, j of
the array is a numpy array of shape [n_voxels, n_timeframes] that
contains the data of subject i collected during session j.
imgs can also be a list of arrays where element i of the array is
a numpy array of shape [n_voxels, n_timeframes] that contains the
data of subject i (number of sessions is implicitly 1)
atlas : array, shape=[n_supervoxels, n_voxels] or array, shape=[n_voxels]
or None
Probabilistic or deterministic atlas on which to project the data
Deterministic atlas is an array of shape [n_voxels,] where values
range from 1 to n_supervoxels. Voxels labelled 0 will be ignored.
n_jobs : integer, optional, default=1
The number of CPUs to use to do the computation.
-1 means all CPUs, -2 all CPUs but one, and so on.
temp_dir : str or None
path to dir where temporary results are stored
if None temporary results will be stored in memory. This
can results in memory errors when the number of subjects
and / or sessions is large
low_ram : bool
if True and temp_dir is not None, reduced_data will be saved on disk
this increases the number of IO but reduces memory complexity when
the number of subject and/or sessions is large
Returns
-------
reduced_data_list : array of str, shape=[n_subjects, n_sessions]
or array, shape=[n_subjects, n_sessions, n_timeframes, n_supervoxels]
Element i, j of the array is a path to the data of subject i collected
during session j.
Data are loaded with numpy.load and expected shape is
[n_timeframes, n_supervoxels]
or Element i, j of the array is the data in array of
shape=[n_timeframes, n_supervoxels]
n_timeframes and n_supervoxels
are assumed to be the same across subjects
n_timeframes can vary across sessions
Each voxel's timecourse is assumed to have mean 0 and variance 1
"""
if atlas is None:
A = None
A_inv = None
else:
loaded_atlas = safe_load(atlas)
if len(loaded_atlas.shape) == 2:
A = None
A_inv = loaded_atlas.T.dot(
np.linalg.inv(loaded_atlas.dot(loaded_atlas.T)))
else:
A = loaded_atlas
A_inv = None
n_subjects = len(imgs)
n_sessions = len(imgs[0])
reduced_data_list = Parallel(n_jobs=n_jobs)(
delayed(reduce_data_single)(i,
j,
imgs[i][j],
atlas=A,
inv_atlas=A_inv,
low_ram=low_ram,
temp_dir=temp_dir)
for i in range(n_subjects) for j in range(n_sessions))
if low_ram:
reduced_data_list = np.reshape(reduced_data_list,
(n_subjects, n_sessions))
else:
if len(np.array(reduced_data_list).shape) == 1:
reduced_data_list = np.reshape(reduced_data_list,
(n_subjects, n_sessions))
else:
n_timeframes, n_supervoxels = np.array(reduced_data_list).shape[1:]
reduced_data_list = np.reshape(
reduced_data_list,
(n_subjects, n_sessions, n_timeframes, n_supervoxels))
return reduced_data_list
def _reduced_space_compute_shared_response(reduced_data_list,
reduced_basis_list,
n_components=50):
"""Compute shared response with basis fixed in reduced space
Parameters
----------
reduced_data_list : array of str, shape=[n_subjects, n_sessions]
or array, shape=[n_subjects, n_sessions, n_timeframes, n_supervoxels]
Element i, j of the array is a path to the data of subject i
collected during session j.
Data are loaded with numpy.load and expected shape is
[n_timeframes, n_supervoxels]
or Element i, j of the array is the data in array of
shape=[n_timeframes, n_supervoxels]
n_timeframes and n_supervoxels are
assumed to be the same across subjects
n_timeframes can vary across sessions
Each voxel's timecourse is assumed to have mean 0 and variance 1
reduced_basis_list : None or list of array, element i has
shape=[n_components, n_supervoxels]
each subject's reduced basis
if None the basis will be generated on the fly
n_components : int or None
number of components
Returns
-------
shared_response_list : list of array, element i has
shape=[n_timeframes, n_components]
shared response, element i is the shared response during session i
"""
n_subjects, n_sessions = reduced_data_list.shape[:2]
s = [None] * n_sessions
# This is just to check that all subjects have same number of
# timeframes in a given session
for n in range(n_subjects):
for m in range(n_sessions):
data_nm = safe_load(reduced_data_list[n][m])
n_timeframes, n_supervoxels = data_nm.shape
if reduced_basis_list is None:
reduced_basis_list = []
for subject in range(n_subjects):
q = np.eye(n_components, n_supervoxels)
reduced_basis_list.append(q)
basis_n = reduced_basis_list[n]
if s[m] is None:
s[m] = data_nm.dot(basis_n.T)
else:
s[m] = s[m] + data_nm.dot(basis_n.T)
for m in range(n_sessions):
s[m] = s[m] / float(n_subjects)
return s
def _compute_and_save_corr_mat(img, shared_response, temp_dir):
"""computes correlation matrix and stores it
Parameters
----------
img : str
path to data.
Data are loaded with numpy.load and expected shape is
[n_timeframes, n_voxels]
n_timeframes and n_voxels are assumed to be the same across subjects
n_timeframes can vary across sessions
Each voxel's timecourse is assumed to have mean 0 and variance 1
shared_response : array, shape=[n_timeframes, n_components]
shared response
"""
data = safe_load(img).T
name = safe_encode(img)
path = os.path.join(temp_dir, "corr_mat_" + name)
np.save(path, shared_response.T.dot(data))
def _compute_and_save_subject_basis(subject_number, sessions, temp_dir):
"""computes correlation matrix for all sessions
Parameters
----------
subject_number: int
Number that identifies the subject. Basis will be stored in
[temp_dir]/basis_[subject_number].npy
sessions : array of str
Element i of the array is a path to the data collected during
session i.
Data are loaded with numpy.load and expected shape is
[n_timeframes, n_voxels]
n_timeframes and n_voxels are assumed to be the same across subjects
n_timeframes can vary across sessions
Each voxel's timecourse is assumed to have mean 0 and variance
temp_dir : str or None
path to dir where temporary results are stored
if None temporary results will be stored in memory. This
can results in memory errors when the number of subjects
and / or sessions is large
Returns
-------
basis: array, shape=[n_component, n_voxels] or str
basis of subject [subject_number] or path to this basis
"""
corr_mat = None
for session in sessions:
name = safe_encode(session)
path = os.path.join(temp_dir, "corr_mat_" + name + ".npy")
if corr_mat is None:
corr_mat = np.load(path)
else:
corr_mat += np.load(path)
os.remove(path)
basis_i = _compute_subject_basis(corr_mat)
path = os.path.join(temp_dir, "basis_%i" % subject_number)
np.save(path, basis_i)
return path + ".npy"
def _compute_subject_basis(corr_mat):
"""From correlation matrix between shared response and subject data,
Finds subject's basis
Parameters
----------
corr_mat: array, shape=[n_component, n_voxels]
or shape=[n_components, n_supervoxels]
correlation matrix between shared response and subject data or
subject reduced data
element k, v is given by S.T.dot(X_i) where S is the shared response
and X_i the data of subject i.
Returns
-------
basis: array, shape=[n_components, n_voxels]
or shape=[n_components, n_supervoxels]
basis of subject or reduced_basis of subject
"""
# The perturbation is only here to be
# consistent with current implementation
# of DetSRM.
perturbation = np.zeros(corr_mat.shape)
np.fill_diagonal(perturbation, 0.001)
U, _, V = scipy.linalg.svd(corr_mat + perturbation, full_matrices=False)
return U.dot(V)
def fast_srm(reduced_data_list,
n_iter=10,
n_components=None,
low_ram=False,
seed=0):
"""Computes shared response and basis in reduced space
Parameters
----------
reduced_data_list : array, shape=[n_subjects, n_sessions]
or array, shape=[n_subjects, n_sessions, n_timeframes, n_supervoxels]
Element i, j of the array is a path to the data of subject i
collected during session j.
Data are loaded with numpy.load and expected
shape is [n_timeframes, n_supervoxels]
or Element i, j of the array is the data in array of
shape=[n_timeframes, n_supervoxels]
n_timeframes and n_supervoxels are
assumed to be the same across subjects
n_timeframes can vary across sessions
Each voxel's timecourse is assumed to have mean 0 and variance 1
n_iter : int
Number of iterations performed
n_components : int or None
number of components
Returns
-------
shared_response_list : list of array, element i has
shape=[n_timeframes, n_components]
shared response, element i is the shared response during session i
"""
if low_ram:
return lowram_srm(reduced_data_list, n_iter, n_components)
else:
# We need to switch data to DetSRM format
# Indeed in DetSRM all sessions are concatenated.
# Whereas in FastSRM multiple sessions are supported.
n_subjects, n_sessions = reduced_data_list.shape[:2]
# We store the correspondence between timeframes and session
timeframes_slices = []
current_j = 0
for j in range(n_sessions):
timeframes_slices.append(
slice(current_j, current_j + len(reduced_data_list[0, j])))
current_j += len(reduced_data_list[0][j])
# Now we can concatenate everything
X = [
np.concatenate(reduced_data_list[i], axis=0).T
for i in range(n_subjects)
]
srm = DetSRM(n_iter=n_iter, features=n_components, rand_seed=seed)
srm.fit(X)
# SRM gives a list of data projected in shared space
# we get the shared response by averaging those
concatenated_s = np.mean(srm.transform(X), axis=0).T
# Let us return the shared response sliced by sessions
return [concatenated_s[i] for i in timeframes_slices]
def lowram_srm(reduced_data_list, n_iter=10, n_components=None):
"""Computes shared response and basis in reduced space
Parameters
----------
reduced_data_list : array of str, shape=[n_subjects, n_sessions]
Element i, j of the array is a path to the data of subject i
collected during session j.
Data are loaded with numpy.load and expected
shape is [n_timeframes, n_supervoxels]
n_timeframes and n_supervoxels are
assumed to be the same across subjects
n_timeframes can vary across sessions
Each voxel's timecourse is assumed to have mean 0 and variance 1
n_iter : int
Number of iterations performed
n_components : int or None
number of components
Returns
-------
shared_response_list : list of array, element i has
shape=[n_timeframes, n_components]
shared response, element i is the shared response during session i
"""
n_subjects, n_sessions = reduced_data_list.shape[:2]
shared_response = _reduced_space_compute_shared_response(
reduced_data_list, None, n_components)
reduced_basis = [None] * n_subjects
for _ in range(n_iter):
for n in range(n_subjects):
cov = None
for m in range(n_sessions):
data_nm = np.load(reduced_data_list[n, m])
if cov is None:
cov = shared_response[m].T.dot(data_nm)
else:
cov += shared_response[m].T.dot(data_nm)
reduced_basis[n] = _compute_subject_basis(cov)
shared_response = _reduced_space_compute_shared_response(
reduced_data_list, reduced_basis, n_components)
return shared_response
def _compute_basis_subject_online(sessions, shared_response_list):
"""Computes subject's basis with shared response fixed
Parameters
----------
sessions : array of str
Element i of the array is a path to the data
collected during session i.
Data are loaded with numpy.load and expected shape is
[n_timeframes, n_voxels]
n_timeframes and n_voxels are assumed to be the same across subjects
n_timeframes can vary across sessions
Each voxel's timecourse is assumed to have mean 0 and variance 1
shared_response_list : list of array, element i has
shape=[n_timeframes, n_components]
shared response, element i is the shared response during session i
Returns
-------
basis: array, shape=[n_components, n_voxels]
basis
"""
basis_i = None
i = 0
for session in sessions:
data = safe_load(session).T
if basis_i is None:
basis_i = shared_response_list[i].T.dot(data)
else:
basis_i += shared_response_list[i].T.dot(data)
i += 1
del data
return _compute_subject_basis(basis_i)
def _compute_shared_response_online_single(subjects, basis_list, temp_dir,
subjects_indexes, aggregate):
"""Computes shared response during one session with basis fixed
Parameters
----------
subjects : array of str
Element i of the array is a path to the data of subject i.
Data are loaded with numpy.load and expected shape is
[n_timeframes, n_voxels]
n_timeframes and n_voxels are assumed to be the same across subjects
n_timeframes can vary across sessions
Each voxel's timecourse is assumed to have mean 0 and variance 1
basis_list : None or list of array, element i has
shape=[n_components, n_voxels]
basis of all subjects, element i is the basis of subject i
temp_dir : None or str
path to basis folder where file basis_%i.npy contains the basis of
subject i
subjects_indexes : list of int or None
list of indexes corresponding to the subjects to use to compute
shared response
aggregate: str or None, default="mean"
if "mean": returns the mean shared response S from all subjects
if None: returns the subject-specific response in shared space S_i
Returns
-------
shared_response : array, shape=[n_timeframes, n_components] or list
shared response
"""
n = 0
if aggregate == "mean":
shared_response = None
if aggregate is None:
shared_response = []
for k, i in enumerate(subjects_indexes):
subject = subjects[k]
# Transpose to be consistent with paper
data = safe_load(subject).T
if temp_dir is None:
basis_i = basis_list[i]
else:
basis_i = np.load(os.path.join(temp_dir, "basis_%i.npy" % i))
if aggregate == "mean":
if shared_response is None:
shared_response = data.dot(basis_i.T)
else:
shared_response += data.dot(basis_i.T)
n += 1
if aggregate is None:
shared_response.append(data.dot(basis_i.T))
if aggregate is None:
return shared_response
if aggregate == "mean":
return shared_response / float(n)
def _compute_shared_response_online(imgs, basis_list, temp_dir, n_jobs,
subjects_indexes, aggregate):
"""Computes shared response with basis fixed
Parameters
----------
imgs : array of str, shape=[n_subjects, n_sessions]
Element i, j of the array is a path to the data of subject i
collected during session j.
Data are loaded with numpy.load and expected shape is
[n_timeframes, n_voxels]
n_timeframes and n_voxels are assumed to be the same across subjects
n_timeframes can vary across sessions
Each voxel's timecourse is assumed to have mean 0 and variance 1
imgs can also be a list of list of arrays where element i, j of
the array is a numpy array of shape [n_voxels, n_timeframes] that
contains the data of subject i collected during session j.
basis_list : None or list of array, element i has
shape=[n_components, n_voxels]
basis of all subjects, element i is the basis of subject i
temp_dir : None or str
path to basis folder where file basis_%i.npy contains the basis of
subject i
n_jobs : integer, optional, default=1
The number of CPUs to use to do the computation.
-1 means all CPUs, -2 all CPUs but one, and so on.
subjects_indexes : list or None
list of indexes corresponding to the subjects to use to compute
shared response
aggregate: str or None, default="mean"
if "mean": returns the mean shared response S from all subjects
if None: returns the subject-specific response in shared space S_i
Returns
-------
shared_response_list : list of array or list of list of array
shared response, element i is the shared response during session i
or element i, j is the shared response of subject i during session j
"""
n_subjects = len(subjects_indexes)
n_sessions = len(imgs[0])
shared_response_list = Parallel(n_jobs=n_jobs)(
delayed(_compute_shared_response_online_single)
([imgs[i][j] for i in range(n_subjects)], basis_list, temp_dir,
subjects_indexes, aggregate) for j in range(n_sessions))
if aggregate is None:
shared_response_list = [[
shared_response_list[j][i].T for j in range(n_sessions)
] for i in range(n_subjects)]
if aggregate == "mean":
shared_response_list = [
shared_response_list[j].T for j in range(n_sessions)
]
return shared_response_list
class FastSRM(BaseEstimator, TransformerMixin):
"""SRM decomposition using a very low amount of memory and \
computational power thanks to the use of an atlas \
as described in [Richard2019]_.
Given multi-subject data, factorize it as a shared response S \
among all subjects and an orthogonal transform (basis) W per subject:
.. math:: X_i \\approx W_i S, \\forall i=1 \\dots N
Parameters
----------
atlas : array, shape=[n_supervoxels, n_voxels] or array,\
shape=[n_voxels] or str or None, default=None
Probabilistic or deterministic atlas on which to project the data. \
Deterministic atlas is an array of shape [n_voxels,] \
where values range from 1 \
to n_supervoxels. Voxels labelled 0 will be ignored. If atlas is a str the \
corresponding array is loaded with numpy.load and expected shape \
is (n_voxels,) for a deterministic atlas and \
(n_supervoxels, n_voxels) for a probabilistic atlas.
n_components : int
Number of timecourses of the shared coordinates
n_iter : int
Number of iterations to perform
temp_dir : str or None
Path to dir where temporary results are stored. If None \
temporary results will be stored in memory. This can results in memory \
errors when the number of subjects and/or sessions is large
low_ram : bool
If True and temp_dir is not None, reduced_data will be saved on \
disk. This increases the number of IO but reduces memory complexity when \
the number of subject and/or sessions is large
seed : int
Seed used for random sampling.
n_jobs : int, optional, default=1
The number of CPUs to use to do the computation. \
-1 means all CPUs, -2 all CPUs but one, and so on.
verbose : bool or "warn"
If True, logs are enabled. If False, logs are disabled. \
If "warn" only warnings are printed.
aggregate: str or None, default="mean"
If "mean", shared_response is the mean shared response \
from all subjects. If None, shared_response contains all \
subject-specific responses in shared space
Attributes
----------
`basis_list`: list of array, element i has \
shape=[n_components, n_voxels] or list of str
- if basis is a list of array, element i is the basis of subject i
- if basis is a list of str, element i is the path to the basis \
of subject i that is loaded with np.load yielding an array of \
shape [n_components, n_voxels].
Note that any call to the clean method erases this attribute
Note
-----
**References:**
H. Richard, L. Martin, A. Pinho, J. Pillow, B. Thirion, 2019: \
Fast shared response model for fMRI data (https://arxiv.org/pdf/1909.12537.pdf)
"""
def __init__(self,
atlas=None,
n_components=20,
n_iter=100,
temp_dir=None,
low_ram=False,
seed=None,
n_jobs=1,
verbose="warn",
aggregate="mean"):
self.seed = seed
self.n_jobs = n_jobs
self.verbose = verbose
self.n_components = n_components
self.n_iter = n_iter
self.atlas = atlas
if aggregate is not None and aggregate != "mean":
raise ValueError("aggregate can have only value mean or None")
self.aggregate = aggregate
self.basis_list = None
if temp_dir is None:
if self.verbose == "warn" or self.verbose is True:
logger.warning("temp_dir has value None. "
"All basis (spatial maps) and reconstructed "
"data will therefore be kept in memory."
"This can lead to memory errors when the "
"number of subjects "
"and/or sessions is large.")
self.temp_dir = None
self.low_ram = False
if temp_dir is not None:
self.temp_dir = os.path.join(temp_dir,
"fastsrm" + str(uuid.uuid4()))
self.low_ram = low_ram
def clean(self):
"""This erases temporary files and basis_list attribute to \
free memory. This method should be called when fitted model \
is not needed anymore.
"""
if self.temp_dir is not None:
if os.path.exists(self.temp_dir):
for root, dirs, files in os.walk(self.temp_dir, topdown=False):
for name in files:
os.remove(os.path.join(root, name))
os.rmdir(self.temp_dir)
if self.basis_list is not None:
self.basis_list is None
def fit(self, imgs):
"""Computes basis across subjects from input imgs
Parameters
----------
imgs : array of str, shape=[n_subjects, n_sessions] or \
list of list of arrays or list of arrays
Element i, j of the array is a path to the data of subject i \
collected during session j. Data are loaded with numpy.load and expected \
shape is [n_voxels, n_timeframes] n_timeframes and n_voxels are assumed \
to be the same across subjects n_timeframes can vary across sessions. \
Each voxel's timecourse is assumed to have mean 0 and variance 1
imgs can also be a list of list of arrays where element i, j \
of the array is a numpy array of shape [n_voxels, n_timeframes] \
that contains the data of subject i collected during session j.
imgs can also be a list of arrays where element i \
of the array is a numpy array of shape [n_voxels, n_timeframes] \
that contains the data of subject i (number of sessions is implicitly 1)
Returns
-------
self : object
Returns the instance itself. Contains attributes listed \
at the object level.
"""
atlas_shape = check_atlas(self.atlas, self.n_components)
reshaped_input, imgs, shapes = check_imgs(
imgs, n_components=self.n_components, atlas_shape=atlas_shape)
self.clean()
create_temp_dir(self.temp_dir)
if self.verbose is True:
logger.info("[FastSRM.fit] Reducing data")
reduced_data = reduce_data(imgs,
atlas=self.atlas,
n_jobs=self.n_jobs,
low_ram=self.low_ram,
temp_dir=self.temp_dir)
if self.verbose is True:
logger.info("[FastSRM.fit] Finds shared "
"response using reduced data")
shared_response_list = fast_srm(reduced_data,
n_iter=self.n_iter,
n_components=self.n_components,
low_ram=self.low_ram,
seed=self.seed)
if self.verbose is True:
logger.info("[FastSRM.fit] Finds basis using "
"full data and shared response")
if self.n_jobs == 1:
basis = []
for i, sessions in enumerate(imgs):
basis_i = _compute_basis_subject_online(
sessions, shared_response_list)
if self.temp_dir is None:
basis.append(basis_i)
else:
path = os.path.join(self.temp_dir, "basis_%i" % i)
np.save(path, basis_i)
basis.append(path + ".npy")
del basis_i
else:
if self.temp_dir is None:
basis = Parallel(n_jobs=self.n_jobs)(
delayed(_compute_basis_subject_online)(
sessions, shared_response_list) for sessions in imgs)
else:
Parallel(n_jobs=self.n_jobs)(
delayed(_compute_and_save_corr_mat)(
imgs[i][j], shared_response_list[j], self.temp_dir)
for j in range(len(imgs[0])) for i in range(len(imgs)))
basis = Parallel(n_jobs=self.n_jobs)(
delayed(_compute_and_save_subject_basis)(i, sessions,
self.temp_dir)
for i, sessions in enumerate(imgs))
self.basis_list = basis
return self
def fit_transform(self, imgs, subjects_indexes=None):
"""Computes basis across subjects and shared response from input imgs
return shared response.
Parameters
----------
imgs : array of str, shape=[n_subjects, n_sessions] or \
list of list of arrays or list of arrays
Element i, j of the array is a path to the data of subject i \
collected during session j. Data are loaded with numpy.load and expected \
shape is [n_voxels, n_timeframes] n_timeframes and n_voxels are assumed \
to be the same across subjects n_timeframes can vary across sessions. \
Each voxel's timecourse is assumed to have mean 0 and variance 1
imgs can also be a list of list of arrays where element i, j \
of the array is a numpy array of shape [n_voxels, n_timeframes] \
that contains the data of subject i collected during session j.
imgs can also be a list of arrays where element i \
of the array is a numpy array of shape [n_voxels, n_timeframes] \
that contains the data of subject i (number of sessions is implicitly 1)
subjects_indexes : list or None:
if None imgs[i] will be transformed using basis_list[i]. \
Otherwise imgs[i] will be transformed using basis_list[subjects_index[i]]
Returns
--------
shared_response : list of arrays, list of list of arrays or array
- if imgs is a list of array and self.aggregate="mean": shared \
response is an array of shape (n_components, n_timeframes)
- if imgs is a list of array and self.aggregate=None: shared \
response is a list of array, element i is the projection of data of \
subject i in shared space.
- if imgs is an array or a list of list of array and \
self.aggregate="mean": shared response is a list of array, \
element j is the shared response during session j
- if imgs is an array or a list of list of array and \
self.aggregate=None: shared response is a list of list of array, \
element i, j is the projection of data of subject i collected \
during session j in shared space.
"""
self.fit(imgs)
return self.transform(imgs, subjects_indexes=subjects_indexes)
def transform(self, imgs, subjects_indexes=None):
"""From data in imgs and basis from training data,
computes shared response.
Parameters
----------
imgs : array of str, shape=[n_subjects, n_sessions] or \
list of list of arrays or list of arrays
Element i, j of the array is a path to the data of subject i \
collected during session j. Data are loaded with numpy.load and expected \
shape is [n_voxels, n_timeframes] n_timeframes and n_voxels are assumed \
to be the same across subjects n_timeframes can vary across sessions. \
Each voxel's timecourse is assumed to have mean 0 and variance 1
imgs can also be a list of list of arrays where element i, j \
of the array is a numpy array of shape [n_voxels, n_timeframes] \
that contains the data of subject i collected during session j.
imgs can also be a list of arrays where element i \
of the array is a numpy array of shape [n_voxels, n_timeframes] \
that contains the data of subject i (number of sessions is implicitly 1)
subjects_indexes : list or None:
if None imgs[i] will be transformed using basis_list[i]. \
Otherwise imgs[i] will be transformed using basis[subjects_index[i]]
Returns
--------
shared_response : list of arrays, list of list of arrays or array
- if imgs is a list of array and self.aggregate="mean": shared \
response is an array of shape (n_components, n_timeframes)
- if imgs is a list of array and self.aggregate=None: shared \
response is a list of array, element i is the projection of data of \
subject i in shared space.
- if imgs is an array or a list of list of array and \
self.aggregate="mean": shared response is a list of array, \
element j is the shared response during session j
- if imgs is an array or a list of list of array and \
self.aggregate=None: shared response is a list of list of array, \
element i, j is the projection of data of subject i collected \
during session j in shared space.
"""
aggregate = self.aggregate
if self.basis_list is None:
raise NotFittedError("The model fit has not been run yet.")
atlas_shape = check_atlas(self.atlas, self.n_components)
reshaped_input, imgs, shapes = check_imgs(
imgs,
n_components=self.n_components,
atlas_shape=atlas_shape,
ignore_nsubjects=True)
check_indexes(subjects_indexes, "subjects_indexes")
if subjects_indexes is None:
subjects_indexes = np.arange(len(imgs))
else:
subjects_indexes = np.array(subjects_indexes)
# Transform specific checks
if len(subjects_indexes) < len(imgs):
raise ValueError("Input data imgs has len %i whereas "
"subject_indexes has len %i. "
"The number of basis used to compute "
"the shared response should be equal "
"to the number of subjects in imgs" %
(len(imgs), len(subjects_indexes)))
assert_valid_index(subjects_indexes, len(self.basis_list),
"subjects_indexes")
shared_response = _compute_shared_response_online(
imgs, self.basis_list, self.temp_dir, self.n_jobs,
subjects_indexes, aggregate)
# If shared response has only 1 session we need to reshape it
if reshaped_input:
if aggregate == "mean":
shared_response = shared_response[0]
if aggregate is None:
shared_response = [
shared_response[i][0] for i in range(len(subjects_indexes))
]
return shared_response
def inverse_transform(
self,
shared_response,
subjects_indexes=None,
sessions_indexes=None,
):
"""From shared response and basis from training data
reconstruct subject's data
Parameters
----------
shared_response : list of arrays, list of list of arrays or array
- if imgs is a list of array and self.aggregate="mean": shared \
response is an array of shape (n_components, n_timeframes)
- if imgs is a list of array and self.aggregate=None: shared \
response is a list of array, element i is the projection of data of \
subject i in shared space.
- if imgs is an array or a list of list of array and \
self.aggregate="mean": shared response is a list of array, \
element j is the shared response during session j
- if imgs is an array or a list of list of array and \
self.aggregate=None: shared response is a list of list of array, \
element i, j is the projection of data of subject i collected \
during session j in shared space.
subjects_indexes : list or None
if None reconstructs data of all subjects used during train. \
Otherwise reconstructs data of subjects specified by subjects_indexes.
sessions_indexes : list or None
if None reconstructs data of all sessions. \
Otherwise uses reconstructs data of sessions specified by sessions_indexes.
Returns
-------
reconstructed_data: list of list of arrays or list of arrays
- if reconstructed_data is a list of list : element i, j is \
the reconstructed data for subject subjects_indexes[i] and \
session sessions_indexes[j] as an np array of shape n_voxels, \
n_timeframes
- if reconstructed_data is a list : element i is the \
reconstructed data for subject \
subject_indexes[i] as an np array of shape n_voxels, n_timeframes
"""
added_session, shared = check_shared_response(
shared_response, self.aggregate, n_components=self.n_components)
n_subjects = len(self.basis_list)
n_sessions = len(shared)
for j in range(n_sessions):
assert_array_2axis(shared[j], "shared_response[%i]" % j)
check_indexes(subjects_indexes, "subjects_indexes")
check_indexes(sessions_indexes, "sessions_indexes")
if subjects_indexes is None:
subjects_indexes = np.arange(n_subjects)
else:
subjects_indexes = np.array(subjects_indexes)
assert_valid_index(subjects_indexes, n_subjects, "subjects_indexes")
if sessions_indexes is None:
sessions_indexes = np.arange(len(shared))
else:
sessions_indexes = np.array(sessions_indexes)
assert_valid_index(sessions_indexes, n_sessions, "sessions_indexes")
data = []
for i in subjects_indexes:
data_ = []
basis_i = safe_load(self.basis_list[i])
if added_session:
data.append(basis_i.T.dot(shared[0]))
else:
for j in sessions_indexes:
data_.append(basis_i.T.dot(shared[j]))
data.append(data_)
return data
def add_subjects(self, imgs, shared_response):
""" Add subjects to the current fit. Each new basis will be \
appended at the end of the list of basis (which can \
be accessed using self.basis)
Parameters
----------
imgs : array of str, shape=[n_subjects, n_sessions] or \
list of list of arrays or list of arrays
Element i, j of the array is a path to the data of subject i \
collected during session j. Data are loaded with numpy.load and expected \
shape is [n_voxels, n_timeframes] n_timeframes and n_voxels are assumed \
to be the same across subjects n_timeframes can vary across sessions. \
Each voxel's timecourse is assumed to have mean 0 and variance 1
imgs can also be a list of list of arrays where element i, j \
of the array is a numpy array of shape [n_voxels, n_timeframes] \
that contains the data of subject i collected during session j.
imgs can also be a list of arrays where element i \
of the array is a numpy array of shape [n_voxels, n_timeframes] \
that contains the data of subject i (number of sessions is implicitly 1)
shared_response : list of arrays, list of list of arrays or array
- if imgs is a list of array and self.aggregate="mean": shared \
response is an array of shape (n_components, n_timeframes)
- if imgs is a list of array and self.aggregate=None: shared \
response is a list of array, element i is the projection of data of \
subject i in shared space.
- if imgs is an array or a list of list of array and \
self.aggregate="mean": shared response is a list of array, \
element j is the shared response during session j
- if imgs is an array or a list of list of array and \
self.aggregate=None: shared response is a list of list of array, \
element i, j is the projection of data of subject i collected \
during session j in shared space.
"""
atlas_shape = check_atlas(self.atlas, self.n_components)
reshaped_input, imgs, shapes = check_imgs(
imgs,
n_components=self.n_components,
atlas_shape=atlas_shape,
ignore_nsubjects=True)
_, shared_response_list = check_shared_response(
shared_response,
n_components=self.n_components,
aggregate=self.aggregate,
input_shapes=shapes)
# we need to transpose shared_response_list to be consistent with
# other functions
shared_response_list = [
shared_response_list[j].T for j in range(len(shared_response_list))
]
if self.n_jobs == 1:
basis = []
for i, sessions in enumerate(imgs):
basis_i = _compute_basis_subject_online(
sessions, shared_response_list)
if self.temp_dir is None:
basis.append(basis_i)
else:
path = os.path.join(
self.temp_dir, "basis_%i" % (len(self.basis_list) + i))
np.save(path, basis_i)
basis.append(path + ".npy")
del basis_i
else:
if self.temp_dir is None:
basis = Parallel(n_jobs=self.n_jobs)(
delayed(_compute_basis_subject_online)(
sessions, shared_response_list) for sessions in imgs)
else:
Parallel(n_jobs=self.n_jobs)(
delayed(_compute_and_save_corr_mat)(
imgs[i][j], shared_response_list[j], self.temp_dir)
for j in range(len(imgs[0])) for i in range(len(imgs)))
basis = Parallel(n_jobs=self.n_jobs)(
delayed(_compute_and_save_subject_basis)(
len(self.basis_list) + i, sessions, self.temp_dir)
for i, sessions in enumerate(imgs))
self.basis_list += basis
|
brainiak/brainiak
|
brainiak/funcalign/fastsrm.py
|
Python
|
apache-2.0
| 65,510
|
# -*- coding: utf-8 -*-
from couchdb.design import ViewDefinition
from openprocurement.api import design
FIELDS = [
'planID',
]
CHANGES_FIELDS = FIELDS + [
'dateModified',
]
def add_design():
for i, j in globals().items():
if "_view" in i:
setattr(design, i, j)
plans_all_view = ViewDefinition('plans', 'all', '''function(doc) {
if(doc.doc_type == 'Plan') {
emit(doc.planID, null);
}
}''')
plans_by_dateModified_view = ViewDefinition('plans', 'by_dateModified', '''function(doc) {
if(doc.doc_type == 'Plan') {
var fields=%s, data={};
for (var i in fields) {
if (doc[fields[i]]) {
data[fields[i]] = doc[fields[i]]
}
}
emit(doc.dateModified, data);
}
}''' % FIELDS)
plans_real_by_dateModified_view = ViewDefinition('plans', 'real_by_dateModified', '''function(doc) {
if(doc.doc_type == 'Plan' && !doc.mode) {
var fields=%s, data={};
for (var i in fields) {
if (doc[fields[i]]) {
data[fields[i]] = doc[fields[i]]
}
}
emit(doc.dateModified, data);
}
}''' % FIELDS)
plans_test_by_dateModified_view = ViewDefinition('plans', 'test_by_dateModified', '''function(doc) {
if(doc.doc_type == 'Plan' && doc.mode == 'test') {
var fields=%s, data={};
for (var i in fields) {
if (doc[fields[i]]) {
data[fields[i]] = doc[fields[i]]
}
}
emit(doc.dateModified, data);
}
}''' % FIELDS)
plans_by_local_seq_view = ViewDefinition('plans', 'by_local_seq', '''function(doc) {
if(doc.doc_type == 'Plan') {
var fields=%s, data={};
for (var i in fields) {
if (doc[fields[i]]) {
data[fields[i]] = doc[fields[i]]
}
}
emit(doc._local_seq, data);
}
}''' % CHANGES_FIELDS)
plans_real_by_local_seq_view = ViewDefinition('plans', 'real_by_local_seq', '''function(doc) {
if(doc.doc_type == 'Plan' && !doc.mode) {
var fields=%s, data={};
for (var i in fields) {
if (doc[fields[i]]) {
data[fields[i]] = doc[fields[i]]
}
}
emit(doc._local_seq, data);
}
}''' % CHANGES_FIELDS)
plans_test_by_local_seq_view = ViewDefinition('plans', 'test_by_local_seq', '''function(doc) {
if(doc.doc_type == 'Plan' && doc.mode == 'test') {
var fields=%s, data={};
for (var i in fields) {
if (doc[fields[i]]) {
data[fields[i]] = doc[fields[i]]
}
}
emit(doc._local_seq, data);
}
}''' % CHANGES_FIELDS)
conflicts_view = ViewDefinition('conflicts', 'all', '''function(doc) {
if (doc._conflicts) {
emit(doc._rev, [doc._rev].concat(doc._conflicts));
}
}''')
|
gorserg/openprocurement.planning.api
|
openprocurement/planning/api/design.py
|
Python
|
apache-2.0
| 2,854
|
# -*- coding: utf-8 -*-
'''
Module for managing logrotate.
'''
from __future__ import absolute_import
# Import python libs
import os
import logging
# Import salt libs
import salt.utils
log = logging.getLogger(__name__)
default_conf = '/etc/logrotate.conf'
# Define a function alias in order not to shadow built-in's
__func_alias__ = {
'set_': 'set'
}
def __virtual__():
'''
Only work on POSIX-like systems
'''
if salt.utils.is_windows():
return (False, 'The logrotate execution module cannot be loaded: only available on non-Windows systems.')
return True
def _parse_conf(conf_file=default_conf):
'''
Parse a logrotate configuration file.
Includes will also be parsed, and their configuration will be stored in the
return dict, as if they were part of the main config file. A dict of which
configs came from which includes will be stored in the 'include files' dict
inside the return dict, for later reference by the user or module.
'''
ret = {}
mode = 'single'
multi_names = []
multi = {}
prev_comps = None
with salt.utils.fopen(conf_file, 'r') as ifile:
for line in ifile:
line = line.strip()
if not line:
continue
if line.startswith('#'):
continue
comps = line.split()
if '{' in line and '}' not in line:
mode = 'multi'
if len(comps) == 1 and prev_comps:
multi_names = prev_comps
else:
multi_names = comps
multi_names.pop()
continue
if '}' in line:
mode = 'single'
for multi_name in multi_names:
ret[multi_name] = multi
multi_names = []
multi = {}
continue
if mode == 'single':
key = ret
else:
key = multi
if comps[0] == 'include':
if 'include files' not in ret:
ret['include files'] = {}
for include in os.listdir(comps[1]):
if include not in ret['include files']:
ret['include files'][include] = []
include_path = '{0}/{1}'.format(comps[1], include)
include_conf = _parse_conf(include_path)
for file_key in include_conf:
ret[file_key] = include_conf[file_key]
ret['include files'][include].append(file_key)
prev_comps = comps
if len(comps) > 1:
key[comps[0]] = ' '.join(comps[1:])
else:
key[comps[0]] = True
return ret
def show_conf(conf_file=default_conf):
'''
Show parsed configuration
CLI Example:
.. code-block:: bash
salt '*' logrotate.show_conf
'''
return _parse_conf(conf_file)
def set_(key, value, setting=None, conf_file=default_conf):
'''
Set a new value for a specific configuration line
CLI Example:
.. code-block:: bash
salt '*' logrotate.set rotate 2
Can also be used to set a single value inside a multiline configuration
block. For instance, to change rotate in the following block:
.. code-block:: text
/var/log/wtmp {
monthly
create 0664 root root
rotate 1
}
Use the following command:
.. code-block:: bash
salt '*' logrotate.set /var/log/wtmp rotate 2
This module also has the ability to scan files inside an include directory,
and make changes in the appropriate file.
'''
conf = _parse_conf(conf_file)
for include in conf['include files']:
if key in conf['include files'][include]:
conf_file = os.path.join(conf['include'], include)
if isinstance(conf[key], dict) and not setting:
return (
'Error: {0} includes a dict, and a specific setting inside the '
'dict was not declared'.format(key)
)
if setting:
if isinstance(conf[key], str):
return ('Error: A setting for a dict was declared, but the '
'configuration line given is not a dict')
# We're going to be rewriting an entire stanza
stanza = conf[key]
if value == 'False':
del stanza[value]
else:
stanza[value] = setting
new_line = _dict_to_stanza(key, stanza)
log.debug(stanza)
log.debug(new_line)
log.debug(key)
__salt__['file.replace'](conf_file,
'{0}.*{{.*}}'.format(key),
new_line,
flags=24,
backup=False)
else:
# This is the new config line that will be set
if value == 'True':
new_line = key
elif value == 'False':
new_line = ''
else:
new_line = '{0} {1}'.format(key, value)
log.debug(conf_file)
log.debug(key)
log.debug(new_line)
__salt__['file.replace'](conf_file,
'^{0}.*'.format(key),
new_line,
flags=8,
backup=False)
def _dict_to_stanza(key, stanza):
'''
Convert a dict to a multi-line stanza
'''
ret = ''
for skey in stanza:
if stanza[skey] is True:
stanza[skey] = ''
ret += ' {0} {1}\n'.format(skey, stanza[skey])
return '{0} {{\n{1}}}'.format(key, ret)
|
stephane-martin/salt-debian-packaging
|
salt-2016.3.3/salt/modules/logrotate.py
|
Python
|
apache-2.0
| 5,730
|
# vim: tabstop=4 shiftwidth=4 softtabstop=4
#
# Copyright (C) 2012 Red Hat, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from oslo.config import cfg
from nova import exception
from nova.openstack.common import processutils
from nova import test
from nova.tests import utils as tests_utils
import nova.utils
from nova.virt.disk.vfs import localfs as vfsimpl
CONF = cfg.CONF
dirs = []
files = {}
commands = []
def fake_execute(*args, **kwargs):
commands.append({"args": args, "kwargs": kwargs})
if args[0] == "readlink":
if args[1] == "-nm":
if args[2] in ["/scratch/dir/some/file",
"/scratch/dir/some/dir",
"/scratch/dir/other/dir",
"/scratch/dir/other/file"]:
return args[2], ""
elif args[1] == "-e":
if args[2] in files:
return args[2], ""
return "", "No such file"
elif args[0] == "mkdir":
dirs.append(args[2])
elif args[0] == "chown":
owner = args[1]
path = args[2]
if path not in files:
raise Exception("No such file: " + path)
sep = owner.find(':')
if sep != -1:
user = owner[0:sep]
group = owner[sep + 1:]
else:
user = owner
group = None
if user:
if user == "fred":
uid = 105
else:
uid = 110
files[path]["uid"] = uid
if group:
if group == "users":
gid = 500
else:
gid = 600
files[path]["gid"] = gid
elif args[0] == "chgrp":
group = args[1]
path = args[2]
if path not in files:
raise Exception("No such file: " + path)
if group == "users":
gid = 500
else:
gid = 600
files[path]["gid"] = gid
elif args[0] == "chmod":
mode = args[1]
path = args[2]
if path not in files:
raise Exception("No such file: " + path)
files[path]["mode"] = int(mode, 8)
elif args[0] == "cat":
path = args[1]
if path not in files:
files[path] = {
"content": "Hello World",
"gid": 100,
"uid": 100,
"mode": 0o700
}
return files[path]["content"], ""
elif args[0] == "tee":
if args[1] == "-a":
path = args[2]
append = True
else:
path = args[1]
append = False
if path not in files:
files[path] = {
"content": "Hello World",
"gid": 100,
"uid": 100,
"mode": 0o700,
}
if append:
files[path]["content"] += kwargs["process_input"]
else:
files[path]["content"] = kwargs["process_input"]
class VirtDiskVFSLocalFSTestPaths(test.NoDBTestCase):
def setUp(self):
super(VirtDiskVFSLocalFSTestPaths, self).setUp()
real_execute = processutils.execute
def nonroot_execute(*cmd_parts, **kwargs):
kwargs.pop('run_as_root', None)
return real_execute(*cmd_parts, **kwargs)
self.stubs.Set(processutils, 'execute', nonroot_execute)
def test_check_safe_path(self):
if tests_utils.is_osx():
self.skipTest("Unable to test on OSX")
vfs = vfsimpl.VFSLocalFS("dummy.img")
vfs.imgdir = "/foo"
ret = vfs._canonical_path('etc/something.conf')
self.assertEqual(ret, '/foo/etc/something.conf')
def test_check_unsafe_path(self):
if tests_utils.is_osx():
self.skipTest("Unable to test on OSX")
vfs = vfsimpl.VFSLocalFS("dummy.img")
vfs.imgdir = "/foo"
self.assertRaises(exception.Invalid,
vfs._canonical_path,
'etc/../../../something.conf')
class VirtDiskVFSLocalFSTest(test.NoDBTestCase):
def test_makepath(self):
global dirs, commands
dirs = []
commands = []
self.stubs.Set(processutils, 'execute', fake_execute)
vfs = vfsimpl.VFSLocalFS(imgfile="/dummy.qcow2", imgfmt="qcow2")
vfs.imgdir = "/scratch/dir"
vfs.make_path("/some/dir")
vfs.make_path("/other/dir")
self.assertEqual(dirs,
["/scratch/dir/some/dir", "/scratch/dir/other/dir"]),
root_helper = nova.utils._get_root_helper()
self.assertEqual(commands,
[{'args': ('readlink', '-nm',
'/scratch/dir/some/dir'),
'kwargs': {'run_as_root': True,
'root_helper': root_helper}},
{'args': ('mkdir', '-p',
'/scratch/dir/some/dir'),
'kwargs': {'run_as_root': True,
'root_helper': root_helper}},
{'args': ('readlink', '-nm',
'/scratch/dir/other/dir'),
'kwargs': {'run_as_root': True,
'root_helper': root_helper}},
{'args': ('mkdir', '-p',
'/scratch/dir/other/dir'),
'kwargs': {'run_as_root': True,
'root_helper': root_helper}}])
def test_append_file(self):
global files, commands
files = {}
commands = []
self.stubs.Set(processutils, 'execute', fake_execute)
vfs = vfsimpl.VFSLocalFS(imgfile="/dummy.qcow2", imgfmt="qcow2")
vfs.imgdir = "/scratch/dir"
vfs.append_file("/some/file", " Goodbye")
self.assertIn("/scratch/dir/some/file", files)
self.assertEqual(files["/scratch/dir/some/file"]["content"],
"Hello World Goodbye")
root_helper = nova.utils._get_root_helper()
self.assertEqual(commands,
[{'args': ('readlink', '-nm',
'/scratch/dir/some/file'),
'kwargs': {'run_as_root': True,
'root_helper': root_helper}},
{'args': ('tee', '-a',
'/scratch/dir/some/file'),
'kwargs': {'process_input': ' Goodbye',
'run_as_root': True,
'root_helper': root_helper}}])
def test_replace_file(self):
global files, commands
files = {}
commands = []
self.stubs.Set(processutils, 'execute', fake_execute)
vfs = vfsimpl.VFSLocalFS(imgfile="/dummy.qcow2", imgfmt="qcow2")
vfs.imgdir = "/scratch/dir"
vfs.replace_file("/some/file", "Goodbye")
self.assertIn("/scratch/dir/some/file", files)
self.assertEqual(files["/scratch/dir/some/file"]["content"],
"Goodbye")
root_helper = nova.utils._get_root_helper()
self.assertEqual(commands,
[{'args': ('readlink', '-nm',
'/scratch/dir/some/file'),
'kwargs': {'run_as_root': True,
'root_helper': root_helper}},
{'args': ('tee', '/scratch/dir/some/file'),
'kwargs': {'process_input': 'Goodbye',
'run_as_root': True,
'root_helper': root_helper}}])
def test_read_file(self):
global commands, files
files = {}
commands = []
self.stubs.Set(processutils, 'execute', fake_execute)
vfs = vfsimpl.VFSLocalFS(imgfile="/dummy.qcow2", imgfmt="qcow2")
vfs.imgdir = "/scratch/dir"
self.assertEqual(vfs.read_file("/some/file"), "Hello World")
root_helper = nova.utils._get_root_helper()
self.assertEqual(commands,
[{'args': ('readlink', '-nm',
'/scratch/dir/some/file'),
'kwargs': {'run_as_root': True,
'root_helper': root_helper}},
{'args': ('cat', '/scratch/dir/some/file'),
'kwargs': {'run_as_root': True,
'root_helper': root_helper}}])
def test_has_file(self):
global commands, files
files = {}
commands = []
self.stubs.Set(processutils, 'execute', fake_execute)
vfs = vfsimpl.VFSLocalFS(imgfile="/dummy.qcow2", imgfmt="qcow2")
vfs.imgdir = "/scratch/dir"
vfs.read_file("/some/file")
self.assertTrue(vfs.has_file("/some/file"))
self.assertFalse(vfs.has_file("/other/file"))
root_helper = nova.utils._get_root_helper()
self.assertEqual(commands,
[{'args': ('readlink', '-nm',
'/scratch/dir/some/file'),
'kwargs': {'run_as_root': True,
'root_helper': root_helper}},
{'args': ('cat', '/scratch/dir/some/file'),
'kwargs': {'run_as_root': True,
'root_helper': root_helper}},
{'args': ('readlink', '-nm',
'/scratch/dir/some/file'),
'kwargs': {'run_as_root': True,
'root_helper': root_helper}},
{'args': ('readlink', '-e',
'/scratch/dir/some/file'),
'kwargs': {'run_as_root': True,
'root_helper': root_helper}},
{'args': ('readlink', '-nm',
'/scratch/dir/other/file'),
'kwargs': {'run_as_root': True,
'root_helper': root_helper}},
{'args': ('readlink', '-e',
'/scratch/dir/other/file'),
'kwargs': {'run_as_root': True,
'root_helper': root_helper}},
])
def test_set_permissions(self):
global commands, files
commands = []
files = {}
self.stubs.Set(processutils, 'execute', fake_execute)
vfs = vfsimpl.VFSLocalFS(imgfile="/dummy.qcow2", imgfmt="qcow2")
vfs.imgdir = "/scratch/dir"
vfs.read_file("/some/file")
vfs.set_permissions("/some/file", 0o777)
self.assertEqual(files["/scratch/dir/some/file"]["mode"], 0o777)
root_helper = nova.utils._get_root_helper()
self.assertEqual(commands,
[{'args': ('readlink', '-nm',
'/scratch/dir/some/file'),
'kwargs': {'run_as_root': True,
'root_helper': root_helper}},
{'args': ('cat', '/scratch/dir/some/file'),
'kwargs': {'run_as_root': True,
'root_helper': root_helper}},
{'args': ('readlink', '-nm',
'/scratch/dir/some/file'),
'kwargs': {'run_as_root': True,
'root_helper': root_helper}},
{'args': ('chmod', '777',
'/scratch/dir/some/file'),
'kwargs': {'run_as_root': True,
'root_helper': root_helper}}])
def test_set_ownership(self):
global commands, files
commands = []
files = {}
self.stubs.Set(processutils, 'execute', fake_execute)
vfs = vfsimpl.VFSLocalFS(imgfile="/dummy.qcow2", imgfmt="qcow2")
vfs.imgdir = "/scratch/dir"
vfs.read_file("/some/file")
self.assertEqual(files["/scratch/dir/some/file"]["uid"], 100)
self.assertEqual(files["/scratch/dir/some/file"]["gid"], 100)
vfs.set_ownership("/some/file", "fred", None)
self.assertEqual(files["/scratch/dir/some/file"]["uid"], 105)
self.assertEqual(files["/scratch/dir/some/file"]["gid"], 100)
vfs.set_ownership("/some/file", None, "users")
self.assertEqual(files["/scratch/dir/some/file"]["uid"], 105)
self.assertEqual(files["/scratch/dir/some/file"]["gid"], 500)
vfs.set_ownership("/some/file", "joe", "admins")
self.assertEqual(files["/scratch/dir/some/file"]["uid"], 110)
self.assertEqual(files["/scratch/dir/some/file"]["gid"], 600)
root_helper = nova.utils._get_root_helper()
self.assertEqual(commands,
[{'args': ('readlink', '-nm',
'/scratch/dir/some/file'),
'kwargs': {'run_as_root': True,
'root_helper': root_helper}},
{'args': ('cat', '/scratch/dir/some/file'),
'kwargs': {'run_as_root': True,
'root_helper': root_helper}},
{'args': ('readlink', '-nm',
'/scratch/dir/some/file'),
'kwargs': {'run_as_root': True,
'root_helper': root_helper}},
{'args': ('chown', 'fred',
'/scratch/dir/some/file'),
'kwargs': {'run_as_root': True,
'root_helper': root_helper}},
{'args': ('readlink', '-nm',
'/scratch/dir/some/file'),
'kwargs': {'run_as_root': True,
'root_helper': root_helper}},
{'args': ('chgrp', 'users',
'/scratch/dir/some/file'),
'kwargs': {'run_as_root': True,
'root_helper': root_helper}},
{'args': ('readlink', '-nm',
'/scratch/dir/some/file'),
'kwargs': {'run_as_root': True,
'root_helper': root_helper}},
{'args': ('chown', 'joe:admins',
'/scratch/dir/some/file'),
'kwargs': {'run_as_root': True,
'root_helper': root_helper}}])
|
sacharya/nova
|
nova/tests/virt/test_virt_disk_vfs_localfs.py
|
Python
|
apache-2.0
| 15,688
|
from setuptools import setup
install_requires = [
'psycopg2',
]
setup(
name='cockroachdb',
version='0.2.1',
author='Cockroach Labs',
author_email='cockroach-db@googlegroups.com',
url='https://github.com/cockroachdb/cockroachdb-python',
description='CockroachDB adapter for SQLAlchemy',
license="http://www.apache.org/licenses/LICENSE-2.0",
classifiers=[
'License :: OSI Approved :: Apache Software License',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 3',
],
packages=['cockroachdb', 'cockroachdb.sqlalchemy'],
install_requires=install_requires,
entry_points={
'sqlalchemy.dialects': [
'cockroachdb = cockroachdb.sqlalchemy.dialect:CockroachDBDialect',
],
},
)
|
bdarnell/cockroachdb-python
|
setup.py
|
Python
|
apache-2.0
| 806
|
# Copyright 2014 Red Hat, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import textwrap
import mock
import pep8
from nova.hacking import checks
from nova import test
class HackingTestCase(test.NoDBTestCase):
"""This class tests the hacking checks in nova.hacking.checks by passing
strings to the check methods like the pep8/flake8 parser would. The parser
loops over each line in the file and then passes the parameters to the
check method. The parameter names in the check method dictate what type of
object is passed to the check method. The parameter types are::
logical_line: A processed line with the following modifications:
- Multi-line statements converted to a single line.
- Stripped left and right.
- Contents of strings replaced with "xxx" of same length.
- Comments removed.
physical_line: Raw line of text from the input file.
lines: a list of the raw lines from the input file
tokens: the tokens that contribute to this logical line
line_number: line number in the input file
total_lines: number of lines in the input file
blank_lines: blank lines before this one
indent_char: indentation character in this file (" " or "\t")
indent_level: indentation (with tabs expanded to multiples of 8)
previous_indent_level: indentation on previous line
previous_logical: previous logical line
filename: Path of the file being run through pep8
When running a test on a check method the return will be False/None if
there is no violation in the sample input. If there is an error a tuple is
returned with a position in the line, and a message. So to check the result
just assertTrue if the check is expected to fail and assertFalse if it
should pass.
"""
def test_virt_driver_imports(self):
expect = (0, "N311: importing code from other virt drivers forbidden")
self.assertEqual(expect, checks.import_no_virt_driver_import_deps(
"from nova.virt.libvirt import utils as libvirt_utils",
"./nova/virt/xenapi/driver.py"))
self.assertEqual(expect, checks.import_no_virt_driver_import_deps(
"import nova.virt.libvirt.utils as libvirt_utils",
"./nova/virt/xenapi/driver.py"))
self.assertIsNone(checks.import_no_virt_driver_import_deps(
"from nova.virt.libvirt import utils as libvirt_utils",
"./nova/virt/libvirt/driver.py"))
self.assertIsNone(checks.import_no_virt_driver_import_deps(
"import nova.virt.firewall",
"./nova/virt/libvirt/firewall.py"))
def test_virt_driver_config_vars(self):
self.assertIsInstance(checks.import_no_virt_driver_config_deps(
"CONF.import_opt('volume_drivers', "
"'nova.virt.libvirt.driver', group='libvirt')",
"./nova/virt/xenapi/driver.py"), tuple)
self.assertIsNone(checks.import_no_virt_driver_config_deps(
"CONF.import_opt('volume_drivers', "
"'nova.virt.libvirt.driver', group='libvirt')",
"./nova/virt/libvirt/volume.py"))
def test_no_vi_headers(self):
lines = ['Line 1\n', 'Line 2\n', 'Line 3\n', 'Line 4\n', 'Line 5\n',
'Line 6\n', 'Line 7\n', 'Line 8\n', 'Line 9\n', 'Line 10\n',
'Line 11\n', 'Line 12\n', 'Line 13\n', 'Line14\n', 'Line15\n']
self.assertIsNone(checks.no_vi_headers(
"Test string foo", 1, lines))
self.assertEqual(len(list(checks.no_vi_headers(
"# vim: et tabstop=4 shiftwidth=4 softtabstop=4",
2, lines))), 2)
self.assertIsNone(checks.no_vi_headers(
"# vim: et tabstop=4 shiftwidth=4 softtabstop=4",
6, lines))
self.assertIsNone(checks.no_vi_headers(
"# vim: et tabstop=4 shiftwidth=4 softtabstop=4",
9, lines))
self.assertEqual(len(list(checks.no_vi_headers(
"# vim: et tabstop=4 shiftwidth=4 softtabstop=4",
14, lines))), 2)
self.assertIsNone(checks.no_vi_headers(
"Test end string for vi",
15, lines))
def test_assert_true_instance(self):
self.assertEqual(len(list(checks.assert_true_instance(
"self.assertTrue(isinstance(e, "
"exception.BuildAbortException))"))), 1)
self.assertEqual(
len(list(checks.assert_true_instance("self.assertTrue()"))), 0)
def test_assert_equal_type(self):
self.assertEqual(len(list(checks.assert_equal_type(
"self.assertEqual(type(als['QuicAssist']), list)"))), 1)
self.assertEqual(
len(list(checks.assert_equal_type("self.assertTrue()"))), 0)
def test_assert_equal_in(self):
self.assertEqual(len(list(checks.assert_equal_in(
"self.assertEqual(a in b, True)"))), 1)
self.assertEqual(len(list(checks.assert_equal_in(
"self.assertEqual('str' in 'string', True)"))), 1)
self.assertEqual(len(list(checks.assert_equal_in(
"self.assertEqual(any(a==1 for a in b), True)"))), 0)
self.assertEqual(len(list(checks.assert_equal_in(
"self.assertEqual(True, a in b)"))), 1)
self.assertEqual(len(list(checks.assert_equal_in(
"self.assertEqual(True, 'str' in 'string')"))), 1)
self.assertEqual(len(list(checks.assert_equal_in(
"self.assertEqual(True, any(a==1 for a in b))"))), 0)
self.assertEqual(len(list(checks.assert_equal_in(
"self.assertEqual(a in b, False)"))), 1)
self.assertEqual(len(list(checks.assert_equal_in(
"self.assertEqual('str' in 'string', False)"))), 1)
self.assertEqual(len(list(checks.assert_equal_in(
"self.assertEqual(any(a==1 for a in b), False)"))), 0)
self.assertEqual(len(list(checks.assert_equal_in(
"self.assertEqual(False, a in b)"))), 1)
self.assertEqual(len(list(checks.assert_equal_in(
"self.assertEqual(False, 'str' in 'string')"))), 1)
self.assertEqual(len(list(checks.assert_equal_in(
"self.assertEqual(False, any(a==1 for a in b))"))), 0)
def test_assert_equal_none(self):
self.assertEqual(len(list(checks.assert_equal_none(
"self.assertEqual(A, None)"))), 1)
self.assertEqual(len(list(checks.assert_equal_none(
"self.assertEqual(None, A)"))), 1)
self.assertEqual(
len(list(checks.assert_equal_none("self.assertIsNone()"))), 0)
def test_assert_true_or_false_with_in_or_not_in(self):
self.assertEqual(len(list(checks.assert_equal_none(
"self.assertEqual(A, None)"))), 1)
self.assertEqual(len(list(checks.assert_true_or_false_with_in(
"self.assertTrue(A in B)"))), 1)
self.assertEqual(len(list(checks.assert_true_or_false_with_in(
"self.assertFalse(A in B)"))), 1)
self.assertEqual(len(list(checks.assert_true_or_false_with_in(
"self.assertTrue(A not in B)"))), 1)
self.assertEqual(len(list(checks.assert_true_or_false_with_in(
"self.assertFalse(A not in B)"))), 1)
self.assertEqual(len(list(checks.assert_true_or_false_with_in(
"self.assertTrue(A in B, 'some message')"))), 1)
self.assertEqual(len(list(checks.assert_true_or_false_with_in(
"self.assertFalse(A in B, 'some message')"))), 1)
self.assertEqual(len(list(checks.assert_true_or_false_with_in(
"self.assertTrue(A not in B, 'some message')"))), 1)
self.assertEqual(len(list(checks.assert_true_or_false_with_in(
"self.assertFalse(A not in B, 'some message')"))), 1)
self.assertEqual(len(list(checks.assert_true_or_false_with_in(
"self.assertTrue(A in 'some string with spaces')"))), 1)
self.assertEqual(len(list(checks.assert_true_or_false_with_in(
"self.assertTrue(A in 'some string with spaces')"))), 1)
self.assertEqual(len(list(checks.assert_true_or_false_with_in(
"self.assertTrue(A in ['1', '2', '3'])"))), 1)
self.assertEqual(len(list(checks.assert_true_or_false_with_in(
"self.assertTrue(A in [1, 2, 3])"))), 1)
self.assertEqual(len(list(checks.assert_true_or_false_with_in(
"self.assertTrue(any(A > 5 for A in B))"))), 0)
self.assertEqual(len(list(checks.assert_true_or_false_with_in(
"self.assertTrue(any(A > 5 for A in B), 'some message')"))), 0)
self.assertEqual(len(list(checks.assert_true_or_false_with_in(
"self.assertFalse(some in list1 and some2 in list2)"))), 0)
def test_no_translate_debug_logs(self):
self.assertEqual(len(list(checks.no_translate_debug_logs(
"LOG.debug(_('foo'))", "nova/scheduler/foo.py"))), 1)
self.assertEqual(len(list(checks.no_translate_debug_logs(
"LOG.debug('foo')", "nova/scheduler/foo.py"))), 0)
self.assertEqual(len(list(checks.no_translate_debug_logs(
"LOG.info(_('foo'))", "nova/scheduler/foo.py"))), 0)
def test_no_setting_conf_directly_in_tests(self):
self.assertEqual(len(list(checks.no_setting_conf_directly_in_tests(
"CONF.option = 1", "nova/tests/test_foo.py"))), 1)
self.assertEqual(len(list(checks.no_setting_conf_directly_in_tests(
"CONF.group.option = 1", "nova/tests/test_foo.py"))), 1)
self.assertEqual(len(list(checks.no_setting_conf_directly_in_tests(
"CONF.option = foo = 1", "nova/tests/test_foo.py"))), 1)
# Shouldn't fail with comparisons
self.assertEqual(len(list(checks.no_setting_conf_directly_in_tests(
"CONF.option == 'foo'", "nova/tests/test_foo.py"))), 0)
self.assertEqual(len(list(checks.no_setting_conf_directly_in_tests(
"CONF.option != 1", "nova/tests/test_foo.py"))), 0)
# Shouldn't fail since not in nova/tests/
self.assertEqual(len(list(checks.no_setting_conf_directly_in_tests(
"CONF.option = 1", "nova/compute/foo.py"))), 0)
def test_log_translations(self):
logs = ['audit', 'error', 'info', 'warning', 'critical', 'warn',
'exception']
levels = ['_LI', '_LW', '_LE', '_LC']
debug = "LOG.debug('OK')"
self.assertEqual(
0, len(list(checks.validate_log_translations(debug, debug, 'f'))))
for log in logs:
bad = 'LOG.%s("Bad")' % log
self.assertEqual(1,
len(list(
checks.validate_log_translations(bad, bad, 'f'))))
ok = "LOG.%s('OK') # noqa" % log
self.assertEqual(0,
len(list(
checks.validate_log_translations(ok, ok, 'f'))))
ok = "LOG.%s(variable)" % log
self.assertEqual(0,
len(list(
checks.validate_log_translations(ok, ok, 'f'))))
for level in levels:
ok = "LOG.%s(%s('OK'))" % (log, level)
self.assertEqual(0,
len(list(
checks.validate_log_translations(ok, ok, 'f'))))
def test_no_mutable_default_args(self):
self.assertEqual(1, len(list(checks.no_mutable_default_args(
"def get_info_from_bdm(virt_type, bdm, mapping=[])"))))
self.assertEqual(0, len(list(checks.no_mutable_default_args(
"defined = []"))))
self.assertEqual(0, len(list(checks.no_mutable_default_args(
"defined, undefined = [], {}"))))
def test_check_explicit_underscore_import(self):
self.assertEqual(len(list(checks.check_explicit_underscore_import(
"LOG.info(_('My info message'))",
"cinder/tests/other_files.py"))), 1)
self.assertEqual(len(list(checks.check_explicit_underscore_import(
"msg = _('My message')",
"cinder/tests/other_files.py"))), 1)
self.assertEqual(len(list(checks.check_explicit_underscore_import(
"from cinder.i18n import _",
"cinder/tests/other_files.py"))), 0)
self.assertEqual(len(list(checks.check_explicit_underscore_import(
"LOG.info(_('My info message'))",
"cinder/tests/other_files.py"))), 0)
self.assertEqual(len(list(checks.check_explicit_underscore_import(
"msg = _('My message')",
"cinder/tests/other_files.py"))), 0)
self.assertEqual(len(list(checks.check_explicit_underscore_import(
"from cinder.i18n import _, _LW",
"cinder/tests/other_files2.py"))), 0)
self.assertEqual(len(list(checks.check_explicit_underscore_import(
"msg = _('My message')",
"cinder/tests/other_files2.py"))), 0)
self.assertEqual(len(list(checks.check_explicit_underscore_import(
"_ = translations.ugettext",
"cinder/tests/other_files3.py"))), 0)
self.assertEqual(len(list(checks.check_explicit_underscore_import(
"msg = _('My message')",
"cinder/tests/other_files3.py"))), 0)
def test_use_jsonutils(self):
def __get_msg(fun):
msg = ("N324: jsonutils.%(fun)s must be used instead of "
"json.%(fun)s" % {'fun': fun})
return [(0, msg)]
for method in ('dump', 'dumps', 'load', 'loads'):
self.assertEqual(
__get_msg(method),
list(checks.use_jsonutils("json.%s(" % method,
"./nova/virt/xenapi/driver.py")))
self.assertEqual(0,
len(list(checks.use_jsonutils("json.%s(" % method,
"./plugins/xenserver/script.py"))))
self.assertEqual(0,
len(list(checks.use_jsonutils("jsonx.%s(" % method,
"./nova/virt/xenapi/driver.py"))))
self.assertEqual(0,
len(list(checks.use_jsonutils("json.dumb",
"./nova/virt/xenapi/driver.py"))))
# We are patching pep8 so that only the check under test is actually
# installed.
@mock.patch('pep8._checks',
{'physical_line': {}, 'logical_line': {}, 'tree': {}})
def _run_check(self, code, checker, filename=None):
pep8.register_check(checker)
lines = textwrap.dedent(code).strip().splitlines(True)
checker = pep8.Checker(filename=filename, lines=lines)
checker.check_all()
checker.report._deferred_print.sort()
return checker.report._deferred_print
def _assert_has_errors(self, code, checker, expected_errors=None,
filename=None):
actual_errors = [e[:3] for e in
self._run_check(code, checker, filename)]
self.assertEqual(expected_errors or [], actual_errors)
def _assert_has_no_errors(self, code, checker, filename=None):
self._assert_has_errors(code, checker, filename=filename)
def test_str_unicode_exception(self):
checker = checks.CheckForStrUnicodeExc
code = """
def f(a, b):
try:
p = str(a) + str(b)
except ValueError as e:
p = str(e)
return p
"""
errors = [(5, 16, 'N325')]
self._assert_has_errors(code, checker, expected_errors=errors)
code = """
def f(a, b):
try:
p = unicode(a) + str(b)
except ValueError as e:
p = e
return p
"""
self._assert_has_no_errors(code, checker)
code = """
def f(a, b):
try:
p = str(a) + str(b)
except ValueError as e:
p = unicode(e)
return p
"""
errors = [(5, 20, 'N325')]
self._assert_has_errors(code, checker, expected_errors=errors)
code = """
def f(a, b):
try:
p = str(a) + str(b)
except ValueError as e:
try:
p = unicode(a) + unicode(b)
except ValueError as ve:
p = str(e) + str(ve)
p = e
return p
"""
errors = [(8, 20, 'N325'), (8, 29, 'N325')]
self._assert_has_errors(code, checker, expected_errors=errors)
code = """
def f(a, b):
try:
p = str(a) + str(b)
except ValueError as e:
try:
p = unicode(a) + unicode(b)
except ValueError as ve:
p = str(e) + unicode(ve)
p = str(e)
return p
"""
errors = [(8, 20, 'N325'), (8, 33, 'N325'), (9, 16, 'N325')]
self._assert_has_errors(code, checker, expected_errors=errors)
def test_api_version_decorator_check(self):
code = """
@some_other_decorator
@wsgi.api_version("2.5")
def my_method():
pass
"""
self._assert_has_errors(code, checks.check_api_version_decorator,
expected_errors=[(2, 0, "N332")])
def test_oslo_assert_raises_regexp(self):
code = """
self.assertRaisesRegexp(ValueError,
"invalid literal for.*XYZ'$",
int,
'XYZ')
"""
self._assert_has_errors(code, checks.assert_raises_regexp,
expected_errors=[(1, 0, "N335")])
def test_api_version_decorator_check_no_errors(self):
code = """
class ControllerClass():
@wsgi.api_version("2.5")
def my_method():
pass
"""
self._assert_has_no_errors(code, checks.check_api_version_decorator)
def test_trans_add(self):
checker = checks.CheckForTransAdd
code = """
def fake_tran(msg):
return msg
_ = fake_tran
_LI = _
_LW = _
_LE = _
_LC = _
def f(a, b):
msg = _('test') + 'add me'
msg = _LI('test') + 'add me'
msg = _LW('test') + 'add me'
msg = _LE('test') + 'add me'
msg = _LC('test') + 'add me'
msg = 'add to me' + _('test')
return msg
"""
errors = [(13, 10, 'N326'), (14, 10, 'N326'), (15, 10, 'N326'),
(16, 10, 'N326'), (17, 10, 'N326'), (18, 24, 'N326')]
self._assert_has_errors(code, checker, expected_errors=errors)
code = """
def f(a, b):
msg = 'test' + 'add me'
return msg
"""
self._assert_has_no_errors(code, checker)
def test_dict_constructor_with_list_copy(self):
self.assertEqual(1, len(list(checks.dict_constructor_with_list_copy(
" dict([(i, connect_info[i])"))))
self.assertEqual(1, len(list(checks.dict_constructor_with_list_copy(
" attrs = dict([(k, _from_json(v))"))))
self.assertEqual(1, len(list(checks.dict_constructor_with_list_copy(
" type_names = dict((value, key) for key, value in"))))
self.assertEqual(1, len(list(checks.dict_constructor_with_list_copy(
" dict((value, key) for key, value in"))))
self.assertEqual(1, len(list(checks.dict_constructor_with_list_copy(
"foo(param=dict((k, v) for k, v in bar.items()))"))))
self.assertEqual(1, len(list(checks.dict_constructor_with_list_copy(
" dict([[i,i] for i in range(3)])"))))
self.assertEqual(1, len(list(checks.dict_constructor_with_list_copy(
" dd = dict([i,i] for i in range(3))"))))
self.assertEqual(0, len(list(checks.dict_constructor_with_list_copy(
" create_kwargs = dict(snapshot=snapshot,"))))
self.assertEqual(0, len(list(checks.dict_constructor_with_list_copy(
" self._render_dict(xml, data_el, data.__dict__)"))))
def test_check_http_not_implemented(self):
code = """
except NotImplementedError:
common.raise_http_not_implemented_error()
"""
filename = "nova/api/openstack/compute/v21/test.py"
self._assert_has_no_errors(code, checks.check_http_not_implemented,
filename=filename)
code = """
except NotImplementedError:
msg = _("Unable to set password on instance")
raise exc.HTTPNotImplemented(explanation=msg)
"""
errors = [(3, 4, 'N339')]
self._assert_has_errors(code, checks.check_http_not_implemented,
expected_errors=errors, filename=filename)
filename = "nova/api/openstack/compute/legacy_v2/test.py"
self._assert_has_no_errors(code, checks.check_http_not_implemented,
filename=filename)
def test_check_contextlib_use(self):
code = """
with test.nested(
mock.patch.object(network_model.NetworkInfo, 'hydrate'),
mock.patch.object(objects.InstanceInfoCache, 'save'),
) as (
hydrate_mock, save_mock
)
"""
filename = "nova/api/openstack/compute/v21/test.py"
self._assert_has_no_errors(code, checks.check_no_contextlib_nested,
filename=filename)
code = """
with contextlib.nested(
mock.patch.object(network_model.NetworkInfo, 'hydrate'),
mock.patch.object(objects.InstanceInfoCache, 'save'),
) as (
hydrate_mock, save_mock
)
"""
filename = "nova/api/openstack/compute/legacy_v2/test.py"
errors = [(1, 0, 'N341')]
self._assert_has_errors(code, checks.check_no_contextlib_nested,
expected_errors=errors, filename=filename)
def test_check_greenthread_spawns(self):
errors = [(1, 0, "N340")]
code = "greenthread.spawn(func, arg1, kwarg1=kwarg1)"
self._assert_has_errors(code, checks.check_greenthread_spawns,
expected_errors=errors)
code = "greenthread.spawn_n(func, arg1, kwarg1=kwarg1)"
self._assert_has_errors(code, checks.check_greenthread_spawns,
expected_errors=errors)
code = "eventlet.greenthread.spawn(func, arg1, kwarg1=kwarg1)"
self._assert_has_errors(code, checks.check_greenthread_spawns,
expected_errors=errors)
code = "eventlet.spawn(func, arg1, kwarg1=kwarg1)"
self._assert_has_errors(code, checks.check_greenthread_spawns,
expected_errors=errors)
code = "eventlet.spawn_n(func, arg1, kwarg1=kwarg1)"
self._assert_has_errors(code, checks.check_greenthread_spawns,
expected_errors=errors)
code = "nova.utils.spawn(func, arg1, kwarg1=kwarg1)"
self._assert_has_no_errors(code, checks.check_greenthread_spawns)
code = "nova.utils.spawn_n(func, arg1, kwarg1=kwarg1)"
self._assert_has_no_errors(code, checks.check_greenthread_spawns)
def test_config_option_regex_match(self):
def should_match(code):
self.assertTrue(checks.cfg_opt_re.match(code))
def should_not_match(code):
self.assertFalse(checks.cfg_opt_re.match(code))
should_match("opt = cfg.StrOpt('opt_name')")
should_match("opt = cfg.IntOpt('opt_name')")
should_match("opt = cfg.DictOpt('opt_name')")
should_match("opt = cfg.Opt('opt_name')")
should_match("opts=[cfg.Opt('opt_name')]")
should_match(" cfg.Opt('opt_name')")
should_not_match("opt_group = cfg.OptGroup('opt_group_name')")
def test_check_config_option_in_central_place(self):
errors = [(1, 0, "N342")]
code = """
opts = [
cfg.StrOpt('random_opt',
default='foo',
help='I am here to do stuff'),
]
"""
# option at the right place in the tree
self._assert_has_no_errors(code,
checks.check_config_option_in_central_place,
filename="nova/conf/serial_console.py")
# option at a location which is not in scope right now
# TODO(markus_z): This is remporary until all config options are
# moved to /nova/conf
self._assert_has_no_errors(code,
checks.check_config_option_in_central_place,
filename="nova/dummy/non_existent.py")
# option at the wrong place in the tree
self._assert_has_errors(code,
checks.check_config_option_in_central_place,
filename="nova/cmd/serialproxy.py",
expected_errors=errors)
|
apporc/nova
|
nova/tests/unit/test_hacking.py
|
Python
|
apache-2.0
| 26,493
|
# Copyright 2014 Mellanox Technologies, Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
import re
from oslo_log import log as logging
import six
from neutron._i18n import _, _LE, _LW
from neutron.agent.linux import ip_link_support
from neutron.common import utils
from neutron.plugins.ml2.drivers.mech_sriov.agent.common \
import exceptions as exc
from neutron.plugins.ml2.drivers.mech_sriov.agent import pci_lib
LOG = logging.getLogger(__name__)
class PciOsWrapper(object):
"""OS wrapper for checking virtual functions"""
DEVICE_PATH = "/sys/class/net/%s/device"
PCI_PATH = "/sys/class/net/%s/device/virtfn%s/net"
VIRTFN_FORMAT = r"^virtfn(?P<vf_index>\d+)"
VIRTFN_REG_EX = re.compile(VIRTFN_FORMAT)
@classmethod
def scan_vf_devices(cls, dev_name):
"""Scan os directories to get VF devices
@param dev_name: pf network device name
@return: list of virtual functions
"""
vf_list = []
dev_path = cls.DEVICE_PATH % dev_name
if not os.path.isdir(dev_path):
LOG.error(_LE("Failed to get devices for %s"), dev_name)
raise exc.InvalidDeviceError(dev_name=dev_name,
reason=_("Device not found"))
file_list = os.listdir(dev_path)
for file_name in file_list:
pattern_match = cls.VIRTFN_REG_EX.match(file_name)
if pattern_match:
vf_index = int(pattern_match.group("vf_index"))
file_path = os.path.join(dev_path, file_name)
if os.path.islink(file_path):
file_link = os.readlink(file_path)
pci_slot = os.path.basename(file_link)
vf_list.append((pci_slot, vf_index))
return vf_list
@classmethod
def is_assigned_vf(cls, dev_name, vf_index):
"""Check if VF is assigned.
Checks if a given vf index of a given device name is assigned
by checking the relevant path in the system:
VF is assigned if:
Direct VF: PCI_PATH does not exist.
Macvtap VF: macvtap@<vf interface> interface exists in ip link show
@param dev_name: pf network device name
@param vf_index: vf index
"""
path = cls.PCI_PATH % (dev_name, vf_index)
try:
ifname_list = os.listdir(path)
except OSError:
# PCI_PATH does not exist means that the DIRECT VF assigend
return True
# Note(moshele) kernel < 3.13 doesn't create symbolic link
# for macvtap interface. Therefore we workaround it
# by parsing ip link show and checking if macvtap interface exists
for ifname in ifname_list:
if pci_lib.PciDeviceIPWrapper.is_macvtap_assigned(ifname):
return True
return False
class EmbSwitch(object):
"""Class to manage logical embedded switch entity.
Embedded Switch object is logical entity representing all VFs
connected to same physical network
Each physical network is mapped to PF network device interface,
meaning all its VF, excluding the devices in exclude_device list.
@ivar pci_slot_map: dictionary for mapping each pci slot to vf index
@ivar pci_dev_wrapper: pci device wrapper
"""
def __init__(self, phys_net, dev_name, exclude_devices):
"""Constructor
@param phys_net: physical network
@param dev_name: network device name
@param exclude_devices: list of pci slots to exclude
"""
self.phys_net = phys_net
self.dev_name = dev_name
self.pci_slot_map = {}
self.pci_dev_wrapper = pci_lib.PciDeviceIPWrapper(dev_name)
self._load_devices(exclude_devices)
def _load_devices(self, exclude_devices):
"""Load devices from driver and filter if needed.
@param exclude_devices: excluded devices mapping device_name: pci slots
"""
scanned_pci_list = PciOsWrapper.scan_vf_devices(self.dev_name)
for pci_slot, vf_index in scanned_pci_list:
if pci_slot not in exclude_devices:
self.pci_slot_map[pci_slot] = vf_index
def get_pci_slot_list(self):
"""Get list of VF addresses."""
return self.pci_slot_map.keys()
def get_assigned_devices_info(self):
"""Get assigned Virtual Functions mac and pci slot
information and populates vf_to_pci_slot mappings
@return: list of VF pair (mac address, pci slot)
"""
vf_to_pci_slot_mapping = {}
assigned_devices_info = []
for pci_slot, vf_index in self.pci_slot_map.items():
if not PciOsWrapper.is_assigned_vf(self.dev_name, vf_index):
continue
vf_to_pci_slot_mapping[vf_index] = pci_slot
if vf_to_pci_slot_mapping:
vf_to_mac_mapping = self.pci_dev_wrapper.get_assigned_macs(
list(vf_to_pci_slot_mapping.keys()))
for vf_index, mac in vf_to_mac_mapping.items():
pci_slot = vf_to_pci_slot_mapping[vf_index]
assigned_devices_info.append((mac, pci_slot))
return assigned_devices_info
def get_device_state(self, pci_slot):
"""Get device state.
@param pci_slot: Virtual Function address
"""
vf_index = self._get_vf_index(pci_slot)
return self.pci_dev_wrapper.get_vf_state(vf_index)
def set_device_state(self, pci_slot, state):
"""Set device state.
@param pci_slot: Virtual Function address
@param state: link state
"""
vf_index = self._get_vf_index(pci_slot)
return self.pci_dev_wrapper.set_vf_state(vf_index, state)
def set_device_rate(self, pci_slot, rate_type, rate_kbps):
"""Set device rate: rate (max_tx_rate), min_tx_rate
@param pci_slot: Virtual Function address
@param rate_type: device rate name type. Could be 'rate' and
'min_tx_rate'.
@param rate_kbps: device rate in kbps
"""
vf_index = self._get_vf_index(pci_slot)
#NOTE(ralonsoh): ip link sets rate in Mbps therefore we need to convert
#the rate_kbps value from kbps to Mbps.
#Zero means to disable the rate so the lowest rate available is 1Mbps.
#Floating numbers are not allowed
if rate_kbps > 0 and rate_kbps < 1000:
rate_mbps = 1
else:
rate_mbps = utils.round_val(rate_kbps / 1000.0)
log_dict = {
'rate_mbps': rate_mbps,
'rate_kbps': rate_kbps,
'vf_index': vf_index,
'rate_type': rate_type
}
if rate_kbps % 1000 != 0:
LOG.debug("'%(rate_type)s' for SR-IOV ports is counted in Mbps; "
"setting %(rate_mbps)s Mbps limit for port %(vf_index)s "
"instead of %(rate_kbps)s kbps",
log_dict)
else:
LOG.debug("Setting %(rate_mbps)s Mbps limit for port %(vf_index)s",
log_dict)
return self.pci_dev_wrapper.set_vf_rate(vf_index, rate_type, rate_mbps)
def _get_vf_index(self, pci_slot):
vf_index = self.pci_slot_map.get(pci_slot)
if vf_index is None:
LOG.warning(_LW("Cannot find vf index for pci slot %s"),
pci_slot)
raise exc.InvalidPciSlotError(pci_slot=pci_slot)
return vf_index
def set_device_spoofcheck(self, pci_slot, enabled):
"""Set device spoofchecking
@param pci_slot: Virtual Function address
@param enabled: True to enable spoofcheck, False to disable
"""
vf_index = self.pci_slot_map.get(pci_slot)
if vf_index is None:
raise exc.InvalidPciSlotError(pci_slot=pci_slot)
return self.pci_dev_wrapper.set_vf_spoofcheck(vf_index, enabled)
def get_pci_device(self, pci_slot):
"""Get mac address for given Virtual Function address
@param pci_slot: pci slot
@return: MAC address of virtual function
"""
vf_index = self.pci_slot_map.get(pci_slot)
mac = None
if vf_index is not None:
if PciOsWrapper.is_assigned_vf(self.dev_name, vf_index):
macs = self.pci_dev_wrapper.get_assigned_macs([vf_index])
mac = macs.get(vf_index)
return mac
class ESwitchManager(object):
"""Manages logical Embedded Switch entities for physical network."""
def __new__(cls):
# make it a singleton
if not hasattr(cls, '_instance'):
cls._instance = super(ESwitchManager, cls).__new__(cls)
cls.emb_switches_map = {}
cls.pci_slot_map = {}
return cls._instance
def device_exists(self, device_mac, pci_slot):
"""Verify if device exists.
Check if a device mac exists and matches the given VF pci slot
@param device_mac: device mac
@param pci_slot: VF address
"""
embedded_switch = self._get_emb_eswitch(device_mac, pci_slot)
if embedded_switch:
return True
return False
def get_assigned_devices_info(self, phys_net=None):
"""Get all assigned devices.
Get all assigned devices belongs to given embedded switch
@param phys_net: physical network, if none get all assigned devices
@return: set of assigned VFs (mac address, pci slot) pair
"""
if phys_net:
eswitch_objects = self.emb_switches_map.get(phys_net, set())
else:
eswitch_objects = set()
for eswitch_list in self.emb_switches_map.values():
eswitch_objects |= set(eswitch_list)
assigned_devices = set()
for embedded_switch in eswitch_objects:
for device in embedded_switch.get_assigned_devices_info():
assigned_devices.add(device)
return assigned_devices
def get_device_state(self, device_mac, pci_slot):
"""Get device state.
Get the device state (up/True or down/False)
@param device_mac: device mac
@param pci_slot: VF PCI slot
@return: device state (True/False) None if failed
"""
embedded_switch = self._get_emb_eswitch(device_mac, pci_slot)
if embedded_switch:
return embedded_switch.get_device_state(pci_slot)
return False
def set_device_max_rate(self, device_mac, pci_slot, max_kbps):
"""Set device max rate
Sets the device max rate in kbps
@param device_mac: device mac
@param pci_slot: pci slot
@param max_kbps: device max rate in kbps
"""
embedded_switch = self._get_emb_eswitch(device_mac, pci_slot)
if embedded_switch:
embedded_switch.set_device_rate(
pci_slot,
ip_link_support.IpLinkConstants.IP_LINK_CAPABILITY_RATE,
max_kbps)
def set_device_min_tx_rate(self, device_mac, pci_slot, min_kbps):
"""Set device min_tx_rate
Sets the device min_tx_rate in kbps
@param device_mac: device mac
@param pci_slot: pci slot
@param max_kbps: device min_tx_rate in kbps
"""
embedded_switch = self._get_emb_eswitch(device_mac, pci_slot)
if embedded_switch:
embedded_switch.set_device_rate(
pci_slot,
ip_link_support.IpLinkConstants.IP_LINK_CAPABILITY_MIN_TX_RATE,
min_kbps)
def set_device_state(self, device_mac, pci_slot, admin_state_up):
"""Set device state
Sets the device state (up or down)
@param device_mac: device mac
@param pci_slot: pci slot
@param admin_state_up: device admin state True/False
"""
embedded_switch = self._get_emb_eswitch(device_mac, pci_slot)
if embedded_switch:
embedded_switch.set_device_state(pci_slot,
admin_state_up)
def set_device_spoofcheck(self, device_mac, pci_slot, enabled):
"""Set device spoofcheck
Sets device spoofchecking (enabled or disabled)
@param device_mac: device mac
@param pci_slot: pci slot
@param enabled: device spoofchecking
"""
embedded_switch = self._get_emb_eswitch(device_mac, pci_slot)
if embedded_switch:
embedded_switch.set_device_spoofcheck(pci_slot,
enabled)
def discover_devices(self, device_mappings, exclude_devices):
"""Discover which Virtual functions to manage.
Discover devices, and create embedded switch object for network device
@param device_mappings: device mapping physical_network:device_name
@param exclude_devices: excluded devices mapping device_name: pci slots
"""
if exclude_devices is None:
exclude_devices = {}
for phys_net, dev_names in six.iteritems(device_mappings):
for dev_name in dev_names:
self._create_emb_switch(phys_net, dev_name,
exclude_devices.get(dev_name, set()))
def _create_emb_switch(self, phys_net, dev_name, exclude_devices):
embedded_switch = EmbSwitch(phys_net, dev_name, exclude_devices)
self.emb_switches_map.setdefault(phys_net, []).append(embedded_switch)
for pci_slot in embedded_switch.get_pci_slot_list():
self.pci_slot_map[pci_slot] = embedded_switch
def _get_emb_eswitch(self, device_mac, pci_slot):
"""Get embedded switch.
Get embedded switch by pci slot and validate pci has device mac
@param device_mac: device mac
@param pci_slot: pci slot
"""
embedded_switch = self.pci_slot_map.get(pci_slot)
if embedded_switch:
used_device_mac = embedded_switch.get_pci_device(pci_slot)
if used_device_mac != device_mac:
LOG.warning(_LW("device pci mismatch: %(device_mac)s "
"- %(pci_slot)s"),
{"device_mac": device_mac, "pci_slot": pci_slot})
embedded_switch = None
return embedded_switch
def clear_max_rate(self, pci_slot):
"""Clear the VF "rate" parameter
Clear the "rate" configuration from VF by setting it to 0.
@param pci_slot: VF PCI slot
"""
self._clear_rate(pci_slot,
ip_link_support.IpLinkConstants.IP_LINK_CAPABILITY_RATE)
def clear_min_tx_rate(self, pci_slot):
"""Clear the VF "min_tx_rate" parameter
Clear the "min_tx_rate" configuration from VF by setting it to 0.
@param pci_slot: VF PCI slot
"""
self._clear_rate(pci_slot,
ip_link_support.IpLinkConstants.IP_LINK_CAPABILITY_MIN_TX_RATE)
def _clear_rate(self, pci_slot, rate_type):
"""Clear the VF rate parameter specified in rate_type
Clear the rate configuration from VF by setting it to 0.
@param pci_slot: VF PCI slot
@param rate_type: rate to clear ('rate', 'min_tx_rate')
"""
#NOTE(Moshe Levi): we don't use the self._get_emb_eswitch here, because
#when clearing the VF it may be not assigned. This happens when
#libvirt releases the VF back to the hypervisor on delete VM. Therefore
#we should just clear the VF rate according to pci_slot no matter
#if VF is assigned or not.
embedded_switch = self.pci_slot_map.get(pci_slot)
if embedded_switch:
#NOTE(Moshe Levi): check the pci_slot is not assigned to some
#other port before resetting the rate.
if embedded_switch.get_pci_device(pci_slot) is None:
embedded_switch.set_device_rate(pci_slot, rate_type, 0)
else:
LOG.warning(_LW("VF with PCI slot %(pci_slot)s is already "
"assigned; skipping reset for '%(rate_type)s' "
"device configuration parameter"),
{'pci_slot': pci_slot, 'rate_type': rate_type})
else:
LOG.error(_LE("PCI slot %(pci_slot)s has no mapping to Embedded "
"Switch; skipping"), {'pci_slot': pci_slot})
|
sebrandon1/neutron
|
neutron/plugins/ml2/drivers/mech_sriov/agent/eswitch_manager.py
|
Python
|
apache-2.0
| 16,900
|
# Copyright 2016 Mirantis Inc.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from osprofiler.drivers import base
from osprofiler import exc
class MongoDB(base.Driver):
def __init__(self, connection_str, db_name="osprofiler", project=None,
service=None, host=None, **kwargs):
"""MongoDB driver for OSProfiler."""
super(MongoDB, self).__init__(connection_str, project=project,
service=service, host=host, **kwargs)
try:
from pymongo import MongoClient
except ImportError:
raise exc.CommandError(
"To use OSProfiler with MongoDB driver, "
"please install `pymongo` library. "
"To install with pip:\n `pip install pymongo`.")
client = MongoClient(self.connection_str, connect=False)
self.db = client[db_name]
@classmethod
def get_name(cls):
return "mongodb"
def notify(self, info):
"""Send notifications to MongoDB.
:param info: Contains information about trace element.
In payload dict there are always 3 ids:
"base_id" - uuid that is common for all notifications
related to one trace. Used to simplify
retrieving of all trace elements from
MongoDB.
"parent_id" - uuid of parent element in trace
"trace_id" - uuid of current element in trace
With parent_id and trace_id it's quite simple to build
tree of trace elements, which simplify analyze of trace.
"""
data = info.copy()
data["project"] = self.project
data["service"] = self.service
self.db.profiler.insert_one(data)
if (self.filter_error_trace
and data.get("info", {}).get("etype") is not None):
self.notify_error_trace(data)
def notify_error_trace(self, data):
"""Store base_id and timestamp of error trace to a separate db."""
self.db.profiler_error.update(
{"base_id": data["base_id"]},
{"base_id": data["base_id"], "timestamp": data["timestamp"]},
upsert=True
)
def list_traces(self, fields=None):
"""Query all traces from the storage.
:param fields: Set of trace fields to return. Defaults to 'base_id'
and 'timestamp'
:return List of traces, where each trace is a dictionary containing
at least `base_id` and `timestamp`.
"""
fields = set(fields or self.default_trace_fields)
ids = self.db.profiler.find({}).distinct("base_id")
out_format = {"base_id": 1, "timestamp": 1, "_id": 0}
out_format.update({i: 1 for i in fields})
return [self.db.profiler.find(
{"base_id": i}, out_format).sort("timestamp")[0] for i in ids]
def list_error_traces(self):
"""Returns all traces that have error/exception."""
out_format = {"base_id": 1, "timestamp": 1, "_id": 0}
return self.db.profiler_error.find({}, out_format)
def get_report(self, base_id):
"""Retrieves and parses notification from MongoDB.
:param base_id: Base id of trace elements.
"""
for n in self.db.profiler.find({"base_id": base_id}, {"_id": 0}):
trace_id = n["trace_id"]
parent_id = n["parent_id"]
name = n["name"]
project = n["project"]
service = n["service"]
host = n["info"]["host"]
timestamp = n["timestamp"]
self._append_results(trace_id, parent_id, name, project, service,
host, timestamp, n)
return self._parse_results()
|
openstack/osprofiler
|
osprofiler/drivers/mongodb.py
|
Python
|
apache-2.0
| 4,425
|
# -*- coding: utf-8 -*-
#
# google-cloud-storage documentation build configuration file
#
# This file is execfile()d with the current directory set to its
# containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
import sys
import os
import shlex
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
sys.path.insert(0, os.path.abspath(".."))
__version__ = "0.90.4"
# -- General configuration ------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
# needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = [
"sphinx.ext.autodoc",
"sphinx.ext.autosummary",
"sphinx.ext.doctest",
"sphinx.ext.intersphinx",
"sphinx.ext.coverage",
"sphinx.ext.napoleon",
"sphinx.ext.viewcode",
]
# autodoc/autosummary flags
autoclass_content = "both"
autodoc_default_flags = ["members"]
autosummary_generate = True
# Add any paths that contain templates here, relative to this directory.
templates_path = ["_templates"]
# Allow markdown includes (so releases.md can include CHANGLEOG.md)
# http://www.sphinx-doc.org/en/master/markdown.html
source_parsers = {".md": "recommonmark.parser.CommonMarkParser"}
# The suffix(es) of source filenames.
# You can specify multiple suffix as a list of string:
# source_suffix = ['.rst', '.md']
source_suffix = ".rst"
# The encoding of source files.
# source_encoding = 'utf-8-sig'
# The master toctree document.
master_doc = "index"
# General information about the project.
project = u"google-cloud-storage"
copyright = u"2017, Google"
author = u"Google APIs"
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The full version, including alpha/beta/rc tags.
release = __version__
# The short X.Y version.
version = ".".join(release.split(".")[0:2])
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#
# This is also used if you do content translation via gettext catalogs.
# Usually you set "language" from the command line for these cases.
language = None
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
# today = ''
# Else, today_fmt is used as the format for a strftime call.
# today_fmt = '%B %d, %Y'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
exclude_patterns = ["_build"]
# The reST default role (used for this markup: `text`) to use for all
# documents.
# default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
# add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
# add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
# show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = "sphinx"
# A list of ignored prefixes for module index sorting.
# modindex_common_prefix = []
# If true, keep warnings as "system message" paragraphs in the built documents.
# keep_warnings = False
# If true, `todo` and `todoList` produce output, else they produce nothing.
todo_include_todos = True
# -- Options for HTML output ----------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
html_theme = "alabaster"
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
# html_theme_options = {}
# Add any paths that contain custom themes here, relative to this directory.
# html_theme_path = []
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
# html_title = None
# A shorter title for the navigation bar. Default is the same as html_title.
# html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
# html_logo = None
# The name of an image file (within the static path) to use as favicon of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
# html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
# html_static_path = []
# Add any extra paths that contain custom files (such as robots.txt or
# .htaccess) here, relative to this directory. These files are copied
# directly to the root of the documentation.
# html_extra_path = []
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
# html_last_updated_fmt = '%b %d, %Y'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
# html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
# html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
# html_additional_pages = {}
# If false, no module index is generated.
# html_domain_indices = True
# If false, no index is generated.
# html_use_index = True
# If true, the index is split into individual pages for each letter.
# html_split_index = False
# If true, links to the reST sources are added to the pages.
# html_show_sourcelink = True
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
# html_show_sphinx = True
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
# html_show_copyright = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
# html_use_opensearch = ''
# This is the file name suffix for HTML files (e.g. ".xhtml").
# html_file_suffix = None
# Language to be used for generating the HTML full-text search index.
# Sphinx supports the following languages:
# 'da', 'de', 'en', 'es', 'fi', 'fr', 'hu', 'it', 'ja'
# 'nl', 'no', 'pt', 'ro', 'ru', 'sv', 'tr'
# html_search_language = 'en'
# A dictionary with options for the search language support, empty by default.
# Now only 'ja' uses this config value
# html_search_options = {'type': 'default'}
# The name of a javascript file (relative to the configuration directory) that
# implements a search results scorer. If empty, the default will be used.
# html_search_scorer = 'scorer.js'
# Output file base name for HTML help builder.
htmlhelp_basename = "google-cloud-storage-doc"
# -- Options for LaTeX output ---------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#'preamble': '',
# Latex figure (float) alignment
#'figure_align': 'htbp',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title,
# author, documentclass [howto, manual, or own class]).
latex_documents = [
(
master_doc,
"google-cloud-storage.tex",
u"google-cloud-storage Documentation",
author,
"manual",
)
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
# latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
# latex_use_parts = False
# If true, show page references after internal links.
# latex_show_pagerefs = False
# If true, show URL addresses after external links.
# latex_show_urls = False
# Documents to append as an appendix to all manuals.
# latex_appendices = []
# If false, no module index is generated.
# latex_domain_indices = True
# -- Options for manual page output ---------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
(
master_doc,
"google-cloud-storage",
u"google-cloud-storage Documentation",
[author],
1,
)
]
# If true, show URL addresses after external links.
# man_show_urls = False
# -- Options for Texinfo output -------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
(
master_doc,
"google-cloud-storage",
u"google-cloud-storage Documentation",
author,
"google-cloud-storage",
"GAPIC library for the {metadata.shortName} v1 service",
"APIs",
)
]
# Documents to append as an appendix to all manuals.
# texinfo_appendices = []
# If false, no module index is generated.
# texinfo_domain_indices = True
# How to display URL addresses: 'footnote', 'no', or 'inline'.
# texinfo_show_urls = 'footnote'
# If true, do not generate a @detailmenu in the "Top" node's menu.
# texinfo_no_detailmenu = False
# Example configuration for intersphinx: refer to the Python standard library.
intersphinx_mapping = {
"python": ("http://python.readthedocs.org/en/latest/", None),
"gax": ("https://gax-python.readthedocs.org/en/latest/", None),
}
# Napoleon settings
napoleon_google_docstring = True
napoleon_numpy_docstring = True
napoleon_include_private_with_doc = False
napoleon_include_special_with_doc = True
napoleon_use_admonition_for_examples = False
napoleon_use_admonition_for_notes = False
napoleon_use_admonition_for_references = False
napoleon_use_ivar = False
napoleon_use_param = True
napoleon_use_rtype = True
|
dhermes/gcloud-python
|
storage/docs/conf.py
|
Python
|
apache-2.0
| 10,612
|
# Copyright 2012 OpenStack Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import abc
import keystone.conf
from keystone import exception
CONF = keystone.conf.CONF
def get_project_from_domain(domain_ref):
"""Create a project ref from the provided domain ref."""
project_ref = domain_ref.copy()
project_ref['is_domain'] = True
project_ref['domain_id'] = None
project_ref['parent_id'] = None
return project_ref
# The provided SQL driver uses a special value to represent a domain_id of
# None. See comment in Project class of resource/backends/sql.py for more
# details.
NULL_DOMAIN_ID = '<<keystone.domain.root>>'
class ResourceDriverBase(object, metaclass=abc.ABCMeta):
def _get_list_limit(self):
return CONF.resource.list_limit or CONF.list_limit
# project crud
@abc.abstractmethod
def list_projects(self, hints):
"""List projects in the system.
:param hints: filter hints which the driver should
implement if at all possible.
:returns: a list of project_refs or an empty list.
"""
raise exception.NotImplemented() # pragma: no cover
@abc.abstractmethod
def list_projects_from_ids(self, project_ids):
"""List projects for the provided list of ids.
:param project_ids: list of ids
:returns: a list of project_refs.
This method is used internally by the assignment manager to bulk read
a set of projects given their ids.
"""
raise exception.NotImplemented() # pragma: no cover
@abc.abstractmethod
def list_project_ids_from_domain_ids(self, domain_ids):
"""List project ids for the provided list of domain ids.
:param domain_ids: list of domain ids
:returns: a list of project ids owned by the specified domain ids.
This method is used internally by the assignment manager to bulk read
a set of project ids given a list of domain ids.
"""
raise exception.NotImplemented() # pragma: no cover
@abc.abstractmethod
def list_projects_in_domain(self, domain_id):
"""List projects in the domain.
:param domain_id: the driver MUST only return projects
within this domain.
:returns: a list of project_refs or an empty list.
"""
raise exception.NotImplemented() # pragma: no cover
@abc.abstractmethod
def get_project(self, project_id):
"""Get a project by ID.
:returns: project_ref
:raises keystone.exception.ProjectNotFound: if project_id does not
exist
"""
raise exception.NotImplemented() # pragma: no cover
@abc.abstractmethod
def update_project(self, project_id, project):
"""Update an existing project.
:raises keystone.exception.ProjectNotFound: if project_id does not
exist
:raises keystone.exception.Conflict: if project name already exists
"""
raise exception.NotImplemented() # pragma: no cover
@abc.abstractmethod
def delete_project(self, project_id):
"""Delete an existing project.
:raises keystone.exception.ProjectNotFound: if project_id does not
exist
"""
raise exception.NotImplemented() # pragma: no cover
@abc.abstractmethod
def list_project_parents(self, project_id):
"""List all parents from a project by its ID.
:param project_id: the driver will list the parents of this
project.
:returns: a list of project_refs or an empty list.
:raises keystone.exception.ProjectNotFound: if project_id does not
exist
"""
raise exception.NotImplemented()
@abc.abstractmethod
def list_projects_in_subtree(self, project_id):
"""List all projects in the subtree of a given project.
:param project_id: the driver will get the subtree under
this project.
:returns: a list of project_refs or an empty list
:raises keystone.exception.ProjectNotFound: if project_id does not
exist
"""
raise exception.NotImplemented()
@abc.abstractmethod
def is_leaf_project(self, project_id):
"""Check if a project is a leaf in the hierarchy.
:param project_id: the driver will check if this project
is a leaf in the hierarchy.
:raises keystone.exception.ProjectNotFound: if project_id does not
exist
"""
raise exception.NotImplemented()
def _validate_default_domain(self, ref):
"""Validate that either the default domain or nothing is specified.
Also removes the domain from the ref so that LDAP doesn't have to
persist the attribute.
"""
ref = ref.copy()
domain_id = ref.pop('domain_id', CONF.identity.default_domain_id)
self._validate_default_domain_id(domain_id)
return ref
def _validate_default_domain_id(self, domain_id):
"""Validate that the domain ID belongs to the default domain."""
if domain_id != CONF.identity.default_domain_id:
raise exception.DomainNotFound(domain_id=domain_id)
@abc.abstractmethod
def create_project(self, project_id, project):
"""Create a new project.
:param project_id: This parameter can be ignored.
:param dict project: The new project
Project schema::
type: object
properties:
id:
type: string
name:
type: string
domain_id:
type: [string, null]
description:
type: string
enabled:
type: boolean
parent_id:
type: string
is_domain:
type: boolean
required: [id, name, domain_id]
additionalProperties: true
If the project doesn't match the schema the behavior is undefined.
The driver can impose requirements such as the maximum length of a
field. If these requirements are not met the behavior is undefined.
:raises keystone.exception.Conflict: if the project id already exists
or the name already exists for the domain_id.
"""
raise exception.NotImplemented() # pragma: no cover
@abc.abstractmethod
def get_project_by_name(self, project_name, domain_id):
"""Get a project by name.
:returns: project_ref
:raises keystone.exception.ProjectNotFound: if a project with the
project_name does not exist within the domain
"""
raise exception.NotImplemented() # pragma: no cover
@abc.abstractmethod
def delete_projects_from_ids(self, project_ids):
"""Delete a given list of projects.
Deletes a list of projects. Ensures no project on the list exists
after it is successfully called. If an empty list is provided,
the it is silently ignored. In addition, if a project ID in the list
of project_ids is not found in the backend, no exception is raised,
but a message is logged.
"""
raise exception.NotImplemented() # pragma: no cover
@abc.abstractmethod
def list_projects_acting_as_domain(self, hints):
"""List all projects acting as domains.
:param hints: filter hints which the driver should
implement if at all possible.
:returns: a list of project_refs or an empty list.
"""
raise exception.NotImplemented() # pragma: no cover
def check_project_depth(self, max_depth):
"""Check the projects depth in the backend whether exceed the limit.
:param max_depth: the limit depth that project depth should not exceed.
:type max_depth: integer
:returns: the exceeded project's id or None if no exceeding.
"""
raise exception.NotImplemented() # pragma: no cover
|
openstack/keystone
|
keystone/resource/backends/base.py
|
Python
|
apache-2.0
| 8,923
|
"""
Support for Dovado router.
For more details about this platform, please refer to the documentation at
https://home-assistant.io/components/sensor.dovado/
"""
import logging
import re
from datetime import timedelta
import voluptuous as vol
from homeassistant.helpers.entity import Entity
from homeassistant.util import Throttle
from homeassistant.util import slugify
import homeassistant.helpers.config_validation as cv
from homeassistant.const import (
CONF_USERNAME, CONF_PASSWORD, CONF_HOST, CONF_PORT, CONF_SENSORS,
DEVICE_DEFAULT_NAME)
from homeassistant.components.sensor import (DOMAIN, PLATFORM_SCHEMA)
_LOGGER = logging.getLogger(__name__)
REQUIREMENTS = ['dovado==0.4.1']
MIN_TIME_BETWEEN_UPDATES = timedelta(seconds=30)
SENSOR_UPLOAD = 'upload'
SENSOR_DOWNLOAD = 'download'
SENSOR_SIGNAL = 'signal'
SENSOR_NETWORK = 'network'
SENSOR_SMS_UNREAD = 'sms'
SENSORS = {
SENSOR_NETWORK: ('signal strength', 'Network', None,
'mdi:access-point-network'),
SENSOR_SIGNAL: ('signal strength', 'Signal Strength', '%',
'mdi:signal'),
SENSOR_SMS_UNREAD: ('sms unread', 'SMS unread', '',
'mdi:message-text-outline'),
SENSOR_UPLOAD: ('traffic modem tx', 'Sent', 'GB',
'mdi:cloud-upload'),
SENSOR_DOWNLOAD: ('traffic modem rx', 'Received', 'GB',
'mdi:cloud-download'),
}
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend({
vol.Required(CONF_USERNAME): cv.string,
vol.Required(CONF_PASSWORD): cv.string,
vol.Optional(CONF_HOST): cv.string,
vol.Optional(CONF_PORT): cv.port,
vol.Optional(CONF_SENSORS):
vol.All(cv.ensure_list, [vol.In(SENSORS)]),
})
def setup_platform(hass, config, add_entities, discovery_info=None):
"""Set up the Dovado platform for sensors."""
return Dovado().setup(hass, config, add_entities)
class Dovado:
"""A connection to the router."""
def __init__(self):
"""Initialize."""
self.state = {}
self._dovado = None
def setup(self, hass, config, add_entities):
"""Set up the connection."""
import dovado
self._dovado = dovado.Dovado(
config.get(CONF_USERNAME), config.get(CONF_PASSWORD),
config.get(CONF_HOST), config.get(CONF_PORT))
if not self.update():
return False
def send_sms(service):
"""Send SMS through the router."""
number = service.data.get('number')
message = service.data.get('message')
_LOGGER.debug("message for %s: %s", number, message)
self._dovado.send_sms(number, message)
if self.state.get('sms') == 'enabled':
service_name = slugify("{} {}".format(self.name, 'send_sms'))
hass.services.register(DOMAIN, service_name, send_sms)
for sensor in SENSORS:
if sensor in config.get(CONF_SENSORS, [sensor]):
add_entities([DovadoSensor(self, sensor)])
return True
@property
def name(self):
"""Name of the router."""
return self.state.get("product name", DEVICE_DEFAULT_NAME)
@Throttle(MIN_TIME_BETWEEN_UPDATES)
def update(self):
"""Update device state."""
_LOGGER.info("Updating")
try:
self.state = self._dovado.state or {}
if not self.state:
return False
self.state.update(
connected=self.state.get("modem status") == "CONNECTED")
_LOGGER.debug("Received: %s", self.state)
return True
except OSError as error:
_LOGGER.warning("Could not contact the router: %s", error)
class DovadoSensor(Entity):
"""Representation of a Dovado sensor."""
def __init__(self, dovado, sensor):
"""Initialize the sensor."""
self._dovado = dovado
self._sensor = sensor
self._state = self._compute_state()
def _compute_state(self):
state = self._dovado.state.get(SENSORS[self._sensor][0])
if self._sensor == SENSOR_NETWORK:
match = re.search(r"\((.+)\)", state)
return match.group(1) if match else None
if self._sensor == SENSOR_SIGNAL:
try:
return int(state.split()[0])
except ValueError:
return 0
if self._sensor == SENSOR_SMS_UNREAD:
return int(state)
if self._sensor in [SENSOR_UPLOAD, SENSOR_DOWNLOAD]:
return round(float(state) / 1e6, 1)
return state
def update(self):
"""Update sensor values."""
self._dovado.update()
self._state = self._compute_state()
@property
def name(self):
"""Return the name of the sensor."""
return "{} {}".format(self._dovado.name, SENSORS[self._sensor][1])
@property
def state(self):
"""Return the sensor state."""
return self._state
@property
def icon(self):
"""Return the icon for the sensor."""
return SENSORS[self._sensor][3]
@property
def unit_of_measurement(self):
"""Return the unit of measurement."""
return SENSORS[self._sensor][2]
@property
def device_state_attributes(self):
"""Return the state attributes."""
return {k: v for k, v in self._dovado.state.items()
if k not in ['date', 'time']}
|
tinloaf/home-assistant
|
homeassistant/components/sensor/dovado.py
|
Python
|
apache-2.0
| 5,408
|
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import ddt
import mock
from oslo_utils import strutils
import webob
from manila.api.v2 import share_group_type_specs
from manila import exception
from manila import policy
from manila import test
from manila.tests.api import fakes
import manila.wsgi
CONSISTENT_SNAPSHOTS = 'consistent_snapshots'
def return_create_share_group_type_specs(context, share_group_type_id,
group_specs):
return stub_share_group_type_specs()
def return_share_group_type_specs(context, share_group_type_id):
return stub_share_group_type_specs()
def return_empty_share_group_type_specs(context, share_group_type_id):
return {}
def delete_share_group_type_specs(context, share_group_type_id, key):
pass
def delete_share_group_type_specs_not_found(context, share_group_type_id, key):
raise exception.ShareGroupTypeSpecsNotFound("Not Found")
def stub_share_group_type_specs():
return {"key%d" % i: "value%d" % i for i in (1, 2, 3, 4, 5)}
def get_large_string():
return "s" * 256
def get_group_specs_dict(group_specs, include_required=True):
if not group_specs:
group_specs = {}
return {'group_specs': group_specs}
def fake_request(url, admin=False, experimental=True, version='2.31',
**kwargs):
return fakes.HTTPRequest.blank(
url, use_admin_context=admin, experimental=experimental,
version=version, **kwargs)
@ddt.ddt
class ShareGroupTypesSpecsTest(test.TestCase):
def setUp(self):
super(ShareGroupTypesSpecsTest, self).setUp()
self.flags(host='fake')
self.mock_object(manila.db, 'share_group_type_get')
self.api_path = '/v2/fake/share-group-types/1/group_specs'
self.controller = (
share_group_type_specs.ShareGroupTypeSpecsController())
self.resource_name = self.controller.resource_name
self.mock_policy_check = self.mock_object(policy, 'check_policy')
def test_index(self):
self.mock_object(
manila.db, 'share_group_type_specs_get',
return_share_group_type_specs)
req = fake_request(self.api_path)
req_context = req.environ['manila.context']
res_dict = self.controller.index(req, 1)
self.assertEqual('value1', res_dict['group_specs']['key1'])
self.mock_policy_check.assert_called_once_with(
req_context, self.resource_name, 'index')
def test_index_no_data(self):
self.mock_object(manila.db, 'share_group_type_specs_get',
return_empty_share_group_type_specs)
req = fake_request(self.api_path)
req_context = req.environ['manila.context']
res_dict = self.controller.index(req, 1)
self.assertEqual(0, len(res_dict['group_specs']))
self.mock_policy_check.assert_called_once_with(
req_context, self.resource_name, 'index')
def test_show(self):
self.mock_object(manila.db, 'share_group_type_specs_get',
return_share_group_type_specs)
req = fake_request(self.api_path + '/key5')
req_context = req.environ['manila.context']
res_dict = self.controller.show(req, 1, 'key5')
self.assertEqual('value5', res_dict['key5'])
self.mock_policy_check.assert_called_once_with(
req_context, self.resource_name, 'show')
def test_show_spec_not_found(self):
self.mock_object(manila.db, 'share_group_type_specs_get',
return_empty_share_group_type_specs)
req = fake_request(self.api_path + '/key6')
req_context = req.environ['manila.context']
self.assertRaises(webob.exc.HTTPNotFound, self.controller.show,
req, 1, 'key6')
self.mock_policy_check.assert_called_once_with(
req_context, self.resource_name, 'show')
def test_delete(self):
self.mock_object(manila.db, 'share_group_type_specs_delete',
delete_share_group_type_specs)
req = fake_request(self.api_path + '/key5')
req_context = req.environ['manila.context']
self.controller.delete(req, 1, 'key5')
self.mock_policy_check.assert_called_once_with(
req_context, self.resource_name, 'delete')
def test_delete_not_found(self):
self.mock_object(manila.db, 'share_group_type_specs_delete',
delete_share_group_type_specs_not_found)
req = fake_request(self.api_path + '/key6')
req_context = req.environ['manila.context']
self.assertRaises(webob.exc.HTTPNotFound, self.controller.delete,
req, 1, 'key6')
self.mock_policy_check.assert_called_once_with(
req_context, self.resource_name, 'delete')
@ddt.data(
get_group_specs_dict({}),
{'foo': 'bar'},
{CONSISTENT_SNAPSHOTS + 'foo': True},
{'foo' + CONSISTENT_SNAPSHOTS: False},
*[{CONSISTENT_SNAPSHOTS: v}
for v in strutils.TRUE_STRINGS + strutils.FALSE_STRINGS]
)
def test_create(self, data):
body = {'group_specs': data}
mock_spec_update_or_create = self.mock_object(
manila.db, 'share_group_type_specs_update_or_create',
mock.Mock(return_value=return_create_share_group_type_specs))
req = fake_request(self.api_path)
req_context = req.environ['manila.context']
res_dict = self.controller.create(req, 1, body)
for k, v in data.items():
self.assertIn(k, res_dict['group_specs'])
self.assertEqual(v, res_dict['group_specs'][k])
mock_spec_update_or_create.assert_called_once_with(
req.environ['manila.context'], 1, body['group_specs'])
self.mock_policy_check.assert_called_once_with(
req_context, self.resource_name, 'create')
def test_create_with_too_small_key(self):
self.mock_object(
manila.db, 'share_group_type_specs_update_or_create',
mock.Mock(return_value=return_create_share_group_type_specs))
too_small_key = ""
body = {"group_specs": {too_small_key: "value"}}
req = fake_request(self.api_path)
req_context = req.environ['manila.context']
self.assertRaises(webob.exc.HTTPBadRequest,
self.controller.create, req, 1, body)
self.assertFalse(
manila.db.share_group_type_specs_update_or_create.called)
self.mock_policy_check.assert_called_once_with(
req_context, self.resource_name, 'create')
def test_create_with_too_big_key(self):
self.mock_object(
manila.db, 'share_group_type_specs_update_or_create',
mock.Mock(return_value=return_create_share_group_type_specs))
too_big_key = "k" * 256
body = {"group_specs": {too_big_key: "value"}}
req = fake_request(self.api_path)
req_context = req.environ['manila.context']
self.assertRaises(webob.exc.HTTPBadRequest,
self.controller.create, req, 1, body)
self.assertFalse(
manila.db.share_group_type_specs_update_or_create.called)
self.mock_policy_check.assert_called_once_with(
req_context, self.resource_name, 'create')
def test_create_with_too_small_value(self):
self.mock_object(
manila.db, 'share_group_type_specs_update_or_create',
mock.Mock(return_value=return_create_share_group_type_specs))
too_small_value = ""
body = {"group_specs": {"key": too_small_value}}
req = fake_request(self.api_path)
req_context = req.environ['manila.context']
self.assertRaises(webob.exc.HTTPBadRequest,
self.controller.create, req, 1, body)
self.mock_policy_check.assert_called_once_with(
req_context, self.resource_name, 'create')
self.assertFalse(
manila.db.share_group_type_specs_update_or_create.called)
def test_create_with_too_big_value(self):
self.mock_object(
manila.db, 'share_group_type_specs_update_or_create',
mock.Mock(return_value=return_create_share_group_type_specs))
too_big_value = "v" * 256
body = {"extra_specs": {"key": too_big_value}}
req = fake_request(self.api_path)
req_context = req.environ['manila.context']
self.assertRaises(webob.exc.HTTPBadRequest,
self.controller.create, req, 1, body)
self.mock_policy_check.assert_called_once_with(
req_context, self.resource_name, 'create')
self.assertFalse(
manila.db.share_group_type_specs_update_or_create.called)
def test_create_key_allowed_chars(self):
mock_return_value = stub_share_group_type_specs()
mock_spec_update_or_create = self.mock_object(
manila.db, 'share_group_type_specs_update_or_create',
mock.Mock(return_value=mock_return_value))
body = get_group_specs_dict({"other_alphanum.-_:": "value1"})
req = fake_request(self.api_path)
req_context = req.environ['manila.context']
res_dict = self.controller.create(req, 1, body)
self.assertEqual(mock_return_value['key1'],
res_dict['group_specs']['other_alphanum.-_:'])
mock_spec_update_or_create.assert_called_once_with(
req.environ['manila.context'], 1, body['group_specs'])
self.mock_policy_check.assert_called_once_with(
req_context, self.resource_name, 'create')
def test_create_too_many_keys_allowed_chars(self):
mock_return_value = stub_share_group_type_specs()
mock_spec_update_or_create = self.mock_object(
manila.db, 'share_group_type_specs_update_or_create',
mock.Mock(return_value=mock_return_value))
body = get_group_specs_dict({
"other_alphanum.-_:": "value1",
"other2_alphanum.-_:": "value2",
"other3_alphanum.-_:": "value3",
})
req = fake_request(self.api_path)
req_context = req.environ['manila.context']
res_dict = self.controller.create(req, 1, body)
self.assertEqual(mock_return_value['key1'],
res_dict['group_specs']['other_alphanum.-_:'])
self.assertEqual(mock_return_value['key2'],
res_dict['group_specs']['other2_alphanum.-_:'])
self.assertEqual(mock_return_value['key3'],
res_dict['group_specs']['other3_alphanum.-_:'])
mock_spec_update_or_create.assert_called_once_with(
req_context, 1, body['group_specs'])
self.mock_policy_check.assert_called_once_with(
req_context, self.resource_name, 'create')
def test_update_item_too_many_keys(self):
self.mock_object(manila.db, 'share_group_type_specs_update_or_create')
body = {"key1": "value1", "key2": "value2"}
req = fake_request(self.api_path + '/key1')
req_context = req.environ['manila.context']
self.assertRaises(webob.exc.HTTPBadRequest, self.controller.update,
req, 1, 'key1', body)
self.assertFalse(
manila.db.share_group_type_specs_update_or_create.called)
self.mock_policy_check.assert_called_once_with(
req_context, self.resource_name, 'update')
def test_update_item_body_uri_mismatch(self):
self.mock_object(manila.db, 'share_group_type_specs_update_or_create')
body = {"key1": "value1"}
req = fake_request(self.api_path + '/bad')
req_context = req.environ['manila.context']
self.assertRaises(webob.exc.HTTPBadRequest, self.controller.update,
req, 1, 'bad', body)
self.assertFalse(
manila.db.share_group_type_specs_update_or_create.called)
self.mock_policy_check.assert_called_once_with(
req_context, self.resource_name, 'update')
@ddt.data(None, {}, {"group_specs": {CONSISTENT_SNAPSHOTS: ""}})
def test_update_invalid_body(self, body):
req = fake_request('/v2/fake/share-group-types/1/group_specs')
req_context = req.environ['manila.context']
req.method = 'POST'
self.assertRaises(webob.exc.HTTPBadRequest,
self.controller.update, req, '1', body)
self.mock_policy_check.assert_called_once_with(
req_context, self.resource_name, 'update')
@ddt.data(
None, {}, {'foo': {'a': 'b'}}, {'group_specs': 'string'},
{"group_specs": {"ke/y1": "value1"}},
{"key1": "value1", "ke/y2": "value2", "key3": "value3"},
{"group_specs": {CONSISTENT_SNAPSHOTS: ""}},
{"group_specs": {"": "value"}},
{"group_specs": {"t": get_large_string()}},
{"group_specs": {get_large_string(): get_large_string()}},
{"group_specs": {get_large_string(): "v"}},
{"group_specs": {"k": ""}})
def test_create_invalid_body(self, body):
req = fake_request('/v2/fake/share-group-types/1/group_specs')
req_context = req.environ['manila.context']
req.method = 'POST'
self.assertRaises(webob.exc.HTTPBadRequest,
self.controller.create, req, '1', body)
self.mock_policy_check.assert_called_once_with(
req_context, self.resource_name, 'create')
|
bswartz/manila
|
manila/tests/api/v2/test_share_group_type_specs.py
|
Python
|
apache-2.0
| 14,031
|
# Copyright 2014, Jeff Buttars, A10 Networks.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from __future__ import absolute_import
from __future__ import unicode_literals
from acos_client.v30 import base
class OverlayOptions(base.BaseV30):
url_prefix = "/overlay-tunnel/options"
def get(self, *args, **kwargs):
return self._get(self.url_prefix, **kwargs)
def update(self, gateway_mac, ip_dscp_preserve,
nvgre_disable_flow_id,
nvgre_key_mode_lower24,
tcp_mss_adjust_disable,
uuid,
vxlan_dest_port,
**kwargs):
options = {}
if gateway_mac:
options["gateway-mac"] = gateway_mac
if ip_dscp_preserve:
options["ip-dscp-preserve"] = ip_dscp_preserve
if nvgre_disable_flow_id:
options["nvgre-disable-flow-id"] = nvgre_disable_flow_id
if nvgre_key_mode_lower24:
options["nvgre-key-mode-lower24"] = nvgre_key_mode_lower24
if tcp_mss_adjust_disable:
options["tcp-mss-adjust-disable"] = tcp_mss_adjust_disable
if uuid:
options["uuid"] = uuid
if vxlan_dest_port:
options["vxlan-dest-port"] = vxlan_dest_port
payload = {
"options": options
}
return self._post(self.url_prefix + "/options", payload, **kwargs)
|
mdurrant-b3/acos-client
|
acos_client/v30/overlay/options.py
|
Python
|
apache-2.0
| 1,933
|
"""
Preliminary code for submissions on the
Microsoft Malware Classification challenge.
"""
__authors__ = 'Aaron Gonzales, Andres Ruiz'
__licence__ = 'Apache'
__email__ = 'afruizc@cs.unm.edu'
import sys, os, argparse
import numpy as np
from sklearn import linear_model
from sklearn.feature_extraction import DictVectorizer
from sklearn.feature_extraction.text import TfidfTransformer
from sklearn.pipeline import Pipeline
from sklearn.grid_search import GridSearchCV
from microsoft_malware_challenge.src.utils import utils
import joblib
class Executor(object):
"""
Executes the selected classification pipeline. Right now the
custmization process is by hand, i.e. you have to code it. The
idea is to have a couple of pipelines prepared
"""
def __init__(self):
"""
Creates a new executor object and initializes the main
components.
"""
self.target_names = ['Ramnit', 'Lollipop', 'Kelihos_ver3',
'Vundo', 'Simda', 'Tracur', 'Kelihos_ver1',
'Obfuscator.ACY', 'Gatak']
self.db = utils.get_mongodb()
self.train = None
self.test = None
self.param_tunning = None
self.fitted_model = None
def _load_train(self):
"""
Loads the training dataset.
__THIS__ is the method you want to modify when querying
the database.
TODO: The data part can be just one function
"""
data_train = [(x['hexcode']['bigrams'], x['class'])
for x in self.db.samples.find({
"class": {"$exists": True}})]
return list(zip(*data_train))
def _load_test(self):
"""
Loads the testing dataset.
__THIS__ is the method you want to modify when querying
the database.
"""
data_test = [(x['hexcode']['bigrams'], '"{}"'.format(x['id']))
for x in self.db.test_samples.find({
"id":{"$exists": True}})]
return list(zip(*data_test))
def load_data(self, training=True, testing=False):
"""
Fetches the training data from the database. `training` and
testing indicate the datasets that should be loaded.
Arguments:
`training`: If False, the training dataset is NOT loaded.
`testng`: If True, the testing dataset IS loaded
"""
if training:
temp = self._load_train()
self.train = {'data': (temp[0]), 'target': temp[1]}
if testing:
temp = self._load_test()
self.test = {'data': (temp[0]), 'names': temp[1]}
def config_model(self):
"""
Configures the pipeline
"""
pip = Pipeline([
('vectorizer', DictVectorizer()),
('freq_norm', TfidfTransformer()),
('classifier', linear_model.SGDClassifier(
loss='modified_huber',
penalty='elasticnet',
alpha=1e-2,
n_jobs=-1))
])
parameters = {}
self.param_tunning = GridSearchCV(pip, parameters, n_jobs=-1)
def fit(self):
"""
Fits the parameters to the pipeline
"""
self.fitted_model = self.param_tunning.fit(self.train['data'],
self.train['target'])
def _predict(self, X, create_submission=False, filename='submission.txt'):
"""
Predicts a set of 9 probabilities per malware sample, that
correspond to the 9 malware classes. If `create_submission`
is True, then a text file named `filename` is created for
submission into Kaggle.
Arguments:
`X`: The data in which predictions will be made.
`create_submission`: Indicates whether a submission file should
be created or not.
`filename`: The file that will contain the submission.
"""
predicted_prob = self.fitted_model.predict_proba(X)
if create_submission:
to_print = np.column_stack((np.array(self.test['names']),
predicted_prob))
np.savetxt(filename, to_print, header=','.join(['"id"'] + \
['"Prediction%d"' % x for x in range(1, 10)]), \
fmt='%s', delimiter=',')
return predicted_prob
def predict_on_test(self, create_submission=False,
filename='submission.txt'):
"""
Performs predicton on the test dataset. see `_predict` for
the Keyword arguments that can be used.
Arguments:
`**kwargs`: see `_predict`.
"""
if self.test == None:
sys.stderr.write("Test set not loaded. Aborting prediction\n")
return
return self._predict(self.test['data'], create_submission, filename)
def load_model(self, filename='model.pkl'):
"""
Attempts to load the already computed model from
the `filename` file. If it is not found, then raises
and exception.
Argmuments:
`filename`: The name of the file that contains the model
"""
self.fitted_model = joblib.load(filename)
def config_parser():
"""
Configures the parser for the command line arguments
"""
parser = argparse.ArgumentParser()
parser.add_argument('--save_model', default='model',
help='specifies the directory \
where the model will be saved')
return parser
def main():
"""
Runs the main program
"""
args = config_parser().parse_args()
executor = Executor()
print("Loading data...")
executor.load_data(testing=True)
print('Configuring the model...')
executor.config_model()
print('Fitting the model...')
executor.fit()
if args.save_model:
if not os.path.isdir(args.save_model):
os.mkdir(args.save_model)
save_path = os.path.join(args.save_model, 'model.pkl')
joblib.dump(executor.fitted_model, save_path)
print('Model saved on %s.' % save_path)
print('Predicting...')
executor.predict_on_test(create_submission=True)
if __name__ == '__main__':
main()
|
afruizc/microsoft_malware_challenge
|
src/models/svm_bytecode/svm_bytecode.py
|
Python
|
apache-2.0
| 6,432
|
#
# Copyright (c) 2010-2014, MIT Probabilistic Computing Project
#
# Lead Developers: Jay Baxter and Dan Lovell
# Authors: Jay Baxter, Dan Lovell, Baxter Eaves, Vikash Mansinghka
# Research Leads: Vikash Mansinghka, Patrick Shafto
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import time
import inspect
import sys
import pickle
import os
import numpy
import pytest
import random
import shutil
import pandas
from cStringIO import StringIO
import bayesdb.utils as utils
from bayesdb.client import Client
from bayesdb.engine import Engine
import bayesdb.bql_grammar as bql
test_tablenames = None
client = None
test_filenames = None
def setup_function(function):
global test_tablenames, client, test_filenames
test_tablenames = []
test_filenames = []
# Default upgrade_key_column is None, to let the user choose, but need to avoid
# user input during testing, so for testing just create a new key column.
client = Client(testing=True)
def teardown_function(function):
global tablename, client
for test_tablename in test_tablenames:
client.engine.drop_btable(test_tablename)
for test_filename in test_filenames:
if os.path.exists(test_filename):
os.remove(test_filename)
def create_dha(path='data/dha.csv', key_column=0):
test_tablename = 'dhatest' + str(int(time.time() * 1000000)) + str(int(random.random()*10000000))
csv_file_contents = open(path, 'r').read()
client('create btable %s from %s' % (test_tablename, path), debug=True, pretty=False, key_column=key_column)
global test_tablenames
test_tablenames.append(test_tablename)
return test_tablename
def test_drop_btable():
"""
Test to make sure drop btable prompts the user for confirmation, and responds appropriately when
given certain input.
"""
import sys
from cStringIO import StringIO
# setup the environment
backup = sys.stdout
sys.stdout = StringIO() # capture output
# TODO: not being tested at all yet...
out = sys.stdout.getvalue() # release output
sys.stdout.close() # close the stream
sys.stdout = backup # restore original stdout
def test_btable_list():
global client, test_filenames
out = set(client('list btables', pretty=False, debug=True)[0]['btable'])
init_btable_count = len(out)
test_tablename1 = create_dha()
out = set(client('list btables', pretty=False, debug=True)[0]['btable'])
assert len(out) == 1 + init_btable_count
assert test_tablename1 in out
test_tablename2 = create_dha()
out = set(client('list btables', pretty=False, debug=True)[0]['btable'])
assert len(out) == 2 + init_btable_count
assert test_tablename1 in out
assert test_tablename2 in out
client('drop btable %s' % test_tablename1, yes=True, debug=True, pretty=False)
out = set(client('list btables', pretty=False, debug=True)[0]['btable'])
assert len(out) == 1 + init_btable_count
assert test_tablename1 not in out
assert test_tablename2 in out
## test to make sure btable list is persisted
del client
client = Client()
out = set(client('list btables', pretty=False, debug=True)[0]['btable'])
assert len(out) == 1 + init_btable_count
assert test_tablename1 not in out
assert test_tablename2 in out
def test_save_and_load_models():
test_tablename1 = create_dha()
test_tablename2 = create_dha()
global client, test_filenames
client('initialize 2 models for %s' % (test_tablename1), debug=True, pretty=False)
#client('analyze %s for 1 iteration' % (test_tablename1), debug=True, pretty=False)
pkl_path = 'test_models.pkl.gz'
test_filenames.append(pkl_path)
client('save models from %s to %s' % (test_tablename1, pkl_path), debug=True, pretty=False)
original_models = client.engine.save_models(test_tablename1)
client('load models %s into %s' % (pkl_path, test_tablename2), debug=True, pretty=False)
new_models = client.engine.save_models(test_tablename2)
assert new_models.values() == original_models.values()
# Models are saved with schema now, so check that they can be loaded into a table
# with the same schema that already has models, and can't be loaded into a table
# with a different schema that already has models.
# Should work - schemas are the same and table2 already has models
client('load models %s into %s' % (pkl_path, test_tablename2), debug=True, pretty=False)
test_tablename3 = create_dha()
client('update schema for %s set qual_score = categorical' % (test_tablename3), debug=True, pretty=False)
# Should work - schemas aren't the same, but table3 doesn't have any models, so the schema will be changed.
client('load models %s into %s' % (pkl_path, test_tablename3), debug=True, pretty=False)
test_tablename4 = create_dha()
client('update schema for %s set qual_score = categorical' % (test_tablename4), debug=True, pretty=False)
client('initialize 2 models for %s' % (test_tablename4), debug=True, pretty=False)
# Should fail - schemas aren't the same, and table4 already has models, so the models will be incompatible.
with pytest.raises(utils.BayesDBError):
client('load models %s into %s' % (pkl_path, test_tablename4), debug=True, pretty=False)
def test_column_lists():
""" smoke test """
test_tablename = create_dha()
global client, test_filenames
client('initialize 2 models for %s' % (test_tablename), debug=True, pretty=False)
cname1 = 'cname1'
cname2 = 'cname2'
client('show column lists for %s' % test_tablename, debug=True, pretty=False)
out = client('estimate columns from %s as %s' % (test_tablename, cname1), debug=True, pretty=False)[0]
assert type(out) == pandas.DataFrame
assert (out.columns == ['column label', 'column name']).all()
client('show column lists for %s' % test_tablename, debug=True, pretty=False)
#TODO grammar update, replace tests after implementing show columns for <column_list>
# client('show columns %s for %s' % (cname1, test_tablename), debug=True, pretty=False)
# with pytest.raises(utils.BayesDBColumnListDoesNotExistError):
# client('show columns %s from %s' % (cname2, test_tablename), debug=True, pretty=False)
out = client('estimate columns from %s order by typicality limit 5 as %s' % (test_tablename, cname1), debug=True, pretty=False)[0]
assert out.shape == (5, 3)
client('estimate columns from %s limit 5 as %s' % (test_tablename, cname2), debug=True, pretty=False)
client('show column lists for %s' % test_tablename, debug=True, pretty=False)
# TODO same todo as above
# client('show columns %s from %s' % (cname1, test_tablename), debug=True, pretty=False)
# client('show columns %s from %s' % (cname2, test_tablename), debug=True, pretty=False)
tmp = 'asdf_test.png'
test_filenames.append(tmp)
if os.path.exists(tmp):
os.remove(tmp)
# TODO for columns col_name
client('estimate pairwise dependence probability from %s for %s save to %s' % (test_tablename, cname1, tmp), debug=True, pretty=False)
test_ast = bql.bql_statement.parseString('estimate pairwise dependence probability from %s for %s save to %s' % (test_tablename, cname1, tmp),parseAll=True)
assert test_ast.filename == 'asdf_test.png'
#TODO current parsing breaks save (probably everything) after "for %s"
#assert os.path.exists(tmp)
client('estimate pairwise dependence probability from %s for %s' % (test_tablename, cname2), debug=True, pretty=False)
client('select %s from %s limit 10' % (cname1, test_tablename), debug=True, pretty=False)
client('select %s from %s limit 10' % (cname2, test_tablename), debug=True, pretty=False)
client('infer %s from %s with confidence 0.1 limit 10' % (cname1, test_tablename), debug=True, pretty=False)
client('infer %s from %s with confidence 0.1 limit 10' % (cname2, test_tablename), debug=True, pretty=False)
client('simulate %s from %s times 10' % (cname1, test_tablename), debug=True, pretty=False)
client('simulate %s from %s times 10' % (cname2, test_tablename), debug=True, pretty=False)
# Test dropping column list
client('drop column list %s from %s' % (cname1, test_tablename), debug=True, pretty=False)
client('drop column list %s from %s' % (cname2, test_tablename), debug=True, pretty=False)
# Assert there are now 0 row lists.
out = client('show column lists for %s' % test_tablename, debug=True, pretty=False)[0]
assert out.shape == (0, 1)
def test_simulate():
""" smoke test """
test_tablename = create_dha()
global client, test_filenames
client('initialize 2 models for %s' % (test_tablename), debug=True, pretty=False)
# TODO given documentation
assert len(client("simulate qual_score from %s given name='Albany NY' times 5" % test_tablename, debug=True, pretty=False)[0]) == 5
assert len(client("simulate qual_score from %s given name='Albany NY' and ami_score = 80 times 5" % test_tablename, debug=True, pretty=False)[0]) == 5
assert len(client("simulate name from %s given name='Albany NY' and ami_score = 80 times 5" % test_tablename, debug=True, pretty=False)[0]) == 5
assert len(client("simulate name from %s given name='Albany NY', ami_score = 80 times 5" % test_tablename, debug=True, pretty=False)[0]) == 5
assert len(client("simulate name from %s given name='Albany NY' AND ami_score = 80 times 5" % test_tablename, debug=True, pretty=False)[0]) == 5
assert len(client("simulate name from %s given ami_score = 80 times 5" % test_tablename, debug=True, pretty=False)[0]) == 5
def test_estimate_columns():
""" smoke test """
test_tablename = create_dha()
global client, test_filenames
client('initialize 2 models for %s' % (test_tablename), debug=True, pretty=False)
# client('estimate columns from %s' % test_tablename, debug=True, pretty=False)
client('estimate columns from %s where typicality > 1' % test_tablename, debug=True, pretty=False)
client('estimate columns from %s where typicality > 0' % test_tablename, debug=True, pretty=False)
client('estimate columns from %s where typicality > 0 order by typicality' % test_tablename, debug=True, pretty=False)
# client('estimate columns from %s order by typicality limit 5' % test_tablename, debug=True, pretty=False)
client('estimate columns from %s where dependence probability with qual_score > 0' % test_tablename, debug=True, pretty=False)
client('estimate columns from %s order by dependence probability with qual_score' % test_tablename, debug=True, pretty=False)
client('estimate columns from %s order by dependence probability with qual_score limit 5' % test_tablename, debug=True, pretty=False)
out = client('estimate columns from %s order by correlation with qual_score limit 5' % test_tablename, debug=True, pretty=False)[0]
scores = out["correlation with qual_score"]
assert (0 <= scores).all() and (scores <= 1).all()
out = client('estimate columns from %s where correlation with qual_score > 0 order by correlation with qual_score limit 5' % test_tablename, debug=True, pretty=False)[0]
scores = out["correlation with qual_score"]
assert (0 <= scores).all() and (scores <= 1).all()
client('estimate columns from %s order by mutual information with qual_score limit 5' % test_tablename, debug=True, pretty=False)
client('estimate columns from %s where mutual information with qual_score > 1 order by typicality' % test_tablename, debug=True, pretty=False)
def test_row_clusters():
""" smoke test """
test_tablename = create_dha()
global client, test_filenames
client('initialize 2 models for %s' % (test_tablename), debug=True, pretty=False)
row_lists = client('show row lists for %s' % test_tablename, debug=True, pretty=False)[0]
assert row_lists.shape == (0, 2)
client('estimate pairwise row similarity from %s save clusters with threshold 0.1 as rcc' % test_tablename, debug=True, pretty=False)
row_lists = client('show row lists for %s' % test_tablename, debug=True, pretty=False)[0]
assert row_lists.shape[0] > 0
client('select * from %s where key in rcc_0' % test_tablename, debug=True, pretty=False)
#client("select * from %s where similarity to name='McAllen TX' > 0.5 order by similarity to name='McAllen TX' as mcallenrows" % test_tablename, debug=True, pretty=False)
#client('select * from %s where key in mcallenrows' % test_tablename, debug=True, pretty=False)
# Test removing row lists
client('drop row list rcc from %s' % test_tablename, debug=True, pretty=False)
out = client('show row lists for %s' % test_tablename, debug=True, pretty=False)[0]
assert out.shape == (0, 2)
def test_select_whereclause_functions():
""" smoke test """
test_tablename = create_dha()
global client, test_filenames
client('initialize 2 models for %s' % (test_tablename), debug=True, pretty=False)
# similarity
client('select name from %s where similarity to 0 > 0' % (test_tablename), debug=True, pretty=False)
client('select name from %s where similarity to 0 = 0 order by similarity to 0' % (test_tablename), debug=True, pretty=False)
client('select name from %s where similarity to 1 with respect to qual_score > 0.01' % (test_tablename), debug=True, pretty=False)
client('select name from %s where similarity to 1 with respect to qual_score, ami_score > 0.01' % (test_tablename), debug=True, pretty=False)
# row typicality
client('select * from %s where typicality > 0.04' % (test_tablename), debug=True, pretty=False)
client('select *, typicality from %s where typicality > 0.06' % (test_tablename), debug=True, pretty=False)
# predictive probability
client("select qual_score from %s where predictive probability of qual_score > 0.01" % (test_tablename), debug=True, pretty=False)
client("select qual_score from %s where predictive probability of name > 0.01" % (test_tablename), debug=True, pretty=False)
# probability: aggregate, shouldn't work
with pytest.raises(utils.BayesDBError):
client('select qual_score from %s where probability of qual_score = 6 > 0.01' % (test_tablename), debug=True, pretty=False)
with pytest.raises(utils.BayesDBError):
client("select qual_score from %s where probability of name='Albany NY' > 0.01" % (test_tablename), debug=True, pretty=False)
def test_model_config():
test_tablename = create_dha()
global client, test_filenames
# test naive bayes
client('initialize 2 models for %s with config naive bayes' % (test_tablename), debug=True, pretty=False)
#client('analyze %s for 2 iterations wait' % (test_tablename), debug=True, pretty=False)
client.engine.analyze(test_tablename, model_indices=[0], iterations=2, background=False)
dep_mat = client('estimate pairwise dependence probability from %s' % test_tablename, debug=True, pretty=False)[0]['matrix']
## assert that all dependencies are _0_ (not 1, because there should only be 1 view and 1 cluster!)
## except the diagonal, where we've hardcoded every column to be dependent with itself
assert numpy.all(dep_mat == numpy.identity(dep_mat.shape[0]))
# test crp
with pytest.raises(utils.BayesDBNoModelsError):
client('drop models from %s' % test_tablename, yes=True, debug=True, pretty=False)
client('initialize 2 models for %s with config crp mixture' % (test_tablename), debug=True, pretty=False)
#client('analyze %s for 2 iterations wait' % (test_tablename), debug=True, pretty=False)
client.engine.analyze(test_tablename, model_indices='all', iterations=2, background=False)
dep_mat = client('estimate pairwise dependence probability from %s' % test_tablename, debug=True, pretty=False)[0]['matrix']
## assert that all dependencies are 1 (because there's 1 view, and many clusters)
## (with _very_ low probability, this test may fail due to bad luck)
assert numpy.all(dep_mat == 1)
# test crosscat
with pytest.raises(utils.BayesDBNoModelsError):
client('drop models from %s' % test_tablename, yes=True, debug=True, pretty=False)
client('initialize 2 models for %s' % (test_tablename), debug=True, pretty=False)
#client('analyze %s for 2 iterations wait' % (test_tablename), debug=True, pretty=False)
client.engine.analyze(test_tablename, model_indices='all', iterations=2, background=False)
dep_mat = client('estimate pairwise dependence probability from %s' % test_tablename, debug=True, pretty=False)[0]['matrix']
## assert that all dependencies are not all the same
assert (not numpy.all(dep_mat == 1)) and (not numpy.all(dep_mat == 0))
# test that you can't change model config
with pytest.raises(utils.BayesDBError):
client.engine.initialize_models(test_tablename, 2, 'crp mixture')
@pytest.mark.xfail
def test_analyze():
""" test designed to make sure that analyze in background runs correct number of iterations """
# 1 iteration works fine, but multiple iterations don't.
# AnalyzeMaster is getting called multiple fucking times.
# 9, 11, 11, 13 analyze_master calls, and 4 iterations done on every model each of those times (except first one did 3 iters once)
test_tablename = create_dha(key_column=1) # key column is irrelevant, but let's use it
global client, test_filenames
models = 3
out = client('initialize %d models for %s' % (models, test_tablename), debug=True, pretty=False)[0]
iterations = 3
out = client('analyze %s for %d iterations' % (test_tablename, iterations), debug=True, pretty=False)[0]
out = ''
while 'not currently being analyzed' not in out:
out = client('show analyze for %s' % test_tablename, debug=True, pretty=False)[0]['message']
models = client('show models for %s' % test_tablename, debug=True, pretty=False)[0]['models']
iters_by_model = [v for k,v in models]
for i in iters_by_model:
assert i == iterations
def test_using_models():
""" smoke test """
test_tablename = create_dha(path='data/dha_missing.csv')
global client, test_filenames
client('initialize 3 models for %s' % (test_tablename), debug=True, pretty=False)
client('select name from %s using model 1' % test_tablename, debug=True, pretty=False)
with pytest.raises(utils.BayesDBError):
client('infer name from %s with confidence 0.1 using models 3' % test_tablename, debug=True, pretty=False)
with pytest.raises(utils.BayesDBError):
client("simulate qual_score from %s given name='Albany NY' times 5 using models 3" % test_tablename, debug=True, pretty=False)
with pytest.raises(utils.BayesDBError):
client('infer name from %s with confidence 0.1 using models 0-3' % test_tablename, debug=True, pretty=False)
client('infer name from %s with confidence 0.1 limit 10 using models 2' % test_tablename, debug=True, pretty=False)
client("simulate qual_score from %s given name='Albany NY' times 5 using models 1-2" % test_tablename, debug=True, pretty=False)
client('estimate columns from %s limit 5 using models 1-2' % test_tablename, debug=True, pretty=False)
client('estimate pairwise dependence probability from %s using models 1' % (test_tablename), debug=True, pretty=False)
client('estimate pairwise row similarity from %s save clusters with threshold 0.1 as rcc using models 1-2' % test_tablename, debug=True, pretty=False)
client('drop model 0 from %s' % test_tablename, debug=True, pretty=False, yes=True)
with pytest.raises(utils.BayesDBError):
client('infer name from %s with confidence 0.1 limit 10 using models 0-2' % test_tablename, debug=True, pretty=False)
def test_select():
""" smoke test """
test_tablename = create_dha()
global client, test_filenames
client('initialize 2 models for %s' % (test_tablename), debug=True, pretty=False)
client('select name, qual_score from %s' % (test_tablename), debug=True, pretty=False)
client('select name, qual_score from %s limit 10' % (test_tablename), debug=True, pretty=False)
client('select name, qual_score from %s order by qual_score limit 10' % (test_tablename), debug=True, pretty=False)
client('select name, qual_score from %s order by qual_score ASC limit 10' % (test_tablename), debug=True, pretty=False)
client('select name, qual_score from %s order by qual_score DESC limit 10' % (test_tablename), debug=True, pretty=False)
client('select * from %s order by qual_score DESC limit 10' % (test_tablename), debug=True, pretty=False)
client('select name, qual_score from %s where qual_score > 6' % (test_tablename), debug=True, pretty=False)
client('select * from %s where qual_score > 6' % (test_tablename), debug=True, pretty=False)
client("select * from %s where qual_score > 80 and name = 'Albany NY'" % (test_tablename), debug=True, pretty=False)
client("select * from %s where qual_score > 80 and ami_score > 85" % (test_tablename), debug=True, pretty=False)
# create a column list to be used in future queries
client('estimate columns from %s limit 5 as clist' % test_tablename, debug=True, pretty=False)
# similarity
client('select name, similarity to 0 from %s' % (test_tablename), debug=True, pretty=False)
client('select name from %s order by similarity to 0' % (test_tablename), debug=True, pretty=False)
client('select name, similarity to 0 from %s order by similarity to 0' % (test_tablename), debug=True, pretty=False)
client('select name, similarity to 0 with respect to name from %s order by similarity to 1 with respect to qual_score' % (test_tablename), debug=True, pretty=False)
client('select name, similarity to 0 from %s order by similarity to 1 with respect to qual_score, ami_score' % (test_tablename), debug=True, pretty=False)
client('select name, similarity to 0 from %s order by similarity to 1 with respect to clist' % (test_tablename), debug=True, pretty=False)
# Add some cases to be sure that referencing row index and column values produces the same output.
out1 = client('select name, qual_score, similarity to 161 from %s order by similarity to 161 limit 5' % (test_tablename), debug=True, pretty=False)[0]
out2 = client('select name, qual_score, similarity to name = "McAllen TX" from %s order by similarity to name = "McAllen TX" limit 5' % (test_tablename), debug=True, pretty=False)[0]
out3 = client('select name, qual_score, similarity to key = 161 from %s order by similarity to key = 161 limit 5' % (test_tablename), debug=True, pretty=False)[0]
# Assert columns are equal
for col_idx in range(out1.shape[1]):
assert (out1.iloc[col_idx] == out2.iloc[col_idx]).all()
assert (out2.iloc[col_idx] == out3.iloc[col_idx]).all()
# row typicality
client('select typicality from %s' % (test_tablename), debug=True, pretty=False)
client('select *, typicality from %s' % (test_tablename), debug=True, pretty=False)
client('select typicality from %s order by typicality limit 10' % (test_tablename), debug=True, pretty=False)
# probability
# why is this so slow, when predictive probability is really fast? these are _observed_
# for qual_score (numerical): probability takes 20 times longer than predictive prob (about 5 seconds total for 300 rows)
# for name (categorical): probability takes extremely long (about 75 seconds for 300 rows)
# while predictive probability takes under one second for 300 rows
st = time.time()
client('select probability of qual_score = 6 from %s' % (test_tablename), debug=True, pretty=False)
el = time.time() - st
st = time.time()
client("select probability of name='Albany NY' from %s" % (test_tablename), debug=True, pretty=False)
el2 = time.time() - st
#client("select name from %s order by probability of name='Albany NY' DESC" % (test_tablename), debug=True, pretty=False)
# TODO: test that probability function doesn't get evaluated 2x for each row
#client("select probability of name='Albany NY' from %s order by probability of name='Albany NY' DESC" % (test_tablename), debug=True, pretty=False)
# predictive probability
# these are really fast! :) simple predictive probability, unobserved
client("select predictive probability of qual_score from %s" % (test_tablename), debug=True, pretty=False)
client("select predictive probability of name from %s" % (test_tablename), debug=True, pretty=False)
client("select predictive probability of qual_score from %s order by predictive probability of name" % (test_tablename), debug=True, pretty=False)
client("select predictive probability of qual_score from %s order by predictive probability of qual_score" % (test_tablename), debug=True, pretty=False)
## Aggregate functions: can't order by these.
# mutual information
client("select name, qual_score, mutual information of name with qual_score from %s" % (test_tablename), debug=True, pretty=False)
# dependence probability
client("select dependence probability of name with qual_score from %s" % (test_tablename), debug=True, pretty=False)
client("select name, qual_score, dependence probability of name with qual_score from %s" % (test_tablename), debug=True, pretty=False)
# correlation
client("select name, qual_score, correlation of name with qual_score from %s" % (test_tablename), debug=True, pretty=False)
# column typicality
client("select typicality of qual_score, typicality of name from %s" % (test_tablename), debug=True, pretty=False)
client("select typicality of qual_score from %s" % (test_tablename), debug=True, pretty=False)
# correlation with missing values
test_tablename = create_dha(path='data/dha_missing.csv')
client("select name, qual_score, correlation of name with qual_score from %s" % (test_tablename), debug=True, pretty=False)
def test_into():
test_tablename = create_dha()
global client
client('drop btable test_btable_select', yes=True)
# Test that select can produce a new btable with INTO, and that it can be analyzed and manipulated like other btables
client('select name, qual_score from %s limit 5 into test_btable_select' % test_tablename, debug=True, pretty=False)
out = client('select * from test_btable_select', debug=True, pretty=False)[0]
assert len(out) == 5
assert (out.columns == ['key', 'name', 'qual_score']).all()
client('summarize select * from test_btable_select')
client('label columns for test_btable_select set qual_score = quality')
client('initialize 2 models for test_btable_select')
client('analyze test_btable_select for 2 iterations')
client('simulate * from test_btable_select times 5')
client('drop btable test_btable_select', yes=True)
def test_pandas():
test_tablename = create_dha()
global client
# Test that output is a dict if pretty=False and pandas_output=False
out = client("select name, qual_score from %s limit 10" % (test_tablename), debug=True, pretty=False, pandas_output=False)
assert type(out[0]) == dict
# Test that output is pandas DataFrame when pretty=False and a table-like object is returned (pandas_output=True by default)
out = client("select name, qual_score from %s limit 10" % (test_tablename), debug=True, pretty=False)
assert type(out[0]) == pandas.DataFrame
# Test that it still works when no rows are returned
client("select name, qual_score from %s where qual_score < 0" % (test_tablename), debug=True, pretty=False)
# Get the returned data frame from the first list element of the previous result.
test_df = out[0]
# Test creation of a btable from pandas DataFrame
client("drop btable %s" % (test_tablename), yes=True)
client("create btable %s from pandas" % (test_tablename), debug=True, pretty=False, pandas_df=test_df, key_column=1)
def test_summarize():
test_tablename = create_dha()
global client
# Test that the output is a pandas DataFrame when pretty=False
out = client('summarize select name, qual_score from %s' % (test_tablename), debug=True, pretty=False)[0]
assert type(out) == pandas.DataFrame
assert (out.columns == ['', 'name', 'qual_score']).all()
# Test that stats from summary_describe and summary_freqs made it into the output DataFrame
# Note that all of these stats won't be present in EVERY summarize output, but all should be in the output
# from the previous test.
expected_indices = ['type', 'count', 'unique', 'mean', 'std', 'min', '25%', '50%', '75%', 'max', \
'mode', 'prob_mode']
assert all([x in list(out['']) for x in expected_indices])
# Test that it works on columns of predictive functions.
client('initialize 2 models for %s' % (test_tablename), debug=True, pretty=False)
client('summarize select correlation of name with qual_score from %s' % (test_tablename), debug=True, pretty=False)
# Test with fewer than 5 unique values (output should have fewer rows)
out = client('summarize select name, qual_score from %s limit 3' % (test_tablename), debug=True, pretty=False)[0]
assert out.shape == (12, 3)
# Test with no rows
out = client('summarize select name, qual_score from %s where qual_score < 0' % (test_tablename), debug=True, pretty=False)[0]
assert out.shape == (0, 3)
# Test with only a discrete column
client('summarize select name from %s' % (test_tablename), debug=True, pretty=False)
# Test with only a numerical column
client('summarize select qual_score from %s' % (test_tablename), debug=True, pretty=False)
def test_select_where_col_equal_val():
test_tablename = create_dha()
global client, test_filenames
client('initialize 2 models for %s' % (test_tablename), debug=True, pretty=False)
basic_similarity = client('select * from %s where similarity to 1 > .6 limit 5' % (test_tablename),pretty=False, debug=True)[0]['key']
col_val_similarity = client('select * from %s where similarity to name = "Akron OH" > .6 limit 5' % (test_tablename),pretty=False, debug=True)[0]['key']
assert len(basic_similarity) == len(col_val_similarity)
def test_labeling():
test_tablename = create_dha()
global client, test_filenames
client('label columns for %s set name = Name of the hospital, qual_score = Overall quality score' % (test_tablename), debug=True, pretty=False)
client('show label for %s name, qual_score' % (test_tablename), debug=True, pretty=False)
client('show label for %s' % (test_tablename), debug=True, pretty=False)
# Test getting columns from CSV
client('label columns for %s from data/dha_labels.csv' % (test_tablename), debug=True, pretty=False)
def test_user_metadata():
test_tablename = create_dha()
global client, test_filenames
client('update metadata for %s set data_source = Dartmouth Atlas of Health, url = http://www.dartmouthatlas.org/tools/downloads.aspx' % (test_tablename), debug=True, pretty=False)
client('update metadata for %s from data/dha_user_metadata.csv' % (test_tablename), debug=True, pretty=False)
client('show metadata for %s data_source, url' % (test_tablename), debug=True, pretty=False)
# Test that show metadata also works when no keys are specified
client('show metadata for %s' % (test_tablename), debug=True, pretty=False)
def test_freq_hist():
test_tablename = create_dha()
global client, test_filenames
# Test that freq and hist work and return a DataFrame
out = client('freq select qual_score from %s' % (test_tablename), debug=True, pretty=False)[0]
assert type(out) == pandas.DataFrame
assert out['qual_score'][0] == 87.5
assert out['frequency'][0] == 7
out = client('hist select qual_score from %s' % (test_tablename), debug=True, pretty=False)[0]
assert type(out) == pandas.DataFrame
assert out.shape == (10, 4)
assert out['frequency'][0] == 1
client('initialize 2 models for %s' % (test_tablename), debug=True, pretty=False)
client.engine.analyze(tablename=test_tablename, iterations=2, background=False)
# Results for infer should match select, since there are no missing values
out = client('freq infer qual_score from %s with confidence 0' % (test_tablename), debug=True, pretty=False)[0]
assert type(out) == pandas.DataFrame
assert out['qual_score'][0] == 87.5
assert out['frequency'][0] == 7
out = client('hist infer qual_score from %s with confidence 0' % (test_tablename), debug=True, pretty=False)[0]
assert type(out) == pandas.DataFrame
assert out.shape == (10, 4)
assert out['frequency'][0] == 1
# For simulate, we just have to go by size and expected range
out = client('freq simulate qual_score from %s times 20' % (test_tablename), debug=True, pretty=False)[0]
assert out.shape[1] == 3
assert (out['probability'] < 1).all()
out = client('hist simulate qual_score from %s times 20' % (test_tablename), debug=True, pretty=False)[0]
assert out.shape[1] == 4
assert (out['frequency'] <= 20).all()
assert (out['probability'] < 1).all()
def test_update_schema():
test_tablename = create_dha()
global client, test_filenames
# Test setting one column to each type other than numerical
out = client('update schema for %s set qual_score = ignore, ami_score = categorical, pneum_score = cyclic(0, 100)' % (test_tablename), debug=True, pretty=False)[0]
assert (out['datatype'][out['column'] == 'qual_score'] == 'ignore').all()
assert (out['datatype'][out['column'] == 'ami_score'] == 'categorical').all()
assert (out['datatype'][out['column'] == 'pneum_score'] == 'cyclic').all()
# Test setting categorical with a cardinality parameter
out = client('update schema for %s set name = categorical(350)' % (test_tablename), debug=True, pretty=False)[0]
assert (out['datatype'][out['column'] == 'name'] == 'categorical').all()
# Selecting qual_score should still work even after it's ignored, also should work in where clauses and order by clauses
client('select qual_score from %s' % (test_tablename), debug=True, pretty=False)
out = client('select name, qual_score, ami_score from %s where qual_score > 90 order by qual_score' % (test_tablename), debug=True, pretty=False)[0]
assert (out['qual_score'] > 90).all()
assert (out['qual_score'] == out['qual_score'].order(ascending=False)).all()
# Also test where clause with ignored text column
client('update schema for %s set name = ignore' % (test_tablename), debug=True, pretty=False)
out = client('select name, qual_score from %s where name = "Albany NY"' % (test_tablename), debug=True, pretty=False)[0]
assert out.shape == (1, 3)
assert (out['name'] == "Albany NY").all()
# Set qual_score, ami_score, pneum_score, and total_fte back to numerical, and select should work again
client('update schema for %s set qual_score = numerical, ami_score = numerical, pneum_score = numerical, total_fte = numerical' % (test_tablename), debug=True, pretty=False)
# Set back to ignore, run models, and then estimation shouldn't work for qual_score
client('update schema for %s set qual_score = ignore' % (test_tablename), debug=True, pretty=False)
client('initialize 2 models for %s' % (test_tablename), debug=True, pretty=False)
client.engine.analyze(tablename=test_tablename, iterations=2, background=False)
with pytest.raises(utils.BayesDBError):
# Next two statements should fail because they attempt functions on an 'ignore' column
client('estimate columns from %s order by correlation with qual_score limit 5' % (test_tablename), debug=True, pretty=False)
client('estimate columns from %s order by dependence probability with qual_score limit 5' % (test_tablename), debug=True, pretty=False)
# Next two statements should fail because they 1) try to set a new key and 2) try to change the key's type
client('update schema for %s set name = key' % (test_tablename), debug=True, pretty=False)
client('update schema for %s set key = numerical' % (test_tablename), debug=True, pretty=False)
# Next two statements should fail because they set parameters that don't contain the data.
client('update schema for %s set name = categorical(3)' % (test_tablename), debug=True, pretty=False)
client('update schema for %s set qual_score = cyclic(0, 10)' % (test_tablename), debug=True, pretty=False)
|
JDReutt/BayesDB
|
bayesdb/tests/test_client.py
|
Python
|
apache-2.0
| 36,133
|
# Copyright 2017 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""PNASNet Faster R-CNN implementation.
Based on PNASNet model: https://arxiv.org/abs/1712.00559
"""
import tensorflow as tf
from object_detection.meta_architectures import faster_rcnn_meta_arch
from nets.nasnet import nasnet_utils
from nets.nasnet import pnasnet
arg_scope = tf.contrib.framework.arg_scope
slim = tf.contrib.slim
def pnasnet_large_arg_scope_for_detection(is_batch_norm_training=False):
"""Defines the default arg scope for the PNASNet Large for object detection.
This provides a small edit to switch batch norm training on and off.
Args:
is_batch_norm_training: Boolean indicating whether to train with batch norm.
Returns:
An `arg_scope` to use for the PNASNet Large Model.
"""
imagenet_scope = pnasnet.pnasnet_large_arg_scope()
with arg_scope(imagenet_scope):
with arg_scope([slim.batch_norm], is_training=is_batch_norm_training) as sc:
return sc
def _filter_scaling(reduction_indices, start_cell_num):
"""Compute the expected filter scaling at given PNASNet cell start_cell_num.
In the pnasnet.py code, filter_scaling starts at 1.0. We instead
adapt filter scaling to depend on the starting cell.
At first cells, before any reduction, filter_scalling is 1.0. With passing
any reduction cell, the filter_scaling is multiplied by 2.
Args:
reduction_indices: list of int indices.
start_cell_num: int.
Returns:
filter_scaling: float.
"""
filter_scaling = 1.0
for ind in reduction_indices:
if ind < start_cell_num:
filter_scaling *= 2.0
return filter_scaling
# Note: This is largely a copy of _build_pnasnet_base inside pnasnet.py but
# with special edits to remove instantiation of the stem and the special
# ability to receive as input a pair of hidden states. It constructs only
# a sub-network from the original PNASNet model, starting from the
# start_cell_num cell and with modified final layer.
def _build_pnasnet_base(
hidden_previous, hidden, normal_cell, hparams, true_cell_num,
start_cell_num):
"""Constructs a PNASNet image model for proposal classifier features."""
# Find where to place the reduction cells or stride normal cells
reduction_indices = nasnet_utils.calc_reduction_layers(
hparams.num_cells, hparams.num_reduction_layers)
filter_scaling = _filter_scaling(reduction_indices, start_cell_num)
# Note: The None is prepended to match the behavior of _imagenet_stem()
cell_outputs = [None, hidden_previous, hidden]
net = hidden
# Run the cells
for cell_num in range(start_cell_num, hparams.num_cells):
is_reduction = cell_num in reduction_indices
stride = 2 if is_reduction else 1
if is_reduction: filter_scaling *= hparams.filter_scaling_rate
prev_layer = cell_outputs[-2]
net = normal_cell(
net,
scope='cell_{}'.format(cell_num),
filter_scaling=filter_scaling,
stride=stride,
prev_layer=prev_layer,
cell_num=true_cell_num)
true_cell_num += 1
cell_outputs.append(net)
# Final nonlinearity.
# Note that we have dropped the final pooling, dropout and softmax layers
# from the default pnasnet version.
with tf.variable_scope('final_layer'):
net = tf.nn.relu(net)
return net
# TODO(shlens): Only fixed_shape_resizer is currently supported for PNASNet
# featurization. The reason for this is that pnasnet.py only supports
# inputs with fully known shapes. We need to update pnasnet.py to handle
# shapes not known at compile time.
class FasterRCNNPNASFeatureExtractor(
faster_rcnn_meta_arch.FasterRCNNFeatureExtractor):
"""Faster R-CNN with PNASNet feature extractor implementation."""
def __init__(self,
is_training,
first_stage_features_stride,
batch_norm_trainable=False,
reuse_weights=None,
weight_decay=0.0):
"""Constructor.
Args:
is_training: See base class.
first_stage_features_stride: See base class.
batch_norm_trainable: See base class.
reuse_weights: See base class.
weight_decay: See base class.
Raises:
ValueError: If `first_stage_features_stride` is not 16.
"""
if first_stage_features_stride != 16:
raise ValueError('`first_stage_features_stride` must be 16.')
super(FasterRCNNPNASFeatureExtractor, self).__init__(
is_training, first_stage_features_stride, batch_norm_trainable,
reuse_weights, weight_decay)
def preprocess(self, resized_inputs):
"""Faster R-CNN with PNAS preprocessing.
Maps pixel values to the range [-1, 1].
Args:
resized_inputs: A [batch, height_in, width_in, channels] float32 tensor
representing a batch of images with values between 0 and 255.0.
Returns:
preprocessed_inputs: A [batch, height_out, width_out, channels] float32
tensor representing a batch of images.
"""
return (2.0 / 255.0) * resized_inputs - 1.0
def _extract_proposal_features(self, preprocessed_inputs, scope):
"""Extracts first stage RPN features.
Extracts features using the first half of the PNASNet network.
We construct the network in `align_feature_maps=True` mode, which means
that all VALID paddings in the network are changed to SAME padding so that
the feature maps are aligned.
Args:
preprocessed_inputs: A [batch, height, width, channels] float32 tensor
representing a batch of images.
scope: A scope name.
Returns:
rpn_feature_map: A tensor with shape [batch, height, width, depth]
end_points: A dictionary mapping feature extractor tensor names to tensors
Raises:
ValueError: If the created network is missing the required activation.
"""
del scope
if len(preprocessed_inputs.get_shape().as_list()) != 4:
raise ValueError('`preprocessed_inputs` must be 4 dimensional, got a '
'tensor of shape %s' % preprocessed_inputs.get_shape())
with slim.arg_scope(pnasnet_large_arg_scope_for_detection(
is_batch_norm_training=self._train_batch_norm)):
with arg_scope([slim.conv2d,
slim.batch_norm,
slim.separable_conv2d],
reuse=self._reuse_weights):
_, end_points = pnasnet.build_pnasnet_large(
preprocessed_inputs, num_classes=None,
is_training=self._is_training,
final_endpoint='Cell_7')
# Note that both 'Cell_6' and 'Cell_7' have equal depth = 2160.
# Cell_7 is the last cell before second reduction.
rpn_feature_map = tf.concat([end_points['Cell_6'],
end_points['Cell_7']], 3)
# pnasnet.py does not maintain the batch size in the first dimension.
# This work around permits us retaining the batch for below.
batch = preprocessed_inputs.get_shape().as_list()[0]
shape_without_batch = rpn_feature_map.get_shape().as_list()[1:]
rpn_feature_map_shape = [batch] + shape_without_batch
rpn_feature_map.set_shape(rpn_feature_map_shape)
return rpn_feature_map, end_points
def _extract_box_classifier_features(self, proposal_feature_maps, scope):
"""Extracts second stage box classifier features.
This function reconstructs the "second half" of the PNASNet
network after the part defined in `_extract_proposal_features`.
Args:
proposal_feature_maps: A 4-D float tensor with shape
[batch_size * self.max_num_proposals, crop_height, crop_width, depth]
representing the feature map cropped to each proposal.
scope: A scope name.
Returns:
proposal_classifier_features: A 4-D float tensor with shape
[batch_size * self.max_num_proposals, height, width, depth]
representing box classifier features for each proposal.
"""
del scope
# Number of used stem cells.
num_stem_cells = 2
# Note that we always feed into 2 layers of equal depth
# where the first N channels corresponds to previous hidden layer
# and the second N channels correspond to the final hidden layer.
hidden_previous, hidden = tf.split(proposal_feature_maps, 2, axis=3)
# Note that what follows is largely a copy of build_pnasnet_large() within
# pnasnet.py. We are copying to minimize code pollution in slim.
# TODO(shlens,skornblith): Determine the appropriate drop path schedule.
# For now the schedule is the default (1.0->0.7 over 250,000 train steps).
hparams = pnasnet.large_imagenet_config()
if not self._is_training:
hparams.set_hparam('drop_path_keep_prob', 1.0)
# Calculate the total number of cells in the network
total_num_cells = hparams.num_cells + num_stem_cells
normal_cell = pnasnet.PNasNetNormalCell(
hparams.num_conv_filters, hparams.drop_path_keep_prob,
total_num_cells, hparams.total_training_steps)
with arg_scope([slim.dropout, nasnet_utils.drop_path],
is_training=self._is_training):
with arg_scope([slim.batch_norm], is_training=self._train_batch_norm):
with arg_scope([slim.avg_pool2d,
slim.max_pool2d,
slim.conv2d,
slim.batch_norm,
slim.separable_conv2d,
nasnet_utils.factorized_reduction,
nasnet_utils.global_avg_pool,
nasnet_utils.get_channel_index,
nasnet_utils.get_channel_dim],
data_format=hparams.data_format):
# This corresponds to the cell number just past 'Cell_7' used by
# _extract_proposal_features().
start_cell_num = 8
true_cell_num = start_cell_num + num_stem_cells
with slim.arg_scope(pnasnet.pnasnet_large_arg_scope()):
net = _build_pnasnet_base(
hidden_previous,
hidden,
normal_cell=normal_cell,
hparams=hparams,
true_cell_num=true_cell_num,
start_cell_num=start_cell_num)
proposal_classifier_features = net
return proposal_classifier_features
def restore_from_classification_checkpoint_fn(
self,
first_stage_feature_extractor_scope,
second_stage_feature_extractor_scope):
"""Returns a map of variables to load from a foreign checkpoint.
Note that this overrides the default implementation in
faster_rcnn_meta_arch.FasterRCNNFeatureExtractor which does not work for
PNASNet checkpoints.
Args:
first_stage_feature_extractor_scope: A scope name for the first stage
feature extractor.
second_stage_feature_extractor_scope: A scope name for the second stage
feature extractor.
Returns:
A dict mapping variable names (to load from a checkpoint) to variables in
the model graph.
"""
variables_to_restore = {}
for variable in tf.global_variables():
if variable.op.name.startswith(
first_stage_feature_extractor_scope):
var_name = variable.op.name.replace(
first_stage_feature_extractor_scope + '/', '')
var_name += '/ExponentialMovingAverage'
variables_to_restore[var_name] = variable
if variable.op.name.startswith(
second_stage_feature_extractor_scope):
var_name = variable.op.name.replace(
second_stage_feature_extractor_scope + '/', '')
var_name += '/ExponentialMovingAverage'
variables_to_restore[var_name] = variable
return variables_to_restore
|
cshallue/models
|
research/object_detection/models/faster_rcnn_pnas_feature_extractor.py
|
Python
|
apache-2.0
| 12,248
|
"""
Load tables/views
"""
import yaml
import re
from jnpr.junos.factory import FactoryLoader
from os.path import splitext
def _preprocess_yml(path):
"""Dynamically create PY3 version of the file by re-writing 'unicode' to 'str'."""
with open(path) as f:
tmp_yaml = f.read()
return re.sub(r"unicode", "str", tmp_yaml)
def _loadyaml_bypass(yaml_str):
"""Bypass Juniper's loadyaml and directly call FactoryLoader"""
return FactoryLoader().load(yaml.safe_load(yaml_str))
_YAML_ = splitext(__file__)[0] + ".yml"
py3_yaml = _preprocess_yml(_YAML_)
globals().update(_loadyaml_bypass(py3_yaml))
|
spotify/napalm
|
napalm/junos/utils/junos_views.py
|
Python
|
apache-2.0
| 621
|
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
# pylint: disable=invalid-name,unused-variable,too-many-locals,len-as-condition
"""Schedule for reduce operators"""
from __future__ import absolute_import as _abs
import tvm
from .. import tag
from .. import generic
from .injective import _schedule_injective
def _schedule_reduce(op, sch, is_idx_reduce=False):
if is_idx_reduce:
data_out = op.input_tensors[0]
else:
data_in = op.input_tensors[0]
data_out = op.output(0)
if not sch[data_out].op.reduce_axis:
return _schedule_injective(op, sch)
if len(sch[data_out].op.axis) > 0:
all_reduce = False
num_thread = 32
target = tvm.target.current_target()
if target and target.target_name == "opencl":
# without it, CL_INVALID_WORK_GROUP_SIZE occurred when running test_topi_reduce.py
# don't know why
num_thread = 16
block_x = tvm.thread_axis("blockIdx.x")
thread_x = tvm.thread_axis((0, num_thread), "threadIdx.x")
thread_y = tvm.thread_axis((0, num_thread), "threadIdx.y")
else:
all_reduce = True
num_thread = tvm.target.current_target(allow_none=False).max_num_threads
thread_x = tvm.thread_axis((0, num_thread), "threadIdx.x")
# Fuse and refactor the reduce axis
fused_reduce = sch[data_out].fuse(*[sch[data_out].op.reduce_axis[i]
for i in range(len(sch[data_out].op.reduce_axis))])
ko, ki = sch[data_out].split(fused_reduce, factor=num_thread)
if is_idx_reduce:
data_out_rf, _ = sch.rfactor(data_out, ki)
else:
data_out_rf = sch.rfactor(data_out, ki)
tx = sch[data_out].op.reduce_axis[0]
sch[data_out].bind(tx, thread_x)
sch[data_out_rf].compute_at(sch[data_out], tx)
if is_idx_reduce:
real_output = op.output(0)
temp_idx_input = data_out.op.output(0)
temp_val_input = data_out.op.output(1)
else:
real_output = data_out
if not all_reduce:
# Fuse and split the axis
fused_outer = sch[real_output].fuse(*[sch[real_output].op.axis[i]
for i in range(len(sch[real_output].op.axis))])
bx, outer_in = sch[real_output].split(fused_outer, factor=num_thread)
# Bind the axes to threads and blocks
sch[real_output].bind(outer_in, thread_y)
sch[real_output].bind(bx, block_x)
if is_idx_reduce:
sch[temp_idx_input].compute_at(sch[real_output], outer_in)
sch[temp_val_input].compute_at(sch[real_output], outer_in)
else:
if is_idx_reduce:
spatial_axis = sch[real_output].fuse(*(sch[real_output].op.axis))
sch[real_output].bind(spatial_axis, tvm.thread_axis("blockIdx.x"))
sch[temp_idx_input].compute_at(sch[real_output],
spatial_axis)
sch[temp_val_input].compute_at(sch[real_output],
spatial_axis)
sch[real_output].set_store_predicate(thread_x.equal(0))
return sch
@generic.schedule_reduce.register(["cuda", "gpu"])
def schedule_reduce(outs):
"""Schedule for inject->reduce->bcast ops.
Parameters
----------
outs: Array of Tensor
The computation graph description of reduce in the format
of an array of tensors.
Returns
-------
sch: Schedule
The computation schedule for the op.
"""
outs = [outs] if isinstance(outs, tvm.tensor.Tensor) else outs
sch = tvm.create_schedule([x.op for x in outs])
scheduled_ops = []
def traverse_before_reduce(operator):
"""Internal travserse function"""
if isinstance(operator, tvm.tensor.PlaceholderOp):
return
if tag.is_injective(operator.tag):
sch[operator].compute_inline()
for tensor in operator.input_tensors:
if tensor.op not in scheduled_ops:
traverse_before_reduce(tensor.op)
else:
raise RuntimeError("Unsupported operator: %s" % operator.tag)
scheduled_ops.append(operator)
def traverse_after_reduce(operator):
"""Internal travserse function"""
if tag.is_broadcast(operator.tag):
if operator not in scheduled_ops:
_schedule_injective(operator, sch)
for tensor in operator.input_tensors:
traverse_after_reduce(tensor.op)
elif operator.tag == 'comm_reduce':
_schedule_reduce(operator, sch, is_idx_reduce=False)
for tensor in operator.input_tensors:
if tensor.op not in scheduled_ops:
traverse_before_reduce(tensor.op)
elif operator.tag == 'comm_reduce_idx':
_schedule_reduce(operator, sch, is_idx_reduce=True)
input_tensors = operator.input_tensors[0].op.input_tensors
for tensor in input_tensors:
if tensor.op not in scheduled_ops:
traverse_before_reduce(tensor.op)
else:
raise RuntimeError("Unsupported operator: %s" % operator.tag)
scheduled_ops.append(operator)
traverse_after_reduce(outs[0].op)
return sch
|
Huyuwei/tvm
|
topi/python/topi/cuda/reduction.py
|
Python
|
apache-2.0
| 6,024
|
'''
====================================================================
Copyright (c) 2016 Barry A Scott. All rights reserved.
This software is licensed as described in the file LICENSE.txt,
which you should have received as part of this distribution.
====================================================================
wb_window_main.py
'''
from PyQt5 import QtWidgets
from PyQt5 import QtCore
class WbMainWindow(QtWidgets.QMainWindow):
focus_is_in_names = ('tree', 'table')
def __init__( self, app, debug_fn, parent=None ):
self.app = app
self.log = self.app.log
self.debugLog = app.debug_options.debugLogMainWindow
super().__init__( parent )
self.icon_size = QtCore.QSize( 32, 32 )
# list of all the WbActionState for the menus and toolbars
self.__action_state_manager = WbActionStateManager( self.debugLog )
self.__focus_is_in = self.focus_is_in_names[0]
# Often the rest of init has to be done after the widgets are rendered
# for example to set focus on a widget
self.__timer_init = QtCore.QTimer()
self.__timer_init.timeout.connect( self.__completeInit )
self.__timer_init.setSingleShot( True )
self.__timer_init.start( 0 )
def __completeInit( self ):
self.__timer_init = None
self.app.wrapWithThreadSwitcher( self.completeInit, 'main_window.completeInit' )()
def completeInit( self ):
pass
def close( self ):
super().close()
#------------------------------------------------------------
def setFocusIsIn( self, widget_type ):
assert widget_type in self.focus_is_in_names
self.__focus_is_in = widget_type
def focusIsIn( self ):
assert self.__focus_is_in in self.focus_is_in_names
return self.__focus_is_in
#------------------------------------------------------------
# override to do what is required on app becoming active.
def appActiveHandler( self ):
pass
def updateEnableStates( self, force_disabled=False ):
self.__action_state_manager.update( force_disabled )
def setupMenuBar( self, menu_bar ):
pass
def setupToolBar( self, tool_bar ):
pass
def setupStatusBar( self, status_bar ):
pass
def _addMenu( self, menu, name, handler, enabler=None, icon_name=None, checker=None, group=None, role=QtWidgets.QAction.NoRole ):
if icon_name is None:
icon_name = 'toolbar_images/blank.png'
icon = self.app.getQIcon( icon_name )
action = menu.addAction( icon, name )
if handler is not None:
handler = self.app.wrapWithThreadSwitcher( handler, 'menu: %s' % (name,) )
if checker is not None:
action.toggled.connect( handler )
else:
action.triggered.connect( handler )
if role is not None:
action.setMenuRole( role )
if enabler is not None:
self.__action_state_manager.addEnabler( action, enabler )
if checker is not None:
action.setCheckable( True )
self.__action_state_manager.addChecker( action, checker )
if group is not None:
group.addAction( action )
def _addToolBar( self, name, style=None ):
bar = self.addToolBar( name )
bar.setIconSize( self.icon_size )
if style is not None:
bar.setStyleSheet( 'QToolButton{%s}' % (style,) )
return bar
def _addTool( self, bar, name, handler, enabler=None, icon_name=None, checker=None ):
if icon_name is None:
action = bar.addAction( name )
else:
icon = self.app.getQIcon( icon_name )
action = bar.addAction( icon, name )
if handler is not None:
handler = self.app.wrapWithThreadSwitcher( handler, 'toolbar: %s' % (name,) )
if checker is not None:
action.toggled.connect( handler )
else:
action.triggered.connect( handler )
if enabler is not None:
self.__action_state_manager.addEnabler( action, enabler )
if checker is not None:
action.setCheckable( True )
self.__action_state_manager.addChecker( action, checker )
class WbActionStateManager:
def __init__( self, debug_fn ):
self.debugLog = debug_fn
self.__all_action_enablers = []
self.__all_action_checkers = []
self.__update_running = False
def addEnabler( self, action, enabler_handler ):
self.__all_action_enablers.append( WbActionEnabledState( action, enabler_handler ) )
def addChecker( self, action, checker_handler ):
self.__all_action_checkers.append( WbActionCheckedState( action, checker_handler ) )
def update( self, force_disabled ):
if self.__update_running:
return
self.__update_running = True
self.debugLog( 'WbActionState.update running' )
# use a cache to avoid calling state queries more then once on any one update
cache = {}
for enabler in self.__all_action_enablers:
enabler.setState( cache, force_disabled=force_disabled )
for checker in self.__all_action_checkers:
checker.setState( cache, force_disabled=False )
self.debugLog( 'WbActionState.update done' )
self.__update_running = False
class WbActionSetStateBase:
def __init__( self, action, handler ):
self.action = action
self.handler = handler
self.__key = self.handler.__name__
def __repr__( self ):
return '<WbActionEnabledState: %r>' % (self.enabler_handler,)
def setState( self, cache, force_disabled ):
state = False if force_disabled else self.__callHandler( cache )
assert state in (True, False), 'setState "%r" return %r not bool' % (self.handler, state)
self.setActionState( state )
def setActionState( self, state ):
raise NotImplementedError()
def __callHandler( self, cache ):
if self.__key not in cache:
cache[ self.__key ] = self.handler()
return cache[ self.__key ]
class WbActionEnabledState(WbActionSetStateBase):
def __init__( self, action, enabler_handler ):
super().__init__( action, enabler_handler )
def __repr__( self ):
return '<WbActionEnabledState: %r>' % (self.handler,)
def setActionState( self, state ):
self.action.setEnabled( state )
class WbActionCheckedState(WbActionSetStateBase):
def __init__( self, action, checker_handler ):
super().__init__( action, checker_handler )
def __repr__( self ):
return '<WbActionCheckedState: %r>' % (self.handler,)
def setActionState( self, state ):
self.action.setChecked( state )
|
barry-scott/git-workbench
|
Source/Common/wb_main_window.py
|
Python
|
apache-2.0
| 6,870
|
#!/usr/bin/python
import json
import os.path
import re
import subprocess
zs_api_config_file = '/.zsapi.ini'
zs_api_target = 'localadmin'
if os.path.isfile("/usr/local/zend/bin/zs-client.sh"):
directives_details = subprocess.check_output(["/usr/local/zend/bin/zs-client.sh", "configurationDirectivesList", "--target=localadmin", "--output-format=json"])
## Strip the PHP notices from the json
directives_details = re.sub("Notice:.*\n", "", directives_details)
## Strip the newlines from the json
directives_details = re.sub("\n", "", directives_details)
f1=open('/tmp/puppet_zend_directives.txt', 'w')
f1.write(directives_details)
arr = json.loads(directives_details)
for directive in arr[u"responseData"][u"directives"]:
name = directive["name"]
for key, value in directive.iteritems():
if value and not isinstance(value, list) and not isinstance(value,dict):
print ('zend_directive_' + key + '_' + name + '=' + value)
|
ubellavance/ZendServerPuppet
|
facts.d/get_directives_details.py
|
Python
|
apache-2.0
| 967
|
# Out of order to fix circular import
from .term_reordering import (
chemist_ordered,
normal_ordered,
normal_ordered_ladder_term,
normal_ordered_quad_term,
reorder,
)
from .binary_codes import (
linearize_decoder,
checksum_code,
bravyi_kitaev_code,
jordan_wigner_code,
parity_code,
weight_one_binary_addressing_code,
weight_one_segment_code,
weight_two_segment_code,
interleaved_code,
)
from .binary_code_transform import (
binary_code_transform,
extractor,
dissolve,
make_parity_list,
)
from .bksf import (
bravyi_kitaev_fast,
bravyi_kitaev_fast_interaction_op,
bravyi_kitaev_fast_edge_matrix,
number_operator,
vacuum_operator,
edge_operator_aij,
edge_operator_b,
generate_fermions,
)
from .bravyi_kitaev import (
bravyi_kitaev,
inline_sum,
inline_product,
)
from .bravyi_kitaev_tree import bravyi_kitaev_tree
from .commutator_diagonal_coulomb_operator import (
commutator_ordered_diagonal_coulomb_with_two_body_operator,)
from .conversions import (
get_fermion_operator,
get_boson_operator,
get_majorana_operator,
get_quad_operator,
check_no_sympy,
)
from .fenwick_tree import (
FenwickNode,
FenwickTree,
)
from .jordan_wigner import (
jordan_wigner,
jordan_wigner_one_body,
jordan_wigner_two_body,
)
from .qubitoperator_to_paulisum import qubit_operator_to_pauli_sum
from .reverse_jordan_wigner import reverse_jordan_wigner
from .remove_symmetry_qubits import (
symmetry_conserving_bravyi_kitaev,
edit_hamiltonian_for_spin,
)
from .verstraete_cirac import (
verstraete_cirac_2d_square,
vertical_edges_snake,
)
|
quantumlib/OpenFermion
|
src/openfermion/transforms/opconversions/__init__.py
|
Python
|
apache-2.0
| 1,702
|
"""Provides tools for parsing and writing to a csv file.
"""
import collections
from typing import List, OrderedDict, Tuple, Union
import xlsxwriter
import pyexcel
from xlsxwriter.format import Format
from outputresult import OutputResult, ColumnStyle
Workbook = OrderedDict[str, List[List[str]]]
# The name for the results sheet
RESULTS_SHEET_NAME = 'results'
# The name for the sheet of deleted items
DELETED_SHEET_NAME = 'deleted'
# The name for the sheet of omitted items
OMITTED_SHEET_NAME = 'omitted'
# The name for the sheet of found items
FOUND_SHEET_NAME = 'found'
def excel_to_records(input_path: str) -> Workbook:
"""Reads rows to a excel file at the specified path.
Args:
input_path (str): The path where the excel file will be read.
"""
return pyexcel.get_book_dict(
file_name=input_path
)
def get_writer_format(workbook: xlsxwriter.Workbook, style: ColumnStyle) -> Union[Tuple[Format, int], None]:
if style:
wb_format = workbook.add_format({'text_wrap': 1, 'valign': 'top'}) if style['wrap_text'] else None
return wb_format, style['width']
else:
return None
def write_results_to_xlsx(results: OutputResult, output_path: str):
"""
Writes the result of processing to the output path as a xlsx file. Results will be written to a 'results' sheet.
Omitted results will be written to an 'omitted' sheet. Deleted results will be written to a 'deleted' sheet.
Args:
results: The results to be written.
output_path: The output path.
"""
workbook = collections.OrderedDict([(RESULTS_SHEET_NAME, results.results)])
if results.omitted:
workbook[OMITTED_SHEET_NAME] = results.omitted
if results.deleted:
workbook[DELETED_SHEET_NAME] = results.deleted
if results.found:
workbook[FOUND_SHEET_NAME] = results.found
wb_file = xlsxwriter.Workbook(output_path)
styles = []
if results.column_styles:
styles = list(map(lambda style: get_writer_format(wb_file, style), results.column_styles))
for sheet_name, values in workbook.items():
sheet = wb_file.add_worksheet(name=sheet_name)
if results.freeze_first_row:
sheet.freeze_panes(1, 0)
for col_idx in range(0, len(styles)):
if styles[col_idx]:
col_format, width = styles[col_idx]
sheet.set_column(col_idx, col_idx, width)
for row_idx in range(0, len(values)):
row = values[row_idx]
for col_idx in range(0, len(row)):
cell_format = None
if len(styles) > col_idx and styles[col_idx] and styles[col_idx][0]:
cell_format = styles[col_idx][0]
cell_value = row[col_idx]
sheet.write(row_idx, col_idx, cell_value, cell_format)
wb_file.close()
|
rcordovano/autopsy
|
release_scripts/localization_scripts/excelutil.py
|
Python
|
apache-2.0
| 2,871
|
#!/usr/bin/env python
# -*- cpy-indent-level: 4; indent-tabs-mode: nil -*-
# ex: set expandtab softtabstop=4 shiftwidth=4:
#
# Copyright (C) 2008,2009,2010,2011,2012,2013,2015,2016 Contributor
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Module for testing the del dns environment command."""
import unittest
if __name__ == "__main__":
import utils
utils.import_depends()
from brokertest import TestBrokerCommand
class TestDelDnsEnvironment(TestBrokerCommand):
def test_100_del_utenv(self):
command = ["del", "dns", "environment", "--dns_environment", "ut-env"]
self.noouttest(command)
def test_105_show_utenv(self):
command = ["show", "dns", "environment", "--dns_environment", "ut-env"]
out = self.notfoundtest(command)
self.matchoutput(out, "DNS Environment ut-env not found.", command)
def test_200_del_nonexistant(self):
command = ["del", "dns", "environment", "--dns_environment", "no-such-env"]
out = self.notfoundtest(command)
self.matchoutput(out, "DNS Environment no-such-env not found.", command)
def test_200_del_internal(self):
command = ["del", "dns", "environment", "--dns_environment", "internal"]
out = self.badrequesttest(command)
self.matchoutput(out,
"DNS Environment internal is the default DNS "
"environment, therefore it cannot be deleted.",
command)
if __name__ == '__main__':
suite = unittest.TestLoader().loadTestsFromTestCase(TestDelDnsEnvironment)
unittest.TextTestRunner(verbosity=2).run(suite)
|
quattor/aquilon
|
tests/broker/test_del_dns_environment.py
|
Python
|
apache-2.0
| 2,135
|
#!/usr/bin/env python
import re
import sys
MINTEDOPS = r""" [
framesep=2mm,
baselinestretch=1.2,
fontsize=\footnotesize,
breaklines,
linenos
]
"""
MINTLANGMAPS={r'F\#': 'ocaml',
'Fsharp': 'ocaml',
'Scala': 'javascript',
'Go': 'go'}
def main():
TITLE=False
DIFFICULTY=False
CODE=False
LANG=None
TAGS=False
line=True
with sys.stdin as f:
while line:
line = f.readline().replace(r'\subsection{', r'\subsection*{').replace(r'\begin{verbatim}', r'\begin{lstlisting}').replace(r'\end{verbatim}', r'\end{lstlisting}')
if line.startswith(r'\subsection*{Tags}'):
TAGS=True
continue
if TAGS and len(line.strip()) > 0:
print '\n'.join(map(lambda x: r'\index{%s}' % x, map(str.strip, line.split(','))))
TAGS=False
continue
if line == r'\\documentclass[]{article}':
continue
if line.startswith(r'\section{Title}'):
TITLE=True
continue
if TITLE and len(line.strip()) > 0:
print r'\section{%s}' % line.strip()
TITLE=False
continue
if line.startswith(r'\subsection*{Difficulty}'):
DIFFICULTY=True
continue
if DIFFICULTY:
if line.startswith(r'\subsection*{'):
DIFFICULTY=False
print line,
continue
else:
continue
if line.startswith(r'\subsection*{'):
if 'Solution}' in line:
CODE=True
print line,
try: LANG=re.findall(r"section..([A-Za-z]+) [Ss]olution}", line, re.I)[0]
except IndexError:
LANG="javascript"
continue
if CODE and line.startswith(r'\begin{lstlisting}'):
print r'''\begin{mdframed}[linecolor=black, topline=true, bottomline=true, leftline=false, rightline=false, backgroundcolor=white, userdefinedwidth=\textwidth]'''
print r'\begin{minted}'
print MINTEDOPS,
print '{%s}' % MINTLANGMAPS.get(LANG, LANG.lower())
LANG=None
continue
if CODE and line.startswith(r'\end{lstlisting}'):
print r'\end{minted}'
print r'\end{mdframed}'
CODE=False
continue
print line,
if __name__ == '__main__':
main()
|
adrian17/ColossalOpera
|
book/process.py
|
Python
|
artistic-2.0
| 2,667
|
from __future__ import unicode_literals
try:
from django.urls import re_path, include
except ImportError:
# For Django 1.8 compatibility
from django.conf.urls import url as re_path, include
from django.contrib import admin
from django.shortcuts import render
from forms_builder.forms.models import Form
from forms_builder.forms import urls as form_urls
admin.autodiscover()
urlpatterns = [
re_path(r'^admin/', admin.site.urls),
re_path(r'^forms/', include(form_urls)),
re_path(r'^$', lambda request: render(request, "index.html",
{"forms": Form.objects.all()})),
]
|
nimbis/django-forms-builder
|
forms_builder/example_project/urls.py
|
Python
|
bsd-2-clause
| 632
|
# subprocess - Subprocesses with accessible I/O streams
#
# For more information about this module, see PEP 324.
#
# Copyright (c) 2003-2005 by Peter Astrand <astrand@lysator.liu.se>
#
# Licensed to PSF under a Contributor Agreement.
# See http://www.python.org/2.4/license for licensing details.
r"""Subprocesses with accessible I/O streams
This module allows you to spawn processes, connect to their
input/output/error pipes, and obtain their return codes.
For a complete description of this module see the Python documentation.
Main API
========
run(...): Runs a command, waits for it to complete, then returns a
CompletedProcess instance.
Popen(...): A class for flexibly executing a command in a new process
Constants
---------
DEVNULL: Special value that indicates that os.devnull should be used
PIPE: Special value that indicates a pipe should be created
STDOUT: Special value that indicates that stderr should go to stdout
Older API
=========
call(...): Runs a command, waits for it to complete, then returns
the return code.
check_call(...): Same as call() but raises CalledProcessError()
if return code is not 0
check_output(...): Same as check_call() but returns the contents of
stdout instead of a return code
getoutput(...): Runs a command in the shell, waits for it to complete,
then returns the output
getstatusoutput(...): Runs a command in the shell, waits for it to complete,
then returns a (exitcode, output) tuple
"""
import builtins
import errno
import io
import os
import time
import signal
import sys
import threading
import warnings
import contextlib
from time import monotonic as _time
__all__ = ["Popen", "PIPE", "STDOUT", "call", "check_call", "getstatusoutput",
"getoutput", "check_output", "run", "CalledProcessError", "DEVNULL",
"SubprocessError", "TimeoutExpired", "CompletedProcess"]
# NOTE: We intentionally exclude list2cmdline as it is
# considered an internal implementation detail. issue10838.
try:
import msvcrt
import _winapi
_mswindows = True
except ModuleNotFoundError:
_mswindows = False
import _posixsubprocess
import select
import selectors
else:
from _winapi import (CREATE_NEW_CONSOLE, CREATE_NEW_PROCESS_GROUP,
STD_INPUT_HANDLE, STD_OUTPUT_HANDLE,
STD_ERROR_HANDLE, SW_HIDE,
STARTF_USESTDHANDLES, STARTF_USESHOWWINDOW,
ABOVE_NORMAL_PRIORITY_CLASS, BELOW_NORMAL_PRIORITY_CLASS,
HIGH_PRIORITY_CLASS, IDLE_PRIORITY_CLASS,
NORMAL_PRIORITY_CLASS, REALTIME_PRIORITY_CLASS,
CREATE_NO_WINDOW, DETACHED_PROCESS,
CREATE_DEFAULT_ERROR_MODE, CREATE_BREAKAWAY_FROM_JOB)
__all__.extend(["CREATE_NEW_CONSOLE", "CREATE_NEW_PROCESS_GROUP",
"STD_INPUT_HANDLE", "STD_OUTPUT_HANDLE",
"STD_ERROR_HANDLE", "SW_HIDE",
"STARTF_USESTDHANDLES", "STARTF_USESHOWWINDOW",
"STARTUPINFO",
"ABOVE_NORMAL_PRIORITY_CLASS", "BELOW_NORMAL_PRIORITY_CLASS",
"HIGH_PRIORITY_CLASS", "IDLE_PRIORITY_CLASS",
"NORMAL_PRIORITY_CLASS", "REALTIME_PRIORITY_CLASS",
"CREATE_NO_WINDOW", "DETACHED_PROCESS",
"CREATE_DEFAULT_ERROR_MODE", "CREATE_BREAKAWAY_FROM_JOB"])
# Exception classes used by this module.
class SubprocessError(Exception): pass
class CalledProcessError(SubprocessError):
"""Raised when run() is called with check=True and the process
returns a non-zero exit status.
Attributes:
cmd, returncode, stdout, stderr, output
"""
def __init__(self, returncode, cmd, output=None, stderr=None):
self.returncode = returncode
self.cmd = cmd
self.output = output
self.stderr = stderr
def __str__(self):
if self.returncode and self.returncode < 0:
try:
return "Command '%s' died with %r." % (
self.cmd, signal.Signals(-self.returncode))
except ValueError:
return "Command '%s' died with unknown signal %d." % (
self.cmd, -self.returncode)
else:
return "Command '%s' returned non-zero exit status %d." % (
self.cmd, self.returncode)
@property
def stdout(self):
"""Alias for output attribute, to match stderr"""
return self.output
@stdout.setter
def stdout(self, value):
# There's no obvious reason to set this, but allow it anyway so
# .stdout is a transparent alias for .output
self.output = value
class TimeoutExpired(SubprocessError):
"""This exception is raised when the timeout expires while waiting for a
child process.
Attributes:
cmd, output, stdout, stderr, timeout
"""
def __init__(self, cmd, timeout, output=None, stderr=None):
self.cmd = cmd
self.timeout = timeout
self.output = output
self.stderr = stderr
def __str__(self):
return ("Command '%s' timed out after %s seconds" %
(self.cmd, self.timeout))
@property
def stdout(self):
return self.output
@stdout.setter
def stdout(self, value):
# There's no obvious reason to set this, but allow it anyway so
# .stdout is a transparent alias for .output
self.output = value
if _mswindows:
class STARTUPINFO:
def __init__(self, *, dwFlags=0, hStdInput=None, hStdOutput=None,
hStdError=None, wShowWindow=0, lpAttributeList=None):
self.dwFlags = dwFlags
self.hStdInput = hStdInput
self.hStdOutput = hStdOutput
self.hStdError = hStdError
self.wShowWindow = wShowWindow
self.lpAttributeList = lpAttributeList or {"handle_list": []}
def copy(self):
attr_list = self.lpAttributeList.copy()
if 'handle_list' in attr_list:
attr_list['handle_list'] = list(attr_list['handle_list'])
return STARTUPINFO(dwFlags=self.dwFlags,
hStdInput=self.hStdInput,
hStdOutput=self.hStdOutput,
hStdError=self.hStdError,
wShowWindow=self.wShowWindow,
lpAttributeList=attr_list)
class Handle(int):
closed = False
def Close(self, CloseHandle=_winapi.CloseHandle):
if not self.closed:
self.closed = True
CloseHandle(self)
def Detach(self):
if not self.closed:
self.closed = True
return int(self)
raise ValueError("already closed")
def __repr__(self):
return "%s(%d)" % (self.__class__.__name__, int(self))
__del__ = Close
else:
# When select or poll has indicated that the file is writable,
# we can write up to _PIPE_BUF bytes without risk of blocking.
# POSIX defines PIPE_BUF as >= 512.
_PIPE_BUF = getattr(select, 'PIPE_BUF', 512)
# poll/select have the advantage of not requiring any extra file
# descriptor, contrarily to epoll/kqueue (also, they require a single
# syscall).
if hasattr(selectors, 'PollSelector'):
_PopenSelector = selectors.PollSelector
else:
_PopenSelector = selectors.SelectSelector
if _mswindows:
# On Windows we just need to close `Popen._handle` when we no longer need
# it, so that the kernel can free it. `Popen._handle` gets closed
# implicitly when the `Popen` instance is finalized (see `Handle.__del__`,
# which is calling `CloseHandle` as requested in [1]), so there is nothing
# for `_cleanup` to do.
#
# [1] https://docs.microsoft.com/en-us/windows/desktop/ProcThread/
# creating-processes
_active = None
def _cleanup():
pass
else:
# This lists holds Popen instances for which the underlying process had not
# exited at the time its __del__ method got called: those processes are
# wait()ed for synchronously from _cleanup() when a new Popen object is
# created, to avoid zombie processes.
_active = []
def _cleanup():
if _active is None:
return
for inst in _active[:]:
res = inst._internal_poll(_deadstate=sys.maxsize)
if res is not None:
try:
_active.remove(inst)
except ValueError:
# This can happen if two threads create a new Popen instance.
# It's harmless that it was already removed, so ignore.
pass
PIPE = -1
STDOUT = -2
DEVNULL = -3
# XXX This function is only used by multiprocessing and the test suite,
# but it's here so that it can be imported when Python is compiled without
# threads.
def _optim_args_from_interpreter_flags():
"""Return a list of command-line arguments reproducing the current
optimization settings in sys.flags."""
args = []
value = sys.flags.optimize
if value > 0:
args.append('-' + 'O' * value)
return args
def _args_from_interpreter_flags():
"""Return a list of command-line arguments reproducing the current
settings in sys.flags, sys.warnoptions and sys._xoptions."""
flag_opt_map = {
'debug': 'd',
# 'inspect': 'i',
# 'interactive': 'i',
'dont_write_bytecode': 'B',
'no_site': 'S',
'verbose': 'v',
'bytes_warning': 'b',
'quiet': 'q',
# -O is handled in _optim_args_from_interpreter_flags()
}
args = _optim_args_from_interpreter_flags()
for flag, opt in flag_opt_map.items():
v = getattr(sys.flags, flag)
if v > 0:
args.append('-' + opt * v)
if sys.flags.isolated:
args.append('-I')
else:
if sys.flags.ignore_environment:
args.append('-E')
if sys.flags.no_user_site:
args.append('-s')
# -W options
warnopts = sys.warnoptions[:]
bytes_warning = sys.flags.bytes_warning
xoptions = getattr(sys, '_xoptions', {})
dev_mode = ('dev' in xoptions)
if bytes_warning > 1:
warnopts.remove("error::BytesWarning")
elif bytes_warning:
warnopts.remove("default::BytesWarning")
if dev_mode:
warnopts.remove('default')
for opt in warnopts:
args.append('-W' + opt)
# -X options
if dev_mode:
args.extend(('-X', 'dev'))
for opt in ('faulthandler', 'tracemalloc', 'importtime',
'showalloccount', 'showrefcount', 'utf8'):
if opt in xoptions:
value = xoptions[opt]
if value is True:
arg = opt
else:
arg = '%s=%s' % (opt, value)
args.extend(('-X', arg))
return args
def call(*popenargs, timeout=None, **kwargs):
"""Run command with arguments. Wait for command to complete or
timeout, then return the returncode attribute.
The arguments are the same as for the Popen constructor. Example:
retcode = call(["ls", "-l"])
"""
with Popen(*popenargs, **kwargs) as p:
try:
return p.wait(timeout=timeout)
except: # Including KeyboardInterrupt, wait handled that.
p.kill()
# We don't call p.wait() again as p.__exit__ does that for us.
raise
def check_call(*popenargs, **kwargs):
"""Run command with arguments. Wait for command to complete. If
the exit code was zero then return, otherwise raise
CalledProcessError. The CalledProcessError object will have the
return code in the returncode attribute.
The arguments are the same as for the call function. Example:
check_call(["ls", "-l"])
"""
retcode = call(*popenargs, **kwargs)
if retcode:
cmd = kwargs.get("args")
if cmd is None:
cmd = popenargs[0]
raise CalledProcessError(retcode, cmd)
return 0
def check_output(*popenargs, timeout=None, **kwargs):
r"""Run command with arguments and return its output.
If the exit code was non-zero it raises a CalledProcessError. The
CalledProcessError object will have the return code in the returncode
attribute and output in the output attribute.
The arguments are the same as for the Popen constructor. Example:
>>> check_output(["ls", "-l", "/dev/null"])
b'crw-rw-rw- 1 root root 1, 3 Oct 18 2007 /dev/null\n'
The stdout argument is not allowed as it is used internally.
To capture standard error in the result, use stderr=STDOUT.
>>> check_output(["/bin/sh", "-c",
... "ls -l non_existent_file ; exit 0"],
... stderr=STDOUT)
b'ls: non_existent_file: No such file or directory\n'
There is an additional optional argument, "input", allowing you to
pass a string to the subprocess's stdin. If you use this argument
you may not also use the Popen constructor's "stdin" argument, as
it too will be used internally. Example:
>>> check_output(["sed", "-e", "s/foo/bar/"],
... input=b"when in the course of fooman events\n")
b'when in the course of barman events\n'
By default, all communication is in bytes, and therefore any "input"
should be bytes, and the return value will be bytes. If in text mode,
any "input" should be a string, and the return value will be a string
decoded according to locale encoding, or by "encoding" if set. Text mode
is triggered by setting any of text, encoding, errors or universal_newlines.
"""
if 'stdout' in kwargs:
raise ValueError('stdout argument not allowed, it will be overridden.')
if 'input' in kwargs and kwargs['input'] is None:
# Explicitly passing input=None was previously equivalent to passing an
# empty string. That is maintained here for backwards compatibility.
kwargs['input'] = '' if kwargs.get('universal_newlines', False) else b''
return run(*popenargs, stdout=PIPE, timeout=timeout, check=True,
**kwargs).stdout
class CompletedProcess(object):
"""A process that has finished running.
This is returned by run().
Attributes:
args: The list or str args passed to run().
returncode: The exit code of the process, negative for signals.
stdout: The standard output (None if not captured).
stderr: The standard error (None if not captured).
"""
def __init__(self, args, returncode, stdout=None, stderr=None):
self.args = args
self.returncode = returncode
self.stdout = stdout
self.stderr = stderr
def __repr__(self):
args = ['args={!r}'.format(self.args),
'returncode={!r}'.format(self.returncode)]
if self.stdout is not None:
args.append('stdout={!r}'.format(self.stdout))
if self.stderr is not None:
args.append('stderr={!r}'.format(self.stderr))
return "{}({})".format(type(self).__name__, ', '.join(args))
def check_returncode(self):
"""Raise CalledProcessError if the exit code is non-zero."""
if self.returncode:
raise CalledProcessError(self.returncode, self.args, self.stdout,
self.stderr)
def run(*popenargs,
input=None, capture_output=False, timeout=None, check=False, **kwargs):
"""Run command with arguments and return a CompletedProcess instance.
The returned instance will have attributes args, returncode, stdout and
stderr. By default, stdout and stderr are not captured, and those attributes
will be None. Pass stdout=PIPE and/or stderr=PIPE in order to capture them.
If check is True and the exit code was non-zero, it raises a
CalledProcessError. The CalledProcessError object will have the return code
in the returncode attribute, and output & stderr attributes if those streams
were captured.
If timeout is given, and the process takes too long, a TimeoutExpired
exception will be raised.
There is an optional argument "input", allowing you to
pass bytes or a string to the subprocess's stdin. If you use this argument
you may not also use the Popen constructor's "stdin" argument, as
it will be used internally.
By default, all communication is in bytes, and therefore any "input" should
be bytes, and the stdout and stderr will be bytes. If in text mode, any
"input" should be a string, and stdout and stderr will be strings decoded
according to locale encoding, or by "encoding" if set. Text mode is
triggered by setting any of text, encoding, errors or universal_newlines.
The other arguments are the same as for the Popen constructor.
"""
if input is not None:
if kwargs.get('stdin') is not None:
raise ValueError('stdin and input arguments may not both be used.')
kwargs['stdin'] = PIPE
if capture_output:
if kwargs.get('stdout') is not None or kwargs.get('stderr') is not None:
raise ValueError('stdout and stderr arguments may not be used '
'with capture_output.')
kwargs['stdout'] = PIPE
kwargs['stderr'] = PIPE
with Popen(*popenargs, **kwargs) as process:
try:
stdout, stderr = process.communicate(input, timeout=timeout)
except TimeoutExpired as exc:
process.kill()
if _mswindows:
# Windows accumulates the output in a single blocking
# read() call run on child threads, with the timeout
# being done in a join() on those threads. communicate()
# _after_ kill() is required to collect that and add it
# to the exception.
exc.stdout, exc.stderr = process.communicate()
else:
# POSIX _communicate already populated the output so
# far into the TimeoutExpired exception.
process.wait()
raise
except: # Including KeyboardInterrupt, communicate handled that.
process.kill()
# We don't call process.wait() as .__exit__ does that for us.
raise
retcode = process.poll()
if check and retcode:
raise CalledProcessError(retcode, process.args,
output=stdout, stderr=stderr)
return CompletedProcess(process.args, retcode, stdout, stderr)
def list2cmdline(seq):
"""
Translate a sequence of arguments into a command line
string, using the same rules as the MS C runtime:
1) Arguments are delimited by white space, which is either a
space or a tab.
2) A string surrounded by double quotation marks is
interpreted as a single argument, regardless of white space
contained within. A quoted string can be embedded in an
argument.
3) A double quotation mark preceded by a backslash is
interpreted as a literal double quotation mark.
4) Backslashes are interpreted literally, unless they
immediately precede a double quotation mark.
5) If backslashes immediately precede a double quotation mark,
every pair of backslashes is interpreted as a literal
backslash. If the number of backslashes is odd, the last
backslash escapes the next double quotation mark as
described in rule 3.
"""
# See
# http://msdn.microsoft.com/en-us/library/17w5ykft.aspx
# or search http://msdn.microsoft.com for
# "Parsing C++ Command-Line Arguments"
result = []
needquote = False
for arg in map(os.fsdecode, seq):
bs_buf = []
# Add a space to separate this argument from the others
if result:
result.append(' ')
needquote = (" " in arg) or ("\t" in arg) or not arg
if needquote:
result.append('"')
for c in arg:
if c == '\\':
# Don't know if we need to double yet.
bs_buf.append(c)
elif c == '"':
# Double backslashes.
result.append('\\' * len(bs_buf)*2)
bs_buf = []
result.append('\\"')
else:
# Normal char
if bs_buf:
result.extend(bs_buf)
bs_buf = []
result.append(c)
# Add remaining backslashes, if any.
if bs_buf:
result.extend(bs_buf)
if needquote:
result.extend(bs_buf)
result.append('"')
return ''.join(result)
# Various tools for executing commands and looking at their output and status.
#
def getstatusoutput(cmd):
"""Return (exitcode, output) of executing cmd in a shell.
Execute the string 'cmd' in a shell with 'check_output' and
return a 2-tuple (status, output). The locale encoding is used
to decode the output and process newlines.
A trailing newline is stripped from the output.
The exit status for the command can be interpreted
according to the rules for the function 'wait'. Example:
>>> import subprocess
>>> subprocess.getstatusoutput('ls /bin/ls')
(0, '/bin/ls')
>>> subprocess.getstatusoutput('cat /bin/junk')
(1, 'cat: /bin/junk: No such file or directory')
>>> subprocess.getstatusoutput('/bin/junk')
(127, 'sh: /bin/junk: not found')
>>> subprocess.getstatusoutput('/bin/kill $$')
(-15, '')
"""
try:
data = check_output(cmd, shell=True, text=True, stderr=STDOUT)
exitcode = 0
except CalledProcessError as ex:
data = ex.output
exitcode = ex.returncode
if data[-1:] == '\n':
data = data[:-1]
return exitcode, data
def getoutput(cmd):
"""Return output (stdout or stderr) of executing cmd in a shell.
Like getstatusoutput(), except the exit status is ignored and the return
value is a string containing the command's output. Example:
>>> import subprocess
>>> subprocess.getoutput('ls /bin/ls')
'/bin/ls'
"""
return getstatusoutput(cmd)[1]
def _use_posix_spawn():
"""Check if posix_spawn() can be used for subprocess.
subprocess requires a posix_spawn() implementation that properly reports
errors to the parent process, & sets errno on the following failures:
* Process attribute actions failed.
* File actions failed.
* exec() failed.
Prefer an implementation which can use vfork() in some cases for best
performance.
"""
if _mswindows or not hasattr(os, 'posix_spawn'):
# os.posix_spawn() is not available
return False
if sys.platform == 'darwin':
# posix_spawn() is a syscall on macOS and properly reports errors
return True
# Check libc name and runtime libc version
try:
ver = os.confstr('CS_GNU_LIBC_VERSION')
# parse 'glibc 2.28' as ('glibc', (2, 28))
parts = ver.split(maxsplit=1)
if len(parts) != 2:
# reject unknown format
raise ValueError
libc = parts[0]
version = tuple(map(int, parts[1].split('.')))
if sys.platform == 'linux' and libc == 'glibc' and version >= (2, 24):
# glibc 2.24 has a new Linux posix_spawn implementation using vfork
# which properly reports errors to the parent process.
return True
# Note: Don't use the implementation in earlier glibc because it doesn't
# use vfork (even if glibc 2.26 added a pipe to properly report errors
# to the parent process).
except (AttributeError, ValueError, OSError):
# os.confstr() or CS_GNU_LIBC_VERSION value not available
pass
# By default, assume that posix_spawn() does not properly report errors.
return False
_USE_POSIX_SPAWN = _use_posix_spawn()
class Popen(object):
""" Execute a child program in a new process.
For a complete description of the arguments see the Python documentation.
Arguments:
args: A string, or a sequence of program arguments.
bufsize: supplied as the buffering argument to the open() function when
creating the stdin/stdout/stderr pipe file objects
executable: A replacement program to execute.
stdin, stdout and stderr: These specify the executed programs' standard
input, standard output and standard error file handles, respectively.
preexec_fn: (POSIX only) An object to be called in the child process
just before the child is executed.
close_fds: Controls closing or inheriting of file descriptors.
shell: If true, the command will be executed through the shell.
cwd: Sets the current directory before the child is executed.
env: Defines the environment variables for the new process.
text: If true, decode stdin, stdout and stderr using the given encoding
(if set) or the system default otherwise.
universal_newlines: Alias of text, provided for backwards compatibility.
startupinfo and creationflags (Windows only)
restore_signals (POSIX only)
start_new_session (POSIX only)
pass_fds (POSIX only)
encoding and errors: Text mode encoding and error handling to use for
file objects stdin, stdout and stderr.
Attributes:
stdin, stdout, stderr, pid, returncode
"""
_child_created = False # Set here since __del__ checks it
def __init__(self, args, bufsize=-1, executable=None,
stdin=None, stdout=None, stderr=None,
preexec_fn=None, close_fds=True,
shell=False, cwd=None, env=None, universal_newlines=None,
startupinfo=None, creationflags=0,
restore_signals=True, start_new_session=False,
pass_fds=(), *, encoding=None, errors=None, text=None):
"""Create new Popen instance."""
_cleanup()
# Held while anything is calling waitpid before returncode has been
# updated to prevent clobbering returncode if wait() or poll() are
# called from multiple threads at once. After acquiring the lock,
# code must re-check self.returncode to see if another thread just
# finished a waitpid() call.
self._waitpid_lock = threading.Lock()
self._input = None
self._communication_started = False
if bufsize is None:
bufsize = -1 # Restore default
if not isinstance(bufsize, int):
raise TypeError("bufsize must be an integer")
if _mswindows:
if preexec_fn is not None:
raise ValueError("preexec_fn is not supported on Windows "
"platforms")
else:
# POSIX
if pass_fds and not close_fds:
warnings.warn("pass_fds overriding close_fds.", RuntimeWarning)
close_fds = True
if startupinfo is not None:
raise ValueError("startupinfo is only supported on Windows "
"platforms")
if creationflags != 0:
raise ValueError("creationflags is only supported on Windows "
"platforms")
self.args = args
self.stdin = None
self.stdout = None
self.stderr = None
self.pid = None
self.returncode = None
self.encoding = encoding
self.errors = errors
# Validate the combinations of text and universal_newlines
if (text is not None and universal_newlines is not None
and bool(universal_newlines) != bool(text)):
raise SubprocessError('Cannot disambiguate when both text '
'and universal_newlines are supplied but '
'different. Pass one or the other.')
# Input and output objects. The general principle is like
# this:
#
# Parent Child
# ------ -----
# p2cwrite ---stdin---> p2cread
# c2pread <--stdout--- c2pwrite
# errread <--stderr--- errwrite
#
# On POSIX, the child objects are file descriptors. On
# Windows, these are Windows file handles. The parent objects
# are file descriptors on both platforms. The parent objects
# are -1 when not using PIPEs. The child objects are -1
# when not redirecting.
(p2cread, p2cwrite,
c2pread, c2pwrite,
errread, errwrite) = self._get_handles(stdin, stdout, stderr)
# We wrap OS handles *before* launching the child, otherwise a
# quickly terminating child could make our fds unwrappable
# (see #8458).
if _mswindows:
if p2cwrite != -1:
p2cwrite = msvcrt.open_osfhandle(p2cwrite.Detach(), 0)
if c2pread != -1:
c2pread = msvcrt.open_osfhandle(c2pread.Detach(), 0)
if errread != -1:
errread = msvcrt.open_osfhandle(errread.Detach(), 0)
self.text_mode = encoding or errors or text or universal_newlines
# How long to resume waiting on a child after the first ^C.
# There is no right value for this. The purpose is to be polite
# yet remain good for interactive users trying to exit a tool.
self._sigint_wait_secs = 0.25 # 1/xkcd221.getRandomNumber()
self._closed_child_pipe_fds = False
if self.text_mode:
if bufsize == 1:
line_buffering = True
# Use the default buffer size for the underlying binary streams
# since they don't support line buffering.
bufsize = -1
else:
line_buffering = False
try:
if p2cwrite != -1:
self.stdin = io.open(p2cwrite, 'wb', bufsize)
if self.text_mode:
self.stdin = io.TextIOWrapper(self.stdin, write_through=True,
line_buffering=line_buffering,
encoding=encoding, errors=errors)
if c2pread != -1:
self.stdout = io.open(c2pread, 'rb', bufsize)
if self.text_mode:
self.stdout = io.TextIOWrapper(self.stdout,
encoding=encoding, errors=errors)
if errread != -1:
self.stderr = io.open(errread, 'rb', bufsize)
if self.text_mode:
self.stderr = io.TextIOWrapper(self.stderr,
encoding=encoding, errors=errors)
self._execute_child(args, executable, preexec_fn, close_fds,
pass_fds, cwd, env,
startupinfo, creationflags, shell,
p2cread, p2cwrite,
c2pread, c2pwrite,
errread, errwrite,
restore_signals, start_new_session)
except:
# Cleanup if the child failed starting.
for f in filter(None, (self.stdin, self.stdout, self.stderr)):
try:
f.close()
except OSError:
pass # Ignore EBADF or other errors.
if not self._closed_child_pipe_fds:
to_close = []
if stdin == PIPE:
to_close.append(p2cread)
if stdout == PIPE:
to_close.append(c2pwrite)
if stderr == PIPE:
to_close.append(errwrite)
if hasattr(self, '_devnull'):
to_close.append(self._devnull)
for fd in to_close:
try:
if _mswindows and isinstance(fd, Handle):
fd.Close()
else:
os.close(fd)
except OSError:
pass
raise
@property
def universal_newlines(self):
# universal_newlines as retained as an alias of text_mode for API
# compatibility. bpo-31756
return self.text_mode
@universal_newlines.setter
def universal_newlines(self, universal_newlines):
self.text_mode = bool(universal_newlines)
def _translate_newlines(self, data, encoding, errors):
data = data.decode(encoding, errors)
return data.replace("\r\n", "\n").replace("\r", "\n")
def __enter__(self):
return self
def __exit__(self, exc_type, value, traceback):
if self.stdout:
self.stdout.close()
if self.stderr:
self.stderr.close()
try: # Flushing a BufferedWriter may raise an error
if self.stdin:
self.stdin.close()
finally:
if exc_type == KeyboardInterrupt:
# https://bugs.python.org/issue25942
# In the case of a KeyboardInterrupt we assume the SIGINT
# was also already sent to our child processes. We can't
# block indefinitely as that is not user friendly.
# If we have not already waited a brief amount of time in
# an interrupted .wait() or .communicate() call, do so here
# for consistency.
if self._sigint_wait_secs > 0:
try:
self._wait(timeout=self._sigint_wait_secs)
except TimeoutExpired:
pass
self._sigint_wait_secs = 0 # Note that this has been done.
return # resume the KeyboardInterrupt
# Wait for the process to terminate, to avoid zombies.
self.wait()
def __del__(self, _maxsize=sys.maxsize, _warn=warnings.warn):
if not self._child_created:
# We didn't get to successfully create a child process.
return
if self.returncode is None:
# Not reading subprocess exit status creates a zombie process which
# is only destroyed at the parent python process exit
_warn("subprocess %s is still running" % self.pid,
ResourceWarning, source=self)
# In case the child hasn't been waited on, check if it's done.
self._internal_poll(_deadstate=_maxsize)
if self.returncode is None and _active is not None:
# Child is still running, keep us alive until we can wait on it.
_active.append(self)
def _get_devnull(self):
if not hasattr(self, '_devnull'):
self._devnull = os.open(os.devnull, os.O_RDWR)
return self._devnull
def _stdin_write(self, input):
if input:
try:
self.stdin.write(input)
except BrokenPipeError:
pass # communicate() must ignore broken pipe errors.
except OSError as exc:
if exc.errno == errno.EINVAL:
# bpo-19612, bpo-30418: On Windows, stdin.write() fails
# with EINVAL if the child process exited or if the child
# process is still running but closed the pipe.
pass
else:
raise
try:
self.stdin.close()
except BrokenPipeError:
pass # communicate() must ignore broken pipe errors.
except OSError as exc:
if exc.errno == errno.EINVAL:
pass
else:
raise
def communicate(self, input=None, timeout=None):
"""Interact with process: Send data to stdin and close it.
Read data from stdout and stderr, until end-of-file is
reached. Wait for process to terminate.
The optional "input" argument should be data to be sent to the
child process, or None, if no data should be sent to the child.
communicate() returns a tuple (stdout, stderr).
By default, all communication is in bytes, and therefore any
"input" should be bytes, and the (stdout, stderr) will be bytes.
If in text mode (indicated by self.text_mode), any "input" should
be a string, and (stdout, stderr) will be strings decoded
according to locale encoding, or by "encoding" if set. Text mode
is triggered by setting any of text, encoding, errors or
universal_newlines.
"""
if self._communication_started and input:
raise ValueError("Cannot send input after starting communication")
# Optimization: If we are not worried about timeouts, we haven't
# started communicating, and we have one or zero pipes, using select()
# or threads is unnecessary.
if (timeout is None and not self._communication_started and
[self.stdin, self.stdout, self.stderr].count(None) >= 2):
stdout = None
stderr = None
if self.stdin:
self._stdin_write(input)
elif self.stdout:
stdout = self.stdout.read()
self.stdout.close()
elif self.stderr:
stderr = self.stderr.read()
self.stderr.close()
self.wait()
else:
if timeout is not None:
endtime = _time() + timeout
else:
endtime = None
try:
stdout, stderr = self._communicate(input, endtime, timeout)
except KeyboardInterrupt:
# https://bugs.python.org/issue25942
# See the detailed comment in .wait().
if timeout is not None:
sigint_timeout = min(self._sigint_wait_secs,
self._remaining_time(endtime))
else:
sigint_timeout = self._sigint_wait_secs
self._sigint_wait_secs = 0 # nothing else should wait.
try:
self._wait(timeout=sigint_timeout)
except TimeoutExpired:
pass
raise # resume the KeyboardInterrupt
finally:
self._communication_started = True
sts = self.wait(timeout=self._remaining_time(endtime))
return (stdout, stderr)
def poll(self):
"""Check if child process has terminated. Set and return returncode
attribute."""
return self._internal_poll()
def _remaining_time(self, endtime):
"""Convenience for _communicate when computing timeouts."""
if endtime is None:
return None
else:
return endtime - _time()
def _check_timeout(self, endtime, orig_timeout, stdout_seq, stderr_seq,
skip_check_and_raise=False):
"""Convenience for checking if a timeout has expired."""
if endtime is None:
return
if skip_check_and_raise or _time() > endtime:
raise TimeoutExpired(
self.args, orig_timeout,
output=b''.join(stdout_seq) if stdout_seq else None,
stderr=b''.join(stderr_seq) if stderr_seq else None)
def wait(self, timeout=None):
"""Wait for child process to terminate; returns self.returncode."""
if timeout is not None:
endtime = _time() + timeout
try:
return self._wait(timeout=timeout)
except KeyboardInterrupt:
# https://bugs.python.org/issue25942
# The first keyboard interrupt waits briefly for the child to
# exit under the common assumption that it also received the ^C
# generated SIGINT and will exit rapidly.
if timeout is not None:
sigint_timeout = min(self._sigint_wait_secs,
self._remaining_time(endtime))
else:
sigint_timeout = self._sigint_wait_secs
self._sigint_wait_secs = 0 # nothing else should wait.
try:
self._wait(timeout=sigint_timeout)
except TimeoutExpired:
pass
raise # resume the KeyboardInterrupt
def _close_pipe_fds(self,
p2cread, p2cwrite,
c2pread, c2pwrite,
errread, errwrite):
# self._devnull is not always defined.
devnull_fd = getattr(self, '_devnull', None)
with contextlib.ExitStack() as stack:
if _mswindows:
if p2cread != -1:
stack.callback(p2cread.Close)
if c2pwrite != -1:
stack.callback(c2pwrite.Close)
if errwrite != -1:
stack.callback(errwrite.Close)
else:
if p2cread != -1 and p2cwrite != -1 and p2cread != devnull_fd:
stack.callback(os.close, p2cread)
if c2pwrite != -1 and c2pread != -1 and c2pwrite != devnull_fd:
stack.callback(os.close, c2pwrite)
if errwrite != -1 and errread != -1 and errwrite != devnull_fd:
stack.callback(os.close, errwrite)
if devnull_fd is not None:
stack.callback(os.close, devnull_fd)
# Prevent a double close of these handles/fds from __init__ on error.
self._closed_child_pipe_fds = True
if _mswindows:
#
# Windows methods
#
def _get_handles(self, stdin, stdout, stderr):
"""Construct and return tuple with IO objects:
p2cread, p2cwrite, c2pread, c2pwrite, errread, errwrite
"""
if stdin is None and stdout is None and stderr is None:
return (-1, -1, -1, -1, -1, -1)
p2cread, p2cwrite = -1, -1
c2pread, c2pwrite = -1, -1
errread, errwrite = -1, -1
if stdin is None:
p2cread = _winapi.GetStdHandle(_winapi.STD_INPUT_HANDLE)
if p2cread is None:
p2cread, _ = _winapi.CreatePipe(None, 0)
p2cread = Handle(p2cread)
_winapi.CloseHandle(_)
elif stdin == PIPE:
p2cread, p2cwrite = _winapi.CreatePipe(None, 0)
p2cread, p2cwrite = Handle(p2cread), Handle(p2cwrite)
elif stdin == DEVNULL:
p2cread = msvcrt.get_osfhandle(self._get_devnull())
elif isinstance(stdin, int):
p2cread = msvcrt.get_osfhandle(stdin)
else:
# Assuming file-like object
p2cread = msvcrt.get_osfhandle(stdin.fileno())
p2cread = self._make_inheritable(p2cread)
if stdout is None:
c2pwrite = _winapi.GetStdHandle(_winapi.STD_OUTPUT_HANDLE)
if c2pwrite is None:
_, c2pwrite = _winapi.CreatePipe(None, 0)
c2pwrite = Handle(c2pwrite)
_winapi.CloseHandle(_)
elif stdout == PIPE:
c2pread, c2pwrite = _winapi.CreatePipe(None, 0)
c2pread, c2pwrite = Handle(c2pread), Handle(c2pwrite)
elif stdout == DEVNULL:
c2pwrite = msvcrt.get_osfhandle(self._get_devnull())
elif isinstance(stdout, int):
c2pwrite = msvcrt.get_osfhandle(stdout)
else:
# Assuming file-like object
c2pwrite = msvcrt.get_osfhandle(stdout.fileno())
c2pwrite = self._make_inheritable(c2pwrite)
if stderr is None:
errwrite = _winapi.GetStdHandle(_winapi.STD_ERROR_HANDLE)
if errwrite is None:
_, errwrite = _winapi.CreatePipe(None, 0)
errwrite = Handle(errwrite)
_winapi.CloseHandle(_)
elif stderr == PIPE:
errread, errwrite = _winapi.CreatePipe(None, 0)
errread, errwrite = Handle(errread), Handle(errwrite)
elif stderr == STDOUT:
errwrite = c2pwrite
elif stderr == DEVNULL:
errwrite = msvcrt.get_osfhandle(self._get_devnull())
elif isinstance(stderr, int):
errwrite = msvcrt.get_osfhandle(stderr)
else:
# Assuming file-like object
errwrite = msvcrt.get_osfhandle(stderr.fileno())
errwrite = self._make_inheritable(errwrite)
return (p2cread, p2cwrite,
c2pread, c2pwrite,
errread, errwrite)
def _make_inheritable(self, handle):
"""Return a duplicate of handle, which is inheritable"""
h = _winapi.DuplicateHandle(
_winapi.GetCurrentProcess(), handle,
_winapi.GetCurrentProcess(), 0, 1,
_winapi.DUPLICATE_SAME_ACCESS)
return Handle(h)
def _filter_handle_list(self, handle_list):
"""Filter out console handles that can't be used
in lpAttributeList["handle_list"] and make sure the list
isn't empty. This also removes duplicate handles."""
# An handle with it's lowest two bits set might be a special console
# handle that if passed in lpAttributeList["handle_list"], will
# cause it to fail.
return list({handle for handle in handle_list
if handle & 0x3 != 0x3
or _winapi.GetFileType(handle) !=
_winapi.FILE_TYPE_CHAR})
def _execute_child(self, args, executable, preexec_fn, close_fds,
pass_fds, cwd, env,
startupinfo, creationflags, shell,
p2cread, p2cwrite,
c2pread, c2pwrite,
errread, errwrite,
unused_restore_signals, unused_start_new_session):
"""Execute program (MS Windows version)"""
assert not pass_fds, "pass_fds not supported on Windows."
if isinstance(args, str):
pass
elif isinstance(args, bytes):
if shell:
raise TypeError('bytes args is not allowed on Windows')
args = list2cmdline([args])
elif isinstance(args, os.PathLike):
if shell:
raise TypeError('path-like args is not allowed when '
'shell is true')
args = list2cmdline([args])
else:
args = list2cmdline(args)
if executable is not None:
executable = os.fsdecode(executable)
# Process startup details
if startupinfo is None:
startupinfo = STARTUPINFO()
else:
# bpo-34044: Copy STARTUPINFO since it is modified above,
# so the caller can reuse it multiple times.
startupinfo = startupinfo.copy()
use_std_handles = -1 not in (p2cread, c2pwrite, errwrite)
if use_std_handles:
startupinfo.dwFlags |= _winapi.STARTF_USESTDHANDLES
startupinfo.hStdInput = p2cread
startupinfo.hStdOutput = c2pwrite
startupinfo.hStdError = errwrite
attribute_list = startupinfo.lpAttributeList
have_handle_list = bool(attribute_list and
"handle_list" in attribute_list and
attribute_list["handle_list"])
# If we were given an handle_list or need to create one
if have_handle_list or (use_std_handles and close_fds):
if attribute_list is None:
attribute_list = startupinfo.lpAttributeList = {}
handle_list = attribute_list["handle_list"] = \
list(attribute_list.get("handle_list", []))
if use_std_handles:
handle_list += [int(p2cread), int(c2pwrite), int(errwrite)]
handle_list[:] = self._filter_handle_list(handle_list)
if handle_list:
if not close_fds:
warnings.warn("startupinfo.lpAttributeList['handle_list'] "
"overriding close_fds", RuntimeWarning)
# When using the handle_list we always request to inherit
# handles but the only handles that will be inherited are
# the ones in the handle_list
close_fds = False
if shell:
startupinfo.dwFlags |= _winapi.STARTF_USESHOWWINDOW
startupinfo.wShowWindow = _winapi.SW_HIDE
comspec = os.environ.get("COMSPEC", "cmd.exe")
args = '{} /c "{}"'.format (comspec, args)
if cwd is not None:
cwd = os.fsdecode(cwd)
sys.audit("subprocess.Popen", executable, args, cwd, env)
# Start the process
try:
hp, ht, pid, tid = _winapi.CreateProcess(executable, args,
# no special security
None, None,
int(not close_fds),
creationflags,
env,
cwd,
startupinfo)
finally:
# Child is launched. Close the parent's copy of those pipe
# handles that only the child should have open. You need
# to make sure that no handles to the write end of the
# output pipe are maintained in this process or else the
# pipe will not close when the child process exits and the
# ReadFile will hang.
self._close_pipe_fds(p2cread, p2cwrite,
c2pread, c2pwrite,
errread, errwrite)
# Retain the process handle, but close the thread handle
self._child_created = True
self._handle = Handle(hp)
self.pid = pid
_winapi.CloseHandle(ht)
def _internal_poll(self, _deadstate=None,
_WaitForSingleObject=_winapi.WaitForSingleObject,
_WAIT_OBJECT_0=_winapi.WAIT_OBJECT_0,
_GetExitCodeProcess=_winapi.GetExitCodeProcess):
"""Check if child process has terminated. Returns returncode
attribute.
This method is called by __del__, so it can only refer to objects
in its local scope.
"""
if self.returncode is None:
if _WaitForSingleObject(self._handle, 0) == _WAIT_OBJECT_0:
self.returncode = _GetExitCodeProcess(self._handle)
return self.returncode
def _wait(self, timeout):
"""Internal implementation of wait() on Windows."""
if timeout is None:
timeout_millis = _winapi.INFINITE
else:
timeout_millis = int(timeout * 1000)
if self.returncode is None:
# API note: Returns immediately if timeout_millis == 0.
result = _winapi.WaitForSingleObject(self._handle,
timeout_millis)
if result == _winapi.WAIT_TIMEOUT:
raise TimeoutExpired(self.args, timeout)
self.returncode = _winapi.GetExitCodeProcess(self._handle)
return self.returncode
def _readerthread(self, fh, buffer):
buffer.append(fh.read())
fh.close()
def _communicate(self, input, endtime, orig_timeout):
# Start reader threads feeding into a list hanging off of this
# object, unless they've already been started.
if self.stdout and not hasattr(self, "_stdout_buff"):
self._stdout_buff = []
self.stdout_thread = \
threading.Thread(target=self._readerthread,
args=(self.stdout, self._stdout_buff))
self.stdout_thread.daemon = True
self.stdout_thread.start()
if self.stderr and not hasattr(self, "_stderr_buff"):
self._stderr_buff = []
self.stderr_thread = \
threading.Thread(target=self._readerthread,
args=(self.stderr, self._stderr_buff))
self.stderr_thread.daemon = True
self.stderr_thread.start()
if self.stdin:
self._stdin_write(input)
# Wait for the reader threads, or time out. If we time out, the
# threads remain reading and the fds left open in case the user
# calls communicate again.
if self.stdout is not None:
self.stdout_thread.join(self._remaining_time(endtime))
if self.stdout_thread.is_alive():
raise TimeoutExpired(self.args, orig_timeout)
if self.stderr is not None:
self.stderr_thread.join(self._remaining_time(endtime))
if self.stderr_thread.is_alive():
raise TimeoutExpired(self.args, orig_timeout)
# Collect the output from and close both pipes, now that we know
# both have been read successfully.
stdout = None
stderr = None
if self.stdout:
stdout = self._stdout_buff
self.stdout.close()
if self.stderr:
stderr = self._stderr_buff
self.stderr.close()
# All data exchanged. Translate lists into strings.
if stdout is not None:
stdout = stdout[0]
if stderr is not None:
stderr = stderr[0]
return (stdout, stderr)
def send_signal(self, sig):
"""Send a signal to the process."""
# Don't signal a process that we know has already died.
if self.returncode is not None:
return
if sig == signal.SIGTERM:
self.terminate()
elif sig == signal.CTRL_C_EVENT:
os.kill(self.pid, signal.CTRL_C_EVENT)
elif sig == signal.CTRL_BREAK_EVENT:
os.kill(self.pid, signal.CTRL_BREAK_EVENT)
else:
raise ValueError("Unsupported signal: {}".format(sig))
def terminate(self):
"""Terminates the process."""
# Don't terminate a process that we know has already died.
if self.returncode is not None:
return
try:
_winapi.TerminateProcess(self._handle, 1)
except PermissionError:
# ERROR_ACCESS_DENIED (winerror 5) is received when the
# process already died.
rc = _winapi.GetExitCodeProcess(self._handle)
if rc == _winapi.STILL_ACTIVE:
raise
self.returncode = rc
kill = terminate
else:
#
# POSIX methods
#
def _get_handles(self, stdin, stdout, stderr):
"""Construct and return tuple with IO objects:
p2cread, p2cwrite, c2pread, c2pwrite, errread, errwrite
"""
p2cread, p2cwrite = -1, -1
c2pread, c2pwrite = -1, -1
errread, errwrite = -1, -1
if stdin is None:
pass
elif stdin == PIPE:
p2cread, p2cwrite = os.pipe()
elif stdin == DEVNULL:
p2cread = self._get_devnull()
elif isinstance(stdin, int):
p2cread = stdin
else:
# Assuming file-like object
p2cread = stdin.fileno()
if stdout is None:
pass
elif stdout == PIPE:
c2pread, c2pwrite = os.pipe()
elif stdout == DEVNULL:
c2pwrite = self._get_devnull()
elif isinstance(stdout, int):
c2pwrite = stdout
else:
# Assuming file-like object
c2pwrite = stdout.fileno()
if stderr is None:
pass
elif stderr == PIPE:
errread, errwrite = os.pipe()
elif stderr == STDOUT:
if c2pwrite != -1:
errwrite = c2pwrite
else: # child's stdout is not set, use parent's stdout
errwrite = sys.__stdout__.fileno()
elif stderr == DEVNULL:
errwrite = self._get_devnull()
elif isinstance(stderr, int):
errwrite = stderr
else:
# Assuming file-like object
errwrite = stderr.fileno()
return (p2cread, p2cwrite,
c2pread, c2pwrite,
errread, errwrite)
def _posix_spawn(self, args, executable, env, restore_signals,
p2cread, p2cwrite,
c2pread, c2pwrite,
errread, errwrite):
"""Execute program using os.posix_spawn()."""
if env is None:
env = os.environ
kwargs = {}
if restore_signals:
# See _Py_RestoreSignals() in Python/pylifecycle.c
sigset = []
for signame in ('SIGPIPE', 'SIGXFZ', 'SIGXFSZ'):
signum = getattr(signal, signame, None)
if signum is not None:
sigset.append(signum)
kwargs['setsigdef'] = sigset
file_actions = []
for fd in (p2cwrite, c2pread, errread):
if fd != -1:
file_actions.append((os.POSIX_SPAWN_CLOSE, fd))
for fd, fd2 in (
(p2cread, 0),
(c2pwrite, 1),
(errwrite, 2),
):
if fd != -1:
file_actions.append((os.POSIX_SPAWN_DUP2, fd, fd2))
if file_actions:
kwargs['file_actions'] = file_actions
self.pid = os.posix_spawn(executable, args, env, **kwargs)
self._child_created = True
self._close_pipe_fds(p2cread, p2cwrite,
c2pread, c2pwrite,
errread, errwrite)
def _execute_child(self, args, executable, preexec_fn, close_fds,
pass_fds, cwd, env,
startupinfo, creationflags, shell,
p2cread, p2cwrite,
c2pread, c2pwrite,
errread, errwrite,
restore_signals, start_new_session):
"""Execute program (POSIX version)"""
if isinstance(args, (str, bytes)):
args = [args]
elif isinstance(args, os.PathLike):
if shell:
raise TypeError('path-like args is not allowed when '
'shell is true')
args = [args]
else:
args = list(args)
if shell:
# On Android the default shell is at '/system/bin/sh'.
unix_shell = ('/system/bin/sh' if
hasattr(sys, 'getandroidapilevel') else '/bin/sh')
args = [unix_shell, "-c"] + args
if executable:
args[0] = executable
if executable is None:
executable = args[0]
sys.audit("subprocess.Popen", executable, args, cwd, env)
if (_USE_POSIX_SPAWN
and os.path.dirname(executable)
and preexec_fn is None
and not close_fds
and not pass_fds
and cwd is None
and (p2cread == -1 or p2cread > 2)
and (c2pwrite == -1 or c2pwrite > 2)
and (errwrite == -1 or errwrite > 2)
and not start_new_session):
self._posix_spawn(args, executable, env, restore_signals,
p2cread, p2cwrite,
c2pread, c2pwrite,
errread, errwrite)
return
orig_executable = executable
# For transferring possible exec failure from child to parent.
# Data format: "exception name:hex errno:description"
# Pickle is not used; it is complex and involves memory allocation.
errpipe_read, errpipe_write = os.pipe()
# errpipe_write must not be in the standard io 0, 1, or 2 fd range.
low_fds_to_close = []
while errpipe_write < 3:
low_fds_to_close.append(errpipe_write)
errpipe_write = os.dup(errpipe_write)
for low_fd in low_fds_to_close:
os.close(low_fd)
try:
try:
# We must avoid complex work that could involve
# malloc or free in the child process to avoid
# potential deadlocks, thus we do all this here.
# and pass it to fork_exec()
if env is not None:
env_list = []
for k, v in env.items():
k = os.fsencode(k)
if b'=' in k:
raise ValueError("illegal environment variable name")
env_list.append(k + b'=' + os.fsencode(v))
else:
env_list = None # Use execv instead of execve.
executable = os.fsencode(executable)
if os.path.dirname(executable):
executable_list = (executable,)
else:
# This matches the behavior of os._execvpe().
executable_list = tuple(
os.path.join(os.fsencode(dir), executable)
for dir in os.get_exec_path(env))
fds_to_keep = set(pass_fds)
fds_to_keep.add(errpipe_write)
self.pid = _posixsubprocess.fork_exec(
args, executable_list,
close_fds, tuple(sorted(map(int, fds_to_keep))),
cwd, env_list,
p2cread, p2cwrite, c2pread, c2pwrite,
errread, errwrite,
errpipe_read, errpipe_write,
restore_signals, start_new_session, preexec_fn)
self._child_created = True
finally:
# be sure the FD is closed no matter what
os.close(errpipe_write)
self._close_pipe_fds(p2cread, p2cwrite,
c2pread, c2pwrite,
errread, errwrite)
# Wait for exec to fail or succeed; possibly raising an
# exception (limited in size)
errpipe_data = bytearray()
while True:
part = os.read(errpipe_read, 50000)
errpipe_data += part
if not part or len(errpipe_data) > 50000:
break
finally:
# be sure the FD is closed no matter what
os.close(errpipe_read)
if errpipe_data:
try:
pid, sts = os.waitpid(self.pid, 0)
if pid == self.pid:
self._handle_exitstatus(sts)
else:
self.returncode = sys.maxsize
except ChildProcessError:
pass
try:
exception_name, hex_errno, err_msg = (
errpipe_data.split(b':', 2))
# The encoding here should match the encoding
# written in by the subprocess implementations
# like _posixsubprocess
err_msg = err_msg.decode()
except ValueError:
exception_name = b'SubprocessError'
hex_errno = b'0'
err_msg = 'Bad exception data from child: {!r}'.format(
bytes(errpipe_data))
child_exception_type = getattr(
builtins, exception_name.decode('ascii'),
SubprocessError)
if issubclass(child_exception_type, OSError) and hex_errno:
errno_num = int(hex_errno, 16)
child_exec_never_called = (err_msg == "noexec")
if child_exec_never_called:
err_msg = ""
# The error must be from chdir(cwd).
err_filename = cwd
else:
err_filename = orig_executable
if errno_num != 0:
err_msg = os.strerror(errno_num)
raise child_exception_type(errno_num, err_msg, err_filename)
raise child_exception_type(err_msg)
def _handle_exitstatus(self, sts, _WIFSIGNALED=os.WIFSIGNALED,
_WTERMSIG=os.WTERMSIG, _WIFEXITED=os.WIFEXITED,
_WEXITSTATUS=os.WEXITSTATUS, _WIFSTOPPED=os.WIFSTOPPED,
_WSTOPSIG=os.WSTOPSIG):
"""All callers to this function MUST hold self._waitpid_lock."""
# This method is called (indirectly) by __del__, so it cannot
# refer to anything outside of its local scope.
if _WIFSIGNALED(sts):
self.returncode = -_WTERMSIG(sts)
elif _WIFEXITED(sts):
self.returncode = _WEXITSTATUS(sts)
elif _WIFSTOPPED(sts):
self.returncode = -_WSTOPSIG(sts)
else:
# Should never happen
raise SubprocessError("Unknown child exit status!")
def _internal_poll(self, _deadstate=None, _waitpid=os.waitpid,
_WNOHANG=os.WNOHANG, _ECHILD=errno.ECHILD):
"""Check if child process has terminated. Returns returncode
attribute.
This method is called by __del__, so it cannot reference anything
outside of the local scope (nor can any methods it calls).
"""
if self.returncode is None:
if not self._waitpid_lock.acquire(False):
# Something else is busy calling waitpid. Don't allow two
# at once. We know nothing yet.
return None
try:
if self.returncode is not None:
return self.returncode # Another thread waited.
pid, sts = _waitpid(self.pid, _WNOHANG)
if pid == self.pid:
self._handle_exitstatus(sts)
except OSError as e:
if _deadstate is not None:
self.returncode = _deadstate
elif e.errno == _ECHILD:
# This happens if SIGCLD is set to be ignored or
# waiting for child processes has otherwise been
# disabled for our process. This child is dead, we
# can't get the status.
# http://bugs.python.org/issue15756
self.returncode = 0
finally:
self._waitpid_lock.release()
return self.returncode
def _try_wait(self, wait_flags):
"""All callers to this function MUST hold self._waitpid_lock."""
try:
(pid, sts) = os.waitpid(self.pid, wait_flags)
except ChildProcessError:
# This happens if SIGCLD is set to be ignored or waiting
# for child processes has otherwise been disabled for our
# process. This child is dead, we can't get the status.
pid = self.pid
sts = 0
return (pid, sts)
def _wait(self, timeout):
"""Internal implementation of wait() on POSIX."""
if self.returncode is not None:
return self.returncode
if timeout is not None:
endtime = _time() + timeout
# Enter a busy loop if we have a timeout. This busy loop was
# cribbed from Lib/threading.py in Thread.wait() at r71065.
delay = 0.0005 # 500 us -> initial delay of 1 ms
while True:
if self._waitpid_lock.acquire(False):
try:
if self.returncode is not None:
break # Another thread waited.
(pid, sts) = self._try_wait(os.WNOHANG)
assert pid == self.pid or pid == 0
if pid == self.pid:
self._handle_exitstatus(sts)
break
finally:
self._waitpid_lock.release()
remaining = self._remaining_time(endtime)
if remaining <= 0:
raise TimeoutExpired(self.args, timeout)
delay = min(delay * 2, remaining, .05)
time.sleep(delay)
else:
while self.returncode is None:
with self._waitpid_lock:
if self.returncode is not None:
break # Another thread waited.
(pid, sts) = self._try_wait(0)
# Check the pid and loop as waitpid has been known to
# return 0 even without WNOHANG in odd situations.
# http://bugs.python.org/issue14396.
if pid == self.pid:
self._handle_exitstatus(sts)
return self.returncode
def _communicate(self, input, endtime, orig_timeout):
if self.stdin and not self._communication_started:
# Flush stdio buffer. This might block, if the user has
# been writing to .stdin in an uncontrolled fashion.
try:
self.stdin.flush()
except BrokenPipeError:
pass # communicate() must ignore BrokenPipeError.
if not input:
try:
self.stdin.close()
except BrokenPipeError:
pass # communicate() must ignore BrokenPipeError.
stdout = None
stderr = None
# Only create this mapping if we haven't already.
if not self._communication_started:
self._fileobj2output = {}
if self.stdout:
self._fileobj2output[self.stdout] = []
if self.stderr:
self._fileobj2output[self.stderr] = []
if self.stdout:
stdout = self._fileobj2output[self.stdout]
if self.stderr:
stderr = self._fileobj2output[self.stderr]
self._save_input(input)
if self._input:
input_view = memoryview(self._input)
with _PopenSelector() as selector:
if self.stdin and input:
selector.register(self.stdin, selectors.EVENT_WRITE)
if self.stdout and not self.stdout.closed:
selector.register(self.stdout, selectors.EVENT_READ)
if self.stderr and not self.stderr.closed:
selector.register(self.stderr, selectors.EVENT_READ)
while selector.get_map():
timeout = self._remaining_time(endtime)
if timeout is not None and timeout < 0:
self._check_timeout(endtime, orig_timeout,
stdout, stderr,
skip_check_and_raise=True)
raise RuntimeError( # Impossible :)
'_check_timeout(..., skip_check_and_raise=True) '
'failed to raise TimeoutExpired.')
ready = selector.select(timeout)
self._check_timeout(endtime, orig_timeout, stdout, stderr)
# XXX Rewrite these to use non-blocking I/O on the file
# objects; they are no longer using C stdio!
for key, events in ready:
if key.fileobj is self.stdin:
chunk = input_view[self._input_offset :
self._input_offset + _PIPE_BUF]
try:
self._input_offset += os.write(key.fd, chunk)
except BrokenPipeError:
selector.unregister(key.fileobj)
key.fileobj.close()
else:
if self._input_offset >= len(self._input):
selector.unregister(key.fileobj)
key.fileobj.close()
elif key.fileobj in (self.stdout, self.stderr):
data = os.read(key.fd, 32768)
if not data:
selector.unregister(key.fileobj)
key.fileobj.close()
self._fileobj2output[key.fileobj].append(data)
self.wait(timeout=self._remaining_time(endtime))
# All data exchanged. Translate lists into strings.
if stdout is not None:
stdout = b''.join(stdout)
if stderr is not None:
stderr = b''.join(stderr)
# Translate newlines, if requested.
# This also turns bytes into strings.
if self.text_mode:
if stdout is not None:
stdout = self._translate_newlines(stdout,
self.stdout.encoding,
self.stdout.errors)
if stderr is not None:
stderr = self._translate_newlines(stderr,
self.stderr.encoding,
self.stderr.errors)
return (stdout, stderr)
def _save_input(self, input):
# This method is called from the _communicate_with_*() methods
# so that if we time out while communicating, we can continue
# sending input if we retry.
if self.stdin and self._input is None:
self._input_offset = 0
self._input = input
if input is not None and self.text_mode:
self._input = self._input.encode(self.stdin.encoding,
self.stdin.errors)
def send_signal(self, sig):
"""Send a signal to the process."""
# Skip signalling a process that we know has already died.
if self.returncode is None:
os.kill(self.pid, sig)
def terminate(self):
"""Terminate the process with SIGTERM
"""
self.send_signal(signal.SIGTERM)
def kill(self):
"""Kill the process with SIGKILL
"""
self.send_signal(signal.SIGKILL)
|
xyuanmu/XX-Net
|
python3.8.2/Lib/subprocess.py
|
Python
|
bsd-2-clause
| 77,289
|
#!/usr/bin/env python3
import cmd
import sqlite3
import sys
import os
import requests
from time import sleep
from terminaltables import AsciiTable
from cme.msfrpc import Msfrpc, MsfAuthError
import configparser
from cme.loaders.protocol_loader import protocol_loader
from requests import ConnectionError
# The following disables the InsecureRequests warning and the 'Starting new HTTPS connection' log message
from requests.packages.urllib3.exceptions import InsecureRequestWarning
requests.packages.urllib3.disable_warnings(InsecureRequestWarning)
class UserExitedProto(Exception):
pass
class DatabaseNavigator(cmd.Cmd):
def __init__(self, main_menu, database, proto):
cmd.Cmd.__init__(self)
self.main_menu = main_menu
self.config = main_menu.config
self.proto = proto
self.db = database
self.prompt = 'cmedb ({})({}) > '.format(main_menu.workspace, proto)
def do_back(self, line):
raise UserExitedProto
def do_exit(self, line):
sys.exit(0)
def print_table(self, data, title=None):
print("")
table = AsciiTable(data)
if title:
table.title = title
print(table.table)
print("")
def do_export(self, line):
if not line:
print("[-] not enough arguments")
return
line = line.split()
if line[0].lower() == 'creds':
if len(line) < 3:
print("[-] invalid arguments, export creds <plaintext|hashes|both|csv> <filename>")
return
if line[1].lower() == 'plaintext':
creds = self.db.get_credentials(credtype="plaintext")
elif line[1].lower() == 'hashes':
creds = self.db.get_credentials(credtype="hash")
else:
creds = self.db.get_credentials()
with open(os.path.expanduser(line[2]), 'w') as export_file:
for cred in creds:
credid, domain, user, password, credtype, fromhost = cred
if line[1].lower() == 'csv':
export_file.write('{},{},{},{},{},{}\n'.format(credid,domain,user,password,credtype,fromhost))
else:
export_file.write('{}\n'.format(password))
print('[+] creds exported')
elif line[0].lower() == 'hosts':
if len(line) < 2:
print("[-] invalid arguments, export hosts <filename>")
return
hosts = self.db.get_computers()
with open(os.path.expanduser(line[1]), 'w') as export_file:
for host in hosts:
hostid,ipaddress,hostname,domain,opsys,dc = host
export_file.write('{},{},{},{},{},{}\n'.format(hostid,ipaddress,hostname,domain,opsys,dc))
print('[+] hosts exported')
else:
print('[-] invalid argument, specify creds or hosts')
def do_import(self, line):
if not line:
return
if line == 'empire':
headers = {'Content-Type': 'application/json'}
# Pull the username and password from the config file
payload = {'username': self.config.get('Empire', 'username'),
'password': self.config.get('Empire', 'password')}
# Pull the host and port from the config file
base_url = 'https://{}:{}'.format(self.config.get('Empire', 'api_host'), self.config.get('Empire', 'api_port'))
try:
r = requests.post(base_url + '/api/admin/login', json=payload, headers=headers, verify=False)
if r.status_code == 200:
token = r.json()['token']
url_params = {'token': token}
r = requests.get(base_url + '/api/creds', headers=headers, params=url_params, verify=False)
creds = r.json()
for cred in creds['creds']:
if cred['credtype'] == 'token' or cred['credtype'] == 'krbtgt' or cred['username'].endswith('$'):
continue
self.db.add_credential(cred['credtype'], cred['domain'], cred['username'], cred['password'])
print("[+] Empire credential import successful")
else:
print("[-] Error authenticating to Empire's RESTful API server!")
except ConnectionError as e:
print("[-] Unable to connect to Empire's RESTful API server: {}".format(e))
elif line == 'metasploit':
msf = Msfrpc({'host': self.config.get('Metasploit', 'rpc_host'),
'port': self.config.get('Metasploit', 'rpc_port')})
try:
msf.login('msf', self.config.get('Metasploit', 'password'))
except MsfAuthError:
print("[-] Error authenticating to Metasploit's MSGRPC server!")
return
console_id = str(msf.call('console.create')['id'])
msf.call('console.write', [console_id, 'creds\n'])
sleep(2)
creds = msf.call('console.read', [console_id])
for entry in creds['data'].split('\n'):
cred = entry.split()
try:
# host = cred[0]
# port = cred[2]
proto = cred[3]
username = cred[4]
password = cred[5]
cred_type = cred[6]
if proto == '({})'.format(self.proto) and cred_type == 'Password':
self.db.add_credential('plaintext', '', username, password)
except IndexError:
continue
msf.call('console.destroy', [console_id])
print("[+] Metasploit credential import successful")
def complete_import(self, text, line, begidx, endidx):
"Tab-complete 'import' commands."
commands = ["empire", "metasploit"]
mline = line.partition(' ')[2]
offs = len(mline) - len(text)
return [s[offs:] for s in commands if s.startswith(mline)]
def complete_export(self, text, line, begidx, endidx):
"Tab-complete 'creds' commands."
commands = ["creds", "plaintext", "hashes"]
mline = line.partition(' ')[2]
offs = len(mline) - len(text)
return [s[offs:] for s in commands if s.startswith(mline)]
class CMEDBMenu(cmd.Cmd):
def __init__(self, config_path):
cmd.Cmd.__init__(self)
self.config_path = config_path
try:
self.config = configparser.ConfigParser()
self.config.read(self.config_path)
except Exception as e:
print("[-] Error reading cme.conf: {}".format(e))
sys.exit(1)
self.workspace_dir = os.path.expanduser('~/.cme/workspaces')
self.conn = None
self.p_loader = protocol_loader()
self.protocols = self.p_loader.get_protocols()
self.workspace = self.config.get('CME', 'workspace')
self.do_workspace(self.workspace)
self.db = self.config.get('CME', 'last_used_db')
if self.db:
self.do_proto(self.db)
def open_proto_db(self, db_path):
# Set the database connection to autocommit w/ isolation level
self.conn = sqlite3.connect(db_path, check_same_thread=False)
self.conn.text_factory = str
self.conn.isolation_level = None
def write_configfile(self):
with open(self.config_path, 'w') as configfile:
self.config.write(configfile)
def do_proto(self, proto):
if not proto:
return
proto_db_path = os.path.join(self.workspace_dir, self.workspace, proto + '.db')
if os.path.exists(proto_db_path):
self.open_proto_db(proto_db_path)
db_nav_object = self.p_loader.load_protocol(self.protocols[proto]['nvpath'])
db_object = self.p_loader.load_protocol(self.protocols[proto]['dbpath'])
self.config.set('CME', 'last_used_db', proto)
self.write_configfile()
try:
proto_menu = getattr(db_nav_object, 'navigator')(self, getattr(db_object, 'database')(self.conn), proto)
proto_menu.cmdloop()
except UserExitedProto:
pass
def do_workspace(self, line):
if not line:
return
line = line.strip()
if line.split()[0] == 'create':
new_workspace = line.split()[1].strip()
print("[*] Creating workspace '{}'".format(new_workspace))
os.mkdir(os.path.join(self.workspace_dir, new_workspace))
for protocol in self.protocols.keys():
try:
protocol_object = self.p_loader.load_protocol(self.protocols[protocol]['dbpath'])
except KeyError:
continue
proto_db_path = os.path.join(self.workspace_dir, new_workspace, protocol + '.db')
if not os.path.exists(proto_db_path):
print('[*] Initializing {} protocol database'.format(protocol.upper()))
conn = sqlite3.connect(proto_db_path)
c = conn.cursor()
# try to prevent some of the weird sqlite I/O errors
c.execute('PRAGMA journal_mode = OFF')
c.execute('PRAGMA foreign_keys = 1')
getattr(protocol_object, 'database').db_schema(c)
# commit the changes and close everything off
conn.commit()
conn.close()
self.do_workspace(new_workspace)
elif os.path.exists(os.path.join(self.workspace_dir, line)):
self.config.set('CME', 'workspace', line)
self.write_configfile()
self.workspace = line
self.prompt = 'cmedb ({}) > '.format(line)
def do_exit(self, line):
sys.exit(0)
def main():
config_path = os.path.expanduser('~/.cme/cme.conf')
if not os.path.exists(config_path):
print("[-] Unable to find config file")
sys.exit(1)
try:
cmedbnav = CMEDBMenu(config_path)
cmedbnav.cmdloop()
except KeyboardInterrupt:
pass
|
jorik041/CrackMapExec
|
cme/cmedb.py
|
Python
|
bsd-2-clause
| 10,376
|
import numpy
import glob
import double_hitfinder
from os import listdir
filepath = '/home/toli/Python/Test_and_Learning/data/'
image_names = listdir(filepath)
print 'found data:'
print image_names
#image_names = glob.glob('r0074*.dat')
print 'load first image'
img = numpy.loadtxt(filepath + image_names[0], delimiter = '\t')
print 'generate masks'
mask,gmask,centermask = double_hitfinder.generate_masks(img)
print 'calculating hitscores for files:'
print image_names
for idx, imname in enumerate(image_names):
print 'loading image #' + str(idx)
img = numpy.loadtxt(filepath + imname, delimiter = '\t')
hitscore = double_hitfinder.double_hit_finder(img,mask,gmask,centermask,imname=imname)
print imname, hitscore
|
SPIhub/hummingbird
|
examples/in-flight-holography/calc_hitscores.py
|
Python
|
bsd-2-clause
| 732
|
# coding: utf-8
from __future__ import unicode_literals
import inspect
import pickle
import re
import unittest
from collections import Mapping
import pytest
from django.db import models
from rest_framework import fields, relations, serializers
from rest_framework.compat import unicode_repr
from rest_framework.fields import Field
from .utils import MockObject
try:
from collections import ChainMap
except ImportError:
ChainMap = False
# Test serializer fields imports.
# -------------------------------
class TestFieldImports:
def is_field(self, name, value):
return (
isinstance(value, type) and
issubclass(value, Field) and
not name.startswith('_')
)
def test_fields(self):
msg = "Expected `fields.%s` to be imported in `serializers`"
field_classes = [
key for key, value
in inspect.getmembers(fields)
if self.is_field(key, value)
]
# sanity check
assert 'Field' in field_classes
assert 'BooleanField' in field_classes
for field in field_classes:
assert hasattr(serializers, field), msg % field
def test_relations(self):
msg = "Expected `relations.%s` to be imported in `serializers`"
field_classes = [
key for key, value
in inspect.getmembers(relations)
if self.is_field(key, value)
]
# sanity check
assert 'RelatedField' in field_classes
for field in field_classes:
assert hasattr(serializers, field), msg % field
# Tests for core functionality.
# -----------------------------
class TestSerializer:
def setup(self):
class ExampleSerializer(serializers.Serializer):
char = serializers.CharField()
integer = serializers.IntegerField()
self.Serializer = ExampleSerializer
def test_valid_serializer(self):
serializer = self.Serializer(data={'char': 'abc', 'integer': 123})
assert serializer.is_valid()
assert serializer.validated_data == {'char': 'abc', 'integer': 123}
assert serializer.data == {'char': 'abc', 'integer': 123}
assert serializer.errors == {}
def test_invalid_serializer(self):
serializer = self.Serializer(data={'char': 'abc'})
assert not serializer.is_valid()
assert serializer.validated_data == {}
assert serializer.data == {'char': 'abc'}
assert serializer.errors == {'integer': ['This field is required.']}
def test_invalid_datatype(self):
serializer = self.Serializer(data=[{'char': 'abc'}])
assert not serializer.is_valid()
assert serializer.validated_data == {}
assert serializer.data == {}
assert serializer.errors == {'non_field_errors': ['Invalid data. Expected a dictionary, but got list.']}
def test_partial_validation(self):
serializer = self.Serializer(data={'char': 'abc'}, partial=True)
assert serializer.is_valid()
assert serializer.validated_data == {'char': 'abc'}
assert serializer.errors == {}
def test_empty_serializer(self):
serializer = self.Serializer()
assert serializer.data == {'char': '', 'integer': None}
def test_missing_attribute_during_serialization(self):
class MissingAttributes:
pass
instance = MissingAttributes()
serializer = self.Serializer(instance)
with pytest.raises(AttributeError):
serializer.data
def test_data_access_before_save_raises_error(self):
def create(validated_data):
return validated_data
serializer = self.Serializer(data={'char': 'abc', 'integer': 123})
serializer.create = create
assert serializer.is_valid()
assert serializer.data == {'char': 'abc', 'integer': 123}
with pytest.raises(AssertionError):
serializer.save()
def test_validate_none_data(self):
data = None
serializer = self.Serializer(data=data)
assert not serializer.is_valid()
assert serializer.errors == {'non_field_errors': ['No data provided']}
@unittest.skipUnless(ChainMap, 'requires python 3.3')
def test_serialize_chainmap(self):
data = ChainMap({'char': 'abc'}, {'integer': 123})
serializer = self.Serializer(data=data)
assert serializer.is_valid()
assert serializer.validated_data == {'char': 'abc', 'integer': 123}
assert serializer.errors == {}
def test_serialize_custom_mapping(self):
class SinglePurposeMapping(Mapping):
def __getitem__(self, key):
return 'abc' if key == 'char' else 123
def __iter__(self):
yield 'char'
yield 'integer'
def __len__(self):
return 2
serializer = self.Serializer(data=SinglePurposeMapping())
assert serializer.is_valid()
assert serializer.validated_data == {'char': 'abc', 'integer': 123}
assert serializer.errors == {}
class TestValidateMethod:
def test_non_field_error_validate_method(self):
class ExampleSerializer(serializers.Serializer):
char = serializers.CharField()
integer = serializers.IntegerField()
def validate(self, attrs):
raise serializers.ValidationError('Non field error')
serializer = ExampleSerializer(data={'char': 'abc', 'integer': 123})
assert not serializer.is_valid()
assert serializer.errors == {'non_field_errors': ['Non field error']}
def test_field_error_validate_method(self):
class ExampleSerializer(serializers.Serializer):
char = serializers.CharField()
integer = serializers.IntegerField()
def validate(self, attrs):
raise serializers.ValidationError({'char': 'Field error'})
serializer = ExampleSerializer(data={'char': 'abc', 'integer': 123})
assert not serializer.is_valid()
assert serializer.errors == {'char': ['Field error']}
class TestBaseSerializer:
def setup(self):
class ExampleSerializer(serializers.BaseSerializer):
def to_representation(self, obj):
return {
'id': obj['id'],
'email': obj['name'] + '@' + obj['domain']
}
def to_internal_value(self, data):
name, domain = str(data['email']).split('@')
return {
'id': int(data['id']),
'name': name,
'domain': domain,
}
self.Serializer = ExampleSerializer
def test_abstract_methods_raise_proper_errors(self):
serializer = serializers.BaseSerializer()
with pytest.raises(NotImplementedError):
serializer.to_internal_value(None)
with pytest.raises(NotImplementedError):
serializer.to_representation(None)
with pytest.raises(NotImplementedError):
serializer.update(None, None)
with pytest.raises(NotImplementedError):
serializer.create(None)
def test_access_to_data_attribute_before_validation_raises_error(self):
serializer = serializers.BaseSerializer(data={'foo': 'bar'})
with pytest.raises(AssertionError):
serializer.data
def test_access_to_errors_attribute_before_validation_raises_error(self):
serializer = serializers.BaseSerializer(data={'foo': 'bar'})
with pytest.raises(AssertionError):
serializer.errors
def test_access_to_validated_data_attribute_before_validation_raises_error(self):
serializer = serializers.BaseSerializer(data={'foo': 'bar'})
with pytest.raises(AssertionError):
serializer.validated_data
def test_serialize_instance(self):
instance = {'id': 1, 'name': 'tom', 'domain': 'example.com'}
serializer = self.Serializer(instance)
assert serializer.data == {'id': 1, 'email': 'tom@example.com'}
def test_serialize_list(self):
instances = [
{'id': 1, 'name': 'tom', 'domain': 'example.com'},
{'id': 2, 'name': 'ann', 'domain': 'example.com'},
]
serializer = self.Serializer(instances, many=True)
assert serializer.data == [
{'id': 1, 'email': 'tom@example.com'},
{'id': 2, 'email': 'ann@example.com'}
]
def test_validate_data(self):
data = {'id': 1, 'email': 'tom@example.com'}
serializer = self.Serializer(data=data)
assert serializer.is_valid()
assert serializer.validated_data == {
'id': 1,
'name': 'tom',
'domain': 'example.com'
}
def test_validate_list(self):
data = [
{'id': 1, 'email': 'tom@example.com'},
{'id': 2, 'email': 'ann@example.com'},
]
serializer = self.Serializer(data=data, many=True)
assert serializer.is_valid()
assert serializer.validated_data == [
{'id': 1, 'name': 'tom', 'domain': 'example.com'},
{'id': 2, 'name': 'ann', 'domain': 'example.com'}
]
class TestStarredSource:
"""
Tests for `source='*'` argument, which is used for nested representations.
For example:
nested_field = NestedField(source='*')
"""
data = {
'nested1': {'a': 1, 'b': 2},
'nested2': {'c': 3, 'd': 4}
}
def setup(self):
class NestedSerializer1(serializers.Serializer):
a = serializers.IntegerField()
b = serializers.IntegerField()
class NestedSerializer2(serializers.Serializer):
c = serializers.IntegerField()
d = serializers.IntegerField()
class TestSerializer(serializers.Serializer):
nested1 = NestedSerializer1(source='*')
nested2 = NestedSerializer2(source='*')
self.Serializer = TestSerializer
def test_nested_validate(self):
"""
A nested representation is validated into a flat internal object.
"""
serializer = self.Serializer(data=self.data)
assert serializer.is_valid()
assert serializer.validated_data == {
'a': 1,
'b': 2,
'c': 3,
'd': 4
}
def test_nested_serialize(self):
"""
An object can be serialized into a nested representation.
"""
instance = {'a': 1, 'b': 2, 'c': 3, 'd': 4}
serializer = self.Serializer(instance)
assert serializer.data == self.data
class TestIncorrectlyConfigured:
def test_incorrect_field_name(self):
class ExampleSerializer(serializers.Serializer):
incorrect_name = serializers.IntegerField()
class ExampleObject:
def __init__(self):
self.correct_name = 123
instance = ExampleObject()
serializer = ExampleSerializer(instance)
with pytest.raises(AttributeError) as exc_info:
serializer.data
msg = str(exc_info.value)
assert msg.startswith(
"Got AttributeError when attempting to get a value for field `incorrect_name` on serializer `ExampleSerializer`.\n"
"The serializer field might be named incorrectly and not match any attribute or key on the `ExampleObject` instance.\n"
"Original exception text was:"
)
class TestUnicodeRepr:
def test_unicode_repr(self):
class ExampleSerializer(serializers.Serializer):
example = serializers.CharField()
class ExampleObject:
def __init__(self):
self.example = '한국'
def __repr__(self):
return unicode_repr(self.example)
instance = ExampleObject()
serializer = ExampleSerializer(instance)
repr(serializer) # Should not error.
class TestNotRequiredOutput:
def test_not_required_output_for_dict(self):
"""
'required=False' should allow a dictionary key to be missing in output.
"""
class ExampleSerializer(serializers.Serializer):
omitted = serializers.CharField(required=False)
included = serializers.CharField()
serializer = ExampleSerializer(data={'included': 'abc'})
serializer.is_valid()
assert serializer.data == {'included': 'abc'}
def test_not_required_output_for_object(self):
"""
'required=False' should allow an object attribute to be missing in output.
"""
class ExampleSerializer(serializers.Serializer):
omitted = serializers.CharField(required=False)
included = serializers.CharField()
def create(self, validated_data):
return MockObject(**validated_data)
serializer = ExampleSerializer(data={'included': 'abc'})
serializer.is_valid()
serializer.save()
assert serializer.data == {'included': 'abc'}
def test_not_required_output_for_allow_null_field(self):
class ExampleSerializer(serializers.Serializer):
omitted = serializers.CharField(required=False, allow_null=True)
included = serializers.CharField()
serializer = ExampleSerializer({'included': 'abc'})
assert 'omitted' not in serializer.data
class TestDefaultOutput:
def setup(self):
class ExampleSerializer(serializers.Serializer):
has_default = serializers.CharField(default='x')
has_default_callable = serializers.CharField(default=lambda: 'y')
no_default = serializers.CharField()
self.Serializer = ExampleSerializer
def test_default_used_for_dict(self):
"""
'default="something"' should be used if dictionary key is missing from input.
"""
serializer = self.Serializer({'no_default': 'abc'})
assert serializer.data == {'has_default': 'x', 'has_default_callable': 'y', 'no_default': 'abc'}
def test_default_used_for_object(self):
"""
'default="something"' should be used if object attribute is missing from input.
"""
instance = MockObject(no_default='abc')
serializer = self.Serializer(instance)
assert serializer.data == {'has_default': 'x', 'has_default_callable': 'y', 'no_default': 'abc'}
def test_default_not_used_when_in_dict(self):
"""
'default="something"' should not be used if dictionary key is present in input.
"""
serializer = self.Serializer({'has_default': 'def', 'has_default_callable': 'ghi', 'no_default': 'abc'})
assert serializer.data == {'has_default': 'def', 'has_default_callable': 'ghi', 'no_default': 'abc'}
def test_default_not_used_when_in_object(self):
"""
'default="something"' should not be used if object attribute is present in input.
"""
instance = MockObject(has_default='def', has_default_callable='ghi', no_default='abc')
serializer = self.Serializer(instance)
assert serializer.data == {'has_default': 'def', 'has_default_callable': 'ghi', 'no_default': 'abc'}
def test_default_for_dotted_source(self):
"""
'default="something"' should be used when a traversed attribute is missing from input.
"""
class Serializer(serializers.Serializer):
traversed = serializers.CharField(default='x', source='traversed.attr')
assert Serializer({}).data == {'traversed': 'x'}
assert Serializer({'traversed': {}}).data == {'traversed': 'x'}
assert Serializer({'traversed': None}).data == {'traversed': 'x'}
assert Serializer({'traversed': {'attr': 'abc'}}).data == {'traversed': 'abc'}
def test_default_for_multiple_dotted_source(self):
class Serializer(serializers.Serializer):
c = serializers.CharField(default='x', source='a.b.c')
assert Serializer({}).data == {'c': 'x'}
assert Serializer({'a': {}}).data == {'c': 'x'}
assert Serializer({'a': None}).data == {'c': 'x'}
assert Serializer({'a': {'b': {}}}).data == {'c': 'x'}
assert Serializer({'a': {'b': None}}).data == {'c': 'x'}
assert Serializer({'a': {'b': {'c': 'abc'}}}).data == {'c': 'abc'}
def test_default_for_nested_serializer(self):
class NestedSerializer(serializers.Serializer):
a = serializers.CharField(default='1')
c = serializers.CharField(default='2', source='b.c')
class Serializer(serializers.Serializer):
nested = NestedSerializer()
assert Serializer({'nested': None}).data == {'nested': None}
assert Serializer({'nested': {}}).data == {'nested': {'a': '1', 'c': '2'}}
assert Serializer({'nested': {'a': '3', 'b': {}}}).data == {'nested': {'a': '3', 'c': '2'}}
assert Serializer({'nested': {'a': '3', 'b': {'c': '4'}}}).data == {'nested': {'a': '3', 'c': '4'}}
def test_default_for_allow_null(self):
# allow_null=True should imply default=None
class Serializer(serializers.Serializer):
foo = serializers.CharField()
bar = serializers.CharField(source='foo.bar', allow_null=True)
assert Serializer({'foo': None}).data == {'foo': None, 'bar': None}
class TestCacheSerializerData:
def test_cache_serializer_data(self):
"""
Caching serializer data with pickle will drop the serializer info,
but does preserve the data itself.
"""
class ExampleSerializer(serializers.Serializer):
field1 = serializers.CharField()
field2 = serializers.CharField()
serializer = ExampleSerializer({'field1': 'a', 'field2': 'b'})
pickled = pickle.dumps(serializer.data)
data = pickle.loads(pickled)
assert data == {'field1': 'a', 'field2': 'b'}
class TestDefaultInclusions:
def setup(self):
class ExampleSerializer(serializers.Serializer):
char = serializers.CharField(read_only=True, default='abc')
integer = serializers.IntegerField()
self.Serializer = ExampleSerializer
def test_default_should_included_on_create(self):
serializer = self.Serializer(data={'integer': 456})
assert serializer.is_valid()
assert serializer.validated_data == {'char': 'abc', 'integer': 456}
assert serializer.errors == {}
def test_default_should_be_included_on_update(self):
instance = MockObject(char='def', integer=123)
serializer = self.Serializer(instance, data={'integer': 456})
assert serializer.is_valid()
assert serializer.validated_data == {'char': 'abc', 'integer': 456}
assert serializer.errors == {}
def test_default_should_not_be_included_on_partial_update(self):
instance = MockObject(char='def', integer=123)
serializer = self.Serializer(instance, data={'integer': 456}, partial=True)
assert serializer.is_valid()
assert serializer.validated_data == {'integer': 456}
assert serializer.errors == {}
class TestSerializerValidationWithCompiledRegexField:
def setup(self):
class ExampleSerializer(serializers.Serializer):
name = serializers.RegexField(re.compile(r'\d'), required=True)
self.Serializer = ExampleSerializer
def test_validation_success(self):
serializer = self.Serializer(data={'name': '2'})
assert serializer.is_valid()
assert serializer.validated_data == {'name': '2'}
assert serializer.errors == {}
class Test2555Regression:
def test_serializer_context(self):
class NestedSerializer(serializers.Serializer):
def __init__(self, *args, **kwargs):
super(NestedSerializer, self).__init__(*args, **kwargs)
# .context should not cache
self.context
class ParentSerializer(serializers.Serializer):
nested = NestedSerializer()
serializer = ParentSerializer(data={}, context={'foo': 'bar'})
assert serializer.context == {'foo': 'bar'}
assert serializer.fields['nested'].context == {'foo': 'bar'}
class Test4606Regression:
def setup(self):
class ExampleSerializer(serializers.Serializer):
name = serializers.CharField(required=True)
choices = serializers.CharField(required=True)
self.Serializer = ExampleSerializer
def test_4606_regression(self):
serializer = self.Serializer(data=[{"name": "liz"}], many=True)
with pytest.raises(serializers.ValidationError):
serializer.is_valid(raise_exception=True)
class TestDeclaredFieldInheritance:
def test_declared_field_disabling(self):
class Parent(serializers.Serializer):
f1 = serializers.CharField()
f2 = serializers.CharField()
class Child(Parent):
f1 = None
class Grandchild(Child):
pass
assert len(Parent._declared_fields) == 2
assert len(Child._declared_fields) == 1
assert len(Grandchild._declared_fields) == 1
def test_meta_field_disabling(self):
# Declaratively setting a field on a child class will *not* prevent
# the ModelSerializer from generating a default field.
class MyModel(models.Model):
f1 = models.CharField(max_length=10)
f2 = models.CharField(max_length=10)
class Parent(serializers.ModelSerializer):
class Meta:
model = MyModel
fields = ['f1', 'f2']
class Child(Parent):
f1 = None
class Grandchild(Child):
pass
assert len(Parent().get_fields()) == 2
assert len(Child().get_fields()) == 2
assert len(Grandchild().get_fields()) == 2
|
kgeorgy/django-rest-framework
|
tests/test_serializer.py
|
Python
|
bsd-2-clause
| 21,976
|
from django.db.models import get_model
from django.conf import settings
from cms.models import Placeholder
from cms.models.pluginmodel import CMSPlugin
from cms.plugins.text.models import Text
from cms.api import add_plugin
def convert(action = "dryrun"):
# this dictionary store the information for the conversions
execute=action
models_dictionary = {
"messages": {}, # a general set of messages for the user
"modules": {
"news_and_events.models": { # each module containing the models must be represented, like this
"application": "News & Events", # this is the human-friendly name of the module
"models": { # a dictionary with each model in that module
"NewsArticle": { # the actual name of the class
"fields": [ # a list of the fields we're working on
{ # a dictionary for each field
"old_field": "content",
"new_field": "body",
"slot": "body",
},
],
"model": "News articles", # the human-friendly name of the model
"actions": {}, # an empty dictionary where we we store the results
},
"Event": { # a second model in that module
"fields": [
{
"old_field": "content",
"new_field": "body",
"slot": "body",
},
],
"model": "Events",
"actions": {},
},
},
},
"vacancies_and_studentships.models": { # and a second module
"application": "Vacancies & Studentships",
"models": {
"Vacancy": {
"fields": [
{
"old_field": "description",
"new_field": "body",
"slot": "body",
},
],
"model": "Vacancies",
"actions": {},
},
"Studentship": {
"fields": [
{
"old_field": "description",
"new_field": "body",
"slot": "body",
},
],
"model": "Studentships",
"actions": {},
},
},
},
"publications.models": {
"application": "Publications",
"models": {
"Researcher": {
"fields": [
{
"old_field": "research_synopsis",
"new_field": "synopsis",
"slot": "body",
},
{
"old_field": "research_description",
"new_field": "description",
"slot": "body",
},
],
"model": "Researcher",
"actions": {},
},
},
},
},
}
print "------executing --------"
# loop over the modules
for module, module_values in models_dictionary["modules"].items():
# loop over the models in the module
for model, model_values in module_values["models"].items():
# mmodel is the human-readable name of the model, used for the report summary
mmodel = models_dictionary["modules"][module]["models"][model]["model"]
models_dictionary["messages"][mmodel]={}
# import the model
actual_model = getattr(__import__(module, globals(), locals(), module_values["models"], -1), model)
# loop over the fields that need converting
for field in model_values["fields"]:
old_field = field["old_field"]
new_field = field["new_field"]
slot = field["slot"]
# create a summary report for this field
models_dictionary["messages"][mmodel][old_field]={}
try:
getattr(actual_model, new_field)
except AttributeError:
message = "field " + new_field + " is missing - check the model and try agin"
models_dictionary["messages"][mmodel][old_field]["Error"]=message
continue
junk_content = [] # a record of those items with nothing but <br /> in them
moved_items =[] # a record of the items we migrated to placeholders
# loop over each item in the class
for item in actual_model.objects.all():
old_field_content = getattr(item, old_field) # the old field we want to convert
# now the serious business of converting the fields
# if the item lacks a placeholder, create the placeholder and the reference to it
if old_field_content and not getattr(item, new_field, None):
# check to see if it's worth converting
if len(old_field_content) > 10:
# create the placeholder
placeholder=Placeholder(slot=slot)
if execute == "execute":
placeholder.save()
# refer to the placeholder from the item
setattr(item, new_field, placeholder)
if execute == "execute":
add_plugin(placeholder, "SemanticTextPlugin", settings.CMS_LANGUAGES[0][0], body = old_field_content)
# setattr(item, old_field, "")
if execute == "execute":
item.status = "Converted to placeholder"
else:
item.status = "Unconverted"
else:
# this item is so short it must be junk
if execute == "execute":
setattr(item, old_field, "")
item.status = "Junk field - too short; was deleted instead of converted:" + old_field_content
else:
item.status = "Junk field - too short; will be deleted instead of converted:" + old_field_content
# make a note that this was a junk item
junk_content.append(item)
# make a note that we moved this item
moved_items.append(item)
if execute == "execute":
item.save()
# information about junk content items
if execute == "execute":
message = " ".join((str(len(junk_content)), "junk items not converted items"))
else:
message = " ".join((str(len(junk_content)), "junk items found"))
models_dictionary["messages"][mmodel][old_field]["Junk fields"]=message
# information about items that have been/need to be converted
if execute == "execute":
message = str(len(moved_items)) + " items were converted to placeholder " + new_field
else:
message = str(len(moved_items)) + " items need to be converted to placeholder " + new_field
models_dictionary["messages"][mmodel][old_field]["Conversions"]=message
# list every item that was copied for the full report
if execute == "execute":
action = "Fields that were copied"
else:
action = "Fields to be copied"
models_dictionary["modules"][module]["models"][model]["actions"][action]=moved_items
report = {
"action": execute,
"task": "convert-placeholders",
"converted": models_dictionary,
"template": "housekeeping/convert_to_placeholders.html"
}
return report
|
evildmp/Arkestra
|
housekeeping/convert_to_placeholders.py
|
Python
|
bsd-2-clause
| 9,889
|
# -*- coding: UTF-8 -*-
#
# Copyright (c) 2012, Yung-Yu Chen <yyc@solvcon.net>
#
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# - Redistributions of source code must retain the above copyright notice, this
# list of conditions and the following disclaimer.
# - Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
# - Neither the name of the copyright holder nor the names of its contributors
# may be used to endorse or promote products derived from this software
# without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
"""
A two-/three-dimensional, second order CESE solver for generic linear PDEs. It
uses :py:mod:`solvcon._algorithm`.
"""
import os
import warnings
import numpy as np
import solvcon as sc
from solvcon import solver
from solvcon import boundcond
# for readthedocs to work.
sc.import_module_may_fail('._algorithm')
class LinearSolver(solver.MeshSolver):
"""This class controls the underneath algorithm :py:class:`LinearAlgorithm
<._algorithm.LinearAlgorithm>`.
"""
_interface_init_ = ['cecnd', 'cevol', 'sfmrc']
_solution_array_ = ['solt', 'sol', 'soln', 'dsol', 'dsoln']
def __init__(self, blk, **kw):
"""
A linear solver needs a :py:class:`Block <solvcon.block.Block>` having
at least one group:
>>> from solvcon.testing import create_trivial_2d_blk
>>> blk = create_trivial_2d_blk()
>>> blk.clgrp.fill(0)
>>> blk.grpnames.append('blank')
A linear solver can't be instantiated directly:
>>> svr = LinearSolver(blk, neq=1) # doctest: +ELLIPSIS
Traceback (most recent call last):
...
TypeError: ...
To instantiate the linear solver, at least :py:attr:`gdlen` needs to be
implemented:
>>> class SubSolver(LinearSolver):
... @property
... def gdlen(self):
... return 1
>>> svr = SubSolver(blk, neq=1)
"""
# meta data.
self.neq = kw.pop('neq')
super(LinearSolver, self).__init__(blk, **kw)
self.substep_run = 2
ndim = blk.ndim
ncell = blk.ncell
ngstcell = blk.ngstcell
fpdtype = 'float64'
# scheme parameters.
self.alpha = int(kw.pop('alpha', 0))
self.sigma0 = int(kw.pop('sigma0', 3.0))
self.taylor = float(kw.pop('taylor', 1.0)) # dirty hack.
self.cnbfac = float(kw.pop('cnbfac', 1.0)) # dirty hack.
self.sftfac = float(kw.pop('sftfac', 1.0)) # dirty hack.
self.taumin = float(kw.pop('taumin', 0.0))
self.tauscale = float(kw.pop('tauscale', 1.0))
# dual mesh.
self.cecnd = np.empty(
(ngstcell+ncell, blk.CLMFC+1, ndim), dtype=fpdtype)
self.cevol = np.empty(
(ngstcell+ncell, blk.CLMFC+1), dtype=fpdtype)
self.sfmrc = np.empty((ncell, blk.CLMFC, blk.FCMND, 2, ndim),
dtype=fpdtype)
# parameters.
self.grpda = np.empty((self.ngroup, self.gdlen), dtype=fpdtype)
nsca = kw.pop('nsca', 0)
nvec = kw.pop('nvec', 0)
self.amsca = np.empty((ngstcell+ncell, nsca), dtype=fpdtype)
self.amvec = np.empty((ngstcell+ncell, nvec, ndim), dtype=fpdtype)
# solutions.
neq = self.neq
self.sol = np.empty((ngstcell+ncell, neq), dtype=fpdtype)
self.soln = np.empty((ngstcell+ncell, neq), dtype=fpdtype)
self.solt = np.empty((ngstcell+ncell, neq), dtype=fpdtype)
self.dsol = np.empty((ngstcell+ncell, neq, ndim), dtype=fpdtype)
self.dsoln = np.empty((ngstcell+ncell, neq, ndim), dtype=fpdtype)
self.stm = np.empty((ngstcell+ncell, neq), dtype=fpdtype)
self.cfl = np.empty(ngstcell+ncell, dtype=fpdtype)
self.ocfl = np.empty(ngstcell+ncell, dtype=fpdtype)
@property
def gdlen(self):
return None
def create_alg(self):
"""
Create a :py:class:`._algorithm.LinearAlgorithm` object.
>>> # create a valid solver as the test fixture.
>>> from solvcon.testing import create_trivial_2d_blk
>>> blk = create_trivial_2d_blk()
>>> blk.clgrp.fill(0)
>>> blk.grpnames.append('blank')
>>> class SubSolver(LinearSolver):
... @property
... def gdlen(self):
... return 1
>>> svr = SubSolver(blk, neq=1)
Create an associated algorithm object is straight-forward:
>>> alg = svr.create_alg()
"""
alg = _algorithm.LinearAlgorithm()
alg.setup_mesh(self.blk)
alg.setup_algorithm(self)
return alg
def init(self, **kw):
self.create_alg().prepare_ce()
super(LinearSolver, self).init(**kw)
self.create_alg().prepare_sf()
def provide(self):
# fill group data array.
self._make_grpda()
# pre-calculate CFL.
self.create_alg().calc_cfl()
self.ocfl[:] = self.cfl[:]
# super method.
super(LinearSolver, self).provide()
def apply_bc(self):
super(LinearSolver, self).apply_bc()
self.call_non_interface_bc('soln')
self.call_non_interface_bc('dsoln')
def _make_grpda(self):
raise NotImplementedError
###########################################################################
# Begin marching algorithm.
@sc.MeshSolver.register_marcher
def update(self, worker=None):
self.sol[:,:] = self.soln[:,:]
self.dsol[:,:,:] = self.dsoln[:,:,:]
@sc.MeshSolver.register_marcher
def calcsolt(self, worker=None):
self.create_alg().calc_solt()
@sc.MeshSolver.register_marcher
def calcsoln(self, worker=None):
self.create_alg().calc_soln()
@sc.MeshSolver.register_marcher
def ibcsoln(self, worker=None):
if worker: self.exchangeibc('soln', worker=worker)
@sc.MeshSolver.register_marcher
def bcsoln(self, worker=None):
self.call_non_interface_bc('soln')
@sc.MeshSolver.register_marcher
def calcdsoln(self, worker=None):
self.create_alg().calc_dsoln()
@sc.MeshSolver.register_marcher
def ibcdsoln(self, worker=None):
if worker: self.exchangeibc('dsoln', worker=worker)
@sc.MeshSolver.register_marcher
def bcdsoln(self, worker=None):
self.call_non_interface_bc('dsoln')
# End marching algorithm.
###########################################################################
class LinearPeriodic(boundcond.periodic):
"""
General periodic boundary condition for sequential runs.
"""
def init(self, **kw):
svr = self.svr
blk = svr.blk
ngstcell = blk.ngstcell
ngstface = blk.ngstface
facn = self.facn
slctm = self.rclp[:,0] + ngstcell
slctr = self.rclp[:,1] + ngstcell
# move coordinates.
shf = svr.cecnd[slctr,0,:] - blk.shfccnd[facn[:,2]+ngstface,:]
svr.cecnd[slctm,0,:] = blk.shfccnd[facn[:,0]+ngstface,:] + shf
def soln(self):
svr = self.svr
blk = svr.blk
slctm = self.rclp[:,0] + blk.ngstcell
slctr = self.rclp[:,1] + blk.ngstcell
svr.soln[slctm,:] = svr.soln[slctr,:]
def dsoln(self):
svr = self.svr
blk = svr.blk
slctm = self.rclp[:,0] + blk.ngstcell
slctr = self.rclp[:,1] + blk.ngstcell
svr.dsoln[slctm,:,:] = svr.dsoln[slctr,:,:]
# vim: set ff=unix fenc=utf8 ft=python ai et sw=4 ts=4 tw=79:
|
yungyuc/solvcon
|
solvcon/parcel/linear/solver.py
|
Python
|
bsd-3-clause
| 8,514
|
import sys
import os.path
import numpy as np
from numpy.testing import *
from numpy.testing.decorators import skipif
from skimage import data_dir
from skimage.io import ImageCollection, MultiImage
from skimage.io.collection import alphanumeric_key
from skimage.io import Image as ioImage
try:
from PIL import Image
except ImportError:
PIL_available = False
else:
PIL_available = True
if sys.version_info[0] > 2:
basestring = str
class TestAlphanumericKey():
def setUp(self):
self.test_string = 'z23a'
self.test_str_result = ['z', 23, 'a']
self.filenames = ['f9.10.png', 'f9.9.png', 'f10.10.png', 'f10.9.png',
'e9.png', 'e10.png', 'em.png']
self.sorted_filenames = \
['e9.png', 'e10.png', 'em.png', 'f9.9.png', 'f9.10.png',
'f10.9.png', 'f10.10.png']
def test_string_split(self):
assert_equal(alphanumeric_key(self.test_string), self.test_str_result)
def test_string_sort(self):
sorted_filenames = sorted(self.filenames, key=alphanumeric_key)
assert_equal(sorted_filenames, self.sorted_filenames)
class TestImageCollection():
pattern = [os.path.join(data_dir, pic) for pic in ['camera.png',
'color.png']]
pattern_matched = [os.path.join(data_dir, pic) for pic in
['camera.png', 'moon.png']]
def setUp(self):
self.collection = ImageCollection(self.pattern)
self.collection_matched = ImageCollection(self.pattern_matched)
def test_len(self):
assert len(self.collection) == 2
def test_getitem(self):
num = len(self.collection)
for i in range(-num, num):
assert type(self.collection[i]) is ioImage
assert_array_almost_equal(self.collection[0],
self.collection[-num])
#assert_raises expects a callable, hence this do-very-little func
def return_img(n):
return self.collection[n]
assert_raises(IndexError, return_img, num)
assert_raises(IndexError, return_img, -num - 1)
def test_slicing(self):
assert type(self.collection[:]) is ImageCollection
assert len(self.collection[:]) == 2
assert len(self.collection[:1]) == 1
assert len(self.collection[1:]) == 1
assert_array_almost_equal(self.collection[0], self.collection[:1][0])
assert_array_almost_equal(self.collection[1], self.collection[1:][0])
assert_array_almost_equal(self.collection[1], self.collection[::-1][0])
assert_array_almost_equal(self.collection[0], self.collection[::-1][1])
def test_files_property(self):
assert isinstance(self.collection.files, list)
def set_files(f):
self.collection.files = f
assert_raises(AttributeError, set_files, 'newfiles')
def test_custom_load(self):
load_pattern = [(1, 'one'), (2, 'two')]
def load_fn(x):
return x
ic = ImageCollection(load_pattern, load_func=load_fn)
assert_equal(ic[1], (2, 'two'))
def test_concatenate(self):
ar = self.collection_matched.concatenate()
assert_equal(ar.shape, (len(self.collection_matched),) +
self.collection[0].shape)
assert_raises(ValueError, self.collection.concatenate)
class TestMultiImage():
def setUp(self):
# This multipage TIF file was created with imagemagick:
# convert im1.tif im2.tif -adjoin multipage.tif
if PIL_available:
self.img = MultiImage(os.path.join(data_dir, 'multipage.tif'))
@skipif(not PIL_available)
def test_len(self):
assert len(self.img) == 2
@skipif(not PIL_available)
def test_getitem(self):
num = len(self.img)
for i in range(-num, num):
assert type(self.img[i]) is np.ndarray
assert_array_almost_equal(self.img[0],
self.img[-num])
#assert_raises expects a callable, hence this do-very-little func
def return_img(n):
return self.img[n]
assert_raises(IndexError, return_img, num)
assert_raises(IndexError, return_img, -num - 1)
@skipif(not PIL_available)
def test_files_property(self):
assert isinstance(self.img.filename, basestring)
def set_filename(f):
self.img.filename = f
assert_raises(AttributeError, set_filename, 'newfile')
@skipif(not PIL_available)
def test_conserve_memory_property(self):
assert isinstance(self.img.conserve_memory, bool)
def set_mem(val):
self.img.conserve_memory = val
assert_raises(AttributeError, set_mem, True)
@skipif(not PIL_available)
def test_concatenate(self):
ar = self.img.concatenate()
assert_equal(ar.shape, (len(self.img),) +
self.img[0].shape)
if __name__ == "__main__":
run_module_suite()
|
emmanuelle/scikits.image
|
skimage/io/tests/test_collection.py
|
Python
|
bsd-3-clause
| 5,053
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# @Author: José Sánchez-Gallego (gallegoj@uw.edu)
# @Date: 2018-07-14
# @Filename: aperture.py
# @License: BSD 3-clause (http://www.opensource.org/licenses/BSD-3-Clause)
#
# @Last modified by: José Sánchez-Gallego (gallegoj@uw.edu)
# @Last modified time: 2018-07-27 13:02:19
import astropy.coordinates
import astropy.units
import numpy
try:
import photutils
except ImportError:
photutils = None
__all__ = ['GetApertureMixIn', 'MarvinAperture']
class MarvinAperture(photutils.Aperture if photutils else object):
"""Extends `photutils.Aperture` allowing to extract spaxels in the aperture.
This class is not intended for general use and it is dynamically set as
the base for a `~photutils.Aperture` instance.
"""
@property
def parent(self):
"""Returns or sets the parent object."""
if not hasattr(self, '_parent'):
return None
return self._parent
@parent.setter
def parent(self, value):
self._parent = value
@property
def mask(self):
"""Returns the fractional overlap mask.
Equivalent to using `photutils.PixelAperture.to_mask` followed
by `photutils.ApertureMask.to_image` using the shape of the parent
object. Combines all the apertures in a single mask.
"""
assert self.parent is not None, 'no parent set'
if isinstance(self, photutils.SkyAperture):
aperture = self.to_pixel(self.parent.wcs)
else:
aperture = self
mask = numpy.zeros(self.parent._shape)
for ap in aperture.to_mask(method='exact'):
mask += ap.to_image(shape=(self.parent._shape))
return mask
def getSpaxels(self, threshold=0.5, lazy=True, mask=None, **kwargs):
"""Returns the spaxels that fall within the aperture.
Parameters
----------
threshold : float
The minimum fractional overlap between the spaxel and the aperture
grid for the spaxel to be returned.
lazy : bool
Whether the returned spaxels must be fully loaded or lazily
instantiated.
mask : numpy.ndarray
A mask that defines the fractional pixel overlap with the
apertures. If ``None``, the mask returned by `.MarvinAperture.mask`
will be used.
kwargs : dict
Additional parameters to be passed to the parent ``getSpaxel``
method. Can be used to define what information is loaded
in the spaxels.
"""
assert threshold > 0 and threshold <= 1, 'invalid threshold value'
if mask is None:
mask = self.mask
else:
assert mask.shape == self.parent._shape, 'invalid mask shape'
spaxel_coords = numpy.where(mask >= threshold)
if len(spaxel_coords[0]) == 0:
return []
return self.parent.getSpaxel(x=spaxel_coords[1], y=spaxel_coords[0],
xyorig='lower', lazy=lazy, **kwargs)
class GetApertureMixIn(object):
def getAperture(self, coords, aperture_params, aperture_type='circular',
coord_type='pixel'):
"""Defines an aperture.
This method is mostly a wrapper around the aperture classes defined in
`photutils <http://photutils.readthedocs.io>`_. It allows to
Parameters
----------
coords : tuple or `~numpy.ndarray`
Either a 2-element tuple ``(x, y)`` or ``(ra, dec)`` to define the
centre of a single aperture, or a list of N tuples or a Nx2 array
to define N apertures.
aperture_params : tuple
A tuple with the parameters of the aperture.
* For ``aperture_type='rectangular'``:
* If ``coord_type='pixel'``, a tuple ``(w, h, theta)`` where
``w`` is the full width of the aperture (for ``theta=0`` the
width side is along the ``x`` axis); ``h`` is the full height
of the aperture (for ``theta=0`` the height side is along the
``y`` axis); and ``theta`` is the rotation angle in radians of
the width (``w``) side from the positive ``x`` axis (the
rotation angle increases counterclockwise).
* If ``coord_type='sky'``, same format but ``w`` and ``h`` must
be in arcsec and ``theta`` is the position angle (in degrees)
of the width side. The position angle increases
counterclockwise, from North (PA=0) towards East.
* For ``aperture_type='circular'``:
* The radius ``r`` in units of pixels or arcsec depending on the
value of ``coord_type``. Can be a tuple or a float.
* For ``aperture_type='elliptical'``:
* If ``coord_type='pixel'``, a tuple ``(a, b, theta)`` where
``a`` and ``b`` are the semi-major and semi-minor axes of the
ellipse, respectively, and ``theta`` is the rotation angle in
radians of the semi-major axis from the positive x axis (the
rotation angle increases counterclockwise). If
``coord_type='sky'``, ``a`` and ``b`` must be in arcsec, and
``theta`` is the position angle (in degrees) of the semi-major
axis. The position angle increases counterclockwise, from North
(PA=0) towards East.
aperture_type : {'rectangular', 'circular', 'elliptical'}
The type of aperture to define.
coord_type : {'pixel', 'sky'}
Determines whether the coordinates and aperture parameters refer
to to the frame of the image or to sky coordinates. The conversion
between the image and sky frames is determined using the WCS
headers from the image.
Returns
-------
marvin_aperture : MarvinAperture object
A `.MarvinAperture` instance with the definition of the aperture,
which can be used to extract the associated spaxels or to return
the mask.
Examples
--------
A single circular aperture with a radius of 3 pixels can created as ::
>>> cube = marvin.tools.Cube('8485-1901')
>>> aperture = cube.getAperture((17, 15), 3)
``aperture`` can then be used to return all spaxels that have a
fractional overlap with the aperture of more than 0.6 pixels ::
>>> spaxels = aperture.getSpaxels(threshold=0.6, lazy=True)
>>> spaxels[0]
<Marvin Spaxel (x=15, y=13, loaded=False)
Apertures can also be defined from sky coordinates. To define two
elliptical apertures with semi-axes 3 and 1 arcsec and rotated
30 degrees we do ::
>>> ap_ell = cube.getAperture([(232.546173, 48.6892288), (232.544069, 48.6906177)],
(3, 1, 30), aperture_type='elliptical')
>>> ap_ell
<MarvinAperture([[232.546173 , 48.6892288],
[232.544069 , 48.6906177]], a=3.0, b=1.0, theta=30.0)>
"""
if photutils is None:
raise ImportError('this feature requires photutils. Install it by '
'doing pip install photutils.')
assert coord_type in ['pixel', 'sky'], 'invalid coord_type'
if isinstance(coords, astropy.coordinates.SkyCoord):
coord_type = 'sky'
else:
coords = numpy.atleast_2d(coords)
if coord_type == 'sky':
coords = astropy.coordinates.SkyCoord(coords, unit='deg')
aperture_params = numpy.atleast_1d(aperture_params).tolist()
if aperture_type == 'circular':
if coord_type == 'pixel':
ApertureClass = photutils.CircularAperture
else:
ApertureClass = photutils.SkyCircularAperture
elif aperture_type == 'elliptical':
if coord_type == 'pixel':
ApertureClass = photutils.EllipticalAperture
else:
ApertureClass = photutils.SkyEllipticalAperture
elif aperture_type == 'rectangular':
if coord_type == 'pixel':
ApertureClass = photutils.RectangularAperture
else:
ApertureClass = photutils.SkyRectangularAperture
else:
raise ValueError('invalid aperture_type')
# If on-sky, converts aperture parameters to quantities
if coord_type == 'sky':
if aperture_type == 'circular':
n_params = 1
else:
n_params = 3
assert len(aperture_params) == n_params, 'invalid number of parameters'
units = [astropy.units.arcsec, astropy.units.arcsec, astropy.units.deg]
for ii in range(n_params):
if not isinstance(aperture_params[ii], astropy.units.Quantity):
aperture_params[ii] *= units[ii]
aperture = ApertureClass(coords, *aperture_params)
# Overrides the aperture class so that it inherits from MarvinAperture and
# can gain the methods we defined there. Sets the parent to self.
aperture.__class__ = type('MarvinAperture', (ApertureClass, MarvinAperture), {})
aperture.parent = self
return aperture
|
sdss/marvin
|
python/marvin/tools/mixins/aperture.py
|
Python
|
bsd-3-clause
| 9,499
|
import unittest
from graphserver.core import *
from random import randint
class TestTripBoard(unittest.TestCase):
def test_basic(self):
sc = ServiceCalendar()
sc.add_period( 0, 1*3600*24, ['WKDY','SAT'] )
tz = Timezone()
tz.add_period( TimezonePeriod(0, 1*3600*24, 0) )
tb = TripBoard("WKDY", sc, tz, 0)
assert tb.int_service_id == 0
assert tb.timezone.soul == tz.soul
assert tb.calendar.soul == sc.soul
assert tb.agency == 0
assert tb.overage == -1
assert tb.num_boardings == 0
assert tb.type==8
assert tb.soul
tb.destroy()
try:
print tb
raise Exception( "should have failed by now" )
except:
pass
def test_get_boarding_by_trip_id(self):
sc = ServiceCalendar()
sc.add_period( 0, 1*3600*24, ['WKDY','SAT'] )
tz = Timezone()
tz.add_period( TimezonePeriod(0, 1*3600*24, 0) )
tb = TripBoard("WKDY", sc, tz, 0)
tb.add_boarding( "trip1", 0, 0 )
assert tb.get_boarding_by_trip_id( "trip1" ) == ("trip1", 0, 0)
assert tb.get_boarding_by_trip_id( "bogus" ) == None
tb.add_boarding( "trip2", 1, 1 )
assert tb.get_boarding_by_trip_id( "trip1" ) == ("trip1", 0, 0 )
assert tb.get_boarding_by_trip_id( "trip2" ) == ("trip2", 1, 1 )
assert tb.get_boarding_by_trip_id( "bogus" ) == None
def test_overage(self):
sc = ServiceCalendar()
sc.add_period( 0, 1*3600*24, ['WKDY','SAT'] )
tz = Timezone()
tz.add_period( TimezonePeriod(0, 1*3600*24, 0) )
tb = TripBoard("WKDY", sc, tz, 0)
assert tb.overage == -1
tb.add_boarding( "midnight", 24*3600, 0 )
assert tb.overage == 0
tb.add_boarding( "nightowl1", 24*3600+1, 0 )
assert tb.overage == 1
tb.add_boarding( "nightowl2", 24*3600+3600, 0 )
assert tb.overage == 3600
def test_tripboard_over_midnight(self):
sc = ServiceCalendar()
sc.add_period(0, 1*3600*24, ['WKDY'])
sc.add_period(1*3600*24,2*3600*24, ['SAT'])
tz = Timezone()
tz.add_period( TimezonePeriod(0,2*3600*24,0) )
tb = TripBoard( "WKDY", sc, tz, 0 )
tb.add_boarding( "eleven", 23*3600, 0 )
tb.add_boarding( "midnight", 24*3600, 0 )
tb.add_boarding( "one", 25*3600, 0 )
tb.add_boarding( "two", 26*3600, 0 )
s0 = State(1, 0)
s1 = tb.walk(s0,WalkOptions())
self.assertEqual( s1.weight , 82801 )
assert s1.service_period(0).service_ids == [0]
s0 = State(1, 23*3600 )
s1 = tb.walk(s0,WalkOptions())
assert s1.weight == 1
assert s1.service_period(0).service_ids == [0]
s0 = State(1, 24*3600 )
s1 = tb.walk(s0,WalkOptions())
assert s1.weight == 1
assert s1.service_period(0).service_ids == [1]
s0 = State(1, 25*3600 )
s1 = tb.walk(s0,WalkOptions())
assert s1.time == 25*3600
assert s1.weight == 1
assert s1.service_period(0).service_ids == [1]
s0 = State(1, 26*3600 )
s1 = tb.walk(s0,WalkOptions())
assert s1.time == 26*3600
assert s1.weight == 1
assert s1.service_period(0).service_ids == [1]
s0 = State(1, 26*3600+1)
s1 = tb.walk(s0,WalkOptions())
print s1
self.assertEqual( s1 , None )
def test_tripboard_over_midnight_without_hope(self):
sc = ServiceCalendar()
sc.add_period(0, 1*3600*24, ['WKDY'])
sc.add_period(1*3600*24,2*3600*24, ['SAT'])
sc.add_period(2*3600*24,3*3600*24, ['SUN'])
tz = Timezone()
tz.add_period( TimezonePeriod(0,3*3600*24,0) )
tb = TripBoard( "WKDY", sc, tz, 0 )
tb.add_boarding( "eleven", 23*3600, 0 )
tb.add_boarding( "midnight", 24*3600, 0 )
tb.add_boarding( "one", 25*3600, 0 )
tb.add_boarding( "two", 26*3600, 0 )
s0 = State(1,3*3600*24) #midnight sunday
s1 = tb.walk(s0,WalkOptions())
assert s1 == None
def test_add_single_trip(self):
sc = ServiceCalendar()
sc.add_period( 0, 1*3600*24, ['WKDY','SAT'] )
tz = Timezone()
tz.add_period( TimezonePeriod(0, 1*3600*24, 0) )
tb = TripBoard("WKDY", sc, tz, 0)
try:
tb.get_boarding( 0 )
raise Exception( "should have popped error by now" )
except Exception, ex:
assert str(ex) == "Index 0 out of bounds"
tb.add_boarding( "morning", 0, 0 )
assert tb.num_boardings == 1
assert tb.get_boarding( 0 ) == ("morning", 0, 0)
try:
tb.get_boarding( -1 )
raise Exception( "should have popped error by now" )
except Exception, ex:
assert str(ex) == "Index -1 out of bounds"
try:
tb.get_boarding( 1 )
raise Exception( "should have popped error by now" )
except Exception, ex:
assert str(ex) == "Index 1 out of bounds"
def test_add_several_in_order(self):
sc = ServiceCalendar()
sc.add_period( 0, 1*3600*24, ['WKDY','SAT'] )
tz = Timezone()
tz.add_period( TimezonePeriod(0, 1*3600*24, 0) )
tb = TripBoard("WKDY", sc, tz, 0)
try:
tb.get_boarding( 0 )
raise Exception( "should have popped error by now" )
except Exception, ex:
assert str(ex) == "Index 0 out of bounds"
tb.add_boarding( "first", 0, 0 )
assert tb.num_boardings == 1
assert tb.get_boarding( 0 ) == ('first', 0, 0)
tb.add_boarding( "second", 50, 0 )
assert tb.num_boardings == 2
assert tb.get_boarding( 0 ) == ('first', 0, 0)
assert tb.get_boarding( 1 ) == ('second', 50, 0)
try:
tb.get_boarding( -1 )
raise Exception( "should have popped error by now" )
except Exception, ex:
assert str(ex) == "Index -1 out of bounds"
try:
tb.get_boarding( 2 )
raise Exception( "should have popped error by now" )
except Exception, ex:
assert str(ex) == "Index 2 out of bounds"
tb.add_boarding( "third", 150, 0 )
assert tb.num_boardings == 3
assert tb.get_boarding( 0 ) == ('first', 0, 0)
assert tb.get_boarding( 1 ) == ('second', 50, 0)
assert tb.get_boarding( 2 ) == ('third', 150, 0)
try:
tb.get_boarding( -1 )
raise Exception( "should have popped error by now" )
except Exception, ex:
assert str(ex) == "Index -1 out of bounds"
try:
tb.get_boarding( 3 )
raise Exception( "should have popped error by now" )
except Exception, ex:
assert str(ex) == "Index 3 out of bounds"
tb.add_boarding( "fourth", 150, 0 )
assert tb.num_boardings == 4
assert tb.get_boarding( 0 ) == ('first', 0, 0)
assert tb.get_boarding( 1 ) == ('second', 50, 0)
assert tb.get_boarding( 2 ) == ('third', 150, 0) or tb.get_boarding( 2 ) == ('fourth', 150, 0)
assert tb.get_boarding( 3 ) == ('third', 150, 0) or tb.get_boarding( 3 ) == ('fourth', 150, 0)
def test_add_several_out_of_order(self):
sc = ServiceCalendar()
sc.add_period( 0, 1*3600*24, ['WKDY','SAT'] )
tz = Timezone()
tz.add_period( TimezonePeriod(0, 1*3600*24, 0) )
tb = TripBoard("WKDY", sc, tz, 0)
try:
tb.get_boarding( 0 )
raise Exception( "should have popped error by now" )
except Exception, ex:
assert str(ex) == "Index 0 out of bounds"
tb.add_boarding( "fourth", 150, 0 )
assert tb.num_boardings == 1
assert tb.get_boarding( 0 ) == ('fourth', 150, 0)
tb.add_boarding( "first", 0, 0 )
assert tb.num_boardings == 2
assert tb.get_boarding( 0 ) == ('first', 0, 0)
assert tb.get_boarding( 1 ) == ('fourth', 150, 0)
try:
tb.get_boarding( -1 )
raise Exception( "should have popped error by now" )
except Exception, ex:
assert str(ex) == "Index -1 out of bounds"
try:
tb.get_boarding( 2 )
raise Exception( "should have popped error by now" )
except Exception, ex:
assert str(ex) == "Index 2 out of bounds"
tb.add_boarding( "third", 150, 0 )
assert tb.num_boardings == 3
assert tb.get_boarding( 0 ) == ('first', 0, 0)
assert tb.get_boarding( 1 ) == ('third', 150, 0)
assert tb.get_boarding( 2 ) == ('fourth', 150, 0)
try:
tb.get_boarding( -1 )
raise Exception( "should have popped error by now" )
except Exception, ex:
assert str(ex) == "Index -1 out of bounds"
try:
tb.get_boarding( 3 )
raise Exception( "should have popped error by now" )
except Exception, ex:
assert str(ex) == "Index 3 out of bounds"
tb.add_boarding( "second", 50, 0 )
assert tb.num_boardings == 4
assert tb.get_boarding( 0 ) == ('first', 0, 0)
assert tb.get_boarding( 1 ) == ('second', 50, 0)
assert tb.get_boarding( 2 ) == ('third', 150, 0) or tb.get_boarding( 2 ) == ('fourth', 150, 0)
assert tb.get_boarding( 3 ) == ('third', 150, 0) or tb.get_boarding( 3 ) == ('fourth', 150, 0)
def test_add_several_random(self):
sc = ServiceCalendar()
sc.add_period( 0, 1*3600*24, ['WKDY','SAT'] )
tz = Timezone()
tz.add_period( TimezonePeriod(0, 1*3600*24, 0) )
tb = TripBoard("WKDY", sc, tz, 0)
for i in range(1000):
tb.add_boarding( str(i), randint(0,10000), 0 )
last_depart = -1
for i in range(tb.num_boardings):
trip_id, depart, stop_sequence = tb.get_boarding(i)
assert last_depart <= depart
last_depart = depart
def test_search_boardings_list_single(self):
sc = ServiceCalendar()
sc.add_period( 0, 1*3600*24, ['WKDY','SAT'] )
tz = Timezone()
tz.add_period( TimezonePeriod(0, 1*3600*24, 0) )
tb = TripBoard("WKDY", sc, tz, 0)
assert tb.search_boardings_list(0) == 0
tb.add_boarding( "morning", 15, 0 )
assert tb.search_boardings_list(5) == 0
assert tb.search_boardings_list(15) == 0
assert tb.search_boardings_list(20) == 1
def test_get_next_boarding_index_single(self):
sc = ServiceCalendar()
sc.add_period( 0, 1*3600*24, ['WKDY','SAT'] )
tz = Timezone()
tz.add_period( TimezonePeriod(0, 1*3600*24, 0) )
tb = TripBoard("WKDY", sc, tz, 0)
assert tb.get_next_boarding_index(0) == -1
tb.add_boarding( "morning", 15, 0 )
assert tb.get_next_boarding_index(5) == 0
assert tb.get_next_boarding_index(15) == 0
assert tb.get_next_boarding_index(20) == -1
def test_get_next_boarding_single(self):
sc = ServiceCalendar()
sc.add_period( 0, 1*3600*24, ['WKDY','SAT'] )
tz = Timezone()
tz.add_period( TimezonePeriod(0, 1*3600*24, 0) )
tb = TripBoard("WKDY", sc, tz, 0)
assert tb.get_next_boarding(0) == None
tb.add_boarding( "morning", 15, 0 )
assert tb.get_next_boarding(5) == ( "morning", 15, 0 )
assert tb.get_next_boarding(15) == ( "morning", 15, 0 )
assert tb.get_next_boarding(20) == None
def test_get_next_boarding_several(self):
sc = ServiceCalendar()
sc.add_period( 0, 1*3600*24, ['WKDY','SAT'] )
tz = Timezone()
tz.add_period( TimezonePeriod(0, 1*3600*24, 0) )
tb = TripBoard("WKDY", sc, tz, 0)
assert tb.get_next_boarding(0) == None
tb.add_boarding( "1", 15, 0 )
assert tb.get_next_boarding(5) == ( "1", 15, 0 )
assert tb.get_next_boarding(15) == ( "1", 15, 0 )
assert tb.get_next_boarding(20) == None
tb.add_boarding( "2", 25, 0 )
assert tb.get_next_boarding(5) == ( "1", 15, 0 )
assert tb.get_next_boarding(15) == ( "1", 15, 0 )
assert tb.get_next_boarding(20) == ( "2", 25, 0 )
assert tb.get_next_boarding(25) == ( "2", 25, 0 )
assert tb.get_next_boarding(30) == None
def test_walk(self):
sc = ServiceCalendar()
sc.add_period( 0, 1*3600*24-1, ['WKDY'] )
sc.add_period( 1*3600*25, 2*3600*25-1, ['SAT'] )
tz = Timezone()
tz.add_period( TimezonePeriod(0, 1*3600*24, 0) )
tb = TripBoard( "WKDY", sc, tz, 0 )
tb.add_boarding( "1", 50, 0 )
tb.add_boarding( "2", 100, 0 )
tb.add_boarding( "3", 200, 0 )
#wrong day
s = State(1, 1*3600*24)
ret = tb.walk( s,WalkOptions() )
assert ret == None
s = State(1, 0)
ret = tb.walk(s,WalkOptions())
self.assertEqual( ret.time , 50 )
self.assertEqual( ret.weight , 51 )
self.assertEqual( ret.num_transfers , 1 )
self.assertEqual( ret.dist_walked , 0.0 )
s = State(1, 2)
ret = tb.walk(s,WalkOptions())
assert ret.time == 50
assert ret.weight == 49
assert ret.num_transfers == 1
assert ret.dist_walked == 0.0
s = State(1, 50)
ret = tb.walk(s,WalkOptions())
assert ret.time == 50
assert ret.weight == 1
assert ret.num_transfers == 1
assert ret.dist_walked == 0.0
s = State(1, 100)
ret = tb.walk(s,WalkOptions())
assert ret.time == 100
assert ret.weight == 1
assert ret.num_transfers == 1
assert ret.dist_walked == 0.0
s = State(1, 200)
ret = tb.walk(s,WalkOptions())
assert ret.time == 200
assert ret.weight == 1
assert ret.num_transfers == 1
assert ret.dist_walked == 0.0
s = State(1, 201)
ret = tb.walk(s,WalkOptions())
assert ret == None
def test_walk_back(self):
sc = ServiceCalendar()
sc.add_period( 0, 1*3600*24-1, ['WKDY'] )
sc.add_period( 1*3600*25, 2*3600*25-1, ['SAT'] )
tz = Timezone()
tz.add_period( TimezonePeriod(0, 1*3600*24, 0) )
tb = TripBoard( "WKDY", sc, tz, 0 )
tb.add_boarding( "1", 50, 0 )
tb.add_boarding( "2", 100, 0 )
tb.add_boarding( "3", 200, 0 )
s = State(1,100)
ret = tb.walk_back( s, WalkOptions() )
assert ret.time == 100
assert ret.weight == 0
def test_check_yesterday(self):
"""check the previous day for viable departures"""
# the service calendar has two weekdays, back to back
sc = ServiceCalendar()
sc.add_period( 0, 3600*24, ["WKDY"] )
sc.add_period( 3600*24, 2*3600*24, ["WKDY"] )
# the timezone lasts for two days and has no offset
# this is just boilerplate
tz = Timezone()
tz.add_period( TimezonePeriod(0, 2*3600*24, 0) )
# tripboard runs on weekdays for agency 0
tb = TripBoard( "WKDY", sc, tz, 0 )
# one boarding - one second after midnight
tb.add_boarding( "1", 86400+1, 0 )
# our starting state is midnight between the two days
s0 = State(1, 86400)
# it should be one second until the next boarding
s1 = tb.walk( s0, WalkOptions() )
self.assertEquals( s1.time, 86401 )
def test_check_today(self):
"""given a schedule that runs two consecutive days, find a departure
given a state on midnight between the two days"""
# the service calendar has two weekdays, back to back
sc = ServiceCalendar()
sc.add_period( 0, 3600*24, ["WKDY"] )
sc.add_period( 3600*24, 2*3600*24, ["WKDY"] )
# the timezone lasts for two days and has no offset
# this is just boilerplate
tz = Timezone()
tz.add_period( TimezonePeriod(0, 1*3600*24, 0) )
# tripboard runs on weekdays for agency 0
tb = TripBoard( "WKDY", sc, tz, 0 )
# one boarding - pretty early in the morning
tb.add_boarding( "21SFO1", 26340, 1 )
# our starting state is midnight between the two days
s0 = State(1, 86400)
# it should be early morning on the second day
s1 = tb.walk( s0, WalkOptions() )
self.assertEquals( s1.time, 26340+86400 )
if __name__ == '__main__':
tl = unittest.TestLoader()
suite = tl.loadTestsFromTestCase(TestTripBoard)
unittest.TextTestRunner(verbosity=2).run(suite)
|
graphserver/graphserver
|
pygs/test/unit_test/test_tripboard.py
|
Python
|
bsd-3-clause
| 17,728
|
from __future__ import unicode_literals
import datetime
import re
from datetime import date
from decimal import Decimal
from django import forms
from django.core.exceptions import ImproperlyConfigured
from django.db import models
from django.forms.models import (_get_foreign_key, inlineformset_factory,
modelformset_factory, BaseModelFormSet)
from django.test import TestCase, skipUnlessDBFeature
from django.utils import six
from .models import (Author, BetterAuthor, Book, BookWithCustomPK,
BookWithOptionalAltEditor, AlternateBook, AuthorMeeting, CustomPrimaryKey,
Place, Owner, Location, OwnerProfile, Restaurant, Product, Price,
MexicanRestaurant, ClassyMexicanRestaurant, Repository, Revision,
Person, Membership, Team, Player, Poet, Poem, Post)
class DeletionTests(TestCase):
def test_deletion(self):
PoetFormSet = modelformset_factory(Poet, fields="__all__", can_delete=True)
poet = Poet.objects.create(name='test')
data = {
'form-TOTAL_FORMS': '1',
'form-INITIAL_FORMS': '1',
'form-MAX_NUM_FORMS': '0',
'form-0-id': str(poet.pk),
'form-0-name': 'test',
'form-0-DELETE': 'on',
}
formset = PoetFormSet(data, queryset=Poet.objects.all())
formset.save(commit=False)
self.assertEqual(Poet.objects.count(), 1)
formset.save()
self.assertTrue(formset.is_valid())
self.assertEqual(Poet.objects.count(), 0)
def test_add_form_deletion_when_invalid(self):
"""
Make sure that an add form that is filled out, but marked for deletion
doesn't cause validation errors.
"""
PoetFormSet = modelformset_factory(Poet, fields="__all__", can_delete=True)
poet = Poet.objects.create(name='test')
# One existing untouched and two new unvalid forms
data = {
'form-TOTAL_FORMS': '3',
'form-INITIAL_FORMS': '1',
'form-MAX_NUM_FORMS': '0',
'form-0-id': six.text_type(poet.id),
'form-0-name': 'test',
'form-1-id': '',
'form-1-name': 'x' * 1000, # Too long
'form-2-id': six.text_type(poet.id), # Violate unique constraint
'form-2-name': 'test2',
}
formset = PoetFormSet(data, queryset=Poet.objects.all())
# Make sure this form doesn't pass validation.
self.assertEqual(formset.is_valid(), False)
self.assertEqual(Poet.objects.count(), 1)
# Then make sure that it *does* pass validation and delete the object,
# even though the data in new forms aren't actually valid.
data['form-0-DELETE'] = 'on'
data['form-1-DELETE'] = 'on'
data['form-2-DELETE'] = 'on'
formset = PoetFormSet(data, queryset=Poet.objects.all())
self.assertEqual(formset.is_valid(), True)
formset.save()
self.assertEqual(Poet.objects.count(), 0)
def test_change_form_deletion_when_invalid(self):
"""
Make sure that a change form that is filled out, but marked for deletion
doesn't cause validation errors.
"""
PoetFormSet = modelformset_factory(Poet, fields="__all__", can_delete=True)
poet = Poet.objects.create(name='test')
data = {
'form-TOTAL_FORMS': '1',
'form-INITIAL_FORMS': '1',
'form-MAX_NUM_FORMS': '0',
'form-0-id': six.text_type(poet.id),
'form-0-name': 'x' * 1000,
}
formset = PoetFormSet(data, queryset=Poet.objects.all())
# Make sure this form doesn't pass validation.
self.assertEqual(formset.is_valid(), False)
self.assertEqual(Poet.objects.count(), 1)
# Then make sure that it *does* pass validation and delete the object,
# even though the data isn't actually valid.
data['form-0-DELETE'] = 'on'
formset = PoetFormSet(data, queryset=Poet.objects.all())
self.assertEqual(formset.is_valid(), True)
formset.save()
self.assertEqual(Poet.objects.count(), 0)
def test_outdated_deletion(self):
poet = Poet.objects.create(name='test')
poem = Poem.objects.create(name='Brevity is the soul of wit', poet=poet)
PoemFormSet = inlineformset_factory(Poet, Poem, fields="__all__", can_delete=True)
# Simulate deletion of an object that doesn't exist in the database
data = {
'form-TOTAL_FORMS': '2',
'form-INITIAL_FORMS': '2',
'form-0-id': str(poem.pk),
'form-0-name': 'foo',
'form-1-id': str(poem.pk + 1), # doesn't exist
'form-1-name': 'bar',
'form-1-DELETE': 'on',
}
formset = PoemFormSet(data, instance=poet, prefix="form")
# The formset is valid even though poem.pk + 1 doesn't exist,
# because it's marked for deletion anyway
self.assertTrue(formset.is_valid())
formset.save()
# Make sure the save went through correctly
self.assertEqual(Poem.objects.get(pk=poem.pk).name, "foo")
self.assertEqual(poet.poem_set.count(), 1)
self.assertFalse(Poem.objects.filter(pk=poem.pk + 1).exists())
class ModelFormsetTest(TestCase):
def test_modelformset_factory_without_fields(self):
""" Regression for #19733 """
message = (
"Calling modelformset_factory without defining 'fields' or 'exclude' "
"explicitly is prohibited."
)
with self.assertRaisesMessage(ImproperlyConfigured, message):
modelformset_factory(Author)
def test_simple_save(self):
qs = Author.objects.all()
AuthorFormSet = modelformset_factory(Author, fields="__all__", extra=3)
formset = AuthorFormSet(queryset=qs)
self.assertEqual(len(formset.forms), 3)
self.assertHTMLEqual(formset.forms[0].as_p(),
'<p><label for="id_form-0-name">Name:</label> <input id="id_form-0-name" type="text" name="form-0-name" maxlength="100" /><input type="hidden" name="form-0-id" id="id_form-0-id" /></p>')
self.assertHTMLEqual(formset.forms[1].as_p(),
'<p><label for="id_form-1-name">Name:</label> <input id="id_form-1-name" type="text" name="form-1-name" maxlength="100" /><input type="hidden" name="form-1-id" id="id_form-1-id" /></p>')
self.assertHTMLEqual(formset.forms[2].as_p(),
'<p><label for="id_form-2-name">Name:</label> <input id="id_form-2-name" type="text" name="form-2-name" maxlength="100" /><input type="hidden" name="form-2-id" id="id_form-2-id" /></p>')
data = {
'form-TOTAL_FORMS': '3', # the number of forms rendered
'form-INITIAL_FORMS': '0', # the number of forms with initial data
'form-MAX_NUM_FORMS': '', # the max number of forms
'form-0-name': 'Charles Baudelaire',
'form-1-name': 'Arthur Rimbaud',
'form-2-name': '',
}
formset = AuthorFormSet(data=data, queryset=qs)
self.assertTrue(formset.is_valid())
saved = formset.save()
self.assertEqual(len(saved), 2)
author1, author2 = saved
self.assertEqual(author1, Author.objects.get(name='Charles Baudelaire'))
self.assertEqual(author2, Author.objects.get(name='Arthur Rimbaud'))
authors = list(Author.objects.order_by('name'))
self.assertEqual(authors, [author2, author1])
# Gah! We forgot Paul Verlaine. Let's create a formset to edit the
# existing authors with an extra form to add him. We *could* pass in a
# queryset to restrict the Author objects we edit, but in this case
# we'll use it to display them in alphabetical order by name.
qs = Author.objects.order_by('name')
AuthorFormSet = modelformset_factory(Author, fields="__all__", extra=1, can_delete=False)
formset = AuthorFormSet(queryset=qs)
self.assertEqual(len(formset.forms), 3)
self.assertHTMLEqual(formset.forms[0].as_p(),
'<p><label for="id_form-0-name">Name:</label> <input id="id_form-0-name" type="text" name="form-0-name" value="Arthur Rimbaud" maxlength="100" /><input type="hidden" name="form-0-id" value="%d" id="id_form-0-id" /></p>' % author2.id)
self.assertHTMLEqual(formset.forms[1].as_p(),
'<p><label for="id_form-1-name">Name:</label> <input id="id_form-1-name" type="text" name="form-1-name" value="Charles Baudelaire" maxlength="100" /><input type="hidden" name="form-1-id" value="%d" id="id_form-1-id" /></p>' % author1.id)
self.assertHTMLEqual(formset.forms[2].as_p(),
'<p><label for="id_form-2-name">Name:</label> <input id="id_form-2-name" type="text" name="form-2-name" maxlength="100" /><input type="hidden" name="form-2-id" id="id_form-2-id" /></p>')
data = {
'form-TOTAL_FORMS': '3', # the number of forms rendered
'form-INITIAL_FORMS': '2', # the number of forms with initial data
'form-MAX_NUM_FORMS': '', # the max number of forms
'form-0-id': str(author2.id),
'form-0-name': 'Arthur Rimbaud',
'form-1-id': str(author1.id),
'form-1-name': 'Charles Baudelaire',
'form-2-name': 'Paul Verlaine',
}
formset = AuthorFormSet(data=data, queryset=qs)
self.assertTrue(formset.is_valid())
# Only changed or new objects are returned from formset.save()
saved = formset.save()
self.assertEqual(len(saved), 1)
author3 = saved[0]
self.assertEqual(author3, Author.objects.get(name='Paul Verlaine'))
authors = list(Author.objects.order_by('name'))
self.assertEqual(authors, [author2, author1, author3])
# This probably shouldn't happen, but it will. If an add form was
# marked for deletion, make sure we don't save that form.
qs = Author.objects.order_by('name')
AuthorFormSet = modelformset_factory(Author, fields="__all__", extra=1, can_delete=True)
formset = AuthorFormSet(queryset=qs)
self.assertEqual(len(formset.forms), 4)
self.assertHTMLEqual(formset.forms[0].as_p(),
'<p><label for="id_form-0-name">Name:</label> <input id="id_form-0-name" type="text" name="form-0-name" value="Arthur Rimbaud" maxlength="100" /></p>\n'
'<p><label for="id_form-0-DELETE">Delete:</label> <input type="checkbox" name="form-0-DELETE" id="id_form-0-DELETE" /><input type="hidden" name="form-0-id" value="%d" id="id_form-0-id" /></p>' % author2.id)
self.assertHTMLEqual(formset.forms[1].as_p(),
'<p><label for="id_form-1-name">Name:</label> <input id="id_form-1-name" type="text" name="form-1-name" value="Charles Baudelaire" maxlength="100" /></p>\n'
'<p><label for="id_form-1-DELETE">Delete:</label> <input type="checkbox" name="form-1-DELETE" id="id_form-1-DELETE" /><input type="hidden" name="form-1-id" value="%d" id="id_form-1-id" /></p>' % author1.id)
self.assertHTMLEqual(formset.forms[2].as_p(),
'<p><label for="id_form-2-name">Name:</label> <input id="id_form-2-name" type="text" name="form-2-name" value="Paul Verlaine" maxlength="100" /></p>\n'
'<p><label for="id_form-2-DELETE">Delete:</label> <input type="checkbox" name="form-2-DELETE" id="id_form-2-DELETE" /><input type="hidden" name="form-2-id" value="%d" id="id_form-2-id" /></p>' % author3.id)
self.assertHTMLEqual(formset.forms[3].as_p(),
'<p><label for="id_form-3-name">Name:</label> <input id="id_form-3-name" type="text" name="form-3-name" maxlength="100" /></p>\n'
'<p><label for="id_form-3-DELETE">Delete:</label> <input type="checkbox" name="form-3-DELETE" id="id_form-3-DELETE" /><input type="hidden" name="form-3-id" id="id_form-3-id" /></p>')
data = {
'form-TOTAL_FORMS': '4', # the number of forms rendered
'form-INITIAL_FORMS': '3', # the number of forms with initial data
'form-MAX_NUM_FORMS': '', # the max number of forms
'form-0-id': str(author2.id),
'form-0-name': 'Arthur Rimbaud',
'form-1-id': str(author1.id),
'form-1-name': 'Charles Baudelaire',
'form-2-id': str(author3.id),
'form-2-name': 'Paul Verlaine',
'form-3-name': 'Walt Whitman',
'form-3-DELETE': 'on',
}
formset = AuthorFormSet(data=data, queryset=qs)
self.assertTrue(formset.is_valid())
# No objects were changed or saved so nothing will come back.
self.assertEqual(formset.save(), [])
authors = list(Author.objects.order_by('name'))
self.assertEqual(authors, [author2, author1, author3])
# Let's edit a record to ensure save only returns that one record.
data = {
'form-TOTAL_FORMS': '4', # the number of forms rendered
'form-INITIAL_FORMS': '3', # the number of forms with initial data
'form-MAX_NUM_FORMS': '', # the max number of forms
'form-0-id': str(author2.id),
'form-0-name': 'Walt Whitman',
'form-1-id': str(author1.id),
'form-1-name': 'Charles Baudelaire',
'form-2-id': str(author3.id),
'form-2-name': 'Paul Verlaine',
'form-3-name': '',
'form-3-DELETE': '',
}
formset = AuthorFormSet(data=data, queryset=qs)
self.assertTrue(formset.is_valid())
# One record has changed.
saved = formset.save()
self.assertEqual(len(saved), 1)
self.assertEqual(saved[0], Author.objects.get(name='Walt Whitman'))
def test_commit_false(self):
# Test the behavior of commit=False and save_m2m
author1 = Author.objects.create(name='Charles Baudelaire')
author2 = Author.objects.create(name='Paul Verlaine')
author3 = Author.objects.create(name='Walt Whitman')
meeting = AuthorMeeting.objects.create(created=date.today())
meeting.authors = Author.objects.all()
# create an Author instance to add to the meeting.
author4 = Author.objects.create(name='John Steinbeck')
AuthorMeetingFormSet = modelformset_factory(AuthorMeeting, fields="__all__", extra=1, can_delete=True)
data = {
'form-TOTAL_FORMS': '2', # the number of forms rendered
'form-INITIAL_FORMS': '1', # the number of forms with initial data
'form-MAX_NUM_FORMS': '', # the max number of forms
'form-0-id': str(meeting.id),
'form-0-name': '2nd Tuesday of the Week Meeting',
'form-0-authors': [author2.id, author1.id, author3.id, author4.id],
'form-1-name': '',
'form-1-authors': '',
'form-1-DELETE': '',
}
formset = AuthorMeetingFormSet(data=data, queryset=AuthorMeeting.objects.all())
self.assertTrue(formset.is_valid())
instances = formset.save(commit=False)
for instance in instances:
instance.created = date.today()
instance.save()
formset.save_m2m()
self.assertQuerysetEqual(instances[0].authors.all(), [
'<Author: Charles Baudelaire>',
'<Author: John Steinbeck>',
'<Author: Paul Verlaine>',
'<Author: Walt Whitman>',
])
def test_max_num(self):
# Test the behavior of max_num with model formsets. It should allow
# all existing related objects/inlines for a given object to be
# displayed, but not allow the creation of new inlines beyond max_num.
Author.objects.create(name='Charles Baudelaire')
Author.objects.create(name='Paul Verlaine')
Author.objects.create(name='Walt Whitman')
qs = Author.objects.order_by('name')
AuthorFormSet = modelformset_factory(Author, fields="__all__", max_num=None, extra=3)
formset = AuthorFormSet(queryset=qs)
self.assertEqual(len(formset.forms), 6)
self.assertEqual(len(formset.extra_forms), 3)
AuthorFormSet = modelformset_factory(Author, fields="__all__", max_num=4, extra=3)
formset = AuthorFormSet(queryset=qs)
self.assertEqual(len(formset.forms), 4)
self.assertEqual(len(formset.extra_forms), 1)
AuthorFormSet = modelformset_factory(Author, fields="__all__", max_num=0, extra=3)
formset = AuthorFormSet(queryset=qs)
self.assertEqual(len(formset.forms), 3)
self.assertEqual(len(formset.extra_forms), 0)
AuthorFormSet = modelformset_factory(Author, fields="__all__", max_num=None)
formset = AuthorFormSet(queryset=qs)
self.assertQuerysetEqual(formset.get_queryset(), [
'<Author: Charles Baudelaire>',
'<Author: Paul Verlaine>',
'<Author: Walt Whitman>',
])
AuthorFormSet = modelformset_factory(Author, fields="__all__", max_num=0)
formset = AuthorFormSet(queryset=qs)
self.assertQuerysetEqual(formset.get_queryset(), [
'<Author: Charles Baudelaire>',
'<Author: Paul Verlaine>',
'<Author: Walt Whitman>',
])
AuthorFormSet = modelformset_factory(Author, fields="__all__", max_num=4)
formset = AuthorFormSet(queryset=qs)
self.assertQuerysetEqual(formset.get_queryset(), [
'<Author: Charles Baudelaire>',
'<Author: Paul Verlaine>',
'<Author: Walt Whitman>',
])
def test_custom_save_method(self):
class PoetForm(forms.ModelForm):
def save(self, commit=True):
# change the name to "Vladimir Mayakovsky" just to be a jerk.
author = super(PoetForm, self).save(commit=False)
author.name = "Vladimir Mayakovsky"
if commit:
author.save()
return author
PoetFormSet = modelformset_factory(Poet, fields="__all__", form=PoetForm)
data = {
'form-TOTAL_FORMS': '3', # the number of forms rendered
'form-INITIAL_FORMS': '0', # the number of forms with initial data
'form-MAX_NUM_FORMS': '', # the max number of forms
'form-0-name': 'Walt Whitman',
'form-1-name': 'Charles Baudelaire',
'form-2-name': '',
}
qs = Poet.objects.all()
formset = PoetFormSet(data=data, queryset=qs)
self.assertTrue(formset.is_valid())
poets = formset.save()
self.assertEqual(len(poets), 2)
poet1, poet2 = poets
self.assertEqual(poet1.name, 'Vladimir Mayakovsky')
self.assertEqual(poet2.name, 'Vladimir Mayakovsky')
def test_custom_form(self):
""" Test that model_formset respects fields and exclude parameters of
custom form
"""
class PostForm1(forms.ModelForm):
class Meta:
model = Post
fields = ('title', 'posted')
class PostForm2(forms.ModelForm):
class Meta:
model = Post
exclude = ('subtitle',)
PostFormSet = modelformset_factory(Post, form=PostForm1)
formset = PostFormSet()
self.assertFalse("subtitle" in formset.forms[0].fields)
PostFormSet = modelformset_factory(Post, form=PostForm2)
formset = PostFormSet()
self.assertFalse("subtitle" in formset.forms[0].fields)
def test_custom_queryset_init(self):
"""
Test that a queryset can be overridden in the __init__ method.
https://docs.djangoproject.com/en/dev/topics/forms/modelforms/#changing-the-queryset
"""
Author.objects.create(name='Charles Baudelaire')
Author.objects.create(name='Paul Verlaine')
class BaseAuthorFormSet(BaseModelFormSet):
def __init__(self, *args, **kwargs):
super(BaseAuthorFormSet, self).__init__(*args, **kwargs)
self.queryset = Author.objects.filter(name__startswith='Charles')
AuthorFormSet = modelformset_factory(Author, fields='__all__', formset=BaseAuthorFormSet)
formset = AuthorFormSet()
self.assertEqual(len(formset.get_queryset()), 1)
def test_model_inheritance(self):
BetterAuthorFormSet = modelformset_factory(BetterAuthor, fields="__all__")
formset = BetterAuthorFormSet()
self.assertEqual(len(formset.forms), 1)
self.assertHTMLEqual(formset.forms[0].as_p(),
'<p><label for="id_form-0-name">Name:</label> <input id="id_form-0-name" type="text" name="form-0-name" maxlength="100" /></p>\n'
'<p><label for="id_form-0-write_speed">Write speed:</label> <input type="number" name="form-0-write_speed" id="id_form-0-write_speed" /><input type="hidden" name="form-0-author_ptr" id="id_form-0-author_ptr" /></p>')
data = {
'form-TOTAL_FORMS': '1', # the number of forms rendered
'form-INITIAL_FORMS': '0', # the number of forms with initial data
'form-MAX_NUM_FORMS': '', # the max number of forms
'form-0-author_ptr': '',
'form-0-name': 'Ernest Hemingway',
'form-0-write_speed': '10',
}
formset = BetterAuthorFormSet(data)
self.assertTrue(formset.is_valid())
saved = formset.save()
self.assertEqual(len(saved), 1)
author1, = saved
self.assertEqual(author1, BetterAuthor.objects.get(name='Ernest Hemingway'))
hemingway_id = BetterAuthor.objects.get(name="Ernest Hemingway").pk
formset = BetterAuthorFormSet()
self.assertEqual(len(formset.forms), 2)
self.assertHTMLEqual(formset.forms[0].as_p(),
'<p><label for="id_form-0-name">Name:</label> <input id="id_form-0-name" type="text" name="form-0-name" value="Ernest Hemingway" maxlength="100" /></p>\n'
'<p><label for="id_form-0-write_speed">Write speed:</label> <input type="number" name="form-0-write_speed" value="10" id="id_form-0-write_speed" /><input type="hidden" name="form-0-author_ptr" value="%d" id="id_form-0-author_ptr" /></p>' % hemingway_id)
self.assertHTMLEqual(formset.forms[1].as_p(),
'<p><label for="id_form-1-name">Name:</label> <input id="id_form-1-name" type="text" name="form-1-name" maxlength="100" /></p>\n'
'<p><label for="id_form-1-write_speed">Write speed:</label> <input type="number" name="form-1-write_speed" id="id_form-1-write_speed" /><input type="hidden" name="form-1-author_ptr" id="id_form-1-author_ptr" /></p>')
data = {
'form-TOTAL_FORMS': '2', # the number of forms rendered
'form-INITIAL_FORMS': '1', # the number of forms with initial data
'form-MAX_NUM_FORMS': '', # the max number of forms
'form-0-author_ptr': hemingway_id,
'form-0-name': 'Ernest Hemingway',
'form-0-write_speed': '10',
'form-1-author_ptr': '',
'form-1-name': '',
'form-1-write_speed': '',
}
formset = BetterAuthorFormSet(data)
self.assertTrue(formset.is_valid())
self.assertEqual(formset.save(), [])
def test_inline_formsets(self):
# We can also create a formset that is tied to a parent model. This is
# how the admin system's edit inline functionality works.
AuthorBooksFormSet = inlineformset_factory(Author, Book, can_delete=False, extra=3, fields="__all__")
author = Author.objects.create(name='Charles Baudelaire')
formset = AuthorBooksFormSet(instance=author)
self.assertEqual(len(formset.forms), 3)
self.assertHTMLEqual(formset.forms[0].as_p(),
'<p><label for="id_book_set-0-title">Title:</label> <input id="id_book_set-0-title" type="text" name="book_set-0-title" maxlength="100" /><input type="hidden" name="book_set-0-author" value="%d" id="id_book_set-0-author" /><input type="hidden" name="book_set-0-id" id="id_book_set-0-id" /></p>' % author.id)
self.assertHTMLEqual(formset.forms[1].as_p(),
'<p><label for="id_book_set-1-title">Title:</label> <input id="id_book_set-1-title" type="text" name="book_set-1-title" maxlength="100" /><input type="hidden" name="book_set-1-author" value="%d" id="id_book_set-1-author" /><input type="hidden" name="book_set-1-id" id="id_book_set-1-id" /></p>' % author.id)
self.assertHTMLEqual(formset.forms[2].as_p(),
'<p><label for="id_book_set-2-title">Title:</label> <input id="id_book_set-2-title" type="text" name="book_set-2-title" maxlength="100" /><input type="hidden" name="book_set-2-author" value="%d" id="id_book_set-2-author" /><input type="hidden" name="book_set-2-id" id="id_book_set-2-id" /></p>' % author.id)
data = {
'book_set-TOTAL_FORMS': '3', # the number of forms rendered
'book_set-INITIAL_FORMS': '0', # the number of forms with initial data
'book_set-MAX_NUM_FORMS': '', # the max number of forms
'book_set-0-title': 'Les Fleurs du Mal',
'book_set-1-title': '',
'book_set-2-title': '',
}
formset = AuthorBooksFormSet(data, instance=author)
self.assertTrue(formset.is_valid())
saved = formset.save()
self.assertEqual(len(saved), 1)
book1, = saved
self.assertEqual(book1, Book.objects.get(title='Les Fleurs du Mal'))
self.assertQuerysetEqual(author.book_set.all(), ['<Book: Les Fleurs du Mal>'])
# Now that we've added a book to Charles Baudelaire, let's try adding
# another one. This time though, an edit form will be available for
# every existing book.
AuthorBooksFormSet = inlineformset_factory(Author, Book, can_delete=False, extra=2, fields="__all__")
author = Author.objects.get(name='Charles Baudelaire')
formset = AuthorBooksFormSet(instance=author)
self.assertEqual(len(formset.forms), 3)
self.assertHTMLEqual(formset.forms[0].as_p(),
'<p><label for="id_book_set-0-title">Title:</label> <input id="id_book_set-0-title" type="text" name="book_set-0-title" value="Les Fleurs du Mal" maxlength="100" /><input type="hidden" name="book_set-0-author" value="%d" id="id_book_set-0-author" /><input type="hidden" name="book_set-0-id" value="%d" id="id_book_set-0-id" /></p>' % (author.id, book1.id))
self.assertHTMLEqual(formset.forms[1].as_p(),
'<p><label for="id_book_set-1-title">Title:</label> <input id="id_book_set-1-title" type="text" name="book_set-1-title" maxlength="100" /><input type="hidden" name="book_set-1-author" value="%d" id="id_book_set-1-author" /><input type="hidden" name="book_set-1-id" id="id_book_set-1-id" /></p>' % author.id)
self.assertHTMLEqual(formset.forms[2].as_p(),
'<p><label for="id_book_set-2-title">Title:</label> <input id="id_book_set-2-title" type="text" name="book_set-2-title" maxlength="100" /><input type="hidden" name="book_set-2-author" value="%d" id="id_book_set-2-author" /><input type="hidden" name="book_set-2-id" id="id_book_set-2-id" /></p>' % author.id)
data = {
'book_set-TOTAL_FORMS': '3', # the number of forms rendered
'book_set-INITIAL_FORMS': '1', # the number of forms with initial data
'book_set-MAX_NUM_FORMS': '', # the max number of forms
'book_set-0-id': str(book1.id),
'book_set-0-title': 'Les Fleurs du Mal',
'book_set-1-title': 'Les Paradis Artificiels',
'book_set-2-title': '',
}
formset = AuthorBooksFormSet(data, instance=author)
self.assertTrue(formset.is_valid())
saved = formset.save()
self.assertEqual(len(saved), 1)
book2, = saved
self.assertEqual(book2, Book.objects.get(title='Les Paradis Artificiels'))
# As you can see, 'Les Paradis Artificiels' is now a book belonging to
# Charles Baudelaire.
self.assertQuerysetEqual(author.book_set.order_by('title'), [
'<Book: Les Fleurs du Mal>',
'<Book: Les Paradis Artificiels>',
])
def test_inline_formsets_save_as_new(self):
# The save_as_new parameter lets you re-associate the data to a new
# instance. This is used in the admin for save_as functionality.
AuthorBooksFormSet = inlineformset_factory(Author, Book, can_delete=False, extra=2, fields="__all__")
Author.objects.create(name='Charles Baudelaire')
data = {
'book_set-TOTAL_FORMS': '3', # the number of forms rendered
'book_set-INITIAL_FORMS': '2', # the number of forms with initial data
'book_set-MAX_NUM_FORMS': '', # the max number of forms
'book_set-0-id': '1',
'book_set-0-title': 'Les Fleurs du Mal',
'book_set-1-id': '2',
'book_set-1-title': 'Les Paradis Artificiels',
'book_set-2-title': '',
}
formset = AuthorBooksFormSet(data, instance=Author(), save_as_new=True)
self.assertTrue(formset.is_valid())
new_author = Author.objects.create(name='Charles Baudelaire')
formset = AuthorBooksFormSet(data, instance=new_author, save_as_new=True)
saved = formset.save()
self.assertEqual(len(saved), 2)
book1, book2 = saved
self.assertEqual(book1.title, 'Les Fleurs du Mal')
self.assertEqual(book2.title, 'Les Paradis Artificiels')
# Test using a custom prefix on an inline formset.
formset = AuthorBooksFormSet(prefix="test")
self.assertEqual(len(formset.forms), 2)
self.assertHTMLEqual(formset.forms[0].as_p(),
'<p><label for="id_test-0-title">Title:</label> <input id="id_test-0-title" type="text" name="test-0-title" maxlength="100" /><input type="hidden" name="test-0-author" id="id_test-0-author" /><input type="hidden" name="test-0-id" id="id_test-0-id" /></p>')
self.assertHTMLEqual(formset.forms[1].as_p(),
'<p><label for="id_test-1-title">Title:</label> <input id="id_test-1-title" type="text" name="test-1-title" maxlength="100" /><input type="hidden" name="test-1-author" id="id_test-1-author" /><input type="hidden" name="test-1-id" id="id_test-1-id" /></p>')
def test_inline_formsets_with_custom_pk(self):
# Test inline formsets where the inline-edited object has a custom
# primary key that is not the fk to the parent object.
self.maxDiff = 1024
AuthorBooksFormSet2 = inlineformset_factory(Author, BookWithCustomPK, can_delete=False, extra=1, fields="__all__")
author = Author.objects.create(pk=1, name='Charles Baudelaire')
formset = AuthorBooksFormSet2(instance=author)
self.assertEqual(len(formset.forms), 1)
self.assertHTMLEqual(formset.forms[0].as_p(),
'<p><label for="id_bookwithcustompk_set-0-my_pk">My pk:</label> <input id="id_bookwithcustompk_set-0-my_pk" type="number" name="bookwithcustompk_set-0-my_pk" step="1" /></p>\n'
'<p><label for="id_bookwithcustompk_set-0-title">Title:</label> <input id="id_bookwithcustompk_set-0-title" type="text" name="bookwithcustompk_set-0-title" maxlength="100" /><input type="hidden" name="bookwithcustompk_set-0-author" value="1" id="id_bookwithcustompk_set-0-author" /></p>')
data = {
'bookwithcustompk_set-TOTAL_FORMS': '1', # the number of forms rendered
'bookwithcustompk_set-INITIAL_FORMS': '0', # the number of forms with initial data
'bookwithcustompk_set-MAX_NUM_FORMS': '', # the max number of forms
'bookwithcustompk_set-0-my_pk': '77777',
'bookwithcustompk_set-0-title': 'Les Fleurs du Mal',
}
formset = AuthorBooksFormSet2(data, instance=author)
self.assertTrue(formset.is_valid())
saved = formset.save()
self.assertEqual(len(saved), 1)
book1, = saved
self.assertEqual(book1.pk, 77777)
book1 = author.bookwithcustompk_set.get()
self.assertEqual(book1.title, 'Les Fleurs du Mal')
def test_inline_formsets_with_multi_table_inheritance(self):
# Test inline formsets where the inline-edited object uses multi-table
# inheritance, thus has a non AutoField yet auto-created primary key.
AuthorBooksFormSet3 = inlineformset_factory(Author, AlternateBook, can_delete=False, extra=1, fields="__all__")
author = Author.objects.create(pk=1, name='Charles Baudelaire')
formset = AuthorBooksFormSet3(instance=author)
self.assertEqual(len(formset.forms), 1)
self.assertHTMLEqual(formset.forms[0].as_p(),
'<p><label for="id_alternatebook_set-0-title">Title:</label> <input id="id_alternatebook_set-0-title" type="text" name="alternatebook_set-0-title" maxlength="100" /></p>\n'
'<p><label for="id_alternatebook_set-0-notes">Notes:</label> <input id="id_alternatebook_set-0-notes" type="text" name="alternatebook_set-0-notes" maxlength="100" /><input type="hidden" name="alternatebook_set-0-author" value="1" id="id_alternatebook_set-0-author" /><input type="hidden" name="alternatebook_set-0-book_ptr" id="id_alternatebook_set-0-book_ptr" /></p>')
data = {
'alternatebook_set-TOTAL_FORMS': '1', # the number of forms rendered
'alternatebook_set-INITIAL_FORMS': '0', # the number of forms with initial data
'alternatebook_set-MAX_NUM_FORMS': '', # the max number of forms
'alternatebook_set-0-title': 'Flowers of Evil',
'alternatebook_set-0-notes': 'English translation of Les Fleurs du Mal'
}
formset = AuthorBooksFormSet3(data, instance=author)
self.assertTrue(formset.is_valid())
saved = formset.save()
self.assertEqual(len(saved), 1)
book1, = saved
self.assertEqual(book1.title, 'Flowers of Evil')
self.assertEqual(book1.notes, 'English translation of Les Fleurs du Mal')
@skipUnlessDBFeature('ignores_nulls_in_unique_constraints')
def test_inline_formsets_with_nullable_unique_together(self):
# Test inline formsets where the inline-edited object has a
# unique_together constraint with a nullable member
AuthorBooksFormSet4 = inlineformset_factory(Author, BookWithOptionalAltEditor, can_delete=False, extra=2, fields="__all__")
author = Author.objects.create(pk=1, name='Charles Baudelaire')
data = {
'bookwithoptionalalteditor_set-TOTAL_FORMS': '2', # the number of forms rendered
'bookwithoptionalalteditor_set-INITIAL_FORMS': '0', # the number of forms with initial data
'bookwithoptionalalteditor_set-MAX_NUM_FORMS': '', # the max number of forms
'bookwithoptionalalteditor_set-0-author': '1',
'bookwithoptionalalteditor_set-0-title': 'Les Fleurs du Mal',
'bookwithoptionalalteditor_set-1-author': '1',
'bookwithoptionalalteditor_set-1-title': 'Les Fleurs du Mal',
}
formset = AuthorBooksFormSet4(data, instance=author)
self.assertTrue(formset.is_valid())
saved = formset.save()
self.assertEqual(len(saved), 2)
book1, book2 = saved
self.assertEqual(book1.author_id, 1)
self.assertEqual(book1.title, 'Les Fleurs du Mal')
self.assertEqual(book2.author_id, 1)
self.assertEqual(book2.title, 'Les Fleurs du Mal')
def test_inline_formsets_with_custom_save_method(self):
AuthorBooksFormSet = inlineformset_factory(Author, Book, can_delete=False, extra=2, fields="__all__")
author = Author.objects.create(pk=1, name='Charles Baudelaire')
book1 = Book.objects.create(pk=1, author=author, title='Les Paradis Artificiels')
book2 = Book.objects.create(pk=2, author=author, title='Les Fleurs du Mal')
book3 = Book.objects.create(pk=3, author=author, title='Flowers of Evil')
class PoemForm(forms.ModelForm):
def save(self, commit=True):
# change the name to "Brooklyn Bridge" just to be a jerk.
poem = super(PoemForm, self).save(commit=False)
poem.name = "Brooklyn Bridge"
if commit:
poem.save()
return poem
PoemFormSet = inlineformset_factory(Poet, Poem, form=PoemForm, fields="__all__")
data = {
'poem_set-TOTAL_FORMS': '3', # the number of forms rendered
'poem_set-INITIAL_FORMS': '0', # the number of forms with initial data
'poem_set-MAX_NUM_FORMS': '', # the max number of forms
'poem_set-0-name': 'The Cloud in Trousers',
'poem_set-1-name': 'I',
'poem_set-2-name': '',
}
poet = Poet.objects.create(name='Vladimir Mayakovsky')
formset = PoemFormSet(data=data, instance=poet)
self.assertTrue(formset.is_valid())
saved = formset.save()
self.assertEqual(len(saved), 2)
poem1, poem2 = saved
self.assertEqual(poem1.name, 'Brooklyn Bridge')
self.assertEqual(poem2.name, 'Brooklyn Bridge')
# We can provide a custom queryset to our InlineFormSet:
custom_qs = Book.objects.order_by('-title')
formset = AuthorBooksFormSet(instance=author, queryset=custom_qs)
self.assertEqual(len(formset.forms), 5)
self.assertHTMLEqual(formset.forms[0].as_p(),
'<p><label for="id_book_set-0-title">Title:</label> <input id="id_book_set-0-title" type="text" name="book_set-0-title" value="Les Paradis Artificiels" maxlength="100" /><input type="hidden" name="book_set-0-author" value="1" id="id_book_set-0-author" /><input type="hidden" name="book_set-0-id" value="1" id="id_book_set-0-id" /></p>')
self.assertHTMLEqual(formset.forms[1].as_p(),
'<p><label for="id_book_set-1-title">Title:</label> <input id="id_book_set-1-title" type="text" name="book_set-1-title" value="Les Fleurs du Mal" maxlength="100" /><input type="hidden" name="book_set-1-author" value="1" id="id_book_set-1-author" /><input type="hidden" name="book_set-1-id" value="2" id="id_book_set-1-id" /></p>')
self.assertHTMLEqual(formset.forms[2].as_p(),
'<p><label for="id_book_set-2-title">Title:</label> <input id="id_book_set-2-title" type="text" name="book_set-2-title" value="Flowers of Evil" maxlength="100" /><input type="hidden" name="book_set-2-author" value="1" id="id_book_set-2-author" /><input type="hidden" name="book_set-2-id" value="3" id="id_book_set-2-id" /></p>')
self.assertHTMLEqual(formset.forms[3].as_p(),
'<p><label for="id_book_set-3-title">Title:</label> <input id="id_book_set-3-title" type="text" name="book_set-3-title" maxlength="100" /><input type="hidden" name="book_set-3-author" value="1" id="id_book_set-3-author" /><input type="hidden" name="book_set-3-id" id="id_book_set-3-id" /></p>')
self.assertHTMLEqual(formset.forms[4].as_p(),
'<p><label for="id_book_set-4-title">Title:</label> <input id="id_book_set-4-title" type="text" name="book_set-4-title" maxlength="100" /><input type="hidden" name="book_set-4-author" value="1" id="id_book_set-4-author" /><input type="hidden" name="book_set-4-id" id="id_book_set-4-id" /></p>')
data = {
'book_set-TOTAL_FORMS': '5', # the number of forms rendered
'book_set-INITIAL_FORMS': '3', # the number of forms with initial data
'book_set-MAX_NUM_FORMS': '', # the max number of forms
'book_set-0-id': str(book1.id),
'book_set-0-title': 'Les Paradis Artificiels',
'book_set-1-id': str(book2.id),
'book_set-1-title': 'Les Fleurs du Mal',
'book_set-2-id': str(book3.id),
'book_set-2-title': 'Flowers of Evil',
'book_set-3-title': 'Revue des deux mondes',
'book_set-4-title': '',
}
formset = AuthorBooksFormSet(data, instance=author, queryset=custom_qs)
self.assertTrue(formset.is_valid())
custom_qs = Book.objects.filter(title__startswith='F')
formset = AuthorBooksFormSet(instance=author, queryset=custom_qs)
self.assertHTMLEqual(formset.forms[0].as_p(),
'<p><label for="id_book_set-0-title">Title:</label> <input id="id_book_set-0-title" type="text" name="book_set-0-title" value="Flowers of Evil" maxlength="100" /><input type="hidden" name="book_set-0-author" value="1" id="id_book_set-0-author" /><input type="hidden" name="book_set-0-id" value="3" id="id_book_set-0-id" /></p>')
self.assertHTMLEqual(formset.forms[1].as_p(),
'<p><label for="id_book_set-1-title">Title:</label> <input id="id_book_set-1-title" type="text" name="book_set-1-title" maxlength="100" /><input type="hidden" name="book_set-1-author" value="1" id="id_book_set-1-author" /><input type="hidden" name="book_set-1-id" id="id_book_set-1-id" /></p>')
self.assertHTMLEqual(formset.forms[2].as_p(),
'<p><label for="id_book_set-2-title">Title:</label> <input id="id_book_set-2-title" type="text" name="book_set-2-title" maxlength="100" /><input type="hidden" name="book_set-2-author" value="1" id="id_book_set-2-author" /><input type="hidden" name="book_set-2-id" id="id_book_set-2-id" /></p>')
data = {
'book_set-TOTAL_FORMS': '3', # the number of forms rendered
'book_set-INITIAL_FORMS': '1', # the number of forms with initial data
'book_set-MAX_NUM_FORMS': '', # the max number of forms
'book_set-0-id': str(book3.id),
'book_set-0-title': 'Flowers of Evil',
'book_set-1-title': 'Revue des deux mondes',
'book_set-2-title': '',
}
formset = AuthorBooksFormSet(data, instance=author, queryset=custom_qs)
self.assertTrue(formset.is_valid())
def test_custom_pk(self):
# We need to ensure that it is displayed
CustomPrimaryKeyFormSet = modelformset_factory(CustomPrimaryKey, fields="__all__")
formset = CustomPrimaryKeyFormSet()
self.assertEqual(len(formset.forms), 1)
self.assertHTMLEqual(formset.forms[0].as_p(),
'<p><label for="id_form-0-my_pk">My pk:</label> <input id="id_form-0-my_pk" type="text" name="form-0-my_pk" maxlength="10" /></p>\n'
'<p><label for="id_form-0-some_field">Some field:</label> <input id="id_form-0-some_field" type="text" name="form-0-some_field" maxlength="100" /></p>')
# Custom primary keys with ForeignKey, OneToOneField and AutoField ############
place = Place.objects.create(pk=1, name='Giordanos', city='Chicago')
FormSet = inlineformset_factory(Place, Owner, extra=2, can_delete=False, fields="__all__")
formset = FormSet(instance=place)
self.assertEqual(len(formset.forms), 2)
self.assertHTMLEqual(formset.forms[0].as_p(),
'<p><label for="id_owner_set-0-name">Name:</label> <input id="id_owner_set-0-name" type="text" name="owner_set-0-name" maxlength="100" /><input type="hidden" name="owner_set-0-place" value="1" id="id_owner_set-0-place" /><input type="hidden" name="owner_set-0-auto_id" id="id_owner_set-0-auto_id" /></p>')
self.assertHTMLEqual(formset.forms[1].as_p(),
'<p><label for="id_owner_set-1-name">Name:</label> <input id="id_owner_set-1-name" type="text" name="owner_set-1-name" maxlength="100" /><input type="hidden" name="owner_set-1-place" value="1" id="id_owner_set-1-place" /><input type="hidden" name="owner_set-1-auto_id" id="id_owner_set-1-auto_id" /></p>')
data = {
'owner_set-TOTAL_FORMS': '2',
'owner_set-INITIAL_FORMS': '0',
'owner_set-MAX_NUM_FORMS': '',
'owner_set-0-auto_id': '',
'owner_set-0-name': 'Joe Perry',
'owner_set-1-auto_id': '',
'owner_set-1-name': '',
}
formset = FormSet(data, instance=place)
self.assertTrue(formset.is_valid())
saved = formset.save()
self.assertEqual(len(saved), 1)
owner1, = saved
self.assertEqual(owner1.name, 'Joe Perry')
self.assertEqual(owner1.place.name, 'Giordanos')
formset = FormSet(instance=place)
self.assertEqual(len(formset.forms), 3)
self.assertHTMLEqual(formset.forms[0].as_p(),
'<p><label for="id_owner_set-0-name">Name:</label> <input id="id_owner_set-0-name" type="text" name="owner_set-0-name" value="Joe Perry" maxlength="100" /><input type="hidden" name="owner_set-0-place" value="1" id="id_owner_set-0-place" /><input type="hidden" name="owner_set-0-auto_id" value="%d" id="id_owner_set-0-auto_id" /></p>'
% owner1.auto_id)
self.assertHTMLEqual(formset.forms[1].as_p(),
'<p><label for="id_owner_set-1-name">Name:</label> <input id="id_owner_set-1-name" type="text" name="owner_set-1-name" maxlength="100" /><input type="hidden" name="owner_set-1-place" value="1" id="id_owner_set-1-place" /><input type="hidden" name="owner_set-1-auto_id" id="id_owner_set-1-auto_id" /></p>')
self.assertHTMLEqual(formset.forms[2].as_p(),
'<p><label for="id_owner_set-2-name">Name:</label> <input id="id_owner_set-2-name" type="text" name="owner_set-2-name" maxlength="100" /><input type="hidden" name="owner_set-2-place" value="1" id="id_owner_set-2-place" /><input type="hidden" name="owner_set-2-auto_id" id="id_owner_set-2-auto_id" /></p>')
data = {
'owner_set-TOTAL_FORMS': '3',
'owner_set-INITIAL_FORMS': '1',
'owner_set-MAX_NUM_FORMS': '',
'owner_set-0-auto_id': six.text_type(owner1.auto_id),
'owner_set-0-name': 'Joe Perry',
'owner_set-1-auto_id': '',
'owner_set-1-name': 'Jack Berry',
'owner_set-2-auto_id': '',
'owner_set-2-name': '',
}
formset = FormSet(data, instance=place)
self.assertTrue(formset.is_valid())
saved = formset.save()
self.assertEqual(len(saved), 1)
owner2, = saved
self.assertEqual(owner2.name, 'Jack Berry')
self.assertEqual(owner2.place.name, 'Giordanos')
# Ensure a custom primary key that is a ForeignKey or OneToOneField get rendered for the user to choose.
FormSet = modelformset_factory(OwnerProfile, fields="__all__")
formset = FormSet()
self.assertHTMLEqual(formset.forms[0].as_p(),
'<p><label for="id_form-0-owner">Owner:</label> <select name="form-0-owner" id="id_form-0-owner">\n'
'<option value="" selected="selected">---------</option>\n'
'<option value="%d">Joe Perry at Giordanos</option>\n'
'<option value="%d">Jack Berry at Giordanos</option>\n'
'</select></p>\n'
'<p><label for="id_form-0-age">Age:</label> <input type="number" name="form-0-age" id="id_form-0-age" min="0" /></p>'
% (owner1.auto_id, owner2.auto_id))
owner1 = Owner.objects.get(name='Joe Perry')
FormSet = inlineformset_factory(Owner, OwnerProfile, max_num=1, can_delete=False, fields="__all__")
self.assertEqual(FormSet.max_num, 1)
formset = FormSet(instance=owner1)
self.assertEqual(len(formset.forms), 1)
self.assertHTMLEqual(formset.forms[0].as_p(),
'<p><label for="id_ownerprofile-0-age">Age:</label> <input type="number" name="ownerprofile-0-age" id="id_ownerprofile-0-age" min="0" /><input type="hidden" name="ownerprofile-0-owner" value="%d" id="id_ownerprofile-0-owner" /></p>'
% owner1.auto_id)
data = {
'ownerprofile-TOTAL_FORMS': '1',
'ownerprofile-INITIAL_FORMS': '0',
'ownerprofile-MAX_NUM_FORMS': '1',
'ownerprofile-0-owner': '',
'ownerprofile-0-age': '54',
}
formset = FormSet(data, instance=owner1)
self.assertTrue(formset.is_valid())
saved = formset.save()
self.assertEqual(len(saved), 1)
profile1, = saved
self.assertEqual(profile1.owner, owner1)
self.assertEqual(profile1.age, 54)
formset = FormSet(instance=owner1)
self.assertEqual(len(formset.forms), 1)
self.assertHTMLEqual(formset.forms[0].as_p(),
'<p><label for="id_ownerprofile-0-age">Age:</label> <input type="number" name="ownerprofile-0-age" value="54" id="id_ownerprofile-0-age" min="0" /><input type="hidden" name="ownerprofile-0-owner" value="%d" id="id_ownerprofile-0-owner" /></p>'
% owner1.auto_id)
data = {
'ownerprofile-TOTAL_FORMS': '1',
'ownerprofile-INITIAL_FORMS': '1',
'ownerprofile-MAX_NUM_FORMS': '1',
'ownerprofile-0-owner': six.text_type(owner1.auto_id),
'ownerprofile-0-age': '55',
}
formset = FormSet(data, instance=owner1)
self.assertTrue(formset.is_valid())
saved = formset.save()
self.assertEqual(len(saved), 1)
profile1, = saved
self.assertEqual(profile1.owner, owner1)
self.assertEqual(profile1.age, 55)
def test_unique_true_enforces_max_num_one(self):
# ForeignKey with unique=True should enforce max_num=1
place = Place.objects.create(pk=1, name='Giordanos', city='Chicago')
FormSet = inlineformset_factory(Place, Location, can_delete=False, fields="__all__")
self.assertEqual(FormSet.max_num, 1)
formset = FormSet(instance=place)
self.assertEqual(len(formset.forms), 1)
self.assertHTMLEqual(formset.forms[0].as_p(),
'<p><label for="id_location_set-0-lat">Lat:</label> <input id="id_location_set-0-lat" type="text" name="location_set-0-lat" maxlength="100" /></p>\n'
'<p><label for="id_location_set-0-lon">Lon:</label> <input id="id_location_set-0-lon" type="text" name="location_set-0-lon" maxlength="100" /><input type="hidden" name="location_set-0-place" value="1" id="id_location_set-0-place" /><input type="hidden" name="location_set-0-id" id="id_location_set-0-id" /></p>')
def test_foreign_keys_in_parents(self):
self.assertEqual(type(_get_foreign_key(Restaurant, Owner)), models.ForeignKey)
self.assertEqual(type(_get_foreign_key(MexicanRestaurant, Owner)), models.ForeignKey)
def test_unique_validation(self):
FormSet = modelformset_factory(Product, fields="__all__", extra=1)
data = {
'form-TOTAL_FORMS': '1',
'form-INITIAL_FORMS': '0',
'form-MAX_NUM_FORMS': '',
'form-0-slug': 'car-red',
}
formset = FormSet(data)
self.assertTrue(formset.is_valid())
saved = formset.save()
self.assertEqual(len(saved), 1)
product1, = saved
self.assertEqual(product1.slug, 'car-red')
data = {
'form-TOTAL_FORMS': '1',
'form-INITIAL_FORMS': '0',
'form-MAX_NUM_FORMS': '',
'form-0-slug': 'car-red',
}
formset = FormSet(data)
self.assertFalse(formset.is_valid())
self.assertEqual(formset.errors, [{'slug': ['Product with this Slug already exists.']}])
def test_modelformset_validate_max_flag(self):
# If validate_max is set and max_num is less than TOTAL_FORMS in the
# data, then throw an exception. MAX_NUM_FORMS in the data is
# irrelevant here (it's output as a hint for the client but its
# value in the returned data is not checked)
data = {
'form-TOTAL_FORMS': '2',
'form-INITIAL_FORMS': '0',
'form-MAX_NUM_FORMS': '2', # should be ignored
'form-0-price': '12.00',
'form-0-quantity': '1',
'form-1-price': '24.00',
'form-1-quantity': '2',
}
FormSet = modelformset_factory(Price, fields="__all__", extra=1, max_num=1, validate_max=True)
formset = FormSet(data)
self.assertFalse(formset.is_valid())
self.assertEqual(formset.non_form_errors(), ['Please submit 1 or fewer forms.'])
# Now test the same thing without the validate_max flag to ensure
# default behavior is unchanged
FormSet = modelformset_factory(Price, fields="__all__", extra=1, max_num=1)
formset = FormSet(data)
self.assertTrue(formset.is_valid())
def test_unique_together_validation(self):
FormSet = modelformset_factory(Price, fields="__all__", extra=1)
data = {
'form-TOTAL_FORMS': '1',
'form-INITIAL_FORMS': '0',
'form-MAX_NUM_FORMS': '',
'form-0-price': '12.00',
'form-0-quantity': '1',
}
formset = FormSet(data)
self.assertTrue(formset.is_valid())
saved = formset.save()
self.assertEqual(len(saved), 1)
price1, = saved
self.assertEqual(price1.price, Decimal('12.00'))
self.assertEqual(price1.quantity, 1)
data = {
'form-TOTAL_FORMS': '1',
'form-INITIAL_FORMS': '0',
'form-MAX_NUM_FORMS': '',
'form-0-price': '12.00',
'form-0-quantity': '1',
}
formset = FormSet(data)
self.assertFalse(formset.is_valid())
self.assertEqual(formset.errors, [{'__all__': ['Price with this Price and Quantity already exists.']}])
def test_unique_together_with_inlineformset_factory(self):
# Also see bug #8882.
repository = Repository.objects.create(name='Test Repo')
FormSet = inlineformset_factory(Repository, Revision, extra=1, fields="__all__")
data = {
'revision_set-TOTAL_FORMS': '1',
'revision_set-INITIAL_FORMS': '0',
'revision_set-MAX_NUM_FORMS': '',
'revision_set-0-repository': repository.pk,
'revision_set-0-revision': '146239817507f148d448db38840db7c3cbf47c76',
'revision_set-0-DELETE': '',
}
formset = FormSet(data, instance=repository)
self.assertTrue(formset.is_valid())
saved = formset.save()
self.assertEqual(len(saved), 1)
revision1, = saved
self.assertEqual(revision1.repository, repository)
self.assertEqual(revision1.revision, '146239817507f148d448db38840db7c3cbf47c76')
# attempt to save the same revision against against the same repo.
data = {
'revision_set-TOTAL_FORMS': '1',
'revision_set-INITIAL_FORMS': '0',
'revision_set-MAX_NUM_FORMS': '',
'revision_set-0-repository': repository.pk,
'revision_set-0-revision': '146239817507f148d448db38840db7c3cbf47c76',
'revision_set-0-DELETE': '',
}
formset = FormSet(data, instance=repository)
self.assertFalse(formset.is_valid())
self.assertEqual(formset.errors, [{'__all__': ['Revision with this Repository and Revision already exists.']}])
# unique_together with inlineformset_factory with overridden form fields
# Also see #9494
FormSet = inlineformset_factory(Repository, Revision, fields=('revision',), extra=1)
data = {
'revision_set-TOTAL_FORMS': '1',
'revision_set-INITIAL_FORMS': '0',
'revision_set-MAX_NUM_FORMS': '',
'revision_set-0-repository': repository.pk,
'revision_set-0-revision': '146239817507f148d448db38840db7c3cbf47c76',
'revision_set-0-DELETE': '',
}
formset = FormSet(data, instance=repository)
self.assertFalse(formset.is_valid())
def test_callable_defaults(self):
# Use of callable defaults (see bug #7975).
person = Person.objects.create(name='Ringo')
FormSet = inlineformset_factory(Person, Membership, can_delete=False, extra=1, fields="__all__")
formset = FormSet(instance=person)
# Django will render a hidden field for model fields that have a callable
# default. This is required to ensure the value is tested for change correctly
# when determine what extra forms have changed to save.
self.assertEqual(len(formset.forms), 1) # this formset only has one form
form = formset.forms[0]
now = form.fields['date_joined'].initial()
result = form.as_p()
result = re.sub(r'\d{4}-\d{2}-\d{2} \d{2}:\d{2}:\d{2}(?:\.\d+)?', '__DATETIME__', result)
self.assertHTMLEqual(result,
'<p><label for="id_membership_set-0-date_joined">Date joined:</label> <input type="text" name="membership_set-0-date_joined" value="__DATETIME__" id="id_membership_set-0-date_joined" /><input type="hidden" name="initial-membership_set-0-date_joined" value="__DATETIME__" id="initial-membership_set-0-id_membership_set-0-date_joined" /></p>\n'
'<p><label for="id_membership_set-0-karma">Karma:</label> <input type="number" name="membership_set-0-karma" id="id_membership_set-0-karma" /><input type="hidden" name="membership_set-0-person" value="%d" id="id_membership_set-0-person" /><input type="hidden" name="membership_set-0-id" id="id_membership_set-0-id" /></p>'
% person.id)
# test for validation with callable defaults. Validations rely on hidden fields
data = {
'membership_set-TOTAL_FORMS': '1',
'membership_set-INITIAL_FORMS': '0',
'membership_set-MAX_NUM_FORMS': '',
'membership_set-0-date_joined': six.text_type(now.strftime('%Y-%m-%d %H:%M:%S')),
'initial-membership_set-0-date_joined': six.text_type(now.strftime('%Y-%m-%d %H:%M:%S')),
'membership_set-0-karma': '',
}
formset = FormSet(data, instance=person)
self.assertTrue(formset.is_valid())
# now test for when the data changes
one_day_later = now + datetime.timedelta(days=1)
filled_data = {
'membership_set-TOTAL_FORMS': '1',
'membership_set-INITIAL_FORMS': '0',
'membership_set-MAX_NUM_FORMS': '',
'membership_set-0-date_joined': six.text_type(one_day_later.strftime('%Y-%m-%d %H:%M:%S')),
'initial-membership_set-0-date_joined': six.text_type(now.strftime('%Y-%m-%d %H:%M:%S')),
'membership_set-0-karma': '',
}
formset = FormSet(filled_data, instance=person)
self.assertFalse(formset.is_valid())
# now test with split datetime fields
class MembershipForm(forms.ModelForm):
date_joined = forms.SplitDateTimeField(initial=now)
class Meta:
model = Membership
fields = "__all__"
def __init__(self, **kwargs):
super(MembershipForm, self).__init__(**kwargs)
self.fields['date_joined'].widget = forms.SplitDateTimeWidget()
FormSet = inlineformset_factory(Person, Membership, form=MembershipForm, can_delete=False, extra=1, fields="__all__")
data = {
'membership_set-TOTAL_FORMS': '1',
'membership_set-INITIAL_FORMS': '0',
'membership_set-MAX_NUM_FORMS': '',
'membership_set-0-date_joined_0': six.text_type(now.strftime('%Y-%m-%d')),
'membership_set-0-date_joined_1': six.text_type(now.strftime('%H:%M:%S')),
'initial-membership_set-0-date_joined': six.text_type(now.strftime('%Y-%m-%d %H:%M:%S')),
'membership_set-0-karma': '',
}
formset = FormSet(data, instance=person)
self.assertTrue(formset.is_valid())
def test_inlineformset_factory_with_null_fk(self):
# inlineformset_factory tests with fk having null=True. see #9462.
# create some data that will exbit the issue
team = Team.objects.create(name="Red Vipers")
Player(name="Timmy").save()
Player(name="Bobby", team=team).save()
PlayerInlineFormSet = inlineformset_factory(Team, Player, fields="__all__")
formset = PlayerInlineFormSet()
self.assertQuerysetEqual(formset.get_queryset(), [])
formset = PlayerInlineFormSet(instance=team)
players = formset.get_queryset()
self.assertEqual(len(players), 1)
player1, = players
self.assertEqual(player1.team, team)
self.assertEqual(player1.name, 'Bobby')
def test_model_formset_with_custom_pk(self):
# a formset for a Model that has a custom primary key that still needs to be
# added to the formset automatically
FormSet = modelformset_factory(ClassyMexicanRestaurant, fields=["tacos_are_yummy"])
self.assertEqual(sorted(FormSet().forms[0].fields.keys()), ['restaurant', 'tacos_are_yummy'])
def test_model_formset_with_initial_model_instance(self):
# has_changed should compare model instance and primary key
# see #18898
FormSet = modelformset_factory(Poem, fields='__all__')
john_milton = Poet(name="John Milton")
john_milton.save()
data = {
'form-TOTAL_FORMS': 1,
'form-INITIAL_FORMS': 0,
'form-MAX_NUM_FORMS': '',
'form-0-name': '',
'form-0-poet': str(john_milton.id),
}
formset = FormSet(initial=[{'poet': john_milton}], data=data)
self.assertFalse(formset.extra_forms[0].has_changed())
def test_model_formset_with_initial_queryset(self):
# has_changed should work with queryset and list of pk's
# see #18898
FormSet = modelformset_factory(AuthorMeeting, fields='__all__')
Author.objects.create(pk=1, name='Charles Baudelaire')
data = {
'form-TOTAL_FORMS': 1,
'form-INITIAL_FORMS': 0,
'form-MAX_NUM_FORMS': '',
'form-0-name': '',
'form-0-created': '',
'form-0-authors': list(Author.objects.values_list('id', flat=True)),
}
formset = FormSet(initial=[{'authors': Author.objects.all()}], data=data)
self.assertFalse(formset.extra_forms[0].has_changed())
def test_prevent_duplicates_from_with_the_same_formset(self):
FormSet = modelformset_factory(Product, fields="__all__", extra=2)
data = {
'form-TOTAL_FORMS': 2,
'form-INITIAL_FORMS': 0,
'form-MAX_NUM_FORMS': '',
'form-0-slug': 'red_car',
'form-1-slug': 'red_car',
}
formset = FormSet(data)
self.assertFalse(formset.is_valid())
self.assertEqual(formset._non_form_errors,
['Please correct the duplicate data for slug.'])
FormSet = modelformset_factory(Price, fields="__all__", extra=2)
data = {
'form-TOTAL_FORMS': 2,
'form-INITIAL_FORMS': 0,
'form-MAX_NUM_FORMS': '',
'form-0-price': '25',
'form-0-quantity': '7',
'form-1-price': '25',
'form-1-quantity': '7',
}
formset = FormSet(data)
self.assertFalse(formset.is_valid())
self.assertEqual(formset._non_form_errors,
['Please correct the duplicate data for price and quantity, which must be unique.'])
# Only the price field is specified, this should skip any unique checks since
# the unique_together is not fulfilled. This will fail with a KeyError if broken.
FormSet = modelformset_factory(Price, fields=("price",), extra=2)
data = {
'form-TOTAL_FORMS': '2',
'form-INITIAL_FORMS': '0',
'form-MAX_NUM_FORMS': '',
'form-0-price': '24',
'form-1-price': '24',
}
formset = FormSet(data)
self.assertTrue(formset.is_valid())
FormSet = inlineformset_factory(Author, Book, extra=0, fields="__all__")
author = Author.objects.create(pk=1, name='Charles Baudelaire')
Book.objects.create(pk=1, author=author, title='Les Paradis Artificiels')
Book.objects.create(pk=2, author=author, title='Les Fleurs du Mal')
Book.objects.create(pk=3, author=author, title='Flowers of Evil')
book_ids = author.book_set.order_by('id').values_list('id', flat=True)
data = {
'book_set-TOTAL_FORMS': '2',
'book_set-INITIAL_FORMS': '2',
'book_set-MAX_NUM_FORMS': '',
'book_set-0-title': 'The 2008 Election',
'book_set-0-author': str(author.id),
'book_set-0-id': str(book_ids[0]),
'book_set-1-title': 'The 2008 Election',
'book_set-1-author': str(author.id),
'book_set-1-id': str(book_ids[1]),
}
formset = FormSet(data=data, instance=author)
self.assertFalse(formset.is_valid())
self.assertEqual(formset._non_form_errors,
['Please correct the duplicate data for title.'])
self.assertEqual(formset.errors,
[{}, {'__all__': ['Please correct the duplicate values below.']}])
FormSet = modelformset_factory(Post, fields="__all__", extra=2)
data = {
'form-TOTAL_FORMS': '2',
'form-INITIAL_FORMS': '0',
'form-MAX_NUM_FORMS': '',
'form-0-title': 'blah',
'form-0-slug': 'Morning',
'form-0-subtitle': 'foo',
'form-0-posted': '2009-01-01',
'form-1-title': 'blah',
'form-1-slug': 'Morning in Prague',
'form-1-subtitle': 'rawr',
'form-1-posted': '2009-01-01'
}
formset = FormSet(data)
self.assertFalse(formset.is_valid())
self.assertEqual(formset._non_form_errors,
['Please correct the duplicate data for title which must be unique for the date in posted.'])
self.assertEqual(formset.errors,
[{}, {'__all__': ['Please correct the duplicate values below.']}])
data = {
'form-TOTAL_FORMS': '2',
'form-INITIAL_FORMS': '0',
'form-MAX_NUM_FORMS': '',
'form-0-title': 'foo',
'form-0-slug': 'Morning in Prague',
'form-0-subtitle': 'foo',
'form-0-posted': '2009-01-01',
'form-1-title': 'blah',
'form-1-slug': 'Morning in Prague',
'form-1-subtitle': 'rawr',
'form-1-posted': '2009-08-02'
}
formset = FormSet(data)
self.assertFalse(formset.is_valid())
self.assertEqual(formset._non_form_errors,
['Please correct the duplicate data for slug which must be unique for the year in posted.'])
data = {
'form-TOTAL_FORMS': '2',
'form-INITIAL_FORMS': '0',
'form-MAX_NUM_FORMS': '',
'form-0-title': 'foo',
'form-0-slug': 'Morning in Prague',
'form-0-subtitle': 'rawr',
'form-0-posted': '2008-08-01',
'form-1-title': 'blah',
'form-1-slug': 'Prague',
'form-1-subtitle': 'rawr',
'form-1-posted': '2009-08-02'
}
formset = FormSet(data)
self.assertFalse(formset.is_valid())
self.assertEqual(formset._non_form_errors,
['Please correct the duplicate data for subtitle which must be unique for the month in posted.'])
class TestModelFormsetOverridesTroughFormMeta(TestCase):
def test_modelformset_factory_widgets(self):
widgets = {
'name': forms.TextInput(attrs={'class': 'poet'})
}
PoetFormSet = modelformset_factory(Poet, fields="__all__", widgets=widgets)
form = PoetFormSet.form()
self.assertHTMLEqual(
"%s" % form['name'],
'<input id="id_name" maxlength="100" type="text" class="poet" name="name" />'
)
def test_inlineformset_factory_widgets(self):
widgets = {
'title': forms.TextInput(attrs={'class': 'book'})
}
BookFormSet = inlineformset_factory(Author, Book, widgets=widgets, fields="__all__")
form = BookFormSet.form()
self.assertHTMLEqual(
"%s" % form['title'],
'<input class="book" id="id_title" maxlength="100" name="title" type="text" />'
)
def test_modelformset_factory_labels_overrides(self):
BookFormSet = modelformset_factory(Book, fields="__all__", labels={
'title': 'Name'
})
form = BookFormSet.form()
self.assertHTMLEqual(form['title'].label_tag(), '<label for="id_title">Name:</label>')
def test_inlineformset_factory_labels_overrides(self):
BookFormSet = inlineformset_factory(Author, Book, fields="__all__", labels={
'title': 'Name'
})
form = BookFormSet.form()
self.assertHTMLEqual(form['title'].label_tag(), '<label for="id_title">Name:</label>')
def test_modelformset_factory_help_text_overrides(self):
BookFormSet = modelformset_factory(Book, fields="__all__", help_texts={
'title': 'Choose carefully.'
})
form = BookFormSet.form()
self.assertEqual(form['title'].help_text, 'Choose carefully.')
def test_inlineformset_factory_help_text_overrides(self):
BookFormSet = inlineformset_factory(Author, Book, fields="__all__", help_texts={
'title': 'Choose carefully.'
})
form = BookFormSet.form()
self.assertEqual(form['title'].help_text, 'Choose carefully.')
def test_modelformset_factory_error_messages_overrides(self):
author = Author.objects.create(pk=1, name='Charles Baudelaire')
BookFormSet = modelformset_factory(Book, fields="__all__", error_messages={
'title': {
'max_length': 'Title too long!!'
}
})
form = BookFormSet.form(data={'title': 'Foo ' * 30, 'author': author.id})
form.full_clean()
self.assertEqual(form.errors, {'title': ['Title too long!!']})
def test_inlineformset_factory_error_messages_overrides(self):
author = Author.objects.create(pk=1, name='Charles Baudelaire')
BookFormSet = inlineformset_factory(Author, Book, fields="__all__", error_messages={
'title': {
'max_length': 'Title too long!!'
}
})
form = BookFormSet.form(data={'title': 'Foo ' * 30, 'author': author.id})
form.full_clean()
self.assertEqual(form.errors, {'title': ['Title too long!!']})
|
liavkoren/djangoDev
|
tests/model_formsets/tests.py
|
Python
|
bsd-3-clause
| 70,571
|
import socket, cPickle
import Config, PickleSocket, Gps
def makeClient( host = "localhost" , port = Config.PORT , direct = False , dummy = False , ignoreLostPackets = False ) :
if dummy : #just simulate the resources
return DummyResources()
elif direct : #skip the server, directly connect to the resources on my machine
return None #FIXME: not implemented yet
else : #connect to the server
return Client( host , port , ignoreLostPackets )
class DummyResources( object ) :
def __init__( self ) :
self.gps = Gps.DummyGps()
self.roboMote = None
self.tosBase = None
class Error( Exception ) :
pass
class Timeout( Error ) :
pass
class Client( object ) :
def __init__( self , host = "localhost" , port = Config.PORT , timeout = Config.CLIENT_DEFAULT_TIMEOUT , ignoreLostPackets = False ) :
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
s.connect(( host , port ))
self.__pickleSocket = PickleSocket.PickleSocket( s )
self.__timeout = timeout
self.__ignoreLostPackets = ignoreLostPackets
def __send( self , msg ) :
self.__pickleSocket.send( msg )
try :
results = self.__pickleSocket.recv( self.__timeout )
return results
except PickleSocket.Timeout :
if self.__ignoreLostPackets :
return None
else :
raise Timeout
except PickleSocket.Error :
if self.__ignoreLostPackets :
return None
else :
raise Error
def __getattr__( self , objName ) :
return MethodName( objName , self.__send )
class MethodName( object ) :
def __init__( self , objName , sendFcn ) :
self.__objName = objName
self.__sendFcn = sendFcn
def __getattr__( self , methodName ) :
return lambda *args , **argd : self.__sendFcn(( self.__objName , methodName , args , argd ))
if __name__ == "__main__" :
# connect to server
c = makeClient()
roboMote = c.roboMote
# after connecting, cmds look local, just like before
print roboMote.disableMotors()
print roboMote.enableMotors()
print roboMote.getMovement()
print roboMote.setMovement( -.5 , .5 , .75 , 1.0 )
print roboMote.getMovement()
print roboMote.getAllDict()
# connect to (dummy) gps
# c = makeClient( dummy = True )
# gps = c.gps
# use dummy gps
# print gps.iterate() # request new readings
# print gps.getPos() # get new x,y
# print gps.getTime() # get time (float) (seconds into the week)
|
fresskarma/tinyos-1.x
|
contrib/ucb/apps/Monstro/lib/Robot/Client.py
|
Python
|
bsd-3-clause
| 2,663
|
from __future__ import print_function
from bokeh.browserlib import view
from bokeh.document import Document
from bokeh.embed import file_html
from bokeh.models.glyphs import Circle
from bokeh.models import (
Plot, DataRange1d, LinearAxis, ColumnDataSource, PanTool, WheelZoomTool
)
from bokeh.resources import INLINE
source = ColumnDataSource(
data = dict(
x = [1, 2, 3, 4, 4, 5, 5],
y = [5, 4, 3, 2, 2.1, 1, 1.1],
color = ["rgb(0, 100, 120)", "green", "blue", "#2c7fb8", "#2c7fb8", "rgba(120, 230, 150, 0.5)", "rgba(120, 230, 150, 0.5)"]
)
)
xdr = DataRange1d()
ydr = DataRange1d()
plot = Plot(x_range=xdr, y_range=ydr)
circle = Circle(x="x", y="y", radius=0.2,
# Set the fill color to be dependent on the "color" field of the
# data source. If the field is missing, then the default value is
# used. Since no explicit default is provided, this picks up the
# default in FillProps, which is "gray".
fill_color="color",
# Alternative to using fill_color with rgba values, you can also use
# the fill_alpha setting to set the alpha values of your circle or
# other glyphs. This can be set as a single value or powered by a
# column in your data source. Uncomment the following line
# to see the effect.
# fill_alpha=0.2,
# An alternative form that explicitly sets a default value:
#fill_color={"default": "red", "field": "color"},
# Note that line_color is set to a fixed value. This can be any of
# the SVG named 147 colors, or a hex color string starting with "#",
# or a string "rgb(r,g,b)" or "rgba(r,g,b,a)".
# Any other string will be interpreted as a field name to look up
# on the datasource.
line_color="black")
plot.add_glyph(source, circle)
plot.add_layout(LinearAxis(), 'below')
plot.add_layout(LinearAxis(), 'left')
plot.add_tools(PanTool(), WheelZoomTool())
doc = Document()
doc.add_root(plot)
if __name__ == "__main__":
filename = "colorspec.html"
with open(filename, "w") as f:
f.write(file_html(doc, INLINE, "Demonstration of ColorSpec"))
print("Wrote %s" % filename)
view(filename)
|
maxalbert/bokeh
|
examples/glyphs/colorspec.py
|
Python
|
bsd-3-clause
| 2,151
|
# -*- coding:utf-8 -*-
from django.views.generic.base import RedirectView
from django.core.urlresolvers import reverse
from registration.models import UserRegistration
class HomeView(RedirectView):
def get_user_registration(self):
return self.request.user.userregistration
def get_redirect_url(self):
try:
user_registration = self.get_user_registration()
except UserRegistration.DoesNotExist:
return reverse('registration:home')
if user_registration.is_employer:
return reverse('employer:dashboard')
return reverse('employee:dashboard')
|
hellhovnd/dentexchange
|
dentexchange/apps/home/views.py
|
Python
|
bsd-3-clause
| 625
|
"""
Copyright (C) 2012 Raniere Silva <ra092767@ime.unicamp.br>
This program is free software: you can redistribute it and/or modify it
under the terms of the GNU General Public License as published by the
Free Software Foundation, either version 3 of the License, or (at your
option) any later version.
This program is distributed in the hope that it will be useful, but
WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
General Public License for more details.
You should have received a copy of the GNU General Public License along
with this program. If not, see <http://www.gnu.org/licenses/>.
"""
def fb_atom(fbid):
"""Return the url for the atom format."""
if fbid is None:
raise TypeError("fbid can't be None")
return "http://www.facebook.com/feeds/page.php?format=atom10&id={0}".format(fbid)
def fb_rss(fbid):
"""Return the url for the rss format."""
if fbid is None:
raise TypeError("fbid can't be None")
return "http://www.facebook.com/feeds/page.php?format=rss20&id={0}".format(fbid)
def get_fbid(name):
"""Return the Facebook ID for the give name."""
import urllib.request
import re
fbid = None
f = urllib.request.urlopen("https://www.facebook.com/{0}".format(name))
for s in f.readlines():
m = re.search(b'ajaxify="([^ ]*)" ', s)
if m:
ajaxify = re.search(b'set=a\.\d*\.\d*\.(\d*)', m.group(1))
if ajaxify:
fbid = ajaxify.group(1).decode()
break
return fbid
if __name__ == "__main__":
"""Call the url retrieve from the command line interpreter. ::
$ python classification.py --help
"""
import argparse
from argparse import RawTextHelpFormatter
# Parse of flags.
parser = argparse.ArgumentParser(description='Get the url of the feed for Facebook timeline.',
formatter_class=RawTextHelpFormatter)
group = parser.add_mutually_exclusive_group(required=True)
group.add_argument('--rss', action='store_true',
help='use the rss format.')
group.add_argument('--atom', action='store_true',
help='use the atom format.')
parser.add_argument('args', nargs=argparse.REMAINDER,
help='List of the profile name to retrieve.')
args = parser.parse_args()
for name in args.args:
if args.rss:
print(fb_rss(get_fbid(name)))
elif args.atom:
print(fb_atom(get_fbid(name)))
|
r-gaia-cs/dotfiles
|
.newsbeuter/newsbeuter-fb.py
|
Python
|
bsd-3-clause
| 2,527
|
from django.apps import AppConfig
class TextImageConfig(AppConfig):
name = 'glitter.blocks.text_image'
label = 'glitter_text_image'
|
developersociety/django-glitter
|
glitter/blocks/text_image/apps.py
|
Python
|
bsd-3-clause
| 142
|
from base.utils import get_model_object
from challenges.models import Challenge
from .models import ChallengeHost, ChallengeHostTeam
def get_challenge_host_teams_for_user(user):
"""Returns challenge host team ids for a particular user"""
return ChallengeHost.objects.filter(user=user).values_list('team_name', flat=True)
def is_user_a_host_of_challenge(user, challenge_pk):
"""Returns boolean if the user is host of a challenge."""
challenge_host_teams = get_challenge_host_teams_for_user(user)
return Challenge.objects.filter(pk=challenge_pk, creator_id__in=challenge_host_teams).exists()
get_challenge_host_team_model = get_model_object(ChallengeHostTeam)
|
taranjeet/EvalAI
|
apps/hosts/utils.py
|
Python
|
bsd-3-clause
| 685
|
# -*- coding: utf-8 -*-
"""Mayavi/traits GUI visualization elements."""
# Authors: Christian Brodbeck <christianbrodbeck@nyu.edu>
#
# License: BSD-3-Clause
import numpy as np
from mayavi.mlab import pipeline, text3d
from mayavi.modules.glyph import Glyph
from mayavi.modules.surface import Surface
from mayavi.sources.vtk_data_source import VTKDataSource
from mayavi.tools.mlab_scene_model import MlabSceneModel
from traits.api import (HasTraits, HasPrivateTraits, on_trait_change,
Instance, Array, Bool, Button, Enum, Float, Int, List,
Range, Str, Property, cached_property, ArrayOrNone)
from traitsui.api import (View, Item, HGroup, VGrid, VGroup, Spring,
TextEditor)
from tvtk.api import tvtk
from ..defaults import DEFAULTS
from ..surface import _CheckInside, _DistanceQuery
from ..transforms import apply_trans, rotation
from ..utils import SilenceStdout
from ..viz.backends._pysurfer_mayavi import (_create_mesh_surf, _oct_glyph,
_toggle_mlab_render)
try:
from traitsui.api import RGBColor
except ImportError:
from traits.api import RGBColor
headview_borders = VGroup(Item('headview', style='custom', show_label=False),
show_border=True, label='View')
def _mm_fmt(x):
"""Format data in units of mm."""
return '%0.1f' % x
laggy_float_editor_mm = TextEditor(auto_set=False, enter_set=True,
evaluate=float,
format_func=lambda x: '%0.1f' % x)
laggy_float_editor_scale = TextEditor(auto_set=False, enter_set=True,
evaluate=float,
format_func=lambda x: '%0.1f' % x)
laggy_float_editor_headscale = TextEditor(auto_set=False, enter_set=True,
evaluate=float,
format_func=lambda x: '%0.3f' % x)
laggy_float_editor_weight = TextEditor(auto_set=False, enter_set=True,
evaluate=float,
format_func=lambda x: '%0.2f' % x)
laggy_float_editor_deg = TextEditor(auto_set=False, enter_set=True,
evaluate=float,
format_func=lambda x: '%0.1f' % x)
_BUTTON_WIDTH = -80
_DEG_WIDTH = -50 # radian floats
_MM_WIDTH = _DEG_WIDTH # mm floats
_SCALE_WIDTH = _DEG_WIDTH # scale floats
_INC_BUTTON_WIDTH = -25 # inc/dec buttons
_DEG_STEP_WIDTH = -50
_MM_STEP_WIDTH = _DEG_STEP_WIDTH
_SCALE_STEP_WIDTH = _DEG_STEP_WIDTH
_WEIGHT_WIDTH = -60 # weight floats
_VIEW_BUTTON_WIDTH = -60
# width is optimized for macOS and Linux avoid a horizontal scroll-bar
# even when a vertical one is present
_COREG_WIDTH = -290
_TEXT_WIDTH = -260
_REDUCED_TEXT_WIDTH = _TEXT_WIDTH - 40 * np.sign(_TEXT_WIDTH)
_DIG_SOURCE_WIDTH = _TEXT_WIDTH - 50 * np.sign(_TEXT_WIDTH)
_MRI_FIDUCIALS_WIDTH = _TEXT_WIDTH - 60 * np.sign(_TEXT_WIDTH)
_SHOW_BORDER = True
_RESET_LABEL = u"↻"
_RESET_WIDTH = _INC_BUTTON_WIDTH
class HeadViewController(HasTraits):
"""Set head views for the given coordinate system.
Parameters
----------
system : 'RAS' | 'ALS' | 'ARI'
Coordinate system described as initials for directions associated with
the x, y, and z axes. Relevant terms are: Anterior, Right, Left,
Superior, Inferior.
"""
system = Enum("RAS", "ALS", "ARI", desc="Coordinate system: directions of "
"the x, y, and z axis.")
right = Button()
front = Button()
left = Button()
top = Button()
interaction = Enum('trackball', 'terrain')
scale = Float(0.16)
scene = Instance(MlabSceneModel)
view = View(VGroup(
VGrid('0', Item('top', width=_VIEW_BUTTON_WIDTH), '0',
Item('right', width=_VIEW_BUTTON_WIDTH),
Item('front', width=_VIEW_BUTTON_WIDTH),
Item('left', width=_VIEW_BUTTON_WIDTH),
columns=3, show_labels=False),
'_',
HGroup(Item('scale', label='Scale',
editor=laggy_float_editor_headscale,
width=_SCALE_WIDTH, show_label=True),
Item('interaction', tooltip='Mouse interaction mode',
show_label=False), Spring()),
show_labels=False))
@on_trait_change('scene.activated')
def _init_view(self):
self.scene.parallel_projection = True
# apparently scene,activated happens several times
if self.scene.renderer:
self.sync_trait('scale', self.scene.camera, 'parallel_scale')
# and apparently this does not happen by default:
self.on_trait_change(self.scene.render, 'scale')
self.interaction = self.interaction # could be delayed
@on_trait_change('interaction')
def on_set_interaction(self, _, interaction):
if self.scene is None or self.scene.interactor is None:
return
# Ensure we're in the correct orientation for the
# InteractorStyleTerrain to have the correct "up"
self.on_set_view('front', '')
self.scene.mlab.draw()
self.scene.interactor.interactor_style = \
tvtk.InteractorStyleTerrain() if interaction == 'terrain' else \
tvtk.InteractorStyleTrackballCamera()
# self.scene.interactor.interactor_style.
self.on_set_view('front', '')
self.scene.mlab.draw()
@on_trait_change('top,left,right,front')
def on_set_view(self, view, _):
if self.scene is None:
return
system = self.system
kwargs = dict(ALS=dict(front=(0, 90, -90),
left=(90, 90, 180),
right=(-90, 90, 0),
top=(0, 0, -90)),
RAS=dict(front=(90., 90., 180),
left=(180, 90, 90),
right=(0., 90, 270),
top=(90, 0, 180)),
ARI=dict(front=(0, 90, 90),
left=(-90, 90, 180),
right=(90, 90, 0),
top=(0, 180, 90)))
if system not in kwargs:
raise ValueError("Invalid system: %r" % system)
if view not in kwargs[system]:
raise ValueError("Invalid view: %r" % view)
kwargs = dict(zip(('azimuth', 'elevation', 'roll'),
kwargs[system][view]))
kwargs['focalpoint'] = (0., 0., 0.)
with SilenceStdout():
self.scene.mlab.view(distance=None, reset_roll=True,
figure=self.scene.mayavi_scene, **kwargs)
class Object(HasPrivateTraits):
"""Represent a 3d object in a mayavi scene."""
points = Array(float, shape=(None, 3))
nn = Array(float, shape=(None, 3))
name = Str
scene = Instance(MlabSceneModel, ())
src = Instance(VTKDataSource)
# This should be Tuple, but it is broken on Anaconda as of 2016/12/16
color = RGBColor((1., 1., 1.))
# Due to a MESA bug, we use 0.99 opacity to force alpha blending
opacity = Range(low=0., high=1., value=0.99)
visible = Bool(True)
def _update_points(self):
"""Update the location of the plotted points."""
if hasattr(self.src, 'data'):
self.src.data.points = self.points
return True
class PointObject(Object):
"""Represent a group of individual points in a mayavi scene."""
label = Bool(False)
label_scale = Float(0.01)
projectable = Bool(False) # set based on type of points
orientable = Property(depends_on=['nearest'])
text3d = List
point_scale = Float(10, label='Point Scale')
# projection onto a surface
nearest = Instance(_DistanceQuery)
check_inside = Instance(_CheckInside)
project_to_trans = ArrayOrNone(float, shape=(4, 4))
project_to_surface = Bool(False, label='Project', desc='project points '
'onto the surface')
orient_to_surface = Bool(False, label='Orient', desc='orient points '
'toward the surface')
scale_by_distance = Bool(False, label='Dist.', desc='scale points by '
'distance from the surface')
mark_inside = Bool(False, label='Mark', desc='mark points inside the '
'surface in a different color')
inside_color = RGBColor((0., 0., 0.))
glyph = Instance(Glyph)
resolution = Int(8)
view = View(HGroup(Item('visible', show_label=False),
Item('color', show_label=False),
Item('opacity')))
def __init__(self, view='points', has_norm=False, *args, **kwargs):
"""Init.
Parameters
----------
view : 'points' | 'cloud' | 'arrow' | 'oct'
Whether the view options should be tailored to individual points
or a point cloud.
has_norm : bool
Whether a norm can be defined; adds view options based on point
norms (default False).
"""
assert view in ('points', 'cloud', 'arrow', 'oct')
self._view = view
self._has_norm = bool(has_norm)
super(PointObject, self).__init__(*args, **kwargs)
def default_traits_view(self): # noqa: D102
color = Item('color', show_label=False)
scale = Item('point_scale', label='Size', width=_SCALE_WIDTH,
editor=laggy_float_editor_headscale)
orient = Item('orient_to_surface',
enabled_when='orientable and not project_to_surface',
tooltip='Orient points toward the surface')
dist = Item('scale_by_distance',
enabled_when='orientable and not project_to_surface',
tooltip='Scale points by distance from the surface')
mark = Item('mark_inside',
enabled_when='orientable and not project_to_surface',
tooltip='Mark points inside the surface using a different '
'color')
if self._view == 'arrow':
visible = Item('visible', label='Show', show_label=False)
return View(HGroup(visible, scale, 'opacity', 'label', Spring()))
elif self._view in ('points', 'oct'):
visible = Item('visible', label='Show', show_label=True)
views = (visible, color, scale, 'label')
else:
assert self._view == 'cloud'
visible = Item('visible', show_label=False)
views = (visible, color, scale)
if not self._has_norm:
return View(HGroup(*views))
group2 = HGroup(dist, Item('project_to_surface', show_label=True,
enabled_when='projectable',
tooltip='Project points onto the surface '
'(for visualization, does not affect '
'fitting)'),
orient, mark, Spring(), show_left=False)
return View(HGroup(HGroup(*views), group2))
@on_trait_change('label')
def _show_labels(self, show):
_toggle_mlab_render(self, False)
while self.text3d:
text = self.text3d.pop()
text.remove()
if show and len(self.src.data.points) > 0:
fig = self.scene.mayavi_scene
if self._view == 'arrow': # for axes
x, y, z = self.src.data.points[0]
self.text3d.append(text3d(
x, y, z, self.name, scale=self.label_scale,
color=self.color, figure=fig))
else:
for i, (x, y, z) in enumerate(np.array(self.src.data.points)):
self.text3d.append(text3d(
x, y, z, ' %i' % i, scale=self.label_scale,
color=self.color, figure=fig))
_toggle_mlab_render(self, True)
@on_trait_change('visible')
def _on_hide(self):
if not self.visible:
self.label = False
@on_trait_change('scene.activated')
def _plot_points(self):
"""Add the points to the mayavi pipeline"""
if self.scene is None:
return
if hasattr(self.glyph, 'remove'):
self.glyph.remove()
if hasattr(self.src, 'remove'):
self.src.remove()
_toggle_mlab_render(self, False)
x, y, z = self.points.T
fig = self.scene.mayavi_scene
scatter = pipeline.scalar_scatter(x, y, z, fig=fig)
if not scatter.running:
# this can occur sometimes during testing w/ui.dispose()
return
# fig.scene.engine.current_object is scatter
mode = {'cloud': 'sphere', 'points': 'sphere', 'oct': 'sphere'}.get(
self._view, self._view)
assert mode in ('sphere', 'arrow')
glyph = pipeline.glyph(scatter, color=self.color,
figure=fig, scale_factor=self.point_scale,
opacity=1., resolution=self.resolution,
mode=mode)
if self._view == 'oct':
_oct_glyph(glyph.glyph.glyph_source, rotation(0, 0, np.pi / 4))
glyph.actor.property.backface_culling = True
glyph.glyph.glyph.vector_mode = 'use_normal'
glyph.glyph.glyph.clamping = False
if mode == 'arrow':
glyph.glyph.glyph_source.glyph_position = 'tail'
glyph.actor.mapper.color_mode = 'map_scalars'
glyph.actor.mapper.scalar_mode = 'use_point_data'
glyph.actor.mapper.use_lookup_table_scalar_range = False
self.src = scatter
self.glyph = glyph
self.sync_trait('point_scale', self.glyph.glyph.glyph, 'scale_factor')
self.sync_trait('color', self.glyph.actor.property, mutual=False)
self.sync_trait('visible', self.glyph)
self.sync_trait('opacity', self.glyph.actor.property)
self.sync_trait('mark_inside', self.glyph.actor.mapper,
'scalar_visibility')
self.on_trait_change(self._update_points, 'points')
self._update_marker_scaling()
self._update_marker_type()
self._update_colors()
_toggle_mlab_render(self, True)
# self.scene.camera.parallel_scale = _scale
def _nearest_default(self):
return _DistanceQuery(np.zeros((1, 3)))
def _get_nearest(self, proj_rr):
idx = self.nearest.query(proj_rr)[1]
proj_pts = apply_trans(
self.project_to_trans, self.nearest.data[idx])
proj_nn = apply_trans(
self.project_to_trans, self.check_inside.surf['nn'][idx],
move=False)
return proj_pts, proj_nn
@on_trait_change('points,project_to_trans,project_to_surface,mark_inside,'
'nearest')
def _update_projections(self):
"""Update the styles of the plotted points."""
if not hasattr(self.src, 'data'):
return
if self._view == 'arrow':
self.src.data.point_data.normals = self.nn
self.src.data.point_data.update()
return
# projections
if len(self.nearest.data) <= 1 or len(self.points) == 0:
return
# Do the projections
pts = self.points
inv_trans = np.linalg.inv(self.project_to_trans)
proj_rr = apply_trans(inv_trans, self.points)
proj_pts, proj_nn = self._get_nearest(proj_rr)
vec = pts - proj_pts # point to the surface
if self.project_to_surface:
pts = proj_pts
nn = proj_nn
if self.mark_inside and not self.project_to_surface:
scalars = (~self.check_inside(proj_rr, verbose=False)).astype(int)
else:
scalars = np.ones(len(pts))
# With this, a point exactly on the surface is of size point_scale
dist = np.linalg.norm(vec, axis=-1, keepdims=True)
self.src.data.point_data.normals = (250 * dist + 1) * nn
self.src.data.point_data.scalars = scalars
self.glyph.actor.mapper.scalar_range = [0., 1.]
self.src.data.points = pts # projection can change this
self.src.data.point_data.update()
@on_trait_change('color,inside_color')
def _update_colors(self):
if self.glyph is None:
return
# inside_color is the surface color, let's try to get far
# from that
inside = np.array(self.inside_color)
# if it's too close to gray, just use black:
if np.mean(np.abs(inside - 0.5)) < 0.2:
inside.fill(0.)
else:
inside = 1 - inside
colors = np.array([tuple(inside) + (1,),
tuple(self.color) + (1,)]) * 255.
self.glyph.module_manager.scalar_lut_manager.lut.table = colors
@on_trait_change('project_to_surface,orient_to_surface')
def _update_marker_type(self):
# not implemented for arrow
if self.glyph is None or self._view == 'arrow':
return
defaults = DEFAULTS['coreg']
gs = self.glyph.glyph.glyph_source
res = getattr(gs.glyph_source, 'theta_resolution',
getattr(gs.glyph_source, 'resolution', None))
if res is None:
return
if self.project_to_surface or self.orient_to_surface:
gs.glyph_source = tvtk.CylinderSource()
gs.glyph_source.height = defaults['eegp_height']
gs.glyph_source.center = (0., -defaults['eegp_height'], 0)
gs.glyph_source.resolution = res
else:
gs.glyph_source = tvtk.SphereSource()
gs.glyph_source.phi_resolution = res
gs.glyph_source.theta_resolution = res
@on_trait_change('scale_by_distance,project_to_surface')
def _update_marker_scaling(self):
if self.glyph is None:
return
if self.scale_by_distance and not self.project_to_surface:
self.glyph.glyph.scale_mode = 'scale_by_vector'
else:
self.glyph.glyph.scale_mode = 'data_scaling_off'
def _resolution_changed(self, new):
if not self.glyph:
return
gs = self.glyph.glyph.glyph_source.glyph_source
if isinstance(gs, tvtk.SphereSource):
gs.phi_resolution = new
gs.theta_resolution = new
elif isinstance(gs, tvtk.CylinderSource):
gs.resolution = new
else: # ArrowSource
gs.tip_resolution = new
gs.shaft_resolution = new
@cached_property
def _get_orientable(self):
return len(self.nearest.data) > 1
class SurfaceObject(Object):
"""Represent a solid object in a mayavi scene.
Notes
-----
Doesn't automatically update plot because update requires both
:attr:`points` and :attr:`tris`. Call :meth:`plot` after updating both
attributes.
"""
rep = Enum("Surface", "Wireframe")
tris = Array(int, shape=(None, 3))
surf = Instance(Surface)
surf_rear = Instance(Surface)
rear_opacity = Float(1.)
view = View(HGroup(Item('visible', show_label=False),
Item('color', show_label=False),
Item('opacity')))
def __init__(self, block_behind=False, **kwargs): # noqa: D102
self._block_behind = block_behind
self._deferred_tri_update = False
super(SurfaceObject, self).__init__(**kwargs)
def clear(self): # noqa: D102
if hasattr(self.src, 'remove'):
self.src.remove()
if hasattr(self.surf, 'remove'):
self.surf.remove()
if hasattr(self.surf_rear, 'remove'):
self.surf_rear.remove()
self.reset_traits(['src', 'surf'])
@on_trait_change('scene.activated')
def plot(self):
"""Add the points to the mayavi pipeline"""
_scale = self.scene.camera.parallel_scale
self.clear()
if not np.any(self.tris):
return
fig = self.scene.mayavi_scene
surf = dict(rr=self.points, tris=self.tris)
normals = _create_mesh_surf(surf, fig=fig)
self.src = normals.parent
rep = 'wireframe' if self.rep == 'Wireframe' else 'surface'
# Add the opaque "inside" first to avoid the translucent "outside"
# from being occluded (gh-5152)
if self._block_behind:
surf_rear = pipeline.surface(
normals, figure=fig, color=self.color, representation=rep,
line_width=1)
surf_rear.actor.property.frontface_culling = True
self.surf_rear = surf_rear
self.sync_trait('color', self.surf_rear.actor.property,
mutual=False)
self.sync_trait('visible', self.surf_rear, 'visible')
self.surf_rear.actor.property.opacity = self.rear_opacity
self.sync_trait(
'rear_opacity', self.surf_rear.actor.property, 'opacity')
surf = pipeline.surface(
normals, figure=fig, color=self.color, representation=rep,
line_width=1)
surf.actor.property.backface_culling = True
self.surf = surf
self.sync_trait('visible', self.surf, 'visible')
self.sync_trait('color', self.surf.actor.property, mutual=False)
self.sync_trait('opacity', self.surf.actor.property)
self.scene.camera.parallel_scale = _scale
@on_trait_change('tris')
def _update_tris(self):
self._deferred_tris_update = True
@on_trait_change('points')
def _update_points(self):
# Nuke the tris before setting the points otherwise we can get
# a nasty segfault (gh-5728)
if self._deferred_tris_update and self.src is not None:
self.src.data.polys = None
if Object._update_points(self):
if self._deferred_tris_update:
self.src.data.polys = self.tris
self._deffered_tris_update = False
self.src.update() # necessary for SurfaceObject since Mayavi 4.5.0
|
bloyl/mne-python
|
mne/gui/_viewer.py
|
Python
|
bsd-3-clause
| 22,230
|
#!/usr/bin/python
# Copyright 2014 Google Inc.
#
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""
Script for generating the Android framework's version of Skia from gyp
files.
"""
import os
import shutil
import sys
import tempfile
# Find the top of trunk
SCRIPT_DIR = os.path.abspath(os.path.dirname(__file__))
SKIA_DIR = os.path.normpath(os.path.join(SCRIPT_DIR, os.pardir, os.pardir,
os.pardir))
# Find the directory with our helper files, and add it to the path.
ANDROID_TOOLS = os.path.join(SKIA_DIR, 'platform_tools', 'android')
sys.path.append(ANDROID_TOOLS)
import gyp_gen.android_framework_gyp as android_framework_gyp
import gyp_gen.gypd_parser as gypd_parser
import gyp_gen.generate_user_config as generate_user_config
import gyp_gen.makefile_writer as makefile_writer
import gyp_gen.tool_makefile_writer as tool_makefile_writer
import gyp_gen.vars_dict_lib as vars_dict_lib
# Folder containing all gyp files and generated gypd files.
GYP_FOLDER = 'gyp'
def generate_var_dict(target_dir, target_file, skia_arch_type, have_neon,
gyp_source_dir):
"""Create a VarsDict for a particular arch type.
Each paramater is passed directly to android_framework_gyp.main().
Args:
target_dir: Directory containing gyp files.
target_file: Target gyp file.
skia_arch_type: Target architecture.
have_neon: Whether the target should build for neon.
gyp_source_dir: Directory for gyp source.
Returns:
A VarsDict containing the variable definitions determined by gyp.
"""
result_file = android_framework_gyp.main(target_dir, target_file,
skia_arch_type, have_neon,
gyp_source_dir)
var_dict = vars_dict_lib.VarsDict()
gypd_parser.parse_gypd(var_dict, result_file, '.')
android_framework_gyp.clean_gypd_files(target_dir)
print '.',
return var_dict
def main(target_dir=None, require_sk_user_config=False, gyp_source_dir=None):
"""Create Android.mk for the Android framework's external/skia.
Builds Android.mk using Skia's gyp files.
Args:
target_dir: Directory in which to place 'Android.mk'. If None, the file
will be placed in skia's root directory.
require_sk_user_config: If True, raise an AssertionError if
SkUserConfig.h does not exist.
gyp_source_dir: Source directory for gyp.
"""
# Create a temporary folder to hold gyp and gypd files. Create it in SKIA_DIR
# so that it is a sibling of gyp/, so the relationships between gyp files and
# other files (e.g. platform_tools/android/gyp/dependencies.gypi, referenced
# by android_deps.gyp as a relative path) is unchanged.
# Use mkdtemp to find an unused folder name, but then delete it so copytree
# can be called with a non-existent directory.
tmp_folder = tempfile.mkdtemp(dir=SKIA_DIR)
os.rmdir(tmp_folder)
shutil.copytree(os.path.join(SKIA_DIR, GYP_FOLDER), tmp_folder)
try:
main_gyp_file = 'android_framework_lib.gyp'
print 'Creating Android.mk',
# Generate a separate VarsDict for each architecture type. For each
# archtype:
# 1. call android_framework_gyp.main() to generate gypd files
# 2. call parse_gypd to read those gypd files into the VarsDict
# 3. delete the gypd files
#
# Once we have the VarsDict for each architecture type, we combine them all
# into a single Android.mk file, which can build targets of any
# architecture type.
# The default uses a non-existant archtype, to find all the general
# variable definitions.
default_var_dict = generate_var_dict(tmp_folder, main_gyp_file, 'other',
False, gyp_source_dir)
arm_var_dict = generate_var_dict(tmp_folder, main_gyp_file, 'arm', False,
gyp_source_dir)
arm_neon_var_dict = generate_var_dict(tmp_folder, main_gyp_file, 'arm',
True, gyp_source_dir)
x86_var_dict = generate_var_dict(tmp_folder, main_gyp_file, 'x86', False,
gyp_source_dir)
mips_var_dict = generate_var_dict(tmp_folder, main_gyp_file, 'mips', False,
gyp_source_dir)
mips64_var_dict = generate_var_dict(tmp_folder, main_gyp_file, 'mips64',
False, gyp_source_dir)
arm64_var_dict = generate_var_dict(tmp_folder, main_gyp_file, 'arm64',
False, gyp_source_dir)
# Compute the intersection of all targets. All the files in the intersection
# should be part of the makefile always. Each dict will now contain trimmed
# lists containing only variable definitions specific to that configuration.
var_dict_list = [default_var_dict, arm_var_dict, arm_neon_var_dict,
x86_var_dict, mips_var_dict, mips64_var_dict,
arm64_var_dict]
common = vars_dict_lib.intersect(var_dict_list)
common.LOCAL_MODULE.add('libskia')
# Create SkUserConfig
user_config = os.path.join(SKIA_DIR, 'include', 'config', 'SkUserConfig.h')
if target_dir:
dst_dir = target_dir
else:
dst_dir = os.path.join(SKIA_DIR, 'include', 'core')
generate_user_config.generate_user_config(
original_sk_user_config=user_config,
require_sk_user_config=require_sk_user_config, target_dir=dst_dir,
defines=common.DEFINES)
tool_makefile_writer.generate_tool(gyp_dir=tmp_folder,
target_file='bench.gyp',
skia_trunk=target_dir,
dest_dir='bench',
skia_lib_var_dict=common,
local_module_name='skia_nanobench',
local_module_tags=['tests'],
desired_targets=['nanobench'],
gyp_source_dir=gyp_source_dir)
tool_makefile_writer.generate_tool(gyp_dir=tmp_folder,
target_file='gm.gyp',
skia_trunk=target_dir,
dest_dir='gm',
skia_lib_var_dict=common,
local_module_name='skia_gm',
local_module_tags=['tests'],
desired_targets=['gm'],
gyp_source_dir=gyp_source_dir)
tool_makefile_writer.generate_tool(gyp_dir=tmp_folder,
target_file='dm.gyp',
skia_trunk=target_dir,
dest_dir='dm',
skia_lib_var_dict=common,
local_module_name='skia_dm',
local_module_tags=['tests'],
desired_targets=['dm'],
gyp_source_dir=gyp_source_dir)
# Now that the defines have been written to SkUserConfig and they've been
# used to skip adding them to the tools makefiles, they are not needed in
# Android.mk. Reset DEFINES.
common.DEFINES.reset()
# Further trim arm_neon_var_dict with arm_var_dict. After this call,
# arm_var_dict (which will now be the intersection) includes all definitions
# used by both arm and arm + neon, and arm_neon_var_dict will only contain
# those specific to arm + neon.
arm_var_dict = vars_dict_lib.intersect([arm_var_dict, arm_neon_var_dict])
# Now create a list of VarsDictData holding everything but common.
deviations_from_common = []
deviations_from_common.append(makefile_writer.VarsDictData(
arm_var_dict, 'arm'))
deviations_from_common.append(makefile_writer.VarsDictData(
arm_neon_var_dict, 'arm', 'ARCH_ARM_HAVE_NEON'))
deviations_from_common.append(makefile_writer.VarsDictData(x86_var_dict,
'x86'))
# Currently, x86_64 is identical to x86
deviations_from_common.append(makefile_writer.VarsDictData(x86_var_dict,
'x86_64'))
deviations_from_common.append(makefile_writer.VarsDictData(mips_var_dict,
'mips'))
deviations_from_common.append(makefile_writer.VarsDictData(mips64_var_dict,
'mips64'))
deviations_from_common.append(makefile_writer.VarsDictData(arm64_var_dict,
'arm64'))
makefile_writer.write_android_mk(target_dir=target_dir,
common=common, deviations_from_common=deviations_from_common)
finally:
shutil.rmtree(tmp_folder)
if __name__ == '__main__':
main()
|
DiamondLovesYou/skia-sys
|
platform_tools/android/bin/gyp_to_android.py
|
Python
|
bsd-3-clause
| 9,176
|
'''
Write the sequence of each exon to a seperate text file.
'''
from seq_generator import *
import os
g = Genome()
for gene, exon_count in g.genes_exons():
for exon in range(int(exon_count)):
seq = g.sequence(gene, exon)
filename = "{0}_{1}".format(gene, exon)
path = os.path.join('static/data/GRCh37_exons/', filename)
print "writing data to " + path
with open(path, 'w') as outfile:
outfile.write(str(seq))
|
sanjanalab/GUIDES
|
seperate_exons.py
|
Python
|
bsd-3-clause
| 426
|
#!/usr/bin/env python
# -*- Mode: Python -*-
# vi:si:et:sw=4:sts=4:ts=4
# Copyright (c) 2001-2004, MetaSlash Inc. All rights reserved.
# Portions Copyright (c) 2005, Google, Inc. All rights reserved.
"""
Check python source code files for possible errors and print warnings
Contact Info:
http://pychecker.sourceforge.net/
pychecker-list@lists.sourceforge.net
"""
import string
import types
import sys
import imp
import os
import glob
# see __init__.py for meaning, this must match the version there
LOCAL_MAIN_VERSION = 3
def setupNamespace(path) :
# remove pychecker if it's the first component, it needs to be last
if sys.path[0][-9:] == 'pychecker' :
del sys.path[0]
# make sure pychecker is last in path, so we can import
checker_path = os.path.dirname(os.path.dirname(path))
if checker_path not in sys.path :
sys.path.append(checker_path)
def setupSysPathForDevelopment():
import pychecker
this_module = sys.modules[__name__]
# in 2.2 and older, this_module might not have __file__ at all
if not hasattr(this_module, '__file__'):
return
this_path = os.path.normpath(os.path.dirname(this_module.__file__))
pkg_path = os.path.normpath(os.path.dirname(pychecker.__file__))
if pkg_path != this_path:
# pychecker was probably found in site-packages, insert this
# directory before the other one so we can do development and run
# our local version and not the version from site-packages.
pkg_dir = os.path.dirname(pkg_path)
i = 0
for p in sys.path:
if os.path.normpath(p) == pkg_dir:
sys.path.insert(i-1, os.path.dirname(this_path))
break
i = i + 1
del sys.modules['pychecker']
if __name__ == '__main__' :
setupNamespace(sys.argv[0])
setupSysPathForDevelopment()
from pychecker import utils
from pychecker import printer
from pychecker import warn
from pychecker import OP
from pychecker import Config
from pychecker import function
from pychecker import msgs
from pychecker import pcmodules
from pychecker.Warning import Warning
_cfg = None
_VERSION_MISMATCH_ERROR = '''
There seem to be two versions of PyChecker being used.
One is probably in python/site-packages, the other in a local directory.
If you want to run the local version, you must remove the version
from site-packages. Or you can install the current version
by doing python setup.py install.
'''
def _printWarnings(warnings, stream=None):
if stream is None:
stream = sys.stdout
warnings.sort()
lastWarning = None
for warning in warnings :
if lastWarning is not None:
# ignore duplicate warnings
if cmp(lastWarning, warning) == 0:
continue
# print blank line between files
if lastWarning.file != warning.file:
stream.write("\n")
lastWarning = warning
warning.output(stream, removeSysPath=True)
def main(argv) :
__pychecker__ = 'no-miximport'
import pychecker
if LOCAL_MAIN_VERSION != pychecker.MAIN_MODULE_VERSION :
sys.stderr.write(_VERSION_MISMATCH_ERROR)
sys.exit(100)
# remove empty arguments
argv = filter(None, argv)
# if the first arg starts with an @, read options from the file
# after the @ (this is mostly for windows)
if len(argv) >= 2 and argv[1][0] == '@':
# read data from the file
command_file = argv[1][1:]
try:
f = open(command_file, 'r')
command_line = f.read()
f.close()
except IOError, err:
sys.stderr.write("Unable to read commands from file: %s\n %s\n" % \
(command_file, err))
sys.exit(101)
# convert to an argv list, keeping argv[0] and the files to process
argv = argv[:1] + string.split(command_line) + argv[2:]
global _cfg
_cfg, files, suppressions = Config.setupFromArgs(argv[1:])
utils.initConfig(_cfg)
if not files :
return 0
# Now that we've got the args, update the list of evil C objects
for evil_doer in _cfg.evil:
pcmodules.EVIL_C_OBJECTS[evil_doer] = None
# insert this here, so we find files in the local dir before std library
sys.path.insert(0, '')
# import here, because sys.path is not set up at the top for pychecker dir
from pychecker import check
warnings = check._check(files,
cfg=_cfg,
suppressions=suppressions, printProcessing=True)
if not _cfg.quiet :
print "\nWarnings...\n"
if warnings:
_printWarnings(warnings)
return 1
if not _cfg.quiet :
print "None"
return 0
# FIXME: this is a nasty side effect for import checker
if __name__ == '__main__' :
try :
sys.exit(main(sys.argv))
except Config.UsageError :
sys.exit(127)
else :
_orig__import__ = None
_suppressions = None
_warnings_cache = {}
def _get_unique_warnings(warnings):
for i in range(len(warnings)-1, -1, -1):
w = warnings[i].format()
if _warnings_cache.has_key(w):
del warnings[i]
else:
_warnings_cache[w] = 1
return warnings
def __import__(name, globals=None, locals=None, fromlist=None):
if globals is None:
globals = {}
if locals is None:
locals = {}
if fromlist is None:
fromlist = []
check = not sys.modules.has_key(name) and name[:10] != 'pychecker.'
pymodule = _orig__import__(name, globals, locals, fromlist)
if check :
try :
# FIXME: can we find a good moduleDir ?
# based on possible module.__file__, check if it's from
# sys.path, and if not, extract moduleDir
moduleDir = os.path.dirname(pymodule.__file__)
for path in sys.path:
if os.path.abspath(moduleDir) == os.path.abspath(path):
moduleDir = None
break
# FIXME: could it possibly be from a higher-level package,
# instead of the current dir ? Loop up with __init__.py ?
module = pcmodules.PyCheckerModule(pymodule.__name__,
moduleDir=moduleDir)
if module.initModule(pymodule):
warnings = warn.find([module], _cfg, _suppressions)
_printWarnings(_get_unique_warnings(warnings))
else :
print 'Unable to load module', pymodule.__name__
except Exception:
name = getattr(pymodule, '__name__', utils.safestr(pymodule))
# FIXME: can we use it here ?
utils.importError(name)
return pymodule
def _init() :
global _cfg, _suppressions, _orig__import__
args = string.split(os.environ.get('PYCHECKER', ''))
_cfg, files, _suppressions = Config.setupFromArgs(args)
utils.initConfig(_cfg)
check.fixupBuiltinModules(1)
# keep the orig __import__ around so we can call it
import __builtin__
_orig__import__ = __builtin__.__import__
__builtin__.__import__ = __import__
if not os.environ.get('PYCHECKER_DISABLED') :
_init()
|
smspillaz/pychecker
|
pychecker/checker.py
|
Python
|
bsd-3-clause
| 7,420
|
from django.test import TestCase, RequestFactory
from unittest import skip
from django.core.urlresolvers import reverse
from django.contrib.auth.models import User, Permission
from django.contrib.sessions.middleware import SessionMiddleware
from collections import OrderedDict
from urllib import urlencode
from django.utils import timezone
from datetime import timedelta
import os
import StringIO
import re
import logging
from ims.models import Site, ProductInformation, InventoryItem, ProductCategory
from ims.views import (inventory_delete_all, site_delete_all, product_delete_all,
site_delete, product_delete, product_add, site_add, site_detail,
site_add_inventory, products_add_to_site_inventory, product_detail,
product_select_add_site)
from ims.settings import PAGE_SIZE, APP_DIR
import zipfile
logging.disable(logging.CRITICAL)
# test helper functions
def create_inventory_item_for_site(site=None,
product=None,
quantity=1,
deleted=0,
modifier='none'):
if not site:
site=Site(name="test site 1",
modifier=modifier)
site.save()
if not product:
product=ProductInformation(name="test product 1",
code="pdt1",
modifier=modifier,)
product.save()
inventoryItem=site.add_inventory(product=product,
quantity=quantity,
deleted=deleted,
modifier=modifier,)
return site, product, inventoryItem
def create_products_with_inventory_items_for_sites(numSites=1,
numProducts=1,
numItems=1,
modifier='none',
uniqueCategories=False):
sitesList=[]
productList=[]
inventoryItemList=[]
categoryList=[]
for s in range(numSites):
siteName="test site "+str(s+1)
site=Site(name=siteName,)
site.save()
sitesList.append(site)
for p in range(numProducts):
productName="test product "+str(p+1)
productCode="pdt"+str(p+1)
if uniqueCategories:
categoryName="category-" + str(p+1)
else:
categoryName="category-1"
category, created = ProductCategory.objects.get_or_create(category = categoryName)
if created:
category.save()
categoryList.append(category)
product, created=ProductInformation.objects.get_or_create(name=productName,
code=productCode,
category=category)
if created:
product.save()
productList.append(product)
for i in range(numItems):
# increment the quantity for each addition of a new item for
# the same product code, so we can distinguish them
site,product,inventoryItem=create_inventory_item_for_site(
site=site,
product=product,
quantity=i+1,
deleted=0,
modifier=modifier)
inventoryItemList.append(inventoryItem)
return sitesList,productList,inventoryItemList,categoryList
def get_announcement_from_response(response=None, cls=None):
if response and cls:
m=re.search(('^.*<div\s*id="announcement".*?<p.*?class="' +
cls + '">\s*<i .*?</i>\s*<i .*?</i>\s*(.*?)\s*</p>.*?</div>'),
response.content, re.S)
if m and len(m.groups()) > 0:
return m.groups()[0].replace('\n','')
return ''
def add_session_to_request(request):
"""Annotate a request object with a session"""
middleware = SessionMiddleware()
middleware.process_request(request)
request.session.save()
class SiteMethodTests(TestCase):
"""
ims_tests for Site instance methods
"""
#Site inventory ims_tests
def test_latest_inventory_after_initial_creation(self):
"""
site.latest_inventory should only return the latest change
"""
print 'running SiteMethodTests.test_latest_inventory_after_initial_creation... '
(createdSites,
__,
__,
__)=create_products_with_inventory_items_for_sites(
numSites=1,
numProducts=1,
numItems=1)
#latest_inventory is a queryset of all the most recent changes to the
#site's inventory.
latestInventory=[]
for site in createdSites:
latestInventory += site.latest_inventory()
sortedCreatedInventory=[]
for site in createdSites:
for item in site.inventoryitem_set.all():
sortedCreatedInventory.append(item.create_key())
sortedCreatedInventory.sort()
sortedLatestInventory=[]
for item in latestInventory:
sortedLatestInventory.append(item.create_key())
# make sure we return only one thing, since we only added one thing
self.assertListEqual(sortedLatestInventory,
sortedCreatedInventory,
'created inventory in database doesn''t match created inventory')
def test_latest_inventory_after_deletion(self):
"""
site.latest_inventory should only return the latest change, and should
not return any deleted items
"""
print 'running SiteMethodTests.test_latest_inventory_after_deletion... '
(createdSites,
createdProducts,
__,
__)=create_products_with_inventory_items_for_sites(
numSites=1,
numProducts=1,
numItems=1)
# indicate that the just added item is deleted
create_inventory_item_for_site(site=createdSites[0],
product=createdProducts[0],
deleted=1)
#latest_inventory is a queryset of all the most recent changes to the
#site's inventory
latestInventory=createdSites[0].latest_inventory()
# latest_inventory is a queryset of all the most recent changes to the
# site's inventory. Check that a deleted item doesn't show up in
# inventory
with self.assertRaises(InventoryItem.DoesNotExist):
latestInventory.get(information_id=createdProducts[0].pk)
def test_latest_inventory_after_3_quantity_change(self):
"""
site.latest_inventory should only return the latest change
"""
print 'running SiteMethodTests.test_latest_inventory_after_3_quantity_change... '
(createdSites,
createdProducts,
createdInventoryItems,
createdCategories)=create_products_with_inventory_items_for_sites(
numSites=1,
numProducts=1,
numItems=3)
# latest_inventory is a queryset of all the most recent changes to the
# site's inventory.
latestInventory=createdSites[0].latest_inventory()
# check that the inventoryItem that we just added
# and then changed several times has the appropriate final quantity
self.assertEqual(latestInventory.get(
information_id=createdProducts[0].pk).create_key(),
createdInventoryItems.pop().create_key())
self.assertEqual(latestInventory.get(
information_id=createdProducts[0].pk).information.category.pk,
createdCategories.pop().pk)
def test_latest_inventory_after_3_quantity_change_and_deletion(self):
"""
site.latest_inventory should only return the latest change and not
return any deleted items.
"""
print 'running SiteMethodTests.test_latest_inventory_after_3_quantity_change_and_deletion... '
(createdSites,
createdProducts,
__,
__)=create_products_with_inventory_items_for_sites(
numSites=1,
numProducts=1,
numItems=3)
# indicate that the just added item is deleted
create_inventory_item_for_site(site=createdSites[0],
product=createdProducts[0],
deleted=1)
#latest_inventory is a queryset of all the most recent changes to the
#site's inventory
latestInventory=createdSites[0].latest_inventory()
# Check that a deleted InventoryItem doesn't show up
# in inventory
with self.assertRaises(InventoryItem.DoesNotExist):
latestInventory.get(information_id=createdProducts[0].pk)
def test_inventory_set_after_3_changes(self):
"""
InventoryItem history of changes should be retained in the database
"""
print 'running SiteMethodTests.test_inventory_set_after_3_changes... '
(createdSites,
__,
__,
__)=create_products_with_inventory_items_for_sites(
numSites=1,
numProducts=1,
numItems=3)
self.assertEqual(createdSites[0].inventoryitem_set.all().count(),3)
def test_latest_inventory_after_deletion_and_re_addition(self):
"""
site.latest_inventory should only return the latest change and not
return any deleted items. If an item is deleted and then re-added, we
should always see the last change
"""
print 'running SiteMethodTests.test_latest_inventory_after_deletion_and_re_addition... '
(createdSites,
createdProducts,
__,
__)=create_products_with_inventory_items_for_sites(
numSites=1,
numProducts=1,
numItems=1)
# indicate that the just added item is deleted
create_inventory_item_for_site(site=createdSites[0],
product=createdProducts[0],
deleted=1)
#latest_inventory is a queryset of all the most recent changes to the
#site's inventory
(__,
__,
lastItemChange)=create_inventory_item_for_site(
site=createdSites[0],
product=createdProducts[0],
quantity=100)
# latest_inventory is a queryset of all the most recent changes to the
# site's inventory.
latestInventory=createdSites[0].latest_inventory()
# Check that we still have inventory after a deletion
# and re-addition
self.assertEqual(
latestInventory.get(
information_id=createdProducts[0].pk).create_key(),
lastItemChange.create_key())
def test_latest_inventory_3_products_after_3_changes(self):
"""
site.latest_inventory should only return the latest changes
"""
print 'running SiteMethodTests.test_latest_inventory_3_products_after_3_changes... '
(createdSites,
createdProducts,
createdInventoryItems,
createdCategories)=create_products_with_inventory_items_for_sites(
numSites=1,
numProducts=3,
numItems=3,
uniqueCategories=False,
)
# latest_inventory is a queryset of all the most recent changes to the
# site's inventory.
latestInventory=createdSites[0].latest_inventory()
self.assertEqual(
latestInventory.get(information_id=createdProducts[0].pk).create_key(),
createdInventoryItems[3*1-1].create_key())
self.assertEqual(
latestInventory.get(information_id=createdProducts[1].pk).create_key(),
createdInventoryItems[3*2-1].create_key())
self.assertEqual(
latestInventory.get(information_id=createdProducts[2].pk).create_key(),
createdInventoryItems[3*3-1].create_key())
self.assertEqual(
latestInventory.get(information_id=createdProducts[0].pk).information.category.pk,
createdCategories.pop().pk)
def test_parse_sites_from_xls_initial(self):
"""
import 3 sites from Excel
"""
print 'running SiteMethodTests.test_parse_sites_from_xls_initial... '
filename=os.path.join(APP_DIR,
'testData/sites_add_site1_site2_site3.xls')
importedSites,__=Site.parse_sites_from_xls(filename=filename,
modifier='none',
save=True)
self.assertNotEqual(importedSites,
None,
'Failure to import sites from excel')
queriedSites=Site.objects.all()
# check that we saved 3 sites
self.assertEqual(
queriedSites.count(),
3,
'Number of imported sites mismatch. Some sites didn''t get stored.')
# check that the site modifiers are correctly stored
sortedImportedSites=[]
for site in importedSites:
sortedImportedSites.append(site.create_key())
sortedImportedSites.sort()
sortedQueriedSites=[]
for site in queriedSites:
sortedQueriedSites.append(site.create_key())
sortedQueriedSites.sort()
self.assertListEqual(sortedImportedSites,
sortedQueriedSites,
'Imported sites don''t match the stored sites')
def test_parse_sites_from_xls_with_dups(self):
"""
import 3 sites from Excel, plus one duplicate site
"""
print 'running SiteMethodTests.test_parse_sites_from_xls_with_dups... '
filename=os.path.join(APP_DIR,
'testData/sites_add_site1_site2_site3_site3.xls')
importedSites,__=Site.parse_sites_from_xls(filename=filename,
modifier='none',
save=True)
self.assertNotEqual(importedSites,
None,
'Failure to import sites from excel')
queriedSites=Site.objects.all()
# check that we only saved 3 sites
self.assertEqual(
queriedSites.count(),
3,
'You stored a duplicate site as a separate entity.')
def test_parse_sites_from_xls_with_bad_header(self):
"""
import 3 sites from Excel but use a file with invalid headers
"""
print 'running SiteMethodTests.test_parse_sites_from_xls_with_bad_header... '
filename=os.path.join(APP_DIR,
'testData/products_add_prod1_prod2_prod3.xls')
__, siteMessage=Site.parse_sites_from_xls(filename=filename,
modifier='none',
save=True)
self.assert_(
'Xlrdutils' in siteMessage,
('Failure to recognize a file with bad headers.\nSite.parse_sites_from_xls returned: %s'
% siteMessage))
def test_import_parse_from_xls_with_bad_date(self):
"""
import 3 sites from Excel but use a file with a bad date format
"""
print 'running SiteMethodTests.test_parse_sites_from_xls_with_bad_date... '
filename=os.path.join(
APP_DIR,
'testData/sites_add_site1_site2_site3_bad_date.xls')
__, siteMessage=Site.parse_sites_from_xls(filename=filename,
modifier='none',
save=True)
self.assert_('Xlrdutils' in siteMessage,
('Failure to recognize a file with bad date format.\nSite.parse_sites_from_xls returned: %s'
% siteMessage))
def test_parse_sites_from_xls_unicode(self):
print 'running SiteMethodTests.test_parse_sites_from_xls_unicode... '
filename=os.path.join(APP_DIR,
'testData/sites_add_site1_unicode.xls')
try:
(__,
siteMessage) = Site.parse_sites_from_xls(filename=filename,
modifier='none',
save=True)
except UnicodeEncodeError as e:
self.fail("Import of spreadsheet containing unicode caused UnicodeEncodeError: %s" % e)
self.assertEqual(siteMessage,
'',
('Import of spreadsheet containing unicode generated warnings %s'
% siteMessage))
class ProductInformationMethodTests(TestCase):
"""
ProductInformation class method ims_tests
"""
def test_num_sites_containing_with_3_sites(self):
print 'running ProductInformationMethodTests.test_num_sites_containing_with_3_sites... '
(__,
createdProducts,
__,
__)=create_products_with_inventory_items_for_sites(
numSites=3,
numProducts=1,
numItems=1)
product = createdProducts[0]
self.assertEqual(product.num_sites_containing(), 3)
def test_num_sites_containing_with_3_sites_after_inventory_change(self):
print 'running ProductInformationMethodTests.test_num_sites_containing_with_3_sites_after_inventory_change... '
(__,
createdProducts,
__,
__)=create_products_with_inventory_items_for_sites(
numSites=3,
numProducts=1,
numItems=2)
product = createdProducts[0]
self.assertEqual(product.num_sites_containing(), 3)
def test_parse_product_information_from_xls_initial(self):
"""
import 3 products from Excel
"""
print 'running ProductInformationMethodTests.test_parse_product_information_from_xls_initial... '
filename=os.path.join(APP_DIR,
'testData/products_add_prod1_prod2_prod3.xls')
(importedProducts,
__)=ProductInformation.parse_product_information_from_xls(
filename=filename,
modifier='none',
save=True)
self.assertNotEqual(importedProducts,
None,
'Failure to import products from Excel')
queriedProducts=ProductInformation.objects.all()
# check that we saved 3 sites
self.assertEqual(queriedProducts.count(),
3,
'Number of imported products mismatch. \
Some product didn''t get stored.')
# check that the product modifiers are correctly stored
sortedImportedProducts=[]
for product in importedProducts:
sortedImportedProducts.append(product.create_key())
sortedImportedProducts.sort()
sortedQueriedProducts=[]
for product in queriedProducts:
sortedQueriedProducts.append(product.create_key())
sortedQueriedProducts.sort()
self.assertListEqual(sortedImportedProducts, sortedQueriedProducts)
def test_parse_product_information_from_xls_with_dups(self):
"""
import 3 products from Excel, plus one duplicate product
"""
print 'running ProductInformationMethodTests.test_parse_product_information_from_xls_with_dups... '
filename=os.path.join(APP_DIR,
'testData/products_add_prod1_prod2_prod3_prod3.xls')
(importedProducts,
__)=ProductInformation.parse_product_information_from_xls(
filename=filename,
modifier='none',
save=True)
self.assertNotEqual(importedProducts,
None,
'Failure to import products from excel')
queriedProducts=ProductInformation.objects.all()
# check that we only saved 3 products
self.assertTrue(
queriedProducts.count() < 4,
'You stored a duplicate product as a separate entity.')
def test_parse_product_information_from_xls_with_bad_header(self):
"""
import 3 products from Excel but use a file with invalid headers
"""
print 'running ProductInformationMethodTests.test_parse_product_information_from_xls_with_bad_header... '
filename=os.path.join(APP_DIR,
'testData/sites_add_site1_site2_site3.xls')
(__,
productMessage)=ProductInformation.parse_product_information_from_xls(
filename=filename,
modifier='none',
save=True)
self.assert_(
'Xlrdutils' in productMessage,
('Failure to recognize a file with bad headers.\nProductInformation.parse_product_information_from_xls returned: %s'
% productMessage))
def test_parse_product_information_from_xls_with_bad_date(self):
"""
import 3 products from Excel but use a file with a bad date format
"""
print 'running ProductInformationMethodTests.test_parse_product_information_from_xls_with_bad_date... '
filename=os.path.join(
APP_DIR,
'testData/products_add_prod1_prod2_prod3_bad_date.xls')
(__,
productMessage)=ProductInformation.parse_product_information_from_xls(
filename=filename,
modifier='none',
save=True)
self.assert_('Xlrdutils' in productMessage,
('Failure to recognize a file with bad date format.\nProductInformation.parse_product_information_from_xls returned: %s'
% productMessage))
def test_parse_product_information_from_xls_with_unicode(self):
print 'running ProductInformationMethodTests.test_parse_product_information_from_xls_with_unicode... '
filename=os.path.join(
APP_DIR,
'testData/products_add_prod1_unicode.xls')
try:
(__,
productMessage)=ProductInformation.parse_product_information_from_xls(
filename=filename,
modifier='none',
save=True)
except UnicodeEncodeError as e:
self.fail("Import of spreadsheet containing unicode caused UnicodeEncodeError: %s" % e)
self.assertEqual(productMessage,
'',
('Import of spreadsheet containing unicode generated warnings %s'
% productMessage))
class ProductCategoryMethodTests(TestCase):
"""
ProductCategory class method ims_tests
"""
def test_parse_product_category_from_xls_initial(self):
print 'running ProductInformationMethodTests.test_parse_product_category_from_xls_initial... '
filename=os.path.join(APP_DIR,
'testData/category_add_3.xls')
(importedCategories,
__)=ProductCategory.parse_product_categories_from_xls(
filename=filename,
modifier='none',
save=True)
self.assertNotEqual(importedCategories,
None,
'Failure to import categories from Excel')
queriedCategories=ProductCategory.objects.all()
# check that we saved 3 sites
self.assertEqual(queriedCategories.count(),
3,
'Number of imported categories mismatch. \
Some categories didn''t get stored.')
def test_parse_product_category_from_xls_with_unicode(self):
print 'running ProductInformationMethodTests.test_parse_product_category_from_xls_with_unicode... '
filename=os.path.join(
APP_DIR,
'testData/category_add_3_unicode.xls')
try:
(__,
categoryMessage)=ProductCategory.parse_product_categories_from_xls(
filename=filename,
modifier='none',
save=True)
except UnicodeEncodeError as e:
self.fail("Import of spreadsheet containing unicode caused UnicodeEncodeError: %s" % e)
self.assertEqual(categoryMessage,
'',
('Import of spreadsheet containing unicode generated warnings %s'
% categoryMessage))
class InventoryItemMethodTests(TestCase):
"""
InventoryItem class method ims_tests
"""
def test_parse_inventory_from_xls_initial(self):
"""
import 3 inventory items to 3 sites from Excel
"""
print 'running InventoryItemMethodTests.test_parse_inventory_from_xls_initial... '
for number in range(3):
#create three sites
siteName = 'test site %d' % (number + 1)
siteNumber = number + 1
site=Site(name = siteName,
number = siteNumber,
modifier = 'none')
site.save()
for number in range(3):
#create three products
productName="test product %d" % (number+1)
productCode="pdt%d" % (number+1)
product=ProductInformation(name=productName,
code=productCode,
modifier='none')
product.save()
filename=os.path.join(APP_DIR,
'testData/sites_add_site1_site2_site3.xls')
Site.parse_sites_from_xls(filename=filename,
modifier='none',
save=True)
filename=os.path.join(APP_DIR,
'testData/products_add_prod1_prod2_prod3.xls')
ProductInformation.parse_product_information_from_xls(filename=filename,
modifier='none',
save=True)
filename=os.path.join(
APP_DIR,
'testData/inventory_add_10_to_site1_site2_site3_prod1_prod2_prod3.xls')
(importedInventoryItems,
__)=InventoryItem.parse_inventory_from_xls(
filename=filename,
modifier='none',
save=True)
self.assertNotEqual(importedInventoryItems,
None,
'Failure to import inventory from Excel')
self.assertEqual(len(importedInventoryItems),
9,
'Failure to create one or more inventoryItems. Missing associated Site or ProductInformation?')
queriedInventoryItems=InventoryItem.objects.all()
# check that we saved 3 sites
self.assertEqual(queriedInventoryItems.count(),
3*3,
'Total inventory mismatch. Some InventoryItems didn''t get stored.')
# check that the inventory IDs are correctly stored
sortedImportedInventoryItems=[]
for item in importedInventoryItems:
sortedImportedInventoryItems.append(item.create_key())
sortedImportedInventoryItems.sort()
sortedQueriedInventoryItems=[]
for item in queriedInventoryItems:
sortedQueriedInventoryItems.append(item.create_key())
sortedQueriedInventoryItems.sort()
self.assertListEqual(sortedImportedInventoryItems,
sortedQueriedInventoryItems,
'Imported inventory doesn''t match stored inventory')
def test_parse_inventory_from_xls_with_dups(self):
"""
import 3 inventory items to 3 sites from Excel
"""
print 'running InventoryItemMethodTests.test_parse_inventory_from_xls_initial... '
for number in range(3):
#create three sites
siteName = 'test site %d' % (number + 1)
siteNumber = number + 1
site=Site(name = siteName,
number = siteNumber,
modifier = 'none')
site.save()
for number in range(3):
#create three products
productName="test product %d" % (number+1)
productCode="pdt%d" % (number+1)
product=ProductInformation(name=productName,
code=productCode,
modifier='none')
product.save()
filename=os.path.join(
APP_DIR,
'testData/inventory_add_10_to_site1_site2_site3_prod1_prod2_prod3_dups.xls')
(importedInventoryItems,
__)=InventoryItem.parse_inventory_from_xls(
filename=filename,
modifier='none',
save=True)
self.assertNotEqual(importedInventoryItems,
None,
'Failure to import inventory from Excel')
queriedInventory=InventoryItem.objects.all()
# check that we only saved 9 inventory items
self.assertEqual(
queriedInventory.count(), 10,
'You didn''t store all all the inventory items')
def test_parse_inventory_from_xls_with_bad_header(self):
"""
import 3 inventory items to 3 sites from Excel file with a bad header
"""
print 'running InventoryItemMethodTests.test_parse_inventory_from_xls_with_bad_header... '
filename=os.path.join(APP_DIR,
'testData/products_add_prod1_prod2_prod3.xls')
(__,
inventoryMessage)=InventoryItem.parse_inventory_from_xls(
filename=filename,
modifier='none',
save=True)
self.assert_('Xlrdutils' in inventoryMessage,
('Failure to recognize a file with bad header format.\nInventoryItem.parse_inventory_from_xl returned: %s'
% inventoryMessage))
def test_parse_inventory_from_xls_with_bad_date(self):
"""
import 3 inventory items to 3 sites from Excel file with a bad header
"""
print 'running InventoryItemMethodTests.test_parse_inventory_from_xls_with_bad_date... '
filename=os.path.join(
APP_DIR,
'testData/inventory_add_10_to_site1_site2_site3_prod1_prod2_prod3_bad_date.xls')
(__,
inventoryMessage)=InventoryItem.parse_inventory_from_xls(
filename=filename,
modifier='none',
save=True)
self.assert_('Xlrdutils' in inventoryMessage,
('Failure to recognize a file with bad date format.\nInventoryItem.parse_inventory_from_xl returned: %s'
% inventoryMessage))
@skip('No longer using IMS page view')
class HomeViewTests(TestCase):
"""
ims_tests for Home view
"""
def setUp(self):
# Most ims_tests need access to the request factory and/or a user.
self.factory = RequestFactory()
self.user = User.objects.create_user(
username='testUser', password='12345678')
def test_home_for_latest_changes_1(self):
"""
The home view should display sites with recently edited inventory with
the latest changes at the top and latest inventory changes with the
latest changes at the top as well
"""
print 'running HomeViewTests.test_home_for_latest_changes_1... '
self.client.login(username='testUser', password='12345678')
(createdSites,
__,
createdInventoryItems,
__)=create_products_with_inventory_items_for_sites(
numSites=20,
numProducts=5,
numItems=1)
response=self.client.get(reverse('ims:home'))
sitesResponseList=[]
itemsResponseList=[]
for site in response.context['sitesList']:
sitesResponseList.append(site.create_key())
for item in response.context['inventoryList']:
# include the timestamp to ensure uniqueness when comparing
itemsResponseList.append(item.create_key())
sortedCreatedSites=[]
for site in createdSites:
sortedCreatedSites.append(site.create_key())
# compare the latest changed sites only
sortedCreatedSites.reverse()
# just retain the latest inventory changes to compare to the response
latestInventoryItems=OrderedDict()
sortedCreatedInventoryItems=[]
createdInventoryItems.reverse()
for item in createdInventoryItems:
if not latestInventoryItems.has_key(item.information):
latestInventoryItems[item.information]=item
for item in latestInventoryItems.values():
# include the timestamp to ensure uniqueness when comparing
sortedCreatedInventoryItems.append(item.create_key())
self.assertListEqual(sitesResponseList, sortedCreatedSites[:PAGE_SIZE])
self.assertListEqual(itemsResponseList,
sortedCreatedInventoryItems[:PAGE_SIZE])
def test_home_for_latest_changes_2(self):
"""
The home view should display sites with recently edited inventory with
the latest changes at the top and latest inventory changes with the
latest changes at the top as well
"""
print 'running HomeViewTests.test_home_for_latest_changes_2... '
self.client.login(username='testUser', password='12345678')
(createdSites,
__,
createdInventoryItems,
__)=create_products_with_inventory_items_for_sites(
numSites=20,
numProducts=5,
numItems=1)
response=self.client.get(reverse('ims:home'))
sitesResponseList=[]
itemsResponseList=[]
for site in response.context['sitesList']:
sitesResponseList.append(site.create_key())
for item in response.context['inventoryList']:
# include the timestamp to ensure uniqueness when comparing
itemsResponseList.append(item.create_key())
sortedCreatedSites=[]
for site in createdSites:
sortedCreatedSites.append(site.create_key())
# compare the latest changed sites only
sortedCreatedSites.reverse()
# just retain the latest inventory changes to compare to the response
latestInventoryItems=OrderedDict()
sortedCreatedInventoryItems=[]
createdInventoryItems.reverse()
for item in createdInventoryItems:
if not latestInventoryItems.has_key(item.information):
latestInventoryItems[item.information]=item
for item in latestInventoryItems.values():
# include the timestamp to ensure uniqueness when comparing
sortedCreatedInventoryItems.append(item.create_key())
self.assertListEqual(sitesResponseList, sortedCreatedSites[:PAGE_SIZE])
self.assertListEqual(itemsResponseList,
sortedCreatedInventoryItems[:PAGE_SIZE])
def test_home_for_no_inventory(self):
"""
If there is no inventory, ims:home should display nothing
"""
print 'running HomeViewTests.test_home_for_no_inventory... '
self.client.login(username='testUser', password='12345678')
response=self.client.get(reverse('ims:home'))
resultWarning = get_announcement_from_response(response=response,
cls="warningnote")
self.assertIn('No inventory found', resultWarning,
'IMS Home view didn''t generate the correct warning when there is no inventory.\nactual warning message = %s'
% resultWarning)
class InventoryHistoryViewTests(TestCase):
"""
ims_tests for inventory_history view
"""
def setUp(self):
# Most ims_tests need access to the request factory and/or a user.
self.factory = RequestFactory()
self.user = User.objects.create_user(
username='testUser', password='12345678')
def test_inventory_history_with_invalid_site(self):
print 'running InventoryHistoryViewTests.test_inventory_history_with_invalid_site... '
self.client.login(username='testUser', password='12345678')
siteId = 1
code="D11"
response=self.client.get(reverse('ims:inventory_history',
kwargs =
{'siteId':siteId,
'code':code,}),
follow=True)
resultError = get_announcement_from_response(response=response,
cls="errornote")
self.assertIn('Unable to check inventory history.<br />Site %d does not exist' %
siteId, resultError,
'IMS inventory_history view didn''t generate the correct warning when an invalid site was requested.\nactual message = %s' %
resultError)
def test_inventory_history_with_invalid_code(self):
print 'running InventoryHistoryViewTests.test_inventory_history_with_invalid_code... '
self.client.login(username='testUser', password='12345678')
siteId = 1
code="D11"
site=Site(number = siteId)
site.save()
response=self.client.get(reverse('ims:inventory_history',
kwargs =
{'siteId':siteId,
'code':code,}),
follow=True)
resultError = get_announcement_from_response(response=response,
cls="errornote")
self.assertIn('Unable to check inventory history.<br />Item %s does not exist' %
code, resultError,
'IMS inventory_history view didn''t generate the correct warning when an invalid code was requested.\nactual message = %s' %
resultError)
def test_inventory_history_with_valid_history(self):
print 'running InventoryHistoryViewTests.test_inventory_history_with_valid_history... '
self.client.login(username='testUser', password='12345678')
# create initial inventory item
site, product, __ = create_inventory_item_for_site(quantity=1)
# change it to create a history
site, product, __ = create_inventory_item_for_site(
site = site,
product = product,
quantity=2)
response=self.client.get(reverse('ims:inventory_history',
kwargs =
{'siteId':site.number,
'code':product.code,}),
follow=True)
self.assertEqual(response.status_code, 200,
'Inventory History generated a non-200 response code')
resultError = get_announcement_from_response(response=response,
cls="errornote")
self.assertEqual(resultError, '',
'IMS inventory_history view generated an error with a valid request.\nactual message = %s' %
resultError)
resultWarning = get_announcement_from_response(response=response,
cls="warningnote")
self.assertEqual(resultWarning, '',
'IMS inventory_history view generated a warning with a valid request.\nactual message = %s' %
resultWarning)
resultInfo = get_announcement_from_response(response=response,
cls="infonote")
self.assertEqual(resultInfo, '',
'IMS inventory_history view generated info with a valid request.\nactual message = %s' %
resultInfo)
class SitesViewTests(TestCase):
"""
ims_tests for sites view
"""
def setUp(self):
# Most ims_tests need access to the request factory and/or a user.
self.factory = RequestFactory()
self.user = User.objects.create_user(
username='testUser', password='12345678')
def test_sites_get_with_no_sites(self):
print 'running SitesViewTests.test_sites_get_with_no_sites... '
self.client.login(username='testUser', password='12345678')
response=self.client.get(reverse('ims:sites'),
follow=True)
self.assertEquals(response.status_code, 200)
resultWarning = get_announcement_from_response(response=response,
cls="warningnote")
self.assertIn('No sites found',
'IMS sites view didn''t generate the correct warning when no sites were found.\nactual message = %s' %
resultWarning)
def test_sites_get_with_filter_and_no_sites(self):
print 'running SitesViewTests.test_products_get_with_filter_and_no_products... '
self.client.login(username='testUser', password='12345678')
response=self.client.get(reverse('ims:sites',) +
'?searchField=name&searchValue=blah',
follow = False,)
self.assertEquals(response.status_code, 200)
resultWarning = get_announcement_from_response(response=response,
cls="warningnote")
self.assertIn('No sites found',
'IMS sites view didn''t generate the correct warning when no sites were found.\nactual message = %s' %
resultWarning)
def test_sites_get_with_sites(self):
print 'running SitesViewTests.test_sites_get_with_sites... '
self.client.login(username='testUser', password='12345678')
site = Site(name='test site',)
site.save()
response=self.client.get(reverse('ims:sites',),
follow = False,)
self.assertEqual(response.status_code, 200)
resultWarning = get_announcement_from_response(response=response,
cls="warningnote")
self.assertEqual('', resultWarning)
def test_sites_get_with_filter(self):
print 'running SitesViewTests.test_sites_get_with_filter... '
self.client.login(username='testUser', password='12345678')
site = Site(name='test site',)
site.save()
response=self.client.get(reverse('ims:sites',) +
'?searchField=name&searchValue=test',
follow = False,)
self.assertEqual(response.status_code, 200)
resultWarning = get_announcement_from_response(response=response,
cls="warningnote")
self.assertEqual('', resultWarning)
def test_sites_get_with_bad_filter(self):
print 'running SitesViewTests.test_sites_get_with_bad_filter... '
self.client.login(username='testUser', password='12345678')
site = Site(name='test site',)
site.save()
response=self.client.get(reverse('ims:sites',) +
'?searchField=name&searchValue=blah',
follow = False,)
self.assertRedirects(response, reverse('ims:sites',) +
'?page=1&pageSize=%d' % PAGE_SIZE,
status_code = 302,
target_status_code = 200)
def test_sites_post_add(self):
print 'running SitesViewTests.test_sites_post_add... '
perms = ['add_site']
permissions = Permission.objects.filter(codename__in = perms)
self.user.user_permissions=permissions
self.client.login(username='testUser', password='12345678')
response=self.client.post(reverse('ims:sites'),
{'Add':''},
follow=False)
self.assertRedirects(response, reverse('ims:site_add'),
status_code = 302,
target_status_code = 200)
def test_sites_post_add_without_add_site_perm(self):
print 'running SitesViewTests.test_sites_post_add_without_add_site_perm... '
self.client.login(username='testUser', password='12345678')
postData = {'form-MAX_NUM_FORMS': ['1000'],
'form-TOTAL_FORMS': ['0'],
'form-MIN_NUM_FORMS': ['0'],
'form-INITIAL_FORMS': ['0'],
'Add':'Add',}
response=self.client.post(reverse('ims:sites',),
postData,
follow = False,)
self.assertEquals(response.status_code, 200)
resultError = get_announcement_from_response(response=response,
cls="errornote")
self.assertIn('You don''t have permission to add sites',
resultError,
'IMS sites view didn''t generate the correct error when an unauthorized user tried to add.\nactual message = %s' %
resultError)
def test_sites_post_delete(self):
print 'running SitesViewTests.test_sites_post_delete... '
perms = ['delete_site', 'delete_inventoryitem',]
permissions = Permission.objects.filter(codename__in = perms)
self.user.user_permissions=permissions
self.client.login(username='testUser', password='12345678')
site = Site(name = 'test site')
site.save()
postData = {'form-MAX_NUM_FORMS': ['1000'],
'form-TOTAL_FORMS': ['1'],
'form-MIN_NUM_FORMS': ['0'],
'form-INITIAL_FORMS': ['1'],
'form-0-Delete': ['on'],
'form-0-number': [site.number],
'Delete': ['Delete']}
response=self.client.post(reverse('ims:sites'),
postData,
follow=False)
self.assertRedirects(response,
(reverse('ims:site_delete') +
'?site=' + str(site.number) + '&'),
status_code = 302,
target_status_code = 200)
def test_sites_post_delete_without_delete_site_perms(self):
print 'running SitesViewTests.test_sites_post_delete... '
perms = [ 'delete_inventoryitem',]
permissions = Permission.objects.filter(codename__in = perms)
self.user.user_permissions=permissions
self.client.login(username='testUser', password='12345678')
site = Site(name = 'test site')
site.save()
postData = {'form-MAX_NUM_FORMS': ['1000'],
'form-TOTAL_FORMS': ['1'],
'form-MIN_NUM_FORMS': ['0'],
'form-INITIAL_FORMS': ['1'],
'form-0-Delete': ['on'],
'form-0-number': [site.number],
'Delete': ['Delete']}
response=self.client.post(reverse('ims:sites'),
postData,
follow=False)
self.assertEquals(response.status_code, 200)
resultError = get_announcement_from_response(response=response,
cls="errornote")
self.assertIn('You don''t have permission to delete sites',
resultError,
'IMS sites view didn''t generate the correct error when an unauthorized user tried to add.\nactual message = %s' %
resultError)
class ProductDeleteViewTests(TestCase):
"""
ims_tests for product_delete view
"""
def setUp(self):
# Most ims_tests need access to the request factory and/or a user.
self.factory = RequestFactory()
self.user = User.objects.create_user(
username='testUser', password='12345678')
def test_product_delete_get_with_no_get_parms(self):
print 'running ProductDeleteViewTests.test_product_delete_get_with_no_get_parms... '
perms = [ 'delete_productinformation', 'delete_inventoryitem']
permissions = Permission.objects.filter(codename__in = perms)
self.user.user_permissions=permissions
self.client.login(username='testUser', password='12345678')
response=self.client.get(reverse('ims:product_delete'),
follow = False)
self.assertEquals(response.status_code, 200)
resultWarning = get_announcement_from_response(response=response,
cls="warningnote")
self.assertIn('No products requested for deletion',
resultWarning,
'IMS product_delete view didn''t generate the correct warning when no sites requested found.\nactual message = %s' %
resultWarning)
def test_product_delete_get(self):
print 'running ProductDeleteViewTests.test_product_delete_get... '
perms = [ 'delete_productinformation', 'delete_inventoryitem']
permissions = Permission.objects.filter(codename__in = perms)
self.user.user_permissions=permissions
self.client.login(username='testUser', password='12345678')
code = 'D11'
product = ProductInformation(name = 'test product',
code = code)
product.save()
response=self.client.get(reverse('ims:product_delete') + '?' +
urlencode({'code':code}),
follow = False)
self.assertEqual(response.status_code, 200)
resultWarning = get_announcement_from_response(response=response,
cls="warningnote")
self.assertIn('Are you sure?',
resultWarning,
'IMS product_delete view didn''t generate the correct warning.\nactual message = %s' %
resultWarning)
def test_product_delete_get_with_inventory(self):
print 'running ProductDeleteViewTests.test_product_delete_get_with_inventory... '
perms = [ 'delete_productinformation', 'delete_inventoryitem']
permissions = Permission.objects.filter(codename__in = perms)
self.user.user_permissions=permissions
self.client.login(username='testUser', password='12345678')
(__,
createdProducts,
__,
__)=create_products_with_inventory_items_for_sites(
numSites=1,
numProducts=1,
numItems=3)
response=self.client.get(reverse('ims:product_delete') + '?' +
urlencode({'code':createdProducts[0].pk}),
follow = False)
self.assertEquals(response.status_code, 200)
resultWarning = get_announcement_from_response(response=response,
cls="warningnote")
self.assertIn('One or more products contain inventory. Deleting the products will delete all inventory in all sites containing this product as well. Delete anyway?',
resultWarning,
'IMS product_delete view didn''t generate the correct warning.\nactual message = %s' %
resultWarning)
def test_product_delete_get_after_deleting_inventory_from_site(self):
print 'running ProductDeleteViewTests.test_product_delete_get_after_deleting_inventory_from_site... '
perms = [ 'delete_productinformation', 'delete_inventoryitem']
permissions = Permission.objects.filter(codename__in = perms)
self.user.user_permissions=permissions
self.client.login(username='testUser', password='12345678')
(createdSites,
createdProducts,
__,
__)=create_products_with_inventory_items_for_sites(
numSites=1,
numProducts=1,
numItems=3)
product = createdProducts[0]
site =createdSites[0]
site.add_inventory(product=product,
deleted=True,
modifier='testUesr',)
response=self.client.get(reverse('ims:product_delete') + '?' +
urlencode({'code':createdProducts[0].pk}),
follow = False)
self.assertEquals(response.status_code, 200)
resultWarning = get_announcement_from_response(response=response,
cls="warningnote")
self.assertIn('Are you sure?',
resultWarning,
'IMS product_delete view didn''t generate the correct warning.\nactual message = %s' %
resultWarning)
def test_product_delete_get_without_delete_productinformation_perm(self):
print 'running ProductDeleteViewTests.test_product_delete_get_without_delete_productinformation_perm... '
perms = ['delete_inventoryitem',]
permissions = Permission.objects.filter(codename__in = perms)
self.user.user_permissions=permissions
self.client.login(username='testUser', password='12345678')
code = 'D11'
product = ProductInformation(name = 'test product',
code = code)
product.save()
response=self.client.get(reverse('ims:product_delete') + '?' +
urlencode({'code':code}),
follow = False)
self.assertEquals(response.status_code, 200)
resultError = get_announcement_from_response(response=response,
cls="errornote")
self.assertIn('You don''t have permission to delete products',
resultError,
'IMS product_delete view didn''t generate the correct error.\nactual message = %s' %
resultError)
def test_product_delete_get_without_delete_inventoryitem_perm(self):
print 'running ProductDeleteViewTests.test_product_delete_get_without_delete_inventoryitem_perm... '
perms = ['delete_productinformation',]
permissions = Permission.objects.filter(codename__in = perms)
self.user.user_permissions=permissions
self.client.login(username='testUser', password='12345678')
code = 'D11'
product = ProductInformation(name = 'test product',
code = code)
product.save()
response=self.client.get(reverse('ims:product_delete') + '?' +
urlencode({'code':code}),
follow = False)
self.assertEquals(response.status_code, 200)
resultError = get_announcement_from_response(response=response,
cls="errornote")
self.assertIn('You don''t have permission to delete products',
resultError,
'IMS product_delete view didn''t generate the correct error.\nactual message = %s' %
resultError)
def test_product_delete_post_with_no_post_parms(self):
print 'running ProductDeleteViewTests.test_product_delete_post_with_no_post_parms... '
perms = [ 'delete_productinformation', 'delete_inventoryitem']
permissions = Permission.objects.filter(codename__in = perms)
self.user.user_permissions=permissions
self.client.login(username='testUser', password='12345678')
postData = {'Delete':'Delete'}
request = self.factory.post(reverse('ims:product_delete'),
postData,
follow = False)
request.user = self.user
add_session_to_request(request)
response = product_delete(request)
response.client = self.client
resultError = request.session['errorMessage']
self.assertIn('No products requested for deletion',
resultError,
'IMS product_delete view didn''t generate the correct warning when no products requested found.\nactual message = %s' %
resultError)
self.assertRedirects(response, reverse('ims:products') + '?' +
urlencode({'page':1,
'pageSize':PAGE_SIZE,}),
status_code = 302,
target_status_code = 200)
def test_product_delete_post(self):
print 'running ProductDeleteViewTests.test_product_delete_post... '
perms = [ 'delete_productinformation', 'delete_inventoryitem']
permissions = Permission.objects.filter(codename__in = perms)
self.user.user_permissions=permissions
self.client.login(username='testUser', password='12345678')
code = 'D11'
product = ProductInformation(name = 'test product',
code = code)
product.save()
postData = {'Delete':'Delete',
'products':[product.code,]}
request = self.factory.post(reverse('ims:product_delete'),
postData,
follow = False)
request.user = self.user
add_session_to_request(request)
response = product_delete(request)
response.client = self.client
resultInfo = request.session['infoMessage']
self.assertIn(('Successfully deleted product and associated inventory for product code %s with name "%s"<br/>' %
(product.meaningful_code(), product.name)),
resultInfo,
'IMS product_delete view didn''t generate the correct info when product deleted.\nactual message = %s' %
resultInfo)
self.assertRedirects(response, reverse('ims:products') + '?' +
urlencode({'page':1,
'pageSize':PAGE_SIZE,}),
status_code = 302,
target_status_code = 200)
self.assertEqual(ProductInformation.objects.all().count(),
0,
'Product still in database after deleting.')
def test_product_delete_post_with_inventory(self):
print 'running ProductDeleteViewTests.test_product_delete_post_with_inventory... '
perms = [ 'delete_productinformation', 'delete_inventoryitem']
permissions = Permission.objects.filter(codename__in = perms)
self.user.user_permissions=permissions
self.client.login(username='testUser', password='12345678')
(__,
createdProducts,
__,
__)=create_products_with_inventory_items_for_sites(
numSites=1,
numProducts=1,
numItems=3)
postData = {'Delete':'Delete',
'products':[createdProducts[0].code,]}
request = self.factory.post(reverse('ims:product_delete'),
postData,
follow = False)
request.user = self.user
add_session_to_request(request)
response = product_delete(request)
response.client = self.client
resultInfo = request.session['infoMessage']
self.assertIn(('Successfully deleted product and associated inventory for product code %s with name "%s"<br/>' %
(createdProducts[0].meaningful_code(), createdProducts[0].name)),
resultInfo,
'IMS product_delete view didn''t generate the correct info when product deleted.\nactual message = %s' %
resultInfo)
self.assertRedirects(response, reverse('ims:products') + '?' +
urlencode({'page':1,
'pageSize':PAGE_SIZE,}),
status_code = 302,
target_status_code = 200)
self.assertEqual(ProductInformation.objects.all().count(),
0,
'Product still in database after deleting.')
def test_product_delete_post_without_delete_productinformation_perm(self):
print 'running ProductDeleteViewTests.test_product_delete_post_without_delete_productinformation_perm... '
perms = ['delete_inventoryitem',]
permissions = Permission.objects.filter(codename__in = perms)
self.user.user_permissions=permissions
self.client.login(username='testUser', password='12345678')
code = 'D11'
product = ProductInformation(name = 'test product',
code = code)
product.save()
postData = {'Delete':'Delete',
'products':[product.code,]}
response = self.client.post(reverse('ims:product_delete'),
postData,
follow = False)
self.assertEqual(response.status_code,200)
resultError = get_announcement_from_response(response=response,
cls="errornote")
self.assertIn('You don''t have permission to delete products',
resultError,
'IMS product_delete view didn''t generate the correct error.\nactual message = %s' %
resultError)
def test_product_delete_post_without_delete_inventoryitem_perm(self):
print 'running ProductDeleteViewTests.test_product_delete_post_without_delete_inventoryitem_perm... '
perms = ['delete_productinformation',]
permissions = Permission.objects.filter(codename__in = perms)
self.user.user_permissions=permissions
self.client.login(username='testUser', password='12345678')
code = 'D11'
product = ProductInformation(name = 'test product',
code = code)
product.save()
postData = {'Delete':'Delete',
'products':[product.code,]}
response = self.client.post(reverse('ims:product_delete'),
postData,
follow = False)
self.assertEqual(response.status_code,200)
resultError = get_announcement_from_response(response=response,
cls="errornote")
self.assertIn('You don''t have permission to delete products',
resultError,
'IMS product_delete view didn''t generate the correct error.\nactual message = %s' %
resultError)
def test_product_delete_post_cancel(self):
print 'running ProductDeleteViewTests.test_product_delete_post_cancel... '
perms = [ 'delete_productinformation', 'delete_inventoryitem']
permissions = Permission.objects.filter(codename__in = perms)
self.user.user_permissions=permissions
self.client.login(username='testUser', password='12345678')
code = 'D11'
product = ProductInformation(name = 'test product',
code = code)
product.save()
postData = {'Cancel':'Cancel',
'products':[code,]}
response = self.client.post(reverse('ims:product_delete'),
postData,
follow = False)
self.assertRedirects(response, reverse('ims:products') + '?' +
urlencode({'page':1,
'pageSize':1,}),
status_code = 302,
target_status_code = 200)
class SiteDeleteViewTests(TestCase):
"""
ims_tests for site_delete view
"""
def setUp(self):
# Most ims_tests need access to the request factory and/or a user.
self.factory = RequestFactory()
self.user = User.objects.create_user(
username='testUser', password='12345678')
def test_site_delete_get_with_no_get_parms(self):
print 'running SiteDeleteViewTests.test_site_delete_get_with_no_get_parms... '
perms = [ 'delete_site', 'delete_inventoryitem']
permissions = Permission.objects.filter(codename__in = perms)
self.user.user_permissions=permissions
self.client.login(username='testUser', password='12345678')
response=self.client.get(reverse('ims:site_delete'),
follow = False)
self.assertEquals(response.status_code, 200)
resultWarning = get_announcement_from_response(response=response,
cls="warningnote")
self.assertIn('No sites requested for deletion',
resultWarning,
'IMS site_delete view didn''t generate the correct warning when no sites requested found.\nactual message = %s' %
resultWarning)
def test_site_delete_get(self):
print 'running SiteDeleteViewTests.test_site_delete_get... '
perms = [ 'delete_site', 'delete_inventoryitem']
permissions = Permission.objects.filter(codename__in = perms)
self.user.user_permissions=permissions
self.client.login(username='testUser', password='12345678')
site = Site(name = 'test site')
site.save()
response=self.client.get(reverse('ims:site_delete') + '?' +
urlencode({'site':site.pk}),
follow = False)
self.assertEquals(response.status_code, 200)
resultWarning = get_announcement_from_response(response=response,
cls="warningnote")
self.assertIn('Are you sure?',
resultWarning,
'IMS site_delete view didn''t generate the correct warning.\nactual message = %s' %
resultWarning)
def test_site_delete_get_with_inventory(self):
print 'running SiteDeleteViewTests.test_site_delete_get_with_inventory... '
perms = [ 'delete_site', 'delete_inventoryitem']
permissions = Permission.objects.filter(codename__in = perms)
self.user.user_permissions=permissions
self.client.login(username='testUser', password='12345678')
(createdSites,
__,
__,
__)=create_products_with_inventory_items_for_sites(
numSites=1,
numProducts=1,
numItems=3)
response=self.client.get(reverse('ims:site_delete') + '?' +
urlencode({'site':createdSites[0].pk}),
follow = False)
self.assertEquals(response.status_code, 200)
resultWarning = get_announcement_from_response(response=response,
cls="warningnote")
self.assertIn('One or more sites contain inventory. Deleting the sites will delete all inventory as well. Delete anyway?',
resultWarning,
'IMS site_delete view didn''t generate the correct warning.\nactual message = %s' %
resultWarning)
def test_site_delete_get_without_delete_site_perm(self):
print 'running SiteDeleteViewTests.test_site_delete_get_without_delete_site_perm... '
perms = ['delete_inventoryitem',]
permissions = Permission.objects.filter(codename__in = perms)
self.user.user_permissions=permissions
self.client.login(username='testUser', password='12345678')
site = Site(name = 'test site')
site.save()
response=self.client.get(reverse('ims:site_delete') + '?' +
urlencode({'site':site.pk}),
follow = False)
self.assertEquals(response.status_code, 200)
resultError = get_announcement_from_response(response=response,
cls="errornote")
self.assertIn('You don''t have permission to delete sites',
resultError,
'IMS site_delete view didn''t generate the correct error.\nactual message = %s' %
resultError)
def test_site_delete_get_without_delete_inventoryitem_perm(self):
print 'running SiteDeleteViewTests.test_site_delete_get_without_delete_inventoryitem_perm... '
perms = ['delete_site',]
permissions = Permission.objects.filter(codename__in = perms)
self.user.user_permissions=permissions
self.client.login(username='testUser', password='12345678')
site = Site(name = 'test site')
site.save()
response=self.client.get(reverse('ims:site_delete') + '?' +
urlencode({'site':site.pk}),
follow = False)
self.assertEquals(response.status_code, 200)
resultError = get_announcement_from_response(response=response,
cls="errornote")
self.assertIn('You don''t have permission to delete sites',
resultError,
'IMS site_delete view didn''t generate the correct error.\nactual message = %s' %
resultError)
def test_site_delete_post_with_no_post_parms(self):
print 'running SiteDeleteViewTests.test_site_delete_post_with_no_post_parms... '
perms = [ 'delete_site', 'delete_inventoryitem']
permissions = Permission.objects.filter(codename__in = perms)
self.user.user_permissions=permissions
self.client.login(username='testUser', password='12345678')
postData = {'Delete':'Delete'}
request = self.factory.post(reverse('ims:site_delete'),
postData,
follow = False)
request.user = self.user
add_session_to_request(request)
response = site_delete(request)
response.client = self.client
resultError = request.session['errorMessage']
self.assertIn('No sites requested for deletion',
resultError,
'IMS site_delete view didn''t generate the correct warning when no sites requested found.\nactual message = %s' %
resultError)
self.assertRedirects(response, reverse('ims:sites') + '?' +
urlencode({'page':1,
'pageSize':PAGE_SIZE,}),
status_code = 302,
target_status_code = 200)
def test_site_delete_post(self):
print 'running SiteDeleteViewTests.test_site_delete_post... '
perms = [ 'delete_site', 'delete_inventoryitem']
permissions = Permission.objects.filter(codename__in = perms)
self.user.user_permissions=permissions
self.client.login(username='testUser', password='12345678')
site = Site(name = 'test site')
site.save()
postData = {'Delete':'Delete',
'sites':[site.number,]}
request = self.factory.post(reverse('ims:site_delete'),
postData,
follow = False)
request.user = self.user
add_session_to_request(request)
response = site_delete(request)
response.client = self.client
resultInfo = request.session['infoMessage']
self.assertIn('Successfully deleted site %s<br />' % site.name,
resultInfo,
'IMS site_delete view didn''t generate the correct info site deleted.\nactual message = %s' %
resultInfo)
self.assertRedirects(response, reverse('ims:sites') + '?' +
urlencode({'page':1,
'pageSize':PAGE_SIZE,}),
status_code = 302,
target_status_code = 200)
self.assertEqual(Site.objects.all().count(),
0,
'Site still in database after deleting.')
def test_site_delete_post_with_inventory(self):
print 'running SiteDeleteViewTests.test_site_delete_post_with_inventory... '
perms = [ 'delete_site', 'delete_inventoryitem']
permissions = Permission.objects.filter(codename__in = perms)
self.user.user_permissions=permissions
self.client.login(username='testUser', password='12345678')
(createdSites,
__,
__,
__)=create_products_with_inventory_items_for_sites(
numSites=1,
numProducts=1,
numItems=3)
postData = {'Delete':'Delete',
'sites':[createdSites[0].number,]}
request = self.factory.post(reverse('ims:site_delete'),
postData,
follow = False)
request.user = self.user
add_session_to_request(request)
response = site_delete(request)
response.client = self.client
resultInfo = request.session['infoMessage']
self.assertIn('Successfully deleted site %s<br />' % createdSites[0].name,
resultInfo,
'IMS site_delete view didn''t generate the correct info site deleted.\nactual message = %s' %
resultInfo)
self.assertRedirects(response, reverse('ims:sites') + '?' +
urlencode({'page':1,
'pageSize':PAGE_SIZE,}),
status_code = 302,
target_status_code = 200)
self.assertEqual(Site.objects.all().count(),
0,
'Site still in database after deleting.')
self.assertEqual(InventoryItem.objects.all().count(),
0,
'Inventory still in database after deleting.')
def test_site_delete_post_without_delete_site_perm(self):
print 'running SiteDeleteViewTests.test_site_delete_post_without_delete_site_perm... '
perms = ['delete_inventoryitem',]
permissions = Permission.objects.filter(codename__in = perms)
self.user.user_permissions=permissions
self.client.login(username='testUser', password='12345678')
site = Site(name = 'test site')
site.save()
postData = {'Delete':'Delete',
'sites':[site.number,]}
response = self.client.post(reverse('ims:site_delete'),
postData,
follow = False)
self.assertEqual(response.status_code, 200)
resultError = get_announcement_from_response(response=response,
cls="errornote")
self.assertIn('You don''t have permission to delete sites',
resultError,
'IMS site_delete view didn''t generate the correct error with incorrect user permissions.\nactual message = %s' %
resultError)
def test_site_delete_post_without_delete_inventoryitem_perm(self):
print 'running SiteDeleteViewTests.test_site_delete_post_without_delete_inventoryitem_perm... '
perms = ['delete_site',]
permissions = Permission.objects.filter(codename__in = perms)
self.user.user_permissions=permissions
self.client.login(username='testUser', password='12345678')
site = Site(name = 'test site')
site.save()
postData = {'Delete':'Delete',
'sites':[site.number,]}
response = self.client.post(reverse('ims:site_delete'),
postData,
follow = False)
self.assertEqual(response.status_code, 200)
resultError = get_announcement_from_response(response=response,
cls="errornote")
self.assertIn('You don''t have permission to delete sites',
resultError,
'IMS site_delete view didn''t generate the correct error with incorrect user permissions.\nactual message = %s' %
resultError)
def test_site_delete_post_cancel(self):
print 'running SiteDeleteViewTests.test_site_delete_post_cancel... '
perms = [ 'delete_site', 'delete_inventoryitem']
permissions = Permission.objects.filter(codename__in = perms)
self.user.user_permissions=permissions
self.client.login(username='testUser', password='12345678')
site = Site(name = 'test site')
site.save()
postData = {'Cancel':'Cancel',
'sites':[site.number,]}
response = self.client.post(reverse('ims:site_delete'),
postData,
follow = False)
self.assertRedirects(response, reverse('ims:sites') + '?' +
urlencode({'page':1,
'pageSize':1,}),
status_code = 302,
target_status_code = 200)
class SiteAddViewTests(TestCase):
"""
ims_tests for site_add view
"""
def setUp(self):
# Most ims_tests need access to the request factory and/or a user.
self.factory = RequestFactory()
self.user = User.objects.create_user(
username='testUser', password='12345678')
def test_site_add_get(self):
print 'running SiteAddViewTests.test_site_add_get... '
perms = ['add_site']
permissions = Permission.objects.filter(codename__in = perms)
self.user.user_permissions=permissions
self.client.login(username='testUser', password='12345678')
response = self.client.get(reverse('ims:site_add'))
self.assertEquals(response.status_code, 200)
def test_site_add_get_without_add_site_perm(self):
print 'running SiteAddViewTests.test_site_add_get_without_add_site_perm... '
self.client.login(username='testUser', password='12345678')
request = self.factory.get(reverse('ims:site_add'),
follow = False)
request.user = self.user
add_session_to_request(request)
response = site_add(request)
response.client = self.client
resultError = request.session['errorMessage']
self.assertIn('You don''t have permission to add sites',
resultError,
'IMS site_add view didn''t generate the correct error when an unauthorized user tried to add.\nactual message = %s' %
resultError)
self.assertRedirects(response, reverse('ims:sites',) +
'?' + urlencode({'page':1,}),
status_code = 302,
target_status_code = 200)
def test_site_add_post(self):
print 'running SiteAddViewTests.test_site_add_post... '
perms = ['add_site']
permissions = Permission.objects.filter(codename__in = perms)
self.user.user_permissions=permissions
self.client.login(username='testUser', password='12345678')
postData = {'name': 'test site',
'county': '',
'address1': '11 main st.',
'contactName' : 'John Smith',
'contactPhone' : '555-1212',
'modifier' : self.user.username,
'Save': 'Save', }
request = self.factory.post(reverse('ims:site_add'),
postData,
follow = False)
request.user = self.user
add_session_to_request(request)
response = site_add(request)
self.assertEqual(Site.objects.count(), 1)
site = Site.objects.all()[0]
resultInfo = request.session['infoMessage']
self.assertIn('Successfully added site', resultInfo,
'IMS site_add view didn''t generate the correct info when saving.\nactual message = %s' %
resultInfo)
response.client = self.client
self.assertRedirects(response,
reverse('ims:site_detail',
kwargs={'siteId':site.pk,},),
status_code = 302,
target_status_code = 200)
def test_site_add_post_no_change(self):
print 'running SiteAddViewTests.test_site_add_post_no_change... '
perms = ['add_site']
permissions = Permission.objects.filter(codename__in = perms)
self.user.user_permissions=permissions
self.client.login(username='testUser', password='12345678')
postData = {'Save':'Save'}
response = self.client.post(reverse('ims:site_add'),
postData,
follow = False)
self.assertEqual(response.status_code, 200)
resultWarning = get_announcement_from_response(response=response,
cls="warningnote")
self.assertIn('More information required before site can be added',
resultWarning,
'IMS site_add view didn''t generate the correct warning.\nactual message = %s' %
resultWarning)
def test_site_add_post_without_add_site_perm(self):
print 'running SiteAddViewTests.test_site_add_post_without_add_site_perm... '
self.client.login(username='testUser', password='12345678')
postData = {'name': 'test site',
'county': '',
'address1': '11 main st.',
'contactName' : 'John Smith',
'contactPhone' : '555-1212',
'modifier' : self.user.username,
'Save': 'Save', }
request = self.factory.post(reverse('ims:site_add'),
postData,
follow = False)
request.user = self.user
add_session_to_request(request)
response = site_add(request)
resultInfo = request.session['errorMessage']
self.assertIn('You don''t have permission to add sites', resultInfo,
'IMS site_add view didn''t generate the correct error when saving.\nactual message = %s' %
resultInfo)
response.client = self.client
self.assertRedirects(response,
reverse('ims:sites',) + '?' +
urlencode({'page':1,}),
status_code = 302,
target_status_code = 200)
class SiteDetailViewTests(TestCase):
"""
ims_tests for site_detail view
"""
def setUp(self):
# Most ims_tests need access to the request factory and/or a user.
self.factory = RequestFactory()
self.user = User.objects.create_user(
username='testUser', password='12345678')
def test_site_detail_get(self):
print 'running SiteDetailViewTests.test_site_detail_get... '
self.client.login(username='testUser', password='12345678')
site = Site(name = 'test site')
site.save()
response=self.client.get(reverse('ims:site_detail',
kwargs =
{'siteId':site.pk,}),
follow=False)
self.assertEqual(response.status_code, 200)
def test_site_detail_get_with_invalid_site(self):
print 'running SiteDetailViewTests.test_site_detail_get_with_invalid_site... '
self.client.login(username='testUser', password='12345678')
siteId = 1
request=self.factory.get(reverse('ims:site_detail',
kwargs =
{'siteId':siteId,}),
follow=False)
request.user = self.user
add_session_to_request(request)
response = site_detail(request, siteId = siteId)
resultError = request.session['errorMessage']
self.assertIn('Site %d does not exist' % siteId,
resultError,
'IMS site detail view didn''t generate the correct error when an invalid site was requested.\nactual message = %s' %
resultError)
response.client = self.client
self.assertRedirects(response,
reverse('ims:sites',) + '?' +
urlencode({'page':1,}),
status_code = 302,
target_status_code = 200)
def test_site_detail_get_with_filter(self):
print 'running SiteDetailViewTests.test_site_detail_get_with_filter... '
self.client.login(username='testUser', password='12345678')
(createdSites,
__,
__,
__)=create_products_with_inventory_items_for_sites(
numSites=1,
numProducts=2,
numItems=1)
site = createdSites[0]
response=self.client.get(reverse('ims:site_detail',
kwargs =
{'siteId':site.pk,}) +
'?searchField=information__name&searchValue=test product 1',
follow=False)
self.assertEqual(response.status_code, 200)
def test_site_detail_get_with_bad_inventory_filter(self):
print 'running SiteDetailViewTests.test_site_detail_get_with_bad_inventory_filter... '
self.client.login(username='testUser', password='12345678')
(createdSites,
__,
__,
__)=create_products_with_inventory_items_for_sites(
numSites=1,
numProducts=2,
numItems=1)
site = createdSites[0]
request=self.factory.get(reverse('ims:site_detail',
kwargs =
{'siteId':site.pk,}) +
'?searchField=information__name&searchValue=blah',
follow=False)
request.user = self.user
add_session_to_request(request)
response = site_detail(request, siteId = site.pk)
resultWarning = request.session['warningMessage']
self.assertIn('No inventory found using filter criteria.<br/>Showing all inventory.',
resultWarning,
'IMS site detail view didn''t generate the correct error with a bad inventory filter.\nactual message = %s' %
resultWarning)
response.client = self.client
self.assertRedirects(response,
reverse('ims:site_detail',
kwargs =
{'siteId':site.pk,}) + '?' +
urlencode({'page':1,}),
status_code = 302,
target_status_code = 200)
def test_site_detail_post_save_site(self):
print 'running SiteDetailViewTests.test_site_detail_post_save_site... '
self.client.login(username='testUser', password='12345678')
site = Site(name = 'test site')
site.save()
perms = ['change_site']
permissions = Permission.objects.filter(codename__in = perms)
self.user.user_permissions=permissions
self.client.login(username='testUser', password='12345678')
postData = {'name': 'test site',
'county': '',
'address1': '11 main st.',
'contactName' : 'John Smith',
'contactPhone' : '555-1212',
'modifier' : self.user.username,
'Save Site': 'Save Site', }
request=self.factory.post(reverse('ims:site_detail',
kwargs =
{'siteId':site.pk,}),
postData,
follow=False)
request.user = self.user
add_session_to_request(request)
response = site_detail(request, siteId = site.pk)
resultInfo = request.session['infoMessage']
self.assertIn('Successfully changed site information',
resultInfo,
'IMS site detail view didn''t generate the correct info.\nactual message = %s' %
resultInfo)
response.client = self.client
self.assertRedirects(response,
reverse('ims:site_detail',
kwargs={'siteId':site.pk,},) +
'?' + urlencode({'page':1,
'pageSize':PAGE_SIZE,
'adjust':'False'}),
302,
200)
def test_site_detail_post_save_site_invalid_fields(self):
print 'running SiteDetailViewTests.test_site_detail_post_save_site_invalid_fields... '
self.client.login(username='testUser', password='12345678')
site = Site(name = 'test site')
site.save()
perms = ['change_site']
permissions = Permission.objects.filter(codename__in = perms)
self.user.user_permissions=permissions
self.client.login(username='testUser', password='12345678')
postData = {'Save Site': 'Save Site', }
response=self.client.post(reverse('ims:site_detail',
kwargs =
{'siteId':site.pk,}),
postData,
follow=False)
self.assertEqual(response.status_code, 200)
resultWarning = get_announcement_from_response(response=response,
cls="warningnote")
self.assertIn('More information required before the site can be saved',
resultWarning,
'IMS site detail view didn''t generate the correct warning.\nactual message = %s' %
resultWarning)
def test_site_detail_post_save_site_no_change(self):
print 'running SiteDetailViewTests.test_site_detail_post_save_site_no_change... '
self.client.login(username='testUser', password='12345678')
site = Site(name = 'test site',
county = '',
address1 = '11 main st.',
contactName = 'John Smith',
contactPhone = '555-1212',
modifier = self.user.username,)
site.save()
perms = ['change_site']
permissions = Permission.objects.filter(codename__in = perms)
self.user.user_permissions=permissions
self.client.login(username='testUser', password='12345678')
postData = {'name': 'test site',
'county': '',
'address1': '11 main st.',
'contactName' : 'John Smith',
'contactPhone' : '555-1212',
'modifier' : self.user.username,
'Save Site': 'Save Site', }
response=self.client.post(reverse('ims:site_detail',
kwargs =
{'siteId':site.pk,}),
postData,
follow=False)
self.assertEqual(response.status_code, 200)
resultWarning = get_announcement_from_response(response=response,
cls="warningnote")
self.assertIn('No changes made to the site information',
resultWarning,
'IMS site detail view didn''t generate the correct warning.\nactual message = %s' %
resultWarning)
def test_site_detail_post_save_site_without_change_site_perm(self):
print 'running SiteDetailViewTests.test_site_detail_post_save_site_without_change_site_perm... '
self.client.login(username='testUser', password='12345678')
site = Site(name = 'test site',)
site.save()
self.client.login(username='testUser', password='12345678')
postData = {'name': 'test site',
'county': '',
'address1': '11 main st.',
'contactName' : 'John Smith',
'contactPhone' : '555-1212',
'modifier' : self.user.username,
'Save Site': 'Save Site', }
response=self.client.post(reverse('ims:site_detail',
kwargs =
{'siteId':site.pk,}),
postData,
follow=False)
self.assertEqual(response.status_code, 200)
resultError = get_announcement_from_response(response=response,
cls="errornote")
self.assertIn('You don''t have permission to change site information',
resultError,
'IMS site detail view didn''t generate the correct error.\nactual message = %s' %
resultError)
def test_site_detail_post_save_adjust_changes_quantity(self):
print 'running SiteDetailViewTests.test_site_detail_post_save_adjust_changes_quantity... '
(createdSites,
createdProducts,
createdInventory,
__)=create_products_with_inventory_items_for_sites(
numSites=1,
numProducts=1,
numItems=1)
site = createdSites[0]
product = createdProducts[0]
inventory = createdInventory[0]
perms = ['change_inventoryitem', 'delete_inventoryitem']
permissions = Permission.objects.filter(codename__in = perms)
self.user.user_permissions=permissions
self.client.login(username='testUser', password='12345678')
newQuantity = 5
postData = {'form-MAX_NUM_FORMS': ['1000'],
'form-TOTAL_FORMS': ['1'],
'form-MIN_NUM_FORMS': ['0'],
'form-INITIAL_FORMS': ['1'],
'form-0-id':[inventory.pk],
'form-0-site':[site.pk],
'form-0-information':[product.pk],
'form-0-quantity':[newQuantity],
'Save Adjust Changes':'Save Adjust Changes',}
request=self.factory.post(reverse('ims:site_detail',
kwargs =
{'siteId':site.pk,}),
postData,
follow=False)
request.user = self.user
add_session_to_request(request)
response = site_detail(request, siteId = site.pk)
resultInfo = request.session['infoMessage']
self.assertIn('Successfully changed site inventory',
resultInfo,
'IMS site detail view didn''t generate the correct info.\nactual message = %s' %
resultInfo)
response.client = self.client
self.assertRedirects(response,
reverse('ims:site_detail',
kwargs={'siteId':site.pk,},) +
'?' + urlencode({'page':1,
'pageSize':PAGE_SIZE,
'adjust':'True'}),
302,
200)
newInventory = site.latest_inventory_for_product(code = product.pk)
self.assertEqual(newInventory.quantity,
5,
'site_detail view didn''t show the correct inventory quantity after changing to %d\n Quantity = %d' % (newQuantity, newInventory.quantity))
def test_site_detail_post_save_adjust_changes_delete(self):
print 'running SiteDetailViewTests.test_site_detail_post_save_adjust_changes... '
(createdSites,
createdProducts,
createdInventory,
__)=create_products_with_inventory_items_for_sites(
numSites=1,
numProducts=2,
numItems=1)
site = createdSites[0]
numInventory = site.latest_inventory().count()
self.assertEqual(numInventory,
2,
'site_detail view didn''t show the correct inventory after adding 2. Quantity = %d' % numInventory)
perms = ['change_inventoryitem', 'delete_inventoryitem']
permissions = Permission.objects.filter(codename__in = perms)
self.user.user_permissions=permissions
self.client.login(username='testUser', password='12345678')
postData = {'form-MAX_NUM_FORMS': ['1000'],
'form-TOTAL_FORMS': [len(createdProducts)],
'form-MIN_NUM_FORMS': ['0'],
'form-INITIAL_FORMS': ['1'],
'Save Adjust Changes':'Save Adjust Changes',}
addItemDict = {}
deleteIndex = 1
for index in range(len(createdInventory)):
addItemDict['form-%d-id' % index] = createdInventory[index].pk
addItemDict['form-%d-site' % index] = createdInventory[index].site.pk
addItemDict['form-%d-quantity' % index] = createdInventory[index].quantity
addItemDict['form-%d-information' % index] = createdInventory[index].information.pk
if index == deleteIndex:
addItemDict['form-%d-deleteItem' % index] = 'on'
postData.update(addItemDict)
request=self.factory.post(reverse('ims:site_detail',
kwargs =
{'siteId':site.pk,}),
postData,
follow=False)
request.user = self.user
add_session_to_request(request)
response = site_detail(request, siteId = site.pk)
resultInfo = request.session['infoMessage']
self.assertIn('Successfully changed site inventory',
resultInfo,
'IMS site detail view didn''t generate the correct info.\nactual message = %s' %
resultInfo)
response.client = self.client
self.assertRedirects(response,
reverse('ims:site_detail',
kwargs={'siteId':site.pk,},) +
'?' + urlencode({'page':1,
'pageSize':PAGE_SIZE,
'adjust':'True'}),
302,
200)
numInventory = site.latest_inventory().count()
self.assertEqual(numInventory,
1,
'site_detail view didn''t show the correct inventory after deleting 1. Quantity = %d' % numInventory)
def test_site_detail_post_save_adjust_changes_without_change_inventoryitem_perm(self):
print 'running SiteDetailViewTests.test_site_detail_post_save_adjust_changes_without_change_inventoryitem_perm... '
(createdSites,
createdProducts,
createdInventory,
__)=create_products_with_inventory_items_for_sites(
numSites=1,
numProducts=1,
numItems=1)
site = createdSites[0]
product = createdProducts[0]
inventory = createdInventory[0]
perms = ['delete_inventoryitem',]
permissions = Permission.objects.filter(codename__in = perms)
self.user.user_permissions=permissions
self.client.login(username='testUser', password='12345678')
newQuantity = 5
postData = {'form-MAX_NUM_FORMS': ['1000'],
'form-TOTAL_FORMS': ['1'],
'form-MIN_NUM_FORMS': ['0'],
'form-INITIAL_FORMS': ['1'],
'form-0-id':[inventory.pk],
'form-0-site':[site.pk],
'form-0-information':[product.pk],
'form-0-quantity':[newQuantity],
'Save Adjust Changes':'Save Adjust Changes',}
response=self.client.post(reverse('ims:site_detail',
kwargs =
{'siteId':site.pk,}),
postData,
follow=False)
resultError = get_announcement_from_response(response=response,
cls="errornote")
self.assertIn('You don''t have permission to change or delete inventory',
resultError,
'IMS site detail view didn''t generate the correct error.\nactual message = %s' %
resultError)
def test_site_detail_post_save_adjust_changes_without_delete_inventoryitem_perm(self):
print 'running SiteDetailViewTests.test_site_detail_post_save_adjust_changes_without_delete_inventoryitem_perm... '
(createdSites,
createdProducts,
createdInventory,
__)=create_products_with_inventory_items_for_sites(
numSites=1,
numProducts=1,
numItems=1)
site = createdSites[0]
product = createdProducts[0]
inventory = createdInventory[0]
perms = ['delete_inventoryitem',]
permissions = Permission.objects.filter(codename__in = perms)
self.user.user_permissions=permissions
self.client.login(username='testUser', password='12345678')
newQuantity = 5
postData = {'form-MAX_NUM_FORMS': ['1000'],
'form-TOTAL_FORMS': ['1'],
'form-MIN_NUM_FORMS': ['0'],
'form-INITIAL_FORMS': ['1'],
'form-0-id':[inventory.pk],
'form-0-site':[site.pk],
'form-0-information':[product.pk],
'form-0-quantity':[newQuantity],
'Save Adjust Changes':'Save Adjust Changes',}
response=self.client.post(reverse('ims:site_detail',
kwargs =
{'siteId':site.pk,}),
postData,
follow=False)
resultError = get_announcement_from_response(response=response,
cls="errornote")
self.assertIn('You don''t have permission to change or delete inventory',
resultError,
'IMS site detail view didn''t generate the correct error.\nactual message = %s' %
resultError)
def test_site_detail_post_save_add_subtract_changes_quantity(self):
print 'running SiteDetailViewTests.test_site_detail_post_save_add_subtract_changes_quantity... '
(createdSites,
createdProducts,
createdInventory,
__)=create_products_with_inventory_items_for_sites(
numSites=1,
numProducts=1,
numItems=1)
site = createdSites[0]
product = createdProducts[0]
inventory = createdInventory[0]
perms = ['change_inventoryitem',]
permissions = Permission.objects.filter(codename__in = perms)
self.user.user_permissions=permissions
self.client.login(username='testUser', password='12345678')
quantityAdd = 5
postData = {'form-MAX_NUM_FORMS': ['1000'],
'form-TOTAL_FORMS': ['1'],
'form-MIN_NUM_FORMS': ['0'],
'form-INITIAL_FORMS': ['1'],
'form-0-id':[inventory.pk],
'form-0-site':[site.pk],
'form-0-information':[product.pk],
'form-0-addSubtract':[quantityAdd],
'Save Add Subtract Changes':'Save Add Subtract Changes',}
request=self.factory.post(reverse('ims:site_detail',
kwargs =
{'siteId':site.pk,}),
postData,
follow=False)
request.user = self.user
add_session_to_request(request)
response = site_detail(request, siteId = site.pk)
resultInfo = request.session['infoMessage']
self.assertIn('Successfully changed site inventory',
resultInfo,
'IMS site detail view didn''t generate the correct info.\nactual message = %s' %
resultInfo)
response.client = self.client
self.assertRedirects(response,
reverse('ims:site_detail',
kwargs={'siteId':site.pk,},) +
'?' + urlencode({'page':1,
'pageSize':PAGE_SIZE,
'adjust':'False'}),
302,
200)
newInventory = site.latest_inventory_for_product(code = product.pk)
self.assertEqual(newInventory.quantity,
1 + quantityAdd,
'site_detail view didn''t show the correct inventory quantity after changing to %d\n Quantity = %d' % (1 + quantityAdd, newInventory.quantity))
def test_site_detail_post_save_add_subtract_changes_without_change_inventoryitem_perm(self):
print 'running SiteDetailViewTests.test_site_detail_post_save_add_subtract_changes_without_change_inventoryitem_perm... '
(createdSites,
createdProducts,
createdInventory,
__)=create_products_with_inventory_items_for_sites(
numSites=1,
numProducts=1,
numItems=1)
site = createdSites[0]
product = createdProducts[0]
inventory = createdInventory[0]
self.client.login(username='testUser', password='12345678')
quantityAdd = 5
postData = {'form-MAX_NUM_FORMS': ['1000'],
'form-TOTAL_FORMS': ['1'],
'form-MIN_NUM_FORMS': ['0'],
'form-INITIAL_FORMS': ['1'],
'form-0-id':[inventory.pk],
'form-0-site':[site.pk],
'form-0-information':[product.pk],
'form-0-addSubtract':[quantityAdd],
'Save Add Subtract Changes':'Save Add Subtract Changes',}
response=self.client.post(reverse('ims:site_detail',
kwargs =
{'siteId':site.pk,}),
postData,
follow=False)
resultError = get_announcement_from_response(response=response,
cls="errornote")
self.assertIn('You don''t have permission to change inventory',
resultError,
'IMS site detail view didn''t generate the correct error.\nactual message = %s' %
resultError)
def test_site_detail_post_add_new_inventory(self):
print 'running SiteDetailViewTests.test_site_detail_post_add_new_inventory... '
site = Site(name = 'test site')
site.save()
product = ProductInformation(name = 'test product',
code= 'D11',)
product.save()
perms = ['add_inventoryitem',]
permissions = Permission.objects.filter(codename__in = perms)
self.user.user_permissions=permissions
self.client.login(username='testUser', password='12345678')
postData = {'Add New Inventory':'Add New Inventory',}
response=self.client.post(reverse('ims:site_detail',
kwargs =
{'siteId':site.pk,}),
postData,
follow=False)
self.assertRedirects(response,
reverse('ims:site_add_inventory',kwargs={'siteId':site.pk}),
302,
200)
def test_site_detail_post_add_new_inventory_without_add_inventory_perm(self):
print 'running SiteDetailViewTests.test_site_detail_post_add_new_inventory_without_change_inventory_perm... '
site = Site(name = 'test site')
site.save()
self.client.login(username='testUser', password='12345678')
postData = {'Add New Inventory':'Add New Inventory',}
response=self.client.post(reverse('ims:site_detail',
kwargs =
{'siteId':site.pk,}),
postData,
follow=False)
resultError = get_announcement_from_response(response=response,
cls="errornote")
self.assertIn('You don''t have permission to add inventory',
resultError,
'IMS site detail view didn''t generate the correct error.\nactual message = %s' %
resultError)
class SiteAddInventoryViewTests(TestCase):
"""
ims_tests for site_add_inventory view
"""
def setUp(self):
# Most ims_tests need access to the request factory and/or a user.
self.factory = RequestFactory()
self.user = User.objects.create_user(
username='testUser', password='12345678')
def test_site_add_inventory_get(self):
print 'running SiteAddInventoryViewTests.test_site_add_inventory_get... '
perms = ['add_inventoryitem',]
permissions = Permission.objects.filter(codename__in = perms)
self.user.user_permissions=permissions
self.client.login(username='testUser', password='12345678')
site = Site(name = 'test site',)
site.save()
product = ProductInformation(name = 'test product',
code = 'D11')
product.save()
response=self.client.get(reverse('ims:site_add_inventory',
kwargs = {'siteId':site.pk,}),
follow=False)
self.assertEqual(response.status_code, 200)
def test_site_add_inventory_get_without_add_inventoryitem_perm(self):
print 'running SiteAddInventoryViewTests.test_site_add_inventory_get_without_add_inventoryitem_perm... '
self.client.login(username='testUser', password='12345678')
site = Site(name = 'test site',)
site.save()
product = ProductInformation(name = 'test product',
code = 'D11')
product.save()
response=self.client.get(reverse('ims:site_add_inventory',
kwargs = {'siteId':site.pk,}),
follow=False)
self.assertEqual(response.status_code, 200)
resultError = get_announcement_from_response(response=response,
cls="errornote")
self.assertIn('You don''t have permission to add site inventory',
resultError,
'IMS site_add_inventory view didn''t generate the correct error.\nactual message = %s' %
resultError)
def test_site_add_inventory_with_invalid_site(self):
print 'running SiteAddInventoryViewTests.test_site_add_inventory_with_invalid_site... '
self.client.login(username='testUser', password='12345678')
siteId = 1
request = self.factory.get(reverse('ims:site_add_inventory',
kwargs = {'siteId':siteId,}),
follow=False)
request.user = self.user
add_session_to_request(request)
response = site_add_inventory(request, siteId = siteId)
resultError = request.session['errorMessage']
self.assertIn('Site %d does not exist' %
siteId, resultError,
'IMS site_add_inventory view didn''t generate the correct error when an invalid site was requested.\nactual message = %s' %
resultError)
response.client = self.client
self.assertRedirects(response,
reverse('ims:sites'),
302,
200)
def test_site_add_inventory_with_no_products(self):
print 'running SiteAddInventoryViewTests.test_site_add_inventory_with_no_products... '
self.client.login(username='testUser', password='12345678')
site = Site(name = 'test site',)
site.save()
request = self.factory.get(reverse('ims:site_add_inventory',
kwargs = {'siteId':site.pk,}),
follow=False)
request.user = self.user
add_session_to_request(request)
response = site_add_inventory(request, siteId = site.pk)
resultWarning = request.session['warningMessage']
self.assertIn('No products found to add',
resultWarning,
'IMS site_add_inventory view didn''t generate the correct warning.\nactual message = %s' %
resultWarning)
response.client = self.client
self.assertRedirects(response,
reverse('ims:site_detail',
kwargs = {'siteId':site.pk}),
302,
200)
def test_site_add_inventory_post(self):
print 'running SiteAddInventoryViewTests.test_site_add_inventory_post... '
perms = ['add_inventoryitem',]
permissions = Permission.objects.filter(codename__in = perms)
self.user.user_permissions=permissions
self.client.login(username='testUser', password='12345678')
site = Site(name = 'test site',)
site.save()
product = ProductInformation(name = 'test product',
code = 'D11')
product.save()
postData = {'form-MAX_NUM_FORMS': ['1000'],
'form-TOTAL_FORMS': ['1'],
'form-MIN_NUM_FORMS': ['0'],
'form-INITIAL_FORMS': ['1'],
'form-0-code':[product.pk],
'form-0-Add':['on'],
'Add Products':'Add Products',}
response=self.client.post(reverse('ims:site_add_inventory',
kwargs =
{'siteId':site.pk,}),
postData,
follow=False)
productsToAdd = '?code=D11&'
self.assertRedirects(response,
reverse('ims:products_add_to_site_inventory',
kwargs = {'siteId': site.pk}) +
productsToAdd,
302,
200)
def test_site_add_inventory_post_no_products(self):
print 'running SiteAddInventoryViewTests.test_site_add_inventory_post_no_products... '
perms = ['add_inventoryitem',]
permissions = Permission.objects.filter(codename__in = perms)
self.user.user_permissions=permissions
self.client.login(username='testUser', password='12345678')
site = Site(name = 'test site',)
site.save()
product = ProductInformation(name = 'test product',
code = 'D11')
product.save()
postData = {'form-MAX_NUM_FORMS': ['1000'],
'form-TOTAL_FORMS': ['1'],
'form-MIN_NUM_FORMS': ['0'],
'form-INITIAL_FORMS': ['1'],
'form-0-code':[product.pk],
'Add Products':'Add Products',}
response=self.client.post(reverse('ims:site_add_inventory',
kwargs =
{'siteId':site.pk,}),
postData,
follow=False)
resultWarning = get_announcement_from_response(response=response,
cls="warningnote")
self.assertIn('No products selected to add',
resultWarning,
'IMS site_add_inventory didn''t generate the correct warning.\nactual message = %s' %
resultWarning)
def test_site_add_inventory_post_without_add_inventoryitem_perm(self):
print 'running SiteAddInventoryViewTests.test_site_add_inventory_post_without_add_inventoryitem_perm... '
self.client.login(username='testUser', password='12345678')
site = Site(name = 'test site',)
site.save()
product = ProductInformation(name = 'test product',
code = 'D11')
product.save()
postData = {'form-MAX_NUM_FORMS': ['1000'],
'form-TOTAL_FORMS': ['1'],
'form-MIN_NUM_FORMS': ['0'],
'form-INITIAL_FORMS': ['1'],
'form-0-code':[product.pk],
'form-0-Add':['on'],
'Add Products':'Add Products',}
response=self.client.post(reverse('ims:site_add_inventory',
kwargs =
{'siteId':site.pk,}),
postData,
follow=False)
resultError = get_announcement_from_response(response=response,
cls="errornote")
self.assertIn('You don''t have permission to add site inventory',
resultError,
'IMS site_add_inventory didn''t generate the correct error.\nactual message = %s' %
resultError)
class ProductsViewTests(TestCase):
"""
ims_tests for products view
"""
def setUp(self):
# Most ims_tests need access to the request factory and/or a user.
self.factory = RequestFactory()
self.user = User.objects.create_user(
username='testUser', password='12345678')
def test_products_get_with_no_products(self):
print 'running ProductsViewTests.test_products_get_with_no_products... '
self.client.login(username='testUser', password='12345678')
response=self.client.get(reverse('ims:products'),
follow=True)
self.assertEquals(response.status_code, 200)
resultWarning = get_announcement_from_response(response=response,
cls="warningnote")
self.assertIn('No products found',
'IMS products view didn''t generate the correct warning when no products were found.\nactual message = %s' %
resultWarning)
def test_products_get_with_filter_and_no_products(self):
print 'running ProductsViewTests.test_products_get_with_filter_and_no_products... '
self.client.login(username='testUser', password='12345678')
response=self.client.get(reverse('ims:products',) +
'?searchField=name&searchValue=blah',
follow = False,)
self.assertEquals(response.status_code, 200)
resultWarning = get_announcement_from_response(response=response,
cls="warningnote")
self.assertIn('No products found',
'IMS products view didn''t generate the correct warning when no products were found.\nactual message = %s' %
resultWarning)
def test_products_get_with_products(self):
print 'running ProductsViewTests.test_products_get_with_products... '
self.client.login(username='testUser', password='12345678')
code = 'D11'
product = ProductInformation(name='test product',
code = code)
product.save()
response=self.client.get(reverse('ims:products',),
follow = False,)
self.assertEqual(response.status_code, 200)
resultWarning = get_announcement_from_response(response=response,
cls="warningnote")
self.assertEqual('', resultWarning)
def test_products_get_with_filter(self):
print 'running ProductsViewTests.test_products_get_with_filter... '
self.client.login(username='testUser', password='12345678')
code = 'D11'
product = ProductInformation(name='test product',
code = code)
product.save()
response=self.client.get(reverse('ims:products',) +
'?searchField=name&searchValue=test',
follow = False,)
self.assertEqual(response.status_code, 200)
resultWarning = get_announcement_from_response(response=response,
cls="warningnote")
self.assertEqual('', resultWarning)
def test_products_get_with_bad_filter(self):
print 'running ProductsViewTests.test_products_get_with_bad_filter... '
self.client.login(username='testUser', password='12345678')
code = 'D11'
product = ProductInformation(name='test product',
code = code)
product.save()
response=self.client.get(reverse('ims:products',) +
'?searchField=name&searchValue=blah',
follow = False,)
self.assertRedirects(response, reverse('ims:products',) +
'?page=1&pageSize=%d' % PAGE_SIZE,
status_code = 302,
target_status_code = 200)
def test_products_post_add(self):
print 'running ProductsViewTests.test_products_post_add... '
perms = ['add_productinformation']
permissions = Permission.objects.filter(codename__in = perms)
self.user.user_permissions=permissions
self.client.login(username='testUser', password='12345678')
postData = {'form-MAX_NUM_FORMS': ['1000'],
'form-TOTAL_FORMS': ['0'],
'form-MIN_NUM_FORMS': ['0'],
'form-INITIAL_FORMS': ['0'],
'Add':'Add',}
response=self.client.post(reverse('ims:products',),
postData,
follow = False,)
self.assertRedirects(response, reverse('ims:product_add',),
status_code = 302,
target_status_code = 200)
def test_products_post_add_without_add_productinformation_perm(self):
print 'running ProductsViewTests.test_products_post_add_without_add_productinformation_perm... '
self.client.login(username='testUser', password='12345678')
postData = {'form-MAX_NUM_FORMS': ['1000'],
'form-TOTAL_FORMS': ['0'],
'form-MIN_NUM_FORMS': ['0'],
'form-INITIAL_FORMS': ['0'],
'Add':'Add',}
response=self.client.post(reverse('ims:products',),
postData,
follow = False,)
self.assertEquals(response.status_code, 200)
resultError = get_announcement_from_response(response=response,
cls="errornote")
self.assertIn('You don''t have permission to add new products',
resultError,
'IMS products view didn''t generate the correct error when an unauthorized user tried to add.\nactual message = %s' %
resultError)
def test_products_post_delete(self):
print 'running ProductsViewTests.test_products_post_delete... '
perms = ['delete_productinformation', 'delete_inventoryitem']
permissions = Permission.objects.filter(codename__in = perms)
self.user.user_permissions=permissions
self.client.login(username='testUser', password='12345678')
productName = 'test product'
code = 'D11'
product = ProductInformation(name = productName,
code = code,)
product.save()
postData = {'form-MAX_NUM_FORMS': ['1000'],
'form-TOTAL_FORMS': ['1'],
'form-MIN_NUM_FORMS': ['0'],
'form-INITIAL_FORMS': ['1'],
'form-0-code':[code],
'form-0-Delete':['on'],
'Delete':'Delete',}
response=self.client.post(reverse('ims:products',),
postData,
follow = False,)
self.assertRedirects(response, reverse('ims:product_delete',) +
'?code=D11&',
status_code = 302,
target_status_code = 200)
def test_products_post_delete_without_delete_inventoryitem_perms(self):
print 'running ProductsViewTests.test_products_post_delete_without_delete_inventoryitem_perms... '
perms = ['delete_productinformation',]
permissions = Permission.objects.filter(codename__in = perms)
self.user.user_permissions=permissions
self.client.login(username='testUser', password='12345678')
productName = 'test product'
code = 'D11'
product = ProductInformation(name = productName,
code = code,)
product.save()
postData = {'form-MAX_NUM_FORMS': ['1000'],
'form-TOTAL_FORMS': ['1'],
'form-MIN_NUM_FORMS': ['0'],
'form-INITIAL_FORMS': ['1'],
'form-0-code':[code],
'form-0-Delete':['on'],
'Delete':'Delete',}
response=self.client.post(reverse('ims:products',),
postData,
follow = False,)
self.assertEquals(response.status_code, 200)
resultError = get_announcement_from_response(response=response,
cls="errornote")
self.assertIn('You don''t have permission to delete products',
resultError,
'IMS products view didn''t generate the correct error when an unauthorized user tried to add.\nactual message = %s' %
resultError)
def test_products_post_delete_without_delete_productinformation_perms(self):
print 'running ProductsViewTests.test_products_post_delete_without_delete_productinformation_perms... '
perms = ['delete_inventoryitem',]
permissions = Permission.objects.filter(codename__in = perms)
self.user.user_permissions=permissions
self.client.login(username='testUser', password='12345678')
productName = 'test product'
code = 'D11'
product = ProductInformation(name = productName,
code = code,)
product.save()
postData = {'form-MAX_NUM_FORMS': ['1000'],
'form-TOTAL_FORMS': ['1'],
'form-MIN_NUM_FORMS': ['0'],
'form-INITIAL_FORMS': ['1'],
'form-0-code':[code],
'form-0-Delete':['on'],
'Delete':'Delete',}
response=self.client.post(reverse('ims:products',),
postData,
follow = False,)
self.assertEquals(response.status_code, 200)
resultError = get_announcement_from_response(response=response,
cls="errornote")
self.assertIn('You don''t have permission to delete products',
resultError,
'IMS products view didn''t generate the correct error when an unauthorized user tried to add.\nactual message = %s' %
resultError)
class ProductAddViewTests(TestCase):
"""
ims_tests for product_add view
"""
def setUp(self):
# Most ims_tests need access to the request factory and/or a user.
self.factory = RequestFactory()
self.user = User.objects.create_user(
username='testUser', password='12345678')
def test_product_add_get(self):
print 'running ProductAddViewTests.test_product_add_get... '
perms = ['add_productinformation']
permissions = Permission.objects.filter(codename__in = perms)
self.user.user_permissions=permissions
self.client.login(username='testUser', password='12345678')
response = self.client.get(reverse('ims:product_add'))
self.assertEquals(response.status_code, 200)
def test_product_add_get_without_add_productinformation_perm(self):
print 'running ProductAddViewTests.test_product_add_get_without_add_productinformation_perm... '
self.client.login(username='testUser', password='12345678')
request = self.factory.get(reverse('ims:product_add'),
follow = False)
request.user = self.user
add_session_to_request(request)
response = product_add(request)
response.client = self.client
resultError = request.session['errorMessage']
self.assertIn('You don''t have permission to add new products',
resultError,
'IMS product_add view didn''t generate the correct error when an unauthorized user tried to add.\nactual message = %s' %
resultError)
self.assertRedirects(response, reverse('ims:products',) +
'?' + urlencode({'page':1,}),
status_code = 302,
target_status_code = 200)
def test_product_add_post(self):
print 'running ProductAddViewTests.test_product_add_post... '
perms = ['add_productinformation']
permissions = Permission.objects.filter(codename__in = perms)
self.user.user_permissions=permissions
self.client.login(username='testUser', password='12345678')
postData = {'quantityOfMeasure': 1,
'unitOfMeasure': 'EACH',
'code': 'D11',
'Save': 'Save',
'name': 'test product'}
request = self.factory.post(reverse('ims:product_add'),
postData,
follow = False)
request.user = self.user
add_session_to_request(request)
response = product_add(request)
resultInfo = request.session['infoMessage']
self.assertIn('Successfully saved product.', resultInfo,
'IMS product_add view didn''t generate the correct info when saving.\nactual message = %s' %
resultInfo)
response.client = self.client
self.assertRedirects(response,
reverse('ims:product_detail',
kwargs={'code':'D11'}) + '?' +
urlencode({'page':1,
'picture':'False'}),
status_code = 302,
target_status_code = 200)
def test_product_add_post_no_change(self):
print 'running ProductAddViewTests.test_product_add_post_no_change... '
perms = ['add_productinformation']
permissions = Permission.objects.filter(codename__in = perms)
self.user.user_permissions=permissions
self.client.login(username='testUser', password='12345678')
postData = {'Save':'Save'}
response = self.client.post(reverse('ims:product_add'),
postData,
follow = False)
self.assertEqual(response.status_code, 200)
resultWarning = get_announcement_from_response(response=response,
cls="warningnote")
self.assertIn('More information required before product can be added',
resultWarning,
'IMS product_add view didn''t generate the correct warning.\nactual message = %s' %
resultWarning)
def test_product_add_post_with_error_message(self):
print 'running ProductAddViewTests.test_product_add_post_with_error_message... '
perms = ['add_productinformation']
permissions = Permission.objects.filter(codename__in = perms)
self.user.user_permissions=permissions
self.client.login(username='testUser', password='12345678')
postData = {'quantityOfMeasure': 1,
'unitOfMeasure': 'EACH',
'code': 'D11',
'Save': 'Save',
'name': 'test product'}
request = self.factory.post(reverse('ims:product_add'),
postData,
follow = False)
request.user = self.user
add_session_to_request(request)
request.session['errorMessage'] = 'Error'
response = product_add(request)
response.client = self.client
self.assertRedirects(response, reverse('ims:products',) +
'?' + urlencode({'page':1,}),
status_code = 302,
target_status_code = 200)
def test_product_add_post_without_add_productinformation_perm(self):
print 'running ProductAddViewTests.test_product_add_post_without_add_productinformation_perm... '
self.client.login(username='testUser', password='12345678')
postData = {'quantityOfMeasure': 1,
'unitOfMeasure': 'EACH',
'code': 'D11',
'Save': 'Save',
'name': 'test product'}
request = self.factory.post(reverse('ims:product_add'),
postData,
follow = False)
request.user = self.user
add_session_to_request(request)
response = product_add(request)
resultInfo = request.session['errorMessage']
self.assertIn('You don''t have permission to add new products', resultInfo,
'IMS product_add view didn''t generate the correct error when saving.\nactual message = %s' %
resultInfo)
response.client = self.client
self.assertRedirects(response,
reverse('ims:products',) + '?' +
urlencode({'page':1,}),
status_code = 302,
target_status_code = 200)
class ProductDetailViewTests(TestCase):
"""
ims_tests for product_detail view
"""
def setUp(self):
# Most ims_tests need access to the request factory and/or a user.
self.factory = RequestFactory()
self.user = User.objects.create_user(
username='testUser', password='12345678')
def test_product_detail_get(self):
print 'running ProductDetailViewTests.test_product_detail_get... '
self.client.login(username='testUser', password='12345678')
product = ProductInformation(code='D11')
product.save()
code="D11"
response=self.client.get(reverse('ims:product_detail',
kwargs =
{'code':code,}),
follow=True)
self.assertEqual(response.status_code, 200,
"Product Detail View didn't return status code 200 with a valid product code.")
def test_product_detail_get_with_filter_and_no_sites(self):
print 'running ProductDetailViewTests.test_product_detail_get_with_filter_and_no_sites... '
self.client.login(username='testUser', password='12345678')
product = ProductInformation(code='D11')
product.save()
code="D11"
response=self.client.get(reverse('ims:product_detail',
kwargs =
{'code':code,}) +
'?searchField=site__name&searchValue=blah',
follow = False,)
self.assertEqual(response.status_code, 200,)
def test_product_detail_get_with_bad_filter(self):
print 'running ProductDetailViewTests.test_product_detail_get_with_bad_filter... '
self.client.login(username='testUser', password='12345678')
code="D11"
product = ProductInformation(code=code)
product.save()
site = Site(name='test site')
site.save()
site.add_inventory(product = product,
quantity = 1,
modifier = self.user.username)
request=self.factory.get(reverse('ims:product_detail',
kwargs =
{'code':code,}) +
'?searchField=site__name&searchValue=blah',
follow = False)
request.user = self.user
add_session_to_request(request)
response = product_detail(request, code = code)
resultWarning = request.session['warningMessage']
self.assertIn('No sites found using filter criteria.<br/>Showing all sites.',
resultWarning,
'IMS product detail view didn''t generate the correct warning.\nactual message = %s' %
resultWarning)
response.client = self.client
self.assertRedirects(response, reverse('ims:product_detail',
kwargs =
{'code':code,}) +
'?page=1&picture=False',
status_code = 302,
target_status_code = 200)
def test_product_detail_get_with_filter(self):
print 'running ProductDetailViewTests.test_product_detail_get_with_filter... '
self.client.login(username='testUser', password='12345678')
code="D11"
product = ProductInformation(code=code)
product.save()
site = Site(name='test site')
site.save()
site.add_inventory(product = product,
quantity = 1,
modifier = self.user.username)
response=self.client.get(reverse('ims:product_detail',
kwargs =
{'code':code,}) +
'?searchField=site__name&searchValue=test',
follow = False)
self.assertEqual(response.status_code, 200,)
def test_product_detail_get_with_invalid_product(self):
print 'running ProductDetailViewTests.test_product_detail_get_with_invalid_product... '
self.client.login(username='testUser', password='12345678')
code="D11"
request=self.factory.get(reverse('ims:product_detail',
kwargs =
{'code':code,}),
follow = False)
request.user = self.user
add_session_to_request(request)
response = product_detail(request, code = code)
resultError = request.session['errorMessage']
self.assertIn('Product %s does not exist.' % code,
resultError,
'IMS product detail view didn''t generate the correct warning.\nactual message = %s' %
resultError)
response.client = self.client
self.assertRedirects(response, reverse('ims:products',),
status_code = 302,
target_status_code = 200)
def test_product_detail_get_when_sites_have_inventory(self):
print 'running ProductDetailViewTests.test_product_detail_get_when_sites_have_inventory... '
self.client.login(username='testUser', password='12345678')
(createdSites,
createdProducts,
__,
__)=create_products_with_inventory_items_for_sites(
numSites=3,
numProducts=1,
numItems=1)
product = createdProducts[0]
response=self.client.get(reverse('ims:product_detail',
kwargs =
{'code':product.code,}) +
'?searchField=site__name&searchValue=test',
follow = False)
self.assertEqual(response.status_code, 200,)
self.assertEqual(len(response.context['paginatedItems']),
len(createdSites))
def test_product_detail_get_after_deleting_inventory_from_site(self):
print 'running ProductDetailViewTests.test_product_detail_get_after_deleting_inventory_from_site... '
self.client.login(username='testUser', password='12345678')
(createdSites,
createdProducts,
createdInventory,
__)=create_products_with_inventory_items_for_sites(
numSites=3,
numProducts=1,
numItems=1)
product = createdProducts[0]
createdInventory[0].deleted = True
createdInventory[0].save()
response=self.client.get(reverse('ims:product_detail',
kwargs =
{'code':product.code,}) +
'?searchField=site__name&searchValue=test',
follow = False)
self.assertEqual(response.status_code, 200,)
self.assertEqual(len(response.context['paginatedItems']),
len(createdSites) - 1)
def test_product_detail_post_save(self):
print 'running ProductDetailViewTests.test_product_detail_post_save... '
perms = ['change_productinformation']
permissions = Permission.objects.filter(codename__in = perms)
self.user.user_permissions=permissions
self.client.login(username='testUser', password='12345678')
code = 'D11'
productName = 'test product'
product = ProductInformation(name = productName,
code = code)
product.save()
postData = {'quantityOfMeasure': 1,
'unitOfMeasure': 'EACH',
'code': code,
'Save': 'Save',
'name': productName}
request=self.factory.post(reverse('ims:product_detail',
kwargs =
{'code':code,}),
postData,
follow=False)
request.user = self.user
add_session_to_request(request)
response = product_detail(request, code = code)
resultInfo = request.session['infoMessage']
self.assertIn('Successfully saved product information changes.',
resultInfo,
'IMS product detail view didn''t generate the correct info.\nactual message = %s' %
resultInfo)
response.client = self.client
picture = 'picture=False'
filterQuery = ''
self.assertRedirects(response,
reverse('ims:product_detail',
kwargs={'code':code,})
+ '?' + picture + '&' + filterQuery,
302,
200)
def test_product_detail_post_save_invalid_fields(self):
print 'running ProductDetailViewTests.test_product_detail_post_save_invalid_fields... '
perms = ['change_productinformation']
permissions = Permission.objects.filter(codename__in = perms)
self.user.user_permissions=permissions
self.client.login(username='testUser', password='12345678')
code = 'D11'
productName = 'test product'
product = ProductInformation(name = productName,
code = code)
product.save()
postData = {'quantityOfMeasure': 1,
'unitOfMeasure': 'EACH',
'code': '',
'Save': 'Save',
'name': productName}
response=self.client.post(reverse('ims:product_detail',
kwargs =
{'code':code,}),
postData,
follow=False)
self.assertEqual(response.status_code, 200)
resultWarning = get_announcement_from_response(response=response,
cls="warningnote")
self.assertIn('More information required before the product can be saved',
resultWarning,
'IMS product detail view didn''t generate the correct warning.\nactual message = %s' %
resultWarning)
#TODO: figure out why this sets productForm.has_changed() = True
# def test_product_detail_post_no_change(self):
# print 'running ProductDetailViewTests.test_product_detail_post_no_change... '
# perms = ['change_productinformation']
# permissions = Permission.objects.filter(codename__in = perms)
# self.user.user_permissions=permissions
# self.client.login(username='testUser', password='12345678')
# code = 'D11'
# productName = 'test product'
# product = ProductInformation(name = productName,
# code = code,)
# product.save()
# postData = {'Save':'Save',}
# response=self.client.post(reverse('ims:product_detail',
# kwargs =
# {'code':code,}),
# postData,
# follow=False)
# self.assertEqual(response.status_code, 200)
# resultWarning = get_announcement_from_response(response=response,
# cls="warningnote")
# self.assertIn('No changes made to the product information.',
# resultWarning,
# 'IMS product detail view didn''t generate the correct warning.\nactual message = %s' %
# resultWarning)
def test_product_detail_post_without_change_productinformation_perm(self):
print 'running ProductDetailViewTests.test_product_detail_post_without_change_productinformation_perm... '
self.client.login(username='testUser', password='12345678')
code = 'D11'
productName = 'test product'
product = ProductInformation(name = productName,
code = code,)
product.save()
postData = {'Save':'Save',}
response=self.client.post(reverse('ims:product_detail',
kwargs =
{'code':code,}),
postData,
follow=False)
self.assertEqual(response.status_code, 200)
resultError = get_announcement_from_response(response=response,
cls="errornote")
self.assertIn('You don''t have permission to change product information.',
resultError,
'IMS product detail view didn''t generate the correct error.\nactual message = %s' %
resultError)
def test_product_detail_post_save_check_modification_date(self):
print 'running ProductDetailViewTests.test_product_detail_post_save_check_modification_date... '
perms = ['change_productinformation']
permissions = Permission.objects.filter(codename__in = perms)
self.user.user_permissions=permissions
self.client.login(username='testUser', password='12345678')
code = 'D11'
productName = 'test product'
product = ProductInformation(name = productName,
code = code)
# back date the modified field
product.modified = timezone.now() - timedelta(days = 1)
creationDate = product.modified.date()
product.save()
# now we change th eproduct and see if the modified date changes
postData = {'quantityOfMeasure': 1,
'unitOfMeasure': 'EACH',
'code': code,
'Save': 'Save',
'name': productName}
request=self.factory.post(reverse('ims:product_detail',
kwargs =
{'code':code,}),
postData,
follow=False)
request.user = self.user
add_session_to_request(request)
product_detail(request, code = code)
product = ProductInformation.objects.get(pk = code)
changeDate = product.modified.date()
deltaDays = (changeDate - creationDate).days
self.assertEqual(deltaDays,
1,
'IMS product detail view didn''t change the modification date after change')
class ProductSelectAddSiteViewTests(TestCase):
"""
ims_tests for product_select_add_site view
"""
def setUp(self):
# Most ims_tests need access to the request factory and/or a user.
self.factory = RequestFactory()
self.user = User.objects.create_user(
username='testUser', password='12345678')
def test_product_select_add_site_get(self):
print 'running ProductSelectAddSiteViewTests.test_product_select_add_site_get... '
self.client.login(username='testUser', password='12345678')
code = 'D11'
productName = 'test product'
product = ProductInformation(name = productName,
code = code,)
product.save()
site1 = Site(name = 'test site 1')
site1.save()
site2 = Site(name = 'test site 2')
site2.save()
response=self.client.get(reverse('ims:product_select_add_site',
kwargs={'code':code}),
follow=False)
self.assertEquals(response.status_code, 200)
def test_product_select_add_site_get_bad_product(self):
print 'running ProductSelectAddSiteViewTests.test_product_select_add_site_get_bad_product... '
self.client.login(username='testUser', password='12345678')
code = 'D11'
request=self.factory.get(reverse('ims:product_select_add_site',
kwargs={'code':code}),
follow=False)
request.user = self.user
add_session_to_request(request)
response = product_select_add_site(request, code = code)
resultError = request.session['errorMessage']
self.assertIn('Product %s does not exist.' % code,
resultError,
'IMS product_select_add_site view didn''t generate the correct error.\nactual message = %s' %
resultError)
response.client = self.client
self.assertRedirects(response,reverse('ims:products'),
status_code = 302,
target_status_code = 200)
def test_product_select_add_site_single_site(self):
print 'running ProductSelectAddSiteViewTests.test_product_select_add_site_single_site... '
perms = ['add_inventoryitem',]
permissions = Permission.objects.filter(codename__in = perms)
self.user.user_permissions=permissions
self.client.login(username='testUser', password='12345678')
code = 'D11'
productName = 'test product'
product = ProductInformation(name = productName,
code = code,)
product.save()
site = Site(name = 'test site 1')
site.save()
response=self.client.get(reverse('ims:product_select_add_site',
kwargs={'code':code}),
follow=False)
self.assertRedirects(response,reverse('ims:products_add_to_site_inventory',
kwargs={'siteId':site.pk}) + '?' +
urlencode({'code':product.pk}),
status_code = 302,
target_status_code = 200)
def test_product_select_add_site_no_sites(self):
print 'running ProductSelectAddSiteViewTests.test_product_select_add_site_no_sites... '
perms = ['add_inventoryitem',]
permissions = Permission.objects.filter(codename__in = perms)
self.user.user_permissions=permissions
self.client.login(username='testUser', password='12345678')
code = 'D11'
productName = 'test product'
product = ProductInformation(name = productName,
code = code,)
product.save()
request=self.factory.get(reverse('ims:product_select_add_site',
kwargs={'code':code}),
follow=False)
request.user = self.user
add_session_to_request(request)
response = product_select_add_site(request, code = code)
resultWarning = request.session['warningMessage']
self.assertIn('No sites found.',
resultWarning,
'IMS product_select_add_site view didn''t generate the correct warning.\nactual message = %s' %
resultWarning)
response.client = self.client
self.assertRedirects(response,reverse('ims:product_detail',
kwargs={'code':product.code,}),
status_code = 302,
target_status_code = 200)
class ProductsAddToSiteInventoryViewTests(TestCase):
"""
ims_tests for products_add_to_site_inventory view
"""
def setUp(self):
# Most ims_tests need access to the request factory and/or a user.
self.factory = RequestFactory()
self.user = User.objects.create_user(
username='testUser', password='12345678')
def test_products_add_to_site_inventory_get(self):
print 'running ProductsAddToSiteInventoryViewTests.test_products_add_to_site_inventory_get... '
perms = ['add_inventoryitem',]
permissions = Permission.objects.filter(codename__in = perms)
self.user.user_permissions=permissions
self.client.login(username='testUser', password='12345678')
site = Site(name = 'test site')
site.save()
productName = 'test product'
code = 'D11'
product = ProductInformation(name = productName, code = code)
product.save()
response=self.client.get(reverse('ims:products_add_to_site_inventory',
kwargs = {'siteId':site.pk,}) +
'?' + urlencode({'code':code}),
follow=True)
self.assertEqual(response.status_code, 200)
def test_products_add_to_site_inventory_get_without_add_inventoryitem_perm(self):
print 'running ProductsAddToSiteInventoryViewTests.test_products_add_to_site_inventory_get_without_add_inventoryitem_perm... '
self.client.login(username='testUser', password='12345678')
productName = 'test product'
code = 'D11'
site = Site(name = 'test site')
site.save()
product = ProductInformation(name = productName, code = code)
product.save()
request=self.factory.get(reverse('ims:products_add_to_site_inventory',
kwargs = {'siteId':site.pk,}) +
'?' + urlencode({'code':code}),
follow=False)
request.user = self.user
add_session_to_request(request)
response = products_add_to_site_inventory(request, siteId = site.pk)
resultError = request.session['errorMessage']
self.assertIn('You don''t have permission to add to site inventory',
resultError,
'IMS products_add_to_site_inventory view didn''t generate the correct error.\nactual message = %s' %
resultError)
response.client = self.client
self.assertRedirects(response, reverse('ims:site_detail',
kwargs={'siteId':site.pk,}),
status_code = 302,
target_status_code = 200)
def test_products_add_to_site_inventory_get_with_invalid_site(self):
print 'running ProductsAddToSiteInventoryViewTests.test_products_add_to_site_inventory_get_with_invalid_site... '
perms = ['add_inventoryitem',]
permissions = Permission.objects.filter(codename__in = perms)
self.user.user_permissions=permissions
self.client.login(username='testUser', password='12345678')
productName = 'test product'
code = 'D11'
siteNumber = 1
product = ProductInformation(name = productName, code = code)
product.save()
response=self.client.get(reverse('ims:products_add_to_site_inventory',
kwargs = {'siteId':siteNumber,}) +
'?' + urlencode({'code':code}),
follow=True)
self.assertRedirects(response, reverse('ims:sites'),
status_code = 302,
target_status_code = 200)
resultError = get_announcement_from_response(response=response,
cls="errornote")
self.assertIn('Site %d does not exist' %
siteNumber, resultError,
'IMS products_add_to_site_inventory view didn''t generate the correct error when an invalid site was requested.\nactual message = %s' %
resultError)
def test_products_add_to_site_inventory_get_with_invalid_product(self):
print 'running ProductsAddToSiteInventoryViewTests.test_products_add_to_site_inventory_get_with_invalid_product... '
perms = ['add_inventoryitem',]
permissions = Permission.objects.filter(codename__in = perms)
self.user.user_permissions=permissions
self.client.login(username='testUser', password='12345678')
siteName = 'test site'
code = 'D11'
site = Site(name = siteName)
site.save()
response=self.client.get(reverse('ims:products_add_to_site_inventory',
kwargs = {'siteId':site.number,}) +
'?' + urlencode({'code':code}),
follow=True)
self.assertRedirects(response, reverse('ims:products'),
status_code = 302,
target_status_code = 200)
resultError = get_announcement_from_response(response=response,
cls="errornote")
self.assertIn('No valid products selected',
resultError,
'IMS products_add_to_site_inventory view didn''t generate the correct error when an invalid product was requested.\nactual message = %s' %
resultError)
def test_products_add_to_site_inventory_post(self):
print 'running ProductsAddToSiteInventoryViewTests.test_products_add_to_site_inventory_post... '
perms = ['add_inventoryitem',]
permissions = Permission.objects.filter(codename__in = perms)
self.user.user_permissions=permissions
self.client.login(username='testUser', password='12345678')
# populate the database with some data
(createdSites,
createdProducts,
__,
__)=create_products_with_inventory_items_for_sites(
numSites=1,
numProducts=1,
numItems=1)
site = createdSites[0]
# create another product that has not been added to a site yet
productName = 'another product'
code = 'D11'
product = ProductInformation(name = productName,
code = code)
product.save()
postData = {'form-MAX_NUM_FORMS': ['1000'],
'form-TOTAL_FORMS': [len(createdProducts)],
'form-MIN_NUM_FORMS': ['0'],
'form-INITIAL_FORMS': ['1'],
'Save Inventory':'Save Inventory',}
addItemDict = {}
addItemDict['codes'] = []
siteInventory = site.latest_inventory()
for index in range(len(siteInventory)):
addItemDict['codes'].append(siteInventory[index].information.pk)
addItemDict['form-%d-code' % index] = [siteInventory[index].information.pk]
addItemDict['form-%d-Quantity' % index] = [siteInventory[index].quantity]
postData.update(addItemDict)
request=self.factory.post(reverse('ims:products_add_to_site_inventory',
kwargs = {'siteId':site.pk,}),
postData,
follow=False)
request.user = self.user
add_session_to_request(request)
response = products_add_to_site_inventory(request, siteId = site.pk)
resultInfo = request.session['infoMessage']
successfullAdditions = re.findall('Successfully added product',
resultInfo,
re.M | re.DOTALL)
self.assertEqual(len(successfullAdditions), len(createdProducts))
response.client = self.client
self.assertRedirects(response, reverse('ims:site_detail',
kwargs={'siteId':site.pk,}),
status_code = 302,
target_status_code = 200)
def test_products_add_to_site_inventory_post_invalid_data(self):
print 'running ProductsAddToSiteInventoryViewTests.test_products_add_to_site_inventory_post_invalid_data... '
perms = ['add_inventoryitem',]
permissions = Permission.objects.filter(codename__in = perms)
self.user.user_permissions=permissions
self.client.login(username='testUser', password='12345678')
# populate the database with some data
(createdSites,
createdProducts,
__,
__)=create_products_with_inventory_items_for_sites(
numSites=1,
numProducts=1,
numItems=1)
site = createdSites[0]
# create another product that has not been added to a site yet
productName = 'another product'
code = 'D11'
product = ProductInformation(name = productName,
code = code)
product.save()
postData = {'form-MAX_NUM_FORMS': ['1000'],
'form-TOTAL_FORMS': [len(createdProducts)],
'form-MIN_NUM_FORMS': ['0'],
'form-INITIAL_FORMS': ['1'],
'Save Inventory':'Save Inventory',}
addItemDict = {}
addItemDict['codes'] = []
siteInventory = site.latest_inventory()
for index in range(len(siteInventory)):
addItemDict['codes'].append(siteInventory[index].information.pk)
addItemDict['form-%d-code' % index] = [siteInventory[index].information.pk]
addItemDict['form-%d-Quantity' % index] = ''
postData.update(addItemDict)
response=self.client.post(reverse('ims:products_add_to_site_inventory',
kwargs = {'siteId':site.pk,}),
postData,
follow=False)
resultWarning = get_announcement_from_response(response=response,
cls="warningnote")
self.assertIn('More information required before the inventory can be saved',
resultWarning,
'IMS products_add_to_site_inventory view didn''t generate the correct warning.\nactual message = %s'
% resultWarning)
def test_products_add_to_site_inventory_post_without_add_inventoryitem_perm(self):
print 'running ProductsAddToSiteInventoryViewTests.test_products_add_to_site_inventory_post_without_add_inventoryitem_perm... '
self.client.login(username='testUser', password='12345678')
# populate the database with some data
(createdSites,
createdProducts,
__,
__)=create_products_with_inventory_items_for_sites(
numSites=1,
numProducts=1,
numItems=1)
site = createdSites[0]
# create another product that has not been added to a site yet
productName = 'another product'
code = 'D11'
product = ProductInformation(name = productName,
code = code)
product.save()
postData = {'form-MAX_NUM_FORMS': ['1000'],
'form-TOTAL_FORMS': [len(createdProducts)],
'form-MIN_NUM_FORMS': ['0'],
'form-INITIAL_FORMS': ['1'],
'Save Inventory':'Save Inventory',}
addItemDict = {}
addItemDict['codes'] = []
siteInventory = site.latest_inventory()
for index in range(len(siteInventory)):
addItemDict['codes'].append(siteInventory[index].information.pk)
addItemDict['form-%d-code' % index] = [siteInventory[index].information.pk]
addItemDict['form-%d-Quantity' % index] = [siteInventory[index].quantity]
postData.update(addItemDict)
request=self.factory.post(reverse('ims:products_add_to_site_inventory',
kwargs = {'siteId':site.pk,}),
postData,
follow=False)
request.user = self.user
add_session_to_request(request)
response = products_add_to_site_inventory(request, siteId = site.pk)
resultError = request.session['errorMessage']
self.assertIn('You don''t have permission to add to site inventory',
resultError,
'IMS products_add_to_site_inventory view didn''t generate the correct error.\nactual message = %s'
% resultError)
response.client = self.client
self.assertRedirects(response, reverse('ims:site_detail',
kwargs={'siteId':site.pk,}),
status_code = 302,
target_status_code = 200)
def test_products_add_to_site_inventory_post_cancel(self):
print 'running ProductsAddToSiteInventoryViewTests.test_products_add_to_site_inventory_post_cancel... '
perms = ['add_inventoryitem',]
permissions = Permission.objects.filter(codename__in = perms)
self.user.user_permissions=permissions
self.client.login(username='testUser', password='12345678')
# populate the database with some data
(createdSites,
createdProducts,
__,
__)=create_products_with_inventory_items_for_sites(
numSites=1,
numProducts=1,
numItems=1)
site = createdSites[0]
# create another product that has not been added to a site yet
productName = 'another product'
code = 'D11'
product = ProductInformation(name = productName,
code = code)
product.save()
postData = {'form-MAX_NUM_FORMS': ['1000'],
'form-TOTAL_FORMS': [len(createdProducts)],
'form-MIN_NUM_FORMS': ['0'],
'form-INITIAL_FORMS': ['1'],
'Cancel':'Cancel',}
addItemDict = {}
addItemDict['codes'] = []
siteInventory = site.latest_inventory()
for index in range(len(siteInventory)):
addItemDict['codes'].append(siteInventory[index].information.pk)
addItemDict['form-%d-code' % index] = [siteInventory[index].information.pk]
addItemDict['form-%d-Quantity' % index] = [siteInventory[index].quantity]
postData.update(addItemDict)
response=self.client.post(reverse('ims:products_add_to_site_inventory',
kwargs = {'siteId':site.pk,}),
postData,
follow=False)
self.assertRedirects(response, reverse('ims:site_detail',
kwargs={'siteId':site.pk,}),
status_code = 302,
target_status_code = 200)
class ImportSitesViewTests(TestCase):
"""
ims_tests for import_sites view
"""
def setUp(self):
# Most ims_tests need access to the request factory and/or a user.
self.factory = RequestFactory()
self.user = User.objects.create_user(
username='testUser', password='12345678')
def test_import_sites_warning_with_file_and_perms(self):
print 'running ImportSitesViewTests.test_import_sites_warning_with_file_and_perms... '
perms = ['add_site', 'change_site']
permissions = Permission.objects.filter(codename__in = perms)
self.user.user_permissions=permissions
self.client.login(username='testUser', password='12345678')
with open(os.path.join(
APP_DIR,
'testData/sites_add_site1_site2_site3.xls'))as fp:
response=self.client.post(reverse('ims:import_sites'),
{'Import':'Import','file':fp},
follow=True)
queriedSites=Site.objects.all()
# check that we saved 3 sites
self.assertEqual(
queriedSites.count(),
3,
'Number of imported sites mismatch. Some sites didn''t get stored.')
resultWarning = get_announcement_from_response(response=response,
cls="errornote")
self.assertEqual(resultWarning, '',
'import_sites view generated a warning with a valid file and user.\nactual warning message = %s'
% resultWarning)
def test_import_sites_warning_file_with_dups(self):
print 'running ImportSitesViewTests.test_import_sites_warning_file_with_dups... '
perms = ['add_site', 'change_site']
permissions = Permission.objects.filter(codename__in = perms)
self.user.user_permissions=permissions
self.client.login(username='testUser', password='12345678')
with open(
os.path.join(
APP_DIR,
'testData/sites_add_site1_site2_site3_site3.xls')) as fp:
response=self.client.post(reverse('ims:import_sites'),
{'Import':'Import','file':fp},
follow=True)
warningRe = '^.*Found duplicate site numbers.*$'
resultWarning = get_announcement_from_response(response=response,
cls="errornote")
self.assert_(re.match(warningRe,resultWarning),
'import_sites view generated incorrect warning when import contained duplicates.\nRE for part of desired Warning Message = %s\n\nactual warning message = %s'
% (warningRe, resultWarning))
def test_import_sites_warning_with_no_file_and_perms(self):
print 'running ImportSitesViewTests.test_import_sites_warning_with_no_file_and_perms... '
perms = ['add_site', 'change_site']
permissions = Permission.objects.filter(codename__in = perms)
self.user.user_permissions=permissions
self.client.login(username='testUser', password='12345678')
response=self.client.post(reverse('ims:import_sites'),
{'Import':'Import'},
follow=True)
warning='No file selected'
resultWarning = get_announcement_from_response(response=response,
cls="warningnote")
self.assertEqual(resultWarning, warning,
'import_sites view generated incorrect warning when no file was selected.\ndesired Warning Message = %s\n\nactual warning message = %s'
% (warning, resultWarning))
def test_import_sites_error_with_file_and_without_add_site_perm(self):
print 'running ImportSitesViewTests.test_import_sites_error_with_file_and_without_add_site_perm... '
perms = ['change_site']
permissions = Permission.objects.filter(codename__in = perms)
self.user.user_permissions=permissions
self.client.login(username='testUser', password='12345678')
with open(
os.path.join(
APP_DIR,
'testData/sites_add_site1_site2_site3.xls')) as fp:
response=self.client.post(reverse('ims:import_sites'),
{'Import Sites':'Import','file':fp},
follow=True)
warning='You don''t have permission to import sites'
resultWarning = get_announcement_from_response(response=response,
cls="errornote")
self.assertEqual(resultWarning, warning,
'import_sites view generated incorrect warning when user didn''t have add_site perms.\ndesired Warning Message = %s\n\nactual warning message = %s'
% (warning, resultWarning))
def test_import_sites_error_with_file_and_without_change_site_perm(self):
print 'running ImportSitesViewTests.test_import_sites_error_with_file_and_without_change_site_perm... '
perms = ['add_site']
permissions = Permission.objects.filter(codename__in = perms)
self.user.user_permissions=permissions
self.client.login(username='testUser', password='12345678')
with open(os.path.join(
APP_DIR,
'testData/sites_add_site1_site2_site3.xls')) as fp:
response=self.client.post(reverse('ims:import_sites'),
{'Import Sites':'Import','file':fp},
follow=True)
warning='You don''t have permission to import sites'
resultWarning = get_announcement_from_response(response=response,
cls="errornote")
self.assertEqual(resultWarning, warning,
'import_sites view generated incorrect warning when user didn''t have change_site perms.\ndesired Warning Message = %s\n\nactual warning message = %s'
% (warning, resultWarning))
class ImportProductsViewTests(TestCase):
"""
ims_tests for import_products view
"""
def setUp(self):
# Most ims_tests need access to the request factory and/or a user.
self.factory = RequestFactory()
self.user = User.objects.create_user(
username='testUser', password='12345678')
def test_import_products_error_with_file_and_perms(self):
print 'running ImportProductsViewTests.test_import_products_error_with_file_and_perms... '
perms = ['add_productinformation', 'change_productinformation']
permissions = Permission.objects.filter(codename__in = perms)
self.user.user_permissions=permissions
self.client.login(username='testUser', password='12345678')
with open(os.path.join(
APP_DIR,
'testData/products_add_prod1_prod2_prod3.xls')) as fp:
response=self.client.post(reverse('ims:import_products'),
{'Import':'Import','file':fp},
follow=True)
queriedProducts=ProductInformation.objects.all()
# check that we saved 3 sites
self.assertEqual(queriedProducts.count(),
3,
'Number of imported products mismatch. Some products didn''t get stored. Found %d expected 3' % queriedProducts.count())
resultWarning = get_announcement_from_response(response=response,
cls="errornote")
self.assertEqual(resultWarning,
'',
'import_products view generated a warning with a valid file and user.\nactual warning message = %s'
% resultWarning)
def test_import_products_error_file_with_dups(self):
print 'running ImportProductsViewTests.test_import_products_error_file_with_dups... '
perms = ['add_productinformation', 'change_productinformation']
permissions = Permission.objects.filter(codename__in = perms)
self.user.user_permissions=permissions
self.client.login(username='testUser', password='12345678')
with open(
os.path.join(
APP_DIR,
'testData/products_add_prod1_prod2_prod3_prod3.xls')) as fp:
response=self.client.post(reverse('ims:import_products'),
{'Import':'Import','file':fp},
follow=True)
warningRe = '^.*Found duplicate product codes.*$'
resultWarning = get_announcement_from_response(response=response,
cls="errornote")
self.assert_(re.match(warningRe,resultWarning),
'import_products view generated incorrect warning when import contained duplicates.\nRE for part of desired Warning Message = %s\n\nactual warning message = %s'
% (warningRe, resultWarning))
def test_import_products_warning_with_no_file_and_perms(self):
print 'running ImportProductsViewTests.test_import_products_warning_with_no_file_and_perms... '
perms = ['add_productinformation', 'change_productinformation']
permissions = Permission.objects.filter(codename__in = perms)
self.user.user_permissions=permissions
self.client.login(username='testUser', password='12345678')
response=self.client.post(reverse('ims:import_products'),
{'Import':'Import'},
follow=True)
warning='No file selected'
resultWarning = get_announcement_from_response(response=response,
cls="warningnote")
self.assertEqual(resultWarning,
warning,
'import_products view generated incorrect warning when no file was selected.\ndesired Warning Message = %s\n\nactual warning message = %s'
% (warning, resultWarning))
def test_import_products_error_with_file_and_without_add_productinformation_perm(self):
print 'running ImportProductsViewTests.test_import_products_error_with_file_and_without_add_productinformation_perm... '
perms = ['change_productinformation']
permissions = Permission.objects.filter(codename__in = perms)
self.user.user_permissions=permissions
self.client.login(username='testUser', password='12345678')
with open(os.path.join(
APP_DIR,
'testData/products_add_prod1_prod2_prod3.xls')) as fp:
response=self.client.post(reverse('ims:import_products'),
{'Import':'Import','file':fp},
follow=True)
warning='You don''t have permission to import products'
resultWarning = get_announcement_from_response(response=response,
cls="errornote")
self.assertEqual(resultWarning,
warning,
'import_products view generated incorrect warning when user didn''t have add_productinformation perms.\ndesired Warning Message = %s\n\nactual warning message = %s'
% (warning, resultWarning))
def test_import_products_error_with_file_and_without_change_productinformation_perm(self):
print 'running ImportProductsViewTests.test_import_products_error_with_file_and_without_change_productinformation_perm... '
perms = ['add_productinformation']
permissions = Permission.objects.filter(codename__in = perms)
self.user.user_permissions=permissions
self.client.login(username='testUser', password='12345678')
with open(os.path.join(
APP_DIR,
'testData/products_add_prod1_prod2_prod3.xls')) as fp:
response=self.client.post(reverse('ims:import_products'),
{'Import':'Import','file':fp},
follow=True)
warning='You don''t have permission to import products'
resultWarning = get_announcement_from_response(response=response,
cls="errornote")
self.assertEqual(resultWarning,
warning,
'import_products view generated incorrect warning when user didn''t have change_productinformation perms.\ndesired Warning Message = %s\n\nactual warning message = %s'
% (warning, resultWarning))
class ImportInventoryViewTests(TestCase):
"""
ims_tests for import_inventory view
"""
def setUp(self):
# Most ims_tests need access to the request factory and/or a user.
self.factory = RequestFactory()
self.user = User.objects.create_user(
username='testUser', password='12345678')
def test_import_inventory_error_with_file_and_perms(self):
print 'running ImportInventoryViewTests.test_import_inventory_error_with_file_and_perms... '
perms = ['add_inventoryitem',]
permissions = Permission.objects.filter(codename__in = perms)
self.user.user_permissions=permissions
self.client.login(username='testUser', password='12345678')
# populate the database with products and sites, so we can
# import inventory
filename=os.path.join(APP_DIR,
'testData/sites_add_site1_site2_site3.xls')
Site.parse_sites_from_xls(filename=filename,
modifier='none',
save=True)
filename=os.path.join(APP_DIR,
'testData/products_add_prod1_prod2_prod3.xls')
ProductInformation.parse_product_information_from_xls(filename=filename,
modifier='none',
save=True)
with open(os.path.join(
APP_DIR,
'testData/inventory_add_10_to_site1_site2_site3_prod1_prod2_prod3.xls')) as fp:
response=self.client.post(reverse('ims:import_inventory'),
{'Import':'Import','file':fp},
follow=True)
queriedInventory=InventoryItem.objects.all()
# check that we saved 3 sites
self.assertEqual(queriedInventory.count(),
9,
'Number of imported inventory items mismatch. Some inventory didn''t get stored.')
resultWarning = get_announcement_from_response(response=response,
cls="errornote")
self.assertEqual(resultWarning,
'',
'imports view generated a warning with a valid file and user.\nactual warning message = %s'
% resultWarning)
def test_import_inventory_error_file_with_dups(self):
print 'running ImportInventoryViewTests.test_import_inventory_error_file_with_dups... '
perms = ['add_inventoryitem',]
permissions = Permission.objects.filter(codename__in = perms)
self.user.user_permissions=permissions
self.client.login(username='testUser', password='12345678')
# populate the database with products and sites, so we can
# import inventory
filename=os.path.join(APP_DIR,
'testData/sites_add_site1_site2_site3.xls')
Site.parse_sites_from_xls(filename=filename,
modifier='none',
save=True)
filename=os.path.join(APP_DIR,
'testData/products_add_prod1_prod2_prod3.xls')
ProductInformation.parse_product_information_from_xls(filename=filename,
modifier='none',
save=True)
with open(
os.path.join(
APP_DIR,
'testData/inventory_add_10_to_site1_site2_site3_prod1_prod2_prod3_dups.xls')) as fp:
response=self.client.post(reverse('ims:import_inventory'),
{'Import':'Import','file':fp},
follow=True)
warningRe = '^.*Found duplicate inventory items.*$'
resultWarning = get_announcement_from_response(response=response,
cls="errornote")
self.assert_(re.match(warningRe,resultWarning),
'import_inventory view generated incorrect warning when import contained duplicates.\nRE for part of desired Warning Message = %s\n\nactual warning message = %s'
% (warningRe, resultWarning))
def test_import_inventory_warning_with_no_file_and_perms(self):
print 'running ImportInventoryViewTests.test_import_inventory_warning_with_no_file_and_perms... '
perms = ['add_inventoryitem',]
permissions = Permission.objects.filter(codename__in = perms)
self.user.user_permissions=permissions
self.client.login(username='testUser', password='12345678')
# populate the database with products and sites, so we can
# import inventory
filename=os.path.join(APP_DIR,
'testData/sites_add_site1_site2_site3.xls')
Site.parse_sites_from_xls(filename=filename,
modifier='none',
save=True)
filename=os.path.join(APP_DIR,
'testData/products_add_prod1_prod2_prod3.xls')
ProductInformation.parse_product_information_from_xls(filename=filename,
modifier='none',
save=True)
response=self.client.post(reverse('ims:import_inventory'),
{'Import':'Import',},
follow=True)
warning = 'No file selected'
resultWarning = get_announcement_from_response(response=response,
cls="warningnote")
self.assertEqual(warning,
resultWarning,
'import_inventory view generated incorrect warning when no file was selected.\ndesired Warning Message = %s\n\nactual warning message = %s'
% (warning, resultWarning))
def test_import_inventory_error_with_file_and_without_add_inventoryitem_perm(self):
print 'running ImportInventoryViewTests.test_import_inventory_error_with_file_and_without_add_inventoryitem_perm...'
self.client.login(username='testUser', password='12345678')
# populate the database with products and sites, so we can
# import inventory
filename=os.path.join(APP_DIR,
'testData/sites_add_site1_site2_site3.xls')
Site.parse_sites_from_xls(filename=filename,
modifier='none',
save=True)
filename=os.path.join(APP_DIR,
'testData/products_add_prod1_prod2_prod3.xls')
ProductInformation.parse_product_information_from_xls(filename=filename,
modifier='none',
save=True)
with open(os.path.join(
APP_DIR,
'testData/inventory_add_10_to_site1_site2_site3_prod1_prod2_prod3.xls')) as fp:
response=self.client.post(reverse('ims:import_inventory'),
{'Import':'Import','file':fp},
follow=True)
warning = 'You don''t have permission to import inventory'
resultWarning = get_announcement_from_response(response=response,
cls="errornote")
self.assertEqual(warning,
resultWarning,
'import_inventory view generated incorrect warning when user didn''t have add_inventoryitem perms.\ndesired Warning Message = %s\n\nactual warning message = %s'
% (warning, resultWarning))
class SiteDeleteAllViewTests(TestCase):
"""
ims_tests for site_delete_all view
"""
def setUp(self):
# Most ims_tests need access to the request factory and/or a user.
self.factory = RequestFactory()
self.user = User.objects.create_user(
username='testUser', password='12345678')
def test_site_delete_all_confirmed_with_perms(self):
print 'running SiteDeleteAllViewTests.test_site_delete_all_confirmed_with_perms... '
perms = ['delete_site', 'delete_inventoryitem']
permissions = Permission.objects.filter(codename__in = perms)
self.user.user_permissions=permissions
request = self.factory.post(reverse('ims:imports'),
{'Delete All Sites':'Delete All Sites'},)
add_session_to_request(request)
request.user=self.user
# populate the database with some data
create_products_with_inventory_items_for_sites(numSites=20,
numProducts=5,
numItems=1)
site_delete_all(request)
self.assertEqual(Site.objects.all().count(),
0,
'Did not delete all sites')
self.assertEqual(InventoryItem.objects.all().count(),
0,
'Did not delete all inventory')
def test_site_delete_all_confirmed_without_delete_site_perm(self):
print 'running SiteDeleteAllViewTests.test_site_delete_all_confirmed_without_delete_site_perm... '
perms = ['delete_inventoryitem',]
permissions = Permission.objects.filter(codename__in = perms)
self.user.user_permissions=permissions
request = self.factory.post(reverse('ims:imports'),
{'Delete All Sites':'Delete All Sites'},)
add_session_to_request(request)
request.user=self.user
# populate the database with some data
create_products_with_inventory_items_for_sites(numSites=20,
numProducts=5,
numItems=1)
response=site_delete_all(request)
warning='You don''t have permission to delete sites or inventory'
resultWarning = get_announcement_from_response(response=response,
cls="errornote")
self.assert_(warning in resultWarning,
('site_delete_all view didn''t generate the appropriate warning when requested to delete all sites without delete_site perms.\ndesired warning message = %s\nactual warning message = %s'
% (warning, resultWarning)))
def test_site_delete_all_confirmed_without_delete_inventoryitem_perm(self):
print 'running SiteDeleteAllViewTests.test_site_delete_all_confirmed_without_delete_inventoryitem_perm... '
perms = ['delete_site',]
permissions = Permission.objects.filter(codename__in = perms)
self.user.user_permissions=permissions
request = self.factory.post(reverse('ims:imports'),
{'Delete All Sites':'Delete All Sites'},)
add_session_to_request(request)
request.user=self.user
# populate the database with some data
create_products_with_inventory_items_for_sites( numSites=20,
numProducts=5,
numItems=1)
response=site_delete_all(request)
warning='You don''t have permission to delete sites or inventory'
resultWarning = get_announcement_from_response(response=response,
cls="errornote")
self.assert_(warning in resultWarning,
('site_delete_all view didn''t generate the appropriate warning when requested to delete all sites without delete_inventory perms.\ndesired warning message = %s\nactual warning message = %s'
% (warning,resultWarning)))
def test_site_delete_all_canceled_with_perms(self):
print 'running SiteDeleteAllViewTests.test_site_delete_all_canceled_with_perms... '
perms = ['delete_site', 'delete_inventoryitem']
permissions = Permission.objects.filter(codename__in = perms)
self.user.user_permissions=permissions
request = self.factory.post(reverse('ims:imports'),
{'Cancel':'Cancel'},)
add_session_to_request(request)
request.user=self.user
# populate the database with some data
(createdSites,
__,
createdInventoryItems,
__)=create_products_with_inventory_items_for_sites(
numSites=20,
numProducts=5,
numItems=1)
site_delete_all(request)
self.assertEqual(Site.objects.all().count(),
len(createdSites),
'Deleted sites, should have canceled')
self.assertEqual(InventoryItem.objects.all().count(),
len(createdInventoryItems),
'Deleted inventory, should have canceled')
class ProductDeleteAllViewTests(TestCase):
"""
ims_tests for product_delete_all view
"""
def setUp(self):
# Most ims_tests need access to the request factory and/or a user.
self.factory = RequestFactory()
self.user = User.objects.create_user(
username='testUser', password='12345678')
def test_product_delete_all_confirmed_with_perms(self):
print 'running ProductDeleteAllViewTests.test_product_delete_all_confirmed_with_perms... '
perms = ['delete_productinformation', 'delete_inventoryitem']
permissions = Permission.objects.filter(codename__in = perms)
self.user.user_permissions=permissions
request = self.factory.post(reverse('ims:imports'),
{'Delete All Products':'Delete All Products'},)
add_session_to_request(request)
request.user=self.user
# populate the database with some data
create_products_with_inventory_items_for_sites(
numSites=20,
numProducts=5,
numItems=1)
product_delete_all(request)
self.assertEqual(ProductInformation.objects.all().count(),
0,
'Did not delete all products')
self.assertEqual(InventoryItem.objects.all().count(),
0,
'Did not delete all inventory')
def test_product_delete_all_confirmed_without_delete_productinformation_perm(self):
print 'running ProductDeleteAllViewTests.test_product_delete_all_confirmed_without_delete_productinformation_perm... '
perms = ['delete_inventoryitem',]
permissions = Permission.objects.filter(codename__in = perms)
self.user.user_permissions=permissions
request = self.factory.post(reverse('ims:imports'),
{'Delete All Products':'Delete All Products'},)
add_session_to_request(request)
request.user=self.user
# populate the database with some data
create_products_with_inventory_items_for_sites(numSites=20,
numProducts=5,
numItems=1)
response=product_delete_all(request)
warning='You don''t have permission to delete products or inventory'
resultWarning = get_announcement_from_response(response=response,
cls="errornote")
self.assert_(warning in resultWarning,
'product_delete_all view didn''t generate the appropriate warning when requested to delete all products without delete_productinformation perms.\ndesired warning message = %s\nactual warning message = %s'
% (warning, resultWarning))
def test_product_delete_all_confirmed_without_delete_inventoryitem_perm(self):
print 'running ProductDeleteAllViewTests.test_product_delete_all_confirmed_without_delete_inventoryitem_perm... '
perms = ['delete_productinformation',]
permissions = Permission.objects.filter(codename__in = perms)
self.user.user_permissions=permissions
request = self.factory.post(reverse('ims:imports'),
{'Delete All Products':'Delete All Products'},)
add_session_to_request(request)
request.user=self.user
# populate the database with some data
create_products_with_inventory_items_for_sites(numSites=20,
numProducts=5,
numItems=1)
response=product_delete_all(request)
warning='You don''t have permission to delete products or inventory'
resultWarning = get_announcement_from_response(response=response,
cls="errornote")
self.assert_(warning in resultWarning,
'product_delete_all view didn''t generate the appropriate warning when requested to delete all products without delete_inventoryitem perms.\ndesired warning message = %s\nactual warning message = %s'
% (warning, resultWarning))
def test_product_delete_all_canceled_with_perms(self):
print 'running ProductDeleteAllViewTests.test_product_delete_all_canceled_with_perms... '
perms = ['delete_productinformation', 'delete_inventoryitem']
permissions = Permission.objects.filter(codename__in = perms)
self.user.user_permissions=permissions
request = self.factory.post(reverse('ims:imports'),
{'Cancel':'Cancel'},)
add_session_to_request(request)
request.user=self.user
# populate the database with some data
(createdSites,
__,
createdInventoryItems,
__)=create_products_with_inventory_items_for_sites(
numSites=20,
numProducts=5,
numItems=1)
product_delete_all(request)
self.assertEqual(Site.objects.all().count(),
len(createdSites),
'Deleted products, should have canceled')
self.assertEqual(InventoryItem.objects.all().count(),
len(createdInventoryItems),
'Deleted inventory, should have canceled')
class InventoryDeleteAllViewTests(TestCase):
"""
ims_tests for product_delete_all view
"""
def setUp(self):
# Most ims_tests need access to the request factory and/or a user.
self.factory = RequestFactory()
self.user = User.objects.create_user(
username='testUser', password='12345678')
def test_inventory_delete_all_confirmed_with_perms(self):
print 'running InventoryDeleteAllViewTests.test_inventory_delete_all_confirmed_with_perms... '
perms = ['delete_inventoryitem']
permissions = Permission.objects.filter(codename__in = perms)
self.user.user_permissions=permissions
request = self.factory.post(reverse('ims:imports'),
{'Delete All Inventory':'Delete All Inventory'},)
add_session_to_request(request)
request.user=self.user
# populate the database with some data
create_products_with_inventory_items_for_sites(numSites=20,
numProducts=5,
numItems=1)
inventory_delete_all(request)
self.assertEqual(InventoryItem.objects.all().count(),
0,
'Did not delete all inventory')
def test_inventory_delete_all_confirmed_without_delete_inventoryitem_perm(self):
print 'running InventoryDeleteAllViewTests.test_inventory_delete_all_confirmed_without_delete_inventoryitem_perm... '
perms = []
permissions = Permission.objects.filter(codename__in = perms)
self.user.user_permissions=permissions
request = self.factory.post(reverse('ims:imports'),
{'Delete All Inventory':'Delete All Inventory'},)
add_session_to_request(request)
request.user=self.user
# populate the database with some data
create_products_with_inventory_items_for_sites(numSites=20,
numProducts=5,
numItems=1)
response=inventory_delete_all(request)
warning='You don''t have permission to delete inventory'
resultWarning = get_announcement_from_response(response=response,
cls="errornote")
self.assert_(warning in resultWarning,
'imports view didn''t generate the appropriate warning when requested to delete all inventory without delete_inventoryitem perms.\ndesired warning message = %s\nactual warning message = %s'
% (warning, resultWarning))
def test_inventory_delete_all_canceled_with_perms(self):
print 'running InventoryDeleteAllViewTests.test_inventory_delete_all_canceled_with_perms... '
perms = ['delete_inventoryitem']
permissions = Permission.objects.filter(codename__in = perms)
self.user.user_permissions=permissions
request = self.factory.post(reverse('ims:imports'),
{'Cancel':'Cancel'},)
add_session_to_request(request)
request.user=self.user
# populate the database with some data
(__,
__,
createdInventoryItems,
__)=create_products_with_inventory_items_for_sites(
numSites=20,
numProducts=5,
numItems=1)
inventory_delete_all(request)
self.assertEqual(InventoryItem.objects.all().count(),
len(createdInventoryItems),
'Deleted inventory, should have canceled')
class ImportsViewTests(TestCase):
"""
ims_tests for Imports view
"""
def setUp(self):
# Most ims_tests need access to the request factory and/or a user.
self.factory = RequestFactory()
self.user = User.objects.create_user(
username='testUser', password='12345678')
def test_delete_sites_warning_with_perms(self):
print 'running ImportsViewTests.test_delete_sites_warning_with_perms... '
perms = ['delete_site', 'delete_inventoryitem']
permissions = Permission.objects.filter(codename__in = perms)
self.user.user_permissions=permissions
self.client.login(username='testUser', password='12345678')
# populate the database with some data
(createdSites,
__,
__,
__)=create_products_with_inventory_items_for_sites(
numSites=20,
numProducts=5,
numItems=1)
warning=('Delete all %d sites? This will delete all inventory as well.'
% len(createdSites))
response=self.client.post(reverse('ims:imports'),
{'Delete Sites':'Delete'},
follow=True)
resultWarning = get_announcement_from_response(response=response,
cls="warningnote")
self.assert_(warning in resultWarning,
"imports view didn't generate the appropriate warning when requested to delete all sites with appropriate perms.\ndesired warning message = %s\nactual warning message = "
% resultWarning)
def test_delete_sites_error_without_delete_site_perm(self):
print 'running ImportsViewTests.test_delete_sites_error_without_delete_site_perm... '
perms = ['delete_inventoryitem']
permissions = Permission.objects.filter(codename__in = perms)
self.user.user_permissions=permissions
self.client.login(username='testUser', password='12345678')
# populate the database with some data
create_products_with_inventory_items_for_sites(numSites=20,
numProducts=5,
numItems=1)
warning='You don''t have permission to delete sites or inventory'
response=self.client.post(reverse('ims:imports'), {'Delete Sites':'Delete'}, follow=True)
resultWarning = get_announcement_from_response(response=response, cls="errornote")
self.assert_(warning in resultWarning,
'imports view didn''t generate the appropriate warning when requested to delete all sites without delete_site perms.\ndesired warning message = %s\nactual warning message = %s'
% (warning, resultWarning))
def test_delete_sites_error_without_delete_inventoryitem_perm(self):
print 'running ImportsViewTests.test_delete_sites_error_without_delete_inventoryitem_perm... '
perms = ['delete_site']
permissions = Permission.objects.filter(codename__in = perms)
self.user.user_permissions=permissions
self.client.login(username='testUser', password='12345678')
# populate the database with some data
create_products_with_inventory_items_for_sites(numSites=20,
numProducts=5,
numItems=1)
warning='You don''t have permission to delete sites or inventory'
response=self.client.post(reverse('ims:imports'),
{'Delete Sites':'Delete'},
follow=True)
resultWarning = get_announcement_from_response(response=response,
cls="errornote")
self.assert_(warning in resultWarning,
'imports view didn''t generate the appropriate warning when requested to delete all sites without delete_inventory perms.\ndesired warning message = %s\nactual warning message = %s'
% (warning,resultWarning))
def test_export_sites(self):
print 'running ImportsViewTests.test_export_sites... '
# populate the database with some data
(createdSites,
__,
__,
__)=create_products_with_inventory_items_for_sites(
numSites=3,
numProducts=5,
numItems=1,
modifier='testUser')
self.client.login(username='testUser', password='12345678')
response=self.client.post(reverse('ims:imports'),
{'Export Sites':'All'},
follow=True)
parsedExportedSites,__=Site.parse_sites_from_xls(
file_contents=response.content,
save=False)
sortedParsedExportedSites=[]
for site in parsedExportedSites:
sortedParsedExportedSites.append(site.create_key_no_microseconds())
sortedParsedExportedSites.sort()
sortedCreatedSites=[]
for site in createdSites:
sortedCreatedSites.append(site.create_key_no_microseconds())
sortedCreatedSites.sort()
self.assertListEqual(sortedParsedExportedSites,
sortedCreatedSites,
'Sites exported to Excel don''t match the sites in the database')
def test_delete_products_warning_with_perms(self):
print'running ImportsViewTests.test_delete_products_warning_with_perms... '
perms = ['delete_productinformation', 'delete_inventoryitem']
permissions = Permission.objects.filter(codename__in = perms)
self.user.user_permissions=permissions
self.client.login(username='testUser', password='12345678')
# populate the database with some data
(__,
createdProducts,
__,
__)=create_products_with_inventory_items_for_sites(
numSites=20,
numProducts=5,
numItems=1)
warning=('Delete all %d products? This will delete all inventory as well.'
% len(createdProducts))
response=self.client.post(reverse('ims:imports'),
{'Delete Products':'Delete'},
follow=True)
resultWarning = get_announcement_from_response(response=response,
cls="warningnote")
self.assert_(warning in resultWarning,
'imports view didn''t generate the appropriate warning when requested to delete all products with appropriate perms.\ndesired warning message = %s\nactual warning message = %s'
% (warning, resultWarning))
def test_delete_products_error_without_delete_productinformation_perm(self):
print 'running ImportsViewTests.test_delete_products_error_without_delete_productinformation_perm... '
perms = ['delete_inventoryitem']
permissions = Permission.objects.filter(codename__in = perms)
self.user.user_permissions=permissions
self.client.login(username='testUser', password='12345678')
# populate the database with some data
create_products_with_inventory_items_for_sites(numSites=20,
numProducts=5,
numItems=1)
warning='You don''t have permission to delete products or inventory'
response=self.client.post(reverse('ims:imports'),
{'Delete Products':'Delete'},
follow=True)
resultWarning = get_announcement_from_response(response=response,
cls="errornote")
self.assert_(warning in resultWarning,
'imports view didn''t generate the appropriate warning when requested to delete all products without delete_productinformation perms.\ndesired warning message = %s\nactual warning message = %s'
% (warning,resultWarning))
def test_delete_products_error_without_delete_inventoryitem_perm(self):
print 'running ImportsViewTests.test_delete_products_error_without_delete_inventoryitem_perm... '
perms = ['delete_productinformation']
permissions = Permission.objects.filter(codename__in = perms)
self.user.user_permissions=permissions
self.client.login(username='testUser', password='12345678')
# populate the database with some data
create_products_with_inventory_items_for_sites(numSites=20,
numProducts=5,
numItems=1)
warning='You don''t have permission to delete products or inventory'
response=self.client.post(reverse('ims:imports'),
{'Delete Products':'Delete'},
follow=True)
resultWarning = get_announcement_from_response(response=response,
cls="errornote")
self.assert_(warning in resultWarning,
'imports view didn''t generate the appropriate warning when requested to delete all products without delete_inventory perms.\ndesired warning message = %s\nactual warning message = %s'
% (warning, resultWarning))
def test_export_products(self):
print 'running ImportsViewTests.test_export_products... '
# populate the database with some data
(__,
createdProducts,
__,
__)=create_products_with_inventory_items_for_sites(
numSites=3,
numProducts=5,
numItems=1,
modifier='testUser')
self.client.login(username='testUser', password='12345678')
response=self.client.post(reverse('ims:imports'),
{'Export Products':'All'},
follow=True)
(parsedExportedProducts,
__)=ProductInformation.parse_product_information_from_xls(
file_contents=response.content,
save=True)
sortedParsedExportedProducts=[]
for product in parsedExportedProducts:
sortedParsedExportedProducts.append(product.create_key_no_microseconds())
sortedParsedExportedProducts.sort()
sortedCreatedProducts=[]
for product in createdProducts:
sortedCreatedProducts.append(product.create_key_no_microseconds())
sortedCreatedProducts.sort()
self.assertListEqual(sortedParsedExportedProducts,
sortedCreatedProducts,
'Products exported to Excel don''t match the products in the database')
def test_delete_inventory_warning_with_perms(self):
print 'running ImportsViewTests.test_delete_inventory_warning_with_perms... '
perms = ['delete_inventoryitem']
permissions = Permission.objects.filter(codename__in = perms)
self.user.user_permissions=permissions
self.client.login(username='testUser', password='12345678')
# populate the database with some data
(__,
__,
createdInventoryItems,
__)=create_products_with_inventory_items_for_sites(
numSites=20,
numProducts=5,
numItems=1)
warning='Delete all %d inventory items?' % len(createdInventoryItems)
response=self.client.post(reverse('ims:imports'),
{'Delete Inventory':'Delete'},
follow=True)
resultWarning = get_announcement_from_response(response=response,
cls="warningnote")
self.assert_(warning in resultWarning,
'imports view didn''t generate the appropriate warning when requested to delete all inventory with appropriate perms.\ndesired warning message = %s\nactual warning message = %s'
% (warning, resultWarning))
def test_delete_inventory_error_without_delete_inventory_perm(self):
print 'running ImportsViewTests.test_delete_inventory_error_without_delete_inventory_perm... '
perms = ['delete_productinformation']
permissions = Permission.objects.filter(codename__in = perms)
self.user.user_permissions=permissions
self.client.login(username='testUser', password='12345678')
# populate the database with some data
create_products_with_inventory_items_for_sites(numSites=20,
numProducts=5,
numItems=1)
warning='You don''t have permission to delete inventory'
response=self.client.post(reverse('ims:imports'),
{'Delete Inventory':'Delete'},
follow=True)
resultWarning = get_announcement_from_response(response=response,
cls="errornote")
self.assert_(warning in resultWarning,
'imports view didn''t generate the appropriate warning when requested to delete all inventory without delete_inventory perms.\ndesired warning message = %s\nactual warning message = %s'
% (warning, resultWarning))
def test_export_all_inventory(self):
print 'running ImportsViewTests.test_export_all_inventory... '
# populate the database with some data
(createdSites,
__,
__,
__)=create_products_with_inventory_items_for_sites(
numSites=3,
numProducts=5,
numItems=3,
modifier='testUser')
self.client.login(username='testUser', password='12345678')
response=self.client.post(reverse('ims:imports'),
{'Export All Inventory':'All'},
follow=True)
(parsedExportedInventory,
__)=InventoryItem.parse_inventory_from_xls(
file_contents=response.content,
save=False)
sortedParsedExportedInventory=[]
for item in parsedExportedInventory:
sortedParsedExportedInventory.append(item.create_key_no_pk_no_microseconds())
sortedParsedExportedInventory.sort()
sortedCreatedInventory=[]
for site in createdSites:
for item in site.inventoryitem_set.all():
sortedCreatedInventory.append(item.create_key_no_pk_no_microseconds())
sortedCreatedInventory.sort()
self.assertListEqual(sortedParsedExportedInventory,
sortedCreatedInventory,
'Inventory exported to Excel doesn''t match the inventory in the database')
def test_export_current_inventory(self):
print 'running ImportsViewTests.test_export_current_inventory... '
# populate the database with some data
(createdSites,
__,
__,
__)=create_products_with_inventory_items_for_sites(
numSites=3,
numProducts=5,
numItems=3,
modifier='testUser')
self.client.login(username='testUser', password='12345678')
response=self.client.post(reverse('ims:imports'),
{'Export Latest Inventory':'Current'},
follow=True)
(parsedExportedInventory,
__)=InventoryItem.parse_inventory_from_xls(
file_contents=response.content,
save=False)
sortedParsedExportedInventory=[]
for item in parsedExportedInventory:
sortedParsedExportedInventory.append(item.create_key_no_pk_no_microseconds())
sortedParsedExportedInventory.sort()
sortedCreatedInventory=[]
for site in createdSites:
for item in site.latest_inventory():
sortedCreatedInventory.append(item.create_key_no_pk_no_microseconds())
sortedCreatedInventory.sort()
self.assertListEqual(sortedParsedExportedInventory,
sortedCreatedInventory,
'Inventory exported to Excel doesn''t match the inventory in the database')
def test_backup(self):
print 'running ImportsViewTests.test_backup... '
# populate the database with some data
(createdSites,
createdProducts,
__,
__)=create_products_with_inventory_items_for_sites(
numSites=3,
numProducts=5,
numItems=3,
modifier='testUser')
self.client.login(username='testUser', password='12345678')
response=self.client.post(reverse('ims:imports'),
{'Backup':'Backup'},
follow=True)
try:
f = StringIO.StringIO(response.content)
zipArchive = zipfile.ZipFile(f, 'r')
backups = [filename for filename in zipArchive.namelist() if 'Backup' in filename]
self.assertTrue(len(backups) > 0,'No Backup spreadsheet in the archive')
if backups:
fileContents=zipArchive.open(backups[0],'r').read()
zipArchive.close()
(parsedBackedUpInventory,
__)=InventoryItem.parse_inventory_from_xls(
file_contents=fileContents,
save=False)
parsedBackedUpSites,__=Site.parse_sites_from_xls(
file_contents=fileContents,
save=False)
parsedBackedUpProducts,__=ProductInformation.parse_product_information_from_xls(
file_contents=fileContents,
save=False)
finally:
zipArchive.close()
f.close()
# Compare inventory
sortedParsedBackedUpInventory=[]
for item in parsedBackedUpInventory:
sortedParsedBackedUpInventory.append(item.create_key_no_pk_no_microseconds())
sortedParsedBackedUpInventory.sort()
sortedCreatedInventory=[]
for site in createdSites:
for item in site.inventoryitem_set.all():
sortedCreatedInventory.append(item.create_key_no_pk_no_microseconds())
sortedCreatedInventory.sort()
self.assertListEqual(sortedParsedBackedUpInventory,
sortedCreatedInventory,
'Inventory exported to Excel backup doesn''t match the inventory in the database')
# compare sites
sortedParsedBackedUpSites=[]
for site in parsedBackedUpSites:
sortedParsedBackedUpSites.append(site.create_key_no_microseconds())
sortedParsedBackedUpSites.sort()
sortedCreatedSites=[]
for site in createdSites:
sortedCreatedSites.append(site.create_key_no_microseconds())
sortedCreatedSites.sort()
self.assertListEqual(sortedParsedBackedUpSites,
sortedCreatedSites,
'Sites exported to Excel backup don''t match the sites in the database')
# compare products
sortedParsedBackedUpProducts=[]
for product in parsedBackedUpProducts:
sortedParsedBackedUpProducts.append(product.create_key_no_microseconds())
sortedParsedBackedUpProducts.sort()
sortedCreatedProducts=[]
for product in createdProducts:
sortedCreatedProducts.append(product.create_key_no_microseconds())
sortedCreatedProducts.sort()
self.assertListEqual(sortedParsedBackedUpProducts,
sortedCreatedProducts,
'Products exported to Excel backup don''t match the products in the database')
def test_restore_error_without_add_inventoryitem_perm(self):
print 'running ImportsViewTests.test_restore_error_without_add_inventoryitem_perm... '
perms = [
'change_inventoryitem',
'delete_inventoryitem',
'add_productinformation',
'change_productinformation',
'delete_productinformation',
'add_site',
'change_site',
'delete_site']
permissions = Permission.objects.filter(codename__in = perms)
self.user.user_permissions=permissions
self.client.login(username='testUser', password='12345678')
response=self.client.post(reverse('ims:imports'),
{'Restore':'Restore'},
follow=True)
resultWarning = get_announcement_from_response(response=response,
cls="errornote")
warning = 'You don''t have permission to restore the database'
self.assertEqual(warning,resultWarning,'imports view generated incorrect warning when user without add_inventoryitem perm requested a database restore.\ndesired Warning Message = %s\n\nactual warning message = %s'
% (warning, resultWarning))
def test_restore_error_without_change_inventoryitem_perm(self):
print 'running ImportsViewTests.test_restore_error_without_change_inventoryitem_perm... '
perms = ['add_inventoryitem',
'delete_inventoryitem',
'add_productinformation',
'change_productinformation',
'delete_productinformation',
'add_site',
'change_site',
'delete_site']
permissions = Permission.objects.filter(codename__in = perms)
self.user.user_permissions=permissions
self.client.login(username='testUser', password='12345678')
response=self.client.post(reverse('ims:imports'),
{'Restore':'Restore'},
follow=True)
resultWarning = get_announcement_from_response(response=response,
cls="errornote")
warning = 'You don''t have permission to restore the database'
self.assertEqual(warning,resultWarning,'imports view generated incorrect warning when user without change_inventoryitem perm requested a database restore.\ndesired Warning Message = %s\n\nactual warning message = %s'
% (warning, resultWarning))
def test_restore_error_without_delete_inventoryitem_perm(self):
print 'running ImportsViewTests.test_restore_error_without_delete_inventoryitem_perm... '
perms = ['add_inventoryitem',
'change_inventoryitem',
'add_productinformation',
'change_productinformation',
'delete_productinformation',
'add_site',
'change_site',
'delete_site']
permissions = Permission.objects.filter(codename__in = perms)
self.user.user_permissions=permissions
self.client.login(username='testUser', password='12345678')
response=self.client.post(reverse('ims:imports'),
{'Restore':'Restore'},
follow=True)
resultWarning = get_announcement_from_response(response=response,
cls="errornote")
warning = 'You don''t have permission to restore the database'
self.assertEqual(warning,resultWarning,'imports view generated incorrect warning when user without delete_inventoryitem perm requested a database restore.\ndesired Warning Message = %s\n\nactual warning message = %s'
% (warning, resultWarning))
def test_restore_error_without_add_productinformation_perm(self):
print 'running ImportsViewTests.test_restore_error_without_add_productinformation_perm... '
perms = ['add_inventoryitem',
'change_inventoryitem',
'delete_inventoryitem',
'change_productinformation',
'delete_productinformation',
'add_site',
'change_site',
'delete_site']
permissions = Permission.objects.filter(codename__in = perms)
self.user.user_permissions=permissions
self.client.login(username='testUser', password='12345678')
response=self.client.post(reverse('ims:imports'),
{'Restore':'Restore'},
follow=True)
resultWarning = get_announcement_from_response(response=response,
cls="errornote")
warning = 'You don''t have permission to restore the database'
self.assertEqual(warning,resultWarning,'imports view generated incorrect warning when user without add_productinformation perm requested a database restore.\ndesired Warning Message = %s\n\nactual warning message = %s'
% (warning, resultWarning))
def test_restore_error_without_change_productinformation_perm(self):
print 'running ImportsViewTests.test_restore_error_without_change_productinformation_perm... '
perms = ['add_inventoryitem',
'change_inventoryitem',
'delete_inventoryitem',
'add_productinformation',
'delete_productinformation',
'add_site',
'change_site',
'delete_site']
permissions = Permission.objects.filter(codename__in = perms)
self.user.user_permissions=permissions
self.client.login(username='testUser', password='12345678')
response=self.client.post(reverse('ims:imports'),
{'Restore':'Restore'},
follow=True)
resultWarning = get_announcement_from_response(response=response,
cls="errornote")
warning = 'You don''t have permission to restore the database'
self.assertEqual(warning,resultWarning,'imports view generated incorrect warning when user without change_productinformation perm requested a database restore.\ndesired Warning Message = %s\n\nactual warning message = %s'
% (warning, resultWarning))
def test_restore_error_without_delete_productinformation_perm(self):
print 'running ImportsViewTests.test_restore_error_without_delete_productinformation_perm... '
perms = ['add_inventoryitem',
'change_inventoryitem',
'delete_inventoryitem',
'add_productinformation',
'change_productinformation',
'add_site',
'change_site',
'delete_site']
permissions = Permission.objects.filter(codename__in = perms)
self.user.user_permissions=permissions
self.client.login(username='testUser', password='12345678')
response=self.client.post(reverse('ims:imports'),
{'Restore':'Restore'},
follow=True)
resultWarning = get_announcement_from_response(response=response,
cls="errornote")
warning = 'You don''t have permission to restore the database'
self.assertEqual(warning,resultWarning,'imports view generated incorrect warning when user without delete_productinformation perm requested a database restore.\ndesired Warning Message = %s\n\nactual warning message = %s'
% (warning, resultWarning))
def test_restore_error_without_add_site_perm(self):
print 'running ImportsViewTests.test_restore_error_without_add_site_perm... '
perms = ['add_inventoryitem',
'change_inventoryitem',
'delete_inventoryitem',
'add_productinformation',
'change_productinformation',
'delete_productinformation',
'change_site',
'delete_site']
permissions = Permission.objects.filter(codename__in = perms)
self.user.user_permissions=permissions
self.client.login(username='testUser', password='12345678')
response=self.client.post(reverse('ims:imports'),
{'Restore':'Restore'},
follow=True)
resultWarning = get_announcement_from_response(response=response,
cls="errornote")
warning = 'You don''t have permission to restore the database'
self.assertEqual(warning,resultWarning,'imports view generated incorrect warning when user without add_site perm requested a database restore.\ndesired Warning Message = %s\n\nactual warning message = %s'
% (warning, resultWarning))
def test_restore_error_without_change_site_perm(self):
print 'running ImportsViewTests.test_restore_error_without_change_site_perm... '
perms = ['add_inventoryitem',
'change_inventoryitem',
'delete_inventoryitem',
'add_productinformation',
'change_productinformation',
'delete_productinformation',
'add_site',
'delete_site']
permissions = Permission.objects.filter(codename__in = perms)
self.user.user_permissions=permissions
self.client.login(username='testUser', password='12345678')
response=self.client.post(reverse('ims:imports'),
{'Restore':'Restore'},
follow=True)
resultWarning = get_announcement_from_response(response=response,
cls="errornote")
warning = 'You don''t have permission to restore the database'
self.assertEqual(warning,resultWarning,'imports view generated incorrect warning when user without change_site perm requested a database restore.\ndesired Warning Message = %s\n\nactual warning message = %s'
% (warning, resultWarning))
def test_restore_error_without_delete_site_perm(self):
print 'running ImportsViewTests.test_restore_error_without_delete_site_perm... '
perms = ['add_inventoryitem',
'change_inventoryitem',
'delete_inventoryitem',
'add_productinformation',
'change_productinformation',
'delete_productinformation',
'add_site',
'change_site',
]
permissions = Permission.objects.filter(codename__in = perms)
self.user.user_permissions=permissions
self.client.login(username='testUser', password='12345678')
response=self.client.post(reverse('ims:imports'),
{'Restore':'Restore'},
follow=True)
resultWarning = get_announcement_from_response(response=response,
cls="errornote")
warning = 'You don''t have permission to restore the database'
self.assertEqual(warning,resultWarning,'imports view generated incorrect warning when user without delete_site perm requested a database restore.\ndesired Warning Message = %s\n\nactual warning message = %s'
% (warning, resultWarning))
class RestoreViewTests(TestCase):
"""
restore view ims_tests
"""
def setUp(self):
# Most ims_tests need access to the request factory and/or a user.
self.factory = RequestFactory()
self.user = User.objects.create_user(
username='testUser', password='12345678')
def test_restore_get_warning_with_perms(self):
print 'running RestoreViewTests.test_restore_get_warning_with_perms... '
perms = ['add_inventoryitem',
'change_inventoryitem',
'delete_inventoryitem',
'add_productinformation',
'change_productinformation',
'delete_productinformation',
'add_site',
'change_site',
'delete_site']
permissions = Permission.objects.filter(codename__in = perms)
self.user.user_permissions=permissions
self.client.login(username='testUser', password='12345678')
response=self.client.get(reverse('ims:restore'),
follow=True)
resultWarning = get_announcement_from_response(response=response,
cls="warningnote")
warning = 'Restoring the database will cause all current information to be replaced!!!'
self.assertEqual(warning,resultWarning,'restore view generated incorrect warning when user requested a database restore.\ndesired Warning Message = %s\n\nactual warning message = %s'
% (warning, resultWarning))
def test_restore_get_error_without_perms(self):
print 'running RestoreViewTests.test_restore_get_warning_without_perms... '
self.client.login(username='testUser', password='12345678')
response=self.client.get(reverse('ims:restore'),
follow=True)
resultWarning = get_announcement_from_response(response=response,
cls="errornote")
warning = 'You don''t have permission to restore the database'
self.assertEqual(warning,resultWarning,'restore view generated incorrect warning when unauthorized user requested a database restore.\ndesired Warning Message = %s\n\nactual warning message = %s'
% (warning, resultWarning))
def test_restore_info_with_perms(self):
print 'running RestoreViewTests.test_restore_info_with_perms... '
perms = ['add_inventoryitem',
'change_inventoryitem',
'delete_inventoryitem',
'add_productinformation',
'change_productinformation',
'delete_productinformation',
'add_site',
'change_site',
'delete_site',
'add_productcategory',
'change_productcategory',
'delete_productcategory']
permissions = Permission.objects.filter(codename__in = perms)
self.user.user_permissions=permissions
self.client.login(username='testUser', password='12345678')
with open(os.path.join(
APP_DIR,
'testData/Backup_3site_3prod_inventory10.zip')) as fp:
response=self.client.post(reverse('ims:restore'),
{'Restore':'Restore','file':fp},
format = 'multipart',
follow=True)
resultWarning = get_announcement_from_response(response=response,
cls="infonote")
warning = 'Successful restore of sites using "Backup_3site_3prod_inventory10.zip"<br/>Successful restore of categories using "Backup_3site_3prod_inventory10.zip"<br/>Successful restore of sites using "Backup_3site_3prod_inventory10.zip"<br/>Successful restore of products using "Backup_3site_3prod_inventory10.zip"<br/>Successful restore of inventory using "Backup_3site_3prod_inventory10.zip"<br/>'
self.assertEqual(warning,resultWarning,'restore view generated incorrect warning when user requested a database restore.\ndesired Warning Message = %s\n\nactual warning message = %s'
% (warning, resultWarning))
def test_restore_warning_no_file_with_perms(self):
print 'running RestoreViewTests.test_restore_warning_no_file_with_perms... '
perms = ['add_inventoryitem',
'change_inventoryitem',
'delete_inventoryitem',
'add_productinformation',
'change_productinformation',
'delete_productinformation',
'add_site',
'change_site',
'delete_site',
'add_productcategory',
'change_productcategory',
'delete_productcategory']
permissions = Permission.objects.filter(codename__in = perms)
self.user.user_permissions=permissions
self.client.login(username='testUser', password='12345678')
response=self.client.post(reverse('ims:restore'),
{'Restore':'Restore'},
format = 'multipart',
follow=True)
resultWarning = get_announcement_from_response(response=response,
cls="warningnote")
warning = 'No file selected'
self.assertEqual(warning,resultWarning,'restore view generated incorrect warning when user requested a database restore with no file selected.\ndesired Warning Message = %s\n\nactual warning message = %s'
% (warning, resultWarning))
def test_restore_error_bad_file_with_perms(self):
print 'running RestoreViewTests.test_restore_error_bad_file_with_perms... '
perms = ['add_inventoryitem',
'change_inventoryitem',
'delete_inventoryitem',
'add_productinformation',
'change_productinformation',
'delete_productinformation',
'add_site',
'change_site',
'delete_site',
'add_productcategory',
'change_productcategory',
'delete_productcategory']
permissions = Permission.objects.filter(codename__in = perms)
self.user.user_permissions=permissions
self.client.login(username='testUser', password='12345678')
with open(os.path.join(
APP_DIR,
'testData/Backup_3site_3prod_inventory10.xls')) as fp:
response=self.client.post(reverse('ims:restore'),
{'Restore':'Restore','file':fp},
format = 'multipart',
follow=True)
resultError = get_announcement_from_response(response=response,
cls="errornote")
error = "Error while trying to restore database from backup archive:<br/>\"Backup_3site_3prod_inventory10.xls\".<br/><br/>Error Message:<br/> BadZipfile('File is not a zip file',)"
self.assertIn(error,resultError,'restore view generated incorrect error when user requested a database restore with an invalid file.\ndesired Error Message = %s\n\nactual error message = %s'
% (error, resultError))
|
grovesr/django-ims
|
ims_tests/tests.py
|
Python
|
bsd-3-clause
| 252,503
|
# -*- coding: utf-8 -*-
"""
logbook.queues
~~~~~~~~~~~~~~
This module implements queue backends.
:copyright: (c) 2010 by Armin Ronacher, Georg Brandl.
:license: BSD, see LICENSE for more details.
"""
import json
import threading
from threading import Thread, Lock
import platform
from logbook.base import NOTSET, LogRecord, dispatch_record
from logbook.handlers import Handler, WrapperHandler
from logbook.helpers import PY2
if PY2:
from Queue import Empty, Queue as ThreadQueue
else:
from queue import Empty, Queue as ThreadQueue
class RedisHandler(Handler):
"""A handler that sends log messages to a Redis instance.
It publishes each record as json dump. Requires redis module.
To receive such records you need to have a running instance of Redis.
Example setup::
handler = RedisHandler('http://localhost', port='9200', key='redis')
If your Redis instance is password protected, you can securely connect passing
your password when creating a RedisHandler object.
Example::
handler = RedisHandler(password='your_redis_password')
More info about the default buffer size: wp.me/p3tYJu-3b
"""
def __init__(self, host='localhost', port=6379, key='redis', extra_fields={},
flush_threshold=128, flush_time=1, level=NOTSET, filter=None,
password=False, bubble=True, context=None):
Handler.__init__(self, level, filter, bubble)
try:
import redis
from redis import ResponseError
except ImportError:
raise RuntimeError('The redis library is required for '
'the RedisHandler')
self.redis = redis.Redis(host=host, port=port, password=password, decode_responses=True)
try:
self.redis.ping()
except ResponseError:
raise ResponseError('The password provided is apparently incorrect')
self.key = key
self.extra_fields = extra_fields
self.flush_threshold = flush_threshold
self.queue = []
self.lock = Lock()
#Set up a thread that flushes the queue every specified seconds
self._stop_event = threading.Event()
self._flushing_t = threading.Thread(target=self._flush_task,
args=(flush_time, self._stop_event))
self._flushing_t.daemon = True
self._flushing_t.start()
def _flush_task(self, time, stop_event):
"""Calls the method _flush_buffer every certain time.
"""
while not self._stop_event.isSet():
with self.lock:
self._flush_buffer()
self._stop_event.wait(time)
def _flush_buffer(self):
"""Flushes the messaging queue into Redis.
All values are pushed at once for the same key.
"""
if self.queue:
self.redis.rpush(self.key, *self.queue)
self.queue = []
def disable_buffering(self):
"""Disables buffering.
If called, every single message will be directly pushed to Redis.
"""
self._stop_event.set()
self.flush_threshold = 1
def emit(self, record):
"""Emits a pair (key, value) to redis.
The key is the one provided when creating the handler, or redis if none was
provided. The value contains both the message and the hostname. Extra values
are also appended to the message.
"""
with self.lock:
r = {"message": record.msg, "host": platform.node(), "level": record.level_name}
r.update(self.extra_fields)
r.update(record.kwargs)
self.queue.append(json.dumps(r))
if len(self.queue) == self.flush_threshold:
self._flush_buffer()
def close(self):
self._flush_buffer()
class RabbitMQHandler(Handler):
"""A handler that acts as a RabbitMQ publisher, which publishes each record
as json dump. Requires the kombu module.
The queue will be filled with JSON exported log records. To receive such
log records from a queue you can use the :class:`RabbitMQSubscriber`.
Example setup::
handler = RabbitMQHandler('amqp://guest:guest@localhost//', queue='my_log')
"""
def __init__(self, uri=None, queue='logging', level=NOTSET,
filter=None, bubble=False, context=None):
Handler.__init__(self, level, filter, bubble)
try:
import kombu
except ImportError:
raise RuntimeError('The kombu library is required for '
'the RabbitMQSubscriber.')
if uri:
connection = kombu.Connection(uri)
self.queue = connection.SimpleQueue(queue)
def export_record(self, record):
"""Exports the record into a dictionary ready for JSON dumping.
"""
return record.to_dict(json_safe=True)
def emit(self, record):
self.queue.put(self.export_record(record))
def close(self):
self.queue.close()
class ZeroMQHandler(Handler):
"""A handler that acts as a ZeroMQ publisher, which publishes each record
as json dump. Requires the pyzmq library.
The queue will be filled with JSON exported log records. To receive such
log records from a queue you can use the :class:`ZeroMQSubscriber`.
Example setup::
handler = ZeroMQHandler('tcp://127.0.0.1:5000')
"""
def __init__(self, uri=None, level=NOTSET, filter=None, bubble=False,
context=None):
Handler.__init__(self, level, filter, bubble)
try:
import zmq
except ImportError:
raise RuntimeError('The pyzmq library is required for '
'the ZeroMQHandler.')
#: the zero mq context
self.context = context or zmq.Context()
#: the zero mq socket.
self.socket = self.context.socket(zmq.PUB)
if uri is not None:
self.socket.bind(uri)
def export_record(self, record):
"""Exports the record into a dictionary ready for JSON dumping."""
return record.to_dict(json_safe=True)
def emit(self, record):
self.socket.send(json.dumps(self.export_record(record)).encode("utf-8"))
def close(self):
self.socket.close()
class ThreadController(object):
"""A helper class used by queue subscribers to control the background
thread. This is usually created and started in one go by
:meth:`~logbook.queues.ZeroMQSubscriber.dispatch_in_background` or
a comparable function.
"""
def __init__(self, subscriber, setup=None):
self.setup = setup
self.subscriber = subscriber
self.running = False
self._thread = None
def start(self):
"""Starts the task thread."""
self.running = True
self._thread = Thread(target=self._target)
self._thread.setDaemon(True)
self._thread.start()
def stop(self):
"""Stops the task thread."""
if self.running:
self.running = False
self._thread.join()
self._thread = None
def _target(self):
if self.setup is not None:
self.setup.push_thread()
try:
while self.running:
self.subscriber.dispatch_once(timeout=0.05)
finally:
if self.setup is not None:
self.setup.pop_thread()
class SubscriberBase(object):
"""Baseclass for all subscribers."""
def recv(self, timeout=None):
"""Receives a single record from the socket. Timeout of 0 means nonblocking,
`None` means blocking and otherwise it's a timeout in seconds after which
the function just returns with `None`.
Subclasses have to override this.
"""
raise NotImplementedError()
def dispatch_once(self, timeout=None):
"""Receives one record from the socket, loads it and dispatches it. Returns
`True` if something was dispatched or `False` if it timed out.
"""
rv = self.recv(timeout)
if rv is not None:
dispatch_record(rv)
return True
return False
def dispatch_forever(self):
"""Starts a loop that dispatches log records forever."""
while 1:
self.dispatch_once()
def dispatch_in_background(self, setup=None):
"""Starts a new daemonized thread that dispatches in the background.
An optional handler setup can be provided that pushed to the new
thread (can be any :class:`logbook.base.StackedObject`).
Returns a :class:`ThreadController` object for shutting down
the background thread. The background thread will already be
running when this function returns.
"""
controller = ThreadController(self, setup)
controller.start()
return controller
class RabbitMQSubscriber(SubscriberBase):
"""A helper that acts as RabbitMQ subscriber and will dispatch received
log records to the active handler setup. There are multiple ways to
use this class.
It can be used to receive log records from a queue::
subscriber = RabbitMQSubscriber('amqp://guest:guest@localhost//')
record = subscriber.recv()
But it can also be used to receive and dispatch these in one go::
with target_handler:
subscriber = RabbitMQSubscriber('amqp://guest:guest@localhost//')
subscriber.dispatch_forever()
This will take all the log records from that queue and dispatch them
over to `target_handler`. If you want you can also do that in the
background::
subscriber = RabbitMQSubscriber('amqp://guest:guest@localhost//')
controller = subscriber.dispatch_in_background(target_handler)
The controller returned can be used to shut down the background
thread::
controller.stop()
"""
def __init__(self, uri=None, queue='logging'):
try:
import kombu
except ImportError:
raise RuntimeError('The kombu library is required for '
'the RabbitMQSubscriber.')
if uri:
connection = kombu.Connection(uri)
self.queue = connection.SimpleQueue(queue)
def __del__(self):
try:
self.close()
except AttributeError:
# subscriber partially created
pass
def close(self):
self.queue.close()
def recv(self, timeout=None):
"""Receives a single record from the socket. Timeout of 0 means nonblocking,
`None` means blocking and otherwise it's a timeout in seconds after which
the function just returns with `None`.
"""
if timeout == 0:
try:
rv = self.queue.get(block=False)
except Exception:
return
else:
rv = self.queue.get(timeout=timeout)
log_record = rv.payload
rv.ack()
return LogRecord.from_dict(log_record)
class ZeroMQSubscriber(SubscriberBase):
"""A helper that acts as ZeroMQ subscriber and will dispatch received
log records to the active handler setup. There are multiple ways to
use this class.
It can be used to receive log records from a queue::
subscriber = ZeroMQSubscriber('tcp://127.0.0.1:5000')
record = subscriber.recv()
But it can also be used to receive and dispatch these in one go::
with target_handler:
subscriber = ZeroMQSubscriber('tcp://127.0.0.1:5000')
subscriber.dispatch_forever()
This will take all the log records from that queue and dispatch them
over to `target_handler`. If you want you can also do that in the
background::
subscriber = ZeroMQSubscriber('tcp://127.0.0.1:5000')
controller = subscriber.dispatch_in_background(target_handler)
The controller returned can be used to shut down the background
thread::
controller.stop()
"""
def __init__(self, uri=None, context=None):
try:
import zmq
except ImportError:
raise RuntimeError('The pyzmq library is required for '
'the ZeroMQSubscriber.')
self._zmq = zmq
#: the zero mq context
self.context = context or zmq.Context()
#: the zero mq socket.
self.socket = self.context.socket(zmq.SUB)
if uri is not None:
self.socket.connect(uri)
self.socket.setsockopt_unicode(zmq.SUBSCRIBE, u'')
def __del__(self):
try:
self.close()
except AttributeError:
# subscriber partially created
pass
def close(self):
"""Closes the zero mq socket."""
self.socket.close()
def recv(self, timeout=None):
"""Receives a single record from the socket. Timeout of 0 means nonblocking,
`None` means blocking and otherwise it's a timeout in seconds after which
the function just returns with `None`.
"""
if timeout is None:
rv = self.socket.recv()
elif not timeout:
rv = self.socket.recv(self._zmq.NOBLOCK)
if rv is None:
return
else:
if not self._zmq.select([self.socket], [], [], timeout)[0]:
return
rv = self.socket.recv(self._zmq.NOBLOCK)
if not PY2:
rv = rv.decode("utf-8")
return LogRecord.from_dict(json.loads(rv))
def _fix_261_mplog():
"""necessary for older python's to disable a broken monkeypatch
in the logging module. See multiprocessing/util.py for the
hasattr() check. At least in Python 2.6.1 the multiprocessing
module is not imported by logging and as such the test in
the util fails.
"""
import logging
import multiprocessing
logging.multiprocessing = multiprocessing
class MultiProcessingHandler(Handler):
"""Implements a handler that dispatches over a queue to a different
process. It is connected to a subscriber with a
:class:`multiprocessing.Queue`::
from multiprocessing import Queue
from logbook.queues import MultiProcessingHandler
queue = Queue(-1)
handler = MultiProcessingHandler(queue)
"""
def __init__(self, queue, level=NOTSET, filter=None, bubble=False):
Handler.__init__(self, level, filter, bubble)
self.queue = queue
_fix_261_mplog()
def emit(self, record):
self.queue.put_nowait(record.to_dict(json_safe=True))
class MultiProcessingSubscriber(SubscriberBase):
"""Receives log records from the given multiprocessing queue and
dispatches them to the active handler setup. Make sure to use the same
queue for both handler and subscriber. Idaelly the queue is set
up with maximum size (``-1``)::
from multiprocessing import Queue
queue = Queue(-1)
It can be used to receive log records from a queue::
subscriber = MultiProcessingSubscriber(queue)
record = subscriber.recv()
But it can also be used to receive and dispatch these in one go::
with target_handler:
subscriber = MultiProcessingSubscriber(queue)
subscriber.dispatch_forever()
This will take all the log records from that queue and dispatch them
over to `target_handler`. If you want you can also do that in the
background::
subscriber = MultiProcessingSubscriber(queue)
controller = subscriber.dispatch_in_background(target_handler)
The controller returned can be used to shut down the background
thread::
controller.stop()
If no queue is provided the subscriber will create one. This one can the
be used by handlers::
subscriber = MultiProcessingSubscriber()
handler = MultiProcessingHandler(subscriber.queue)
"""
def __init__(self, queue=None):
if queue is None:
from multiprocessing import Queue
queue = Queue(-1)
self.queue = queue
_fix_261_mplog()
def recv(self, timeout=None):
if timeout is None:
rv = self.queue.get()
else:
try:
rv = self.queue.get(block=False, timeout=timeout)
except Empty:
return None
return LogRecord.from_dict(rv)
class ExecnetChannelHandler(Handler):
"""Implements a handler that dispatches over a execnet channel
to a different process.
"""
def __init__(self, channel, level=NOTSET, filter=None, bubble=False):
Handler.__init__(self, level, filter, bubble)
self.channel = channel
def emit(self, record):
self.channel.send(record.to_dict(json_safe=True))
class ExecnetChannelSubscriber(SubscriberBase):
"""subscribes to a execnet channel"""
def __init__(self, channel):
self.channel = channel
def recv(self, timeout=-1):
try:
rv = self.channel.receive(timeout=timeout)
except self.channel.RemoteError:
#XXX: handle
return None
except (self.channel.TimeoutError, EOFError):
return None
else:
return LogRecord.from_dict(rv)
class TWHThreadController(object):
"""A very basic thread controller that pulls things in from a
queue and sends it to a handler. Both queue and handler are
taken from the passed :class:`ThreadedWrapperHandler`.
"""
_sentinel = object()
def __init__(self, wrapper_handler):
self.wrapper_handler = wrapper_handler
self.running = False
self._thread = None
def start(self):
"""Starts the task thread."""
self.running = True
self._thread = Thread(target=self._target)
self._thread.setDaemon(True)
self._thread.start()
def stop(self):
"""Stops the task thread."""
if self.running:
self.wrapper_handler.queue.put_nowait(self._sentinel)
self._thread.join()
self._thread = None
def _target(self):
while 1:
record = self.wrapper_handler.queue.get()
if record is self._sentinel:
self.running = False
break
self.wrapper_handler.handler.handle(record)
class ThreadedWrapperHandler(WrapperHandler):
"""This handled uses a single background thread to dispatch log records
to a specific other handler using an internal queue. The idea is that if
you are using a handler that requires some time to hand off the log records
(such as the mail handler) and would block your request, you can let
Logbook do that in a background thread.
The threaded wrapper handler will automatically adopt the methods and
properties of the wrapped handler. All the values will be reflected:
>>> twh = ThreadedWrapperHandler(TestHandler())
>>> from logbook import WARNING
>>> twh.level_name = 'WARNING'
>>> twh.handler.level_name
'WARNING'
"""
_direct_attrs = frozenset(['handler', 'queue', 'controller'])
def __init__(self, handler):
WrapperHandler.__init__(self, handler)
self.queue = ThreadQueue(-1)
self.controller = TWHThreadController(self)
self.controller.start()
def close(self):
self.controller.stop()
self.handler.close()
def emit(self, record):
self.queue.put_nowait(record)
class GroupMember(ThreadController):
def __init__(self, subscriber, queue):
ThreadController.__init__(self, subscriber, None)
self.queue = queue
def _target(self):
if self.setup is not None:
self.setup.push_thread()
try:
while self.running:
record = self.subscriber.recv()
if record:
try:
self.queue.put(record, timeout=0.05)
except Queue.Full:
pass
finally:
if self.setup is not None:
self.setup.pop_thread()
class SubscriberGroup(SubscriberBase):
"""This is a subscriber which represents a group of subscribers.
This is helpful if you are writing a server-like application which has
"slaves". This way a user is easily able to view every log record which
happened somewhere in the entire system without having to check every
single slave::
subscribers = SubscriberGroup([
MultiProcessingSubscriber(queue),
ZeroMQSubscriber('tcp://localhost:5000')
])
with target_handler:
subscribers.dispatch_forever()
"""
def __init__(self, subscribers=None, queue_limit=10):
self.members = []
self.queue = ThreadQueue(queue_limit)
for subscriber in subscribers or []:
self.add(subscriber)
def add(self, subscriber):
"""Adds the given `subscriber` to the group."""
member = GroupMember(subscriber, self.queue)
member.start()
self.members.append(member)
def recv(self, timeout=None):
try:
return self.queue.get(timeout=timeout)
except Empty:
return
def stop(self):
"""Stops the group from internally recieving any more messages, once the
internal queue is exhausted :meth:`recv` will always return `None`.
"""
for member in self.members:
self.member.stop()
|
alonho/logbook
|
logbook/queues.py
|
Python
|
bsd-3-clause
| 21,606
|
"""
Copyright (c) 2015 Red Hat, Inc
All rights reserved.
This software may be modified and distributed under the terms
of the BSD license. See the LICENSE file for details.
"""
from __future__ import print_function, absolute_import, unicode_literals
import json
import logging
from osbs.utils import graceful_chain_get
logger = logging.getLogger(__name__)
class PodResponse(object):
"""
Wrapper for JSON describing build pod
"""
def __init__(self, pod):
"""
:param request: http.Request
"""
self._json = pod
@property
def json(self):
return self._json
def get_container_image_ids(self):
"""
Find the image IDs the containers use.
:return: dict, image tag to docker ID
"""
statuses = graceful_chain_get(self.json, "status", "containerStatuses")
if statuses is None:
return {}
def remove_prefix(image_id, prefix):
if image_id.startswith(prefix):
return image_id[len(prefix):]
return image_id
return dict([(status['image'], remove_prefix(status['imageID'],
'docker://'))
for status in statuses])
|
pombredanne/osbs-client
|
osbs/build/pod_response.py
|
Python
|
bsd-3-clause
| 1,266
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
Spectrogram decomposition
=========================
.. autosummary::
:toctree: generated/
decompose
hpss
"""
import numpy as np
from scipy.ndimage import median_filter
import sklearn.decomposition
from . import core
from . import cache
from . import util
from .util.exceptions import ParameterError
__all__ = ['decompose', 'hpss']
def decompose(S, n_components=None, transformer=None, sort=False, fit=True, **kwargs):
"""Decompose a feature matrix.
Given a spectrogram `S`, produce a decomposition into `components`
and `activations` such that `S ~= components.dot(activations)`.
By default, this is done with with non-negative matrix factorization (NMF),
but any `sklearn.decomposition`-type object will work.
Parameters
----------
S : np.ndarray [shape=(n_features, n_samples), dtype=float]
The input feature matrix (e.g., magnitude spectrogram)
n_components : int > 0 [scalar] or None
number of desired components
if None, then `n_features` components are used
transformer : None or object
If None, use `sklearn.decomposition.NMF`
Otherwise, any object with a similar interface to NMF should work.
`transformer` must follow the scikit-learn convention, where
input data is `(n_samples, n_features)`.
`transformer.fit_transform()` will be run on `S.T` (not `S`),
the return value of which is stored (transposed) as `activations`
The components will be retrieved as `transformer.components_.T`
`S ~= np.dot(activations, transformer.components_).T`
or equivalently:
`S ~= np.dot(transformer.components_.T, activations.T)`
sort : bool
If `True`, components are sorted by ascending peak frequency.
.. note:: If used with `transformer`, sorting is applied to copies
of the decomposition parameters, and not to `transformer`'s
internal parameters.
fit : bool
If `True`, components are estimated from the input ``S``.
If `False`, components are assumed to be pre-computed and stored
in ``transformer``, and are not changed.
kwargs : Additional keyword arguments to the default transformer
`sklearn.decomposition.NMF`
Returns
-------
components: np.ndarray [shape=(n_features, n_components)]
matrix of components (basis elements).
activations: np.ndarray [shape=(n_components, n_samples)]
transformed matrix/activation matrix
Raises
------
ParameterError
if `fit` is False and no `transformer` object is provided.
See Also
--------
sklearn.decomposition : SciKit-Learn matrix decomposition modules
Examples
--------
Decompose a magnitude spectrogram into 32 components with NMF
>>> y, sr = librosa.load(librosa.util.example_audio_file())
>>> S = np.abs(librosa.stft(y))
>>> comps, acts = librosa.decompose.decompose(S, n_components=8)
>>> comps
array([[ 1.876e-01, 5.559e-02, ..., 1.687e-01, 4.907e-02],
[ 3.148e-01, 1.719e-01, ..., 2.314e-01, 9.493e-02],
...,
[ 1.561e-07, 8.564e-08, ..., 7.167e-08, 4.997e-08],
[ 1.531e-07, 7.880e-08, ..., 5.632e-08, 4.028e-08]])
>>> acts
array([[ 4.197e-05, 8.512e-03, ..., 3.056e-05, 9.159e-06],
[ 9.568e-06, 1.718e-02, ..., 3.322e-05, 7.869e-06],
...,
[ 5.982e-05, 1.311e-02, ..., -0.000e+00, 6.323e-06],
[ 3.782e-05, 7.056e-03, ..., 3.290e-05, -0.000e+00]])
Sort components by ascending peak frequency
>>> comps, acts = librosa.decompose.decompose(S, n_components=8,
... sort=True)
Or with sparse dictionary learning
>>> import sklearn.decomposition
>>> T = sklearn.decomposition.MiniBatchDictionaryLearning(n_components=8)
>>> comps, acts = librosa.decompose.decompose(S, transformer=T, sort=True)
>>> import matplotlib.pyplot as plt
>>> plt.figure(figsize=(10,8))
>>> plt.subplot(3, 1, 1)
>>> librosa.display.specshow(librosa.logamplitude(S**2,
... ref_power=np.max),
... y_axis='log', x_axis='time')
>>> plt.title('Input spectrogram')
>>> plt.colorbar(format='%+2.0f dB')
>>> plt.subplot(3, 2, 3)
>>> librosa.display.specshow(comps, y_axis='log')
>>> plt.title('Components')
>>> plt.subplot(3, 2, 4)
>>> librosa.display.specshow(acts, x_axis='time')
>>> plt.ylabel('Components')
>>> plt.title('Activations')
>>> plt.subplot(3, 1, 3)
>>> S_approx = comps.dot(acts)
>>> librosa.display.specshow(librosa.logamplitude(S_approx**2,
... ref_power=np.max),
... y_axis='log', x_axis='time')
>>> plt.colorbar(format='%+2.0f dB')
>>> plt.title('Reconstructed spectrogram')
>>> plt.tight_layout()
"""
if transformer is None:
if fit is False:
raise ParameterError('fit must be True if transformer is None')
transformer = sklearn.decomposition.NMF(n_components=n_components,
**kwargs)
if n_components is None:
n_components = S.shape[0]
if fit:
activations = transformer.fit_transform(S.T).T
else:
activations = transformer.transform(S.T).T
components = transformer.components_.T
if sort:
components, idx = util.axis_sort(components, index=True)
activations = activations[idx]
return components, activations
@cache
def hpss(S, kernel_size=31, power=2.0, mask=False):
"""Median-filtering harmonic percussive source separation (HPSS).
Decomposes an input spectrogram `S = H + P`
where `H` contains the harmonic components,
and `P` contains the percussive components.
This implementation is based upon the algorithm described by [1]_.
.. [1] Fitzgerald, Derry.
"Harmonic/percussive separation using median filtering."
13th International Conference on Digital Audio Effects (DAFX10),
Graz, Austria, 2010.
Parameters
----------
S : np.ndarray [shape=(d, n)]
input spectrogram. May be real (magnitude) or complex.
kernel_size : int or tuple (kernel_harmonic, kernel_percussive)
kernel size(s) for the median filters.
- If scalar, the same size is used for both harmonic and percussive.
- If iterable, the first value specifies the width of the
harmonic filter, and the second value specifies the width
of the percussive filter.
power : float >= 0 [scalar]
Exponent for the Wiener filter when constructing mask matrices.
Mask matrices are defined by
`mask_H = (r_H ** power) / (r_H ** power + r_P ** power)`
where `r_H` and `r_P` are the median-filter responses for
harmonic and percussive components.
mask : bool
Return the masking matrices instead of components
Returns
-------
harmonic : np.ndarray [shape=(d, n)]
harmonic component (or mask)
percussive : np.ndarray [shape=(d, n)]
percussive component (or mask)
Examples
--------
Separate into harmonic and percussive
>>> y, sr = librosa.load(librosa.util.example_audio_file(), duration=15)
>>> D = librosa.stft(y)
>>> H, P = librosa.decompose.hpss(D)
>>> import matplotlib.pyplot as plt
>>> plt.figure()
>>> plt.subplot(3, 1, 1)
>>> librosa.display.specshow(librosa.logamplitude(np.abs(D)**2,
... ref_power=np.max),
... y_axis='log')
>>> plt.colorbar(format='%+2.0f dB')
>>> plt.title('Full power spectrogram')
>>> plt.subplot(3, 1, 2)
>>> librosa.display.specshow(librosa.logamplitude(np.abs(H)**2,
... ref_power=np.max),
... y_axis='log')
>>> plt.colorbar(format='%+2.0f dB')
>>> plt.title('Harmonic power spectrogram')
>>> plt.subplot(3, 1, 3)
>>> librosa.display.specshow(librosa.logamplitude(np.abs(P)**2,
... ref_power=np.max),
... y_axis='log')
>>> plt.colorbar(format='%+2.0f dB')
>>> plt.title('Percussive power spectrogram')
>>> plt.tight_layout()
Or with a narrower horizontal filter
>>> H, P = librosa.decompose.hpss(D, kernel_size=(13, 31))
Just get harmonic/percussive masks, not the spectra
>>> mask_H, mask_P = librosa.decompose.hpss(D, mask=True)
>>> mask_H
array([[ 1., 0., ..., 0., 0.],
[ 1., 0., ..., 0., 0.],
...,
[ 0., 0., ..., 0., 0.],
[ 0., 0., ..., 0., 0.]])
>>> mask_P
array([[ 0., 1., ..., 1., 1.],
[ 0., 1., ..., 1., 1.],
...,
[ 1., 1., ..., 1., 1.],
[ 1., 1., ..., 1., 1.]])
"""
if np.iscomplexobj(S):
S, phase = core.magphase(S)
else:
phase = 1
if np.isscalar(kernel_size):
win_harm = kernel_size
win_perc = kernel_size
else:
win_harm = kernel_size[0]
win_perc = kernel_size[1]
# Compute median filters. Pre-allocation here preserves memory layout.
harm = np.empty_like(S)
harm[:] = median_filter(S, size=(1, win_harm), mode='reflect')
perc = np.empty_like(S)
perc[:] = median_filter(S, size=(win_perc, 1), mode='reflect')
if mask or power < util.SMALL_FLOAT:
mask_harm = (harm > perc).astype(float)
mask_perc = 1 - mask_harm
if mask:
return mask_harm, mask_perc
else:
perc = perc ** power
zero_perc = (perc < util.SMALL_FLOAT)
perc[zero_perc] = 0.0
harm = harm ** power
zero_harm = (harm < util.SMALL_FLOAT)
harm[zero_harm] = 0.0
# Find points where both are zero, equalize
harm[zero_harm & zero_perc] = 0.5
perc[zero_harm & zero_perc] = 0.5
# Compute harmonic mask
mask_harm = harm / (harm + perc)
mask_perc = perc / (harm + perc)
return ((S * mask_harm) * phase, (S * mask_perc) * phase)
|
decebel/librosa
|
librosa/decompose.py
|
Python
|
isc
| 10,427
|
#!/usr/bin/env python
from __future__ import print_function
from random import randint
from time import sleep
from progress.bar import (Bar, ChargingBar, FillingSquaresBar,
FillingCirclesBar, IncrementalBar, ShadyBar)
from progress.spinner import Spinner, PieSpinner, MoonSpinner, LineSpinner
from progress.counter import Counter, Countdown, Stack, Pie
for bar_cls in (Bar, ChargingBar, FillingSquaresBar, FillingCirclesBar):
suffix = '%(index)d/%(max)d [%(elapsed)d / %(eta)d]'
bar = bar_cls(bar_cls.__name__, suffix=suffix)
for i in bar.iter(range(100)):
sleep(0.04)
for bar_cls in (IncrementalBar, ShadyBar):
suffix = '%(percent)d%% [%(elapsed_td)s / %(eta_td)s]'
bar = bar_cls(bar_cls.__name__, suffix=suffix)
for i in bar.iter(range(200)):
sleep(0.02)
for spin in (Spinner, PieSpinner, MoonSpinner, LineSpinner):
for i in spin(spin.__name__ + ' ').iter(range(30)):
sleep(0.1)
print()
for singleton in (Counter, Countdown, Stack, Pie):
for i in singleton(singleton.__name__ + ' ').iter(range(100)):
sleep(0.03)
print()
bar = IncrementalBar('Random', suffix='%(index)d')
for i in range(100):
bar.goto(randint(0, 100))
sleep(0.1)
bar.finish()
|
RazerM/progress
|
test_progress.py
|
Python
|
isc
| 1,259
|
from django import template
register = template.Library()
def get_seconds_ts(timestamp):
return timestamp.strftime("%S")
def get_miliseconds_ts(timestamp):
return timestamp.strftime("%f")
register.filter(get_seconds_ts)
register.filter(get_miliseconds_ts)
|
MGautier/security-sensor
|
trunk/version-1-0/webapp/secproject/secapp/templatetags/handlers.py
|
Python
|
mit
| 273
|
import sqlite3
import time
import urllib
import zlib
import string
conn = sqlite3.connect('index.sqlite')
conn.text_factory = str
cur = conn.cursor()
cur.execute('''SELECT subject_id,subject FROM Messages
JOIN Subjects ON Messages.subject_id = Subjects.id''')
counts = dict()
for message_row in cur :
text = message_row[1]
text = text.translate(None, string.punctuation)
text = text.translate(None, '1234567890')
text = text.strip()
text = text.lower()
words = text.split()
for word in words:
if len(word) < 4 : continue
counts[word] = counts.get(word,0) + 1
# Find the top 100 words
words = sorted(counts, key=counts.get, reverse=True)
highest = None
lowest = None
for w in words[:100]:
if highest is None or highest < counts[w] :
highest = counts[w]
if lowest is None or lowest > counts[w] :
lowest = counts[w]
print 'Range of counts:',highest,lowest
# Spread the font sizes across 20-100 based on the count
bigsize = 80
smallsize = 20
fhand = open('gword.js','w')
fhand.write("gword = [")
first = True
for k in words[:100]:
if not first : fhand.write( ",\n")
first = False
size = counts[k]
size = (size - lowest) / float(highest - lowest)
size = int((size * bigsize) + smallsize)
fhand.write("{text: '"+k+"', size: "+str(size)+"}")
fhand.write( "\n];\n")
print "Output written to gword.js"
print "Open gword.htm in a browser to view"
|
johanfrisk/Python_at_web
|
notebooks/code/gmane/gword.py
|
Python
|
mit
| 1,440
|
# coding: utf-8
# Copyright (c) Pymatgen Development Team.
# Distributed under the terms of the MIT License.
"""
This module contains some math utils that are used in the chemenv package.
"""
__author__ = "David Waroquiers"
__copyright__ = "Copyright 2012, The Materials Project"
__credits__ = "Geoffroy Hautier"
__version__ = "2.0"
__maintainer__ = "David Waroquiers"
__email__ = "david.waroquiers@gmail.com"
__date__ = "Feb 20, 2016"
from functools import reduce
from math import sqrt
import numpy as np
from scipy.special import erf
##############################################################
# cartesian product of lists ##################################
##############################################################
def _append_es2sequences(sequences, es):
result = []
if not sequences:
for e in es:
result.append([e])
else:
for e in es:
result += [seq + [e] for seq in sequences]
return result
def _cartesian_product(lists):
"""
given a list of lists,
returns all the possible combinations taking one element from each list
The list does not have to be of equal length
"""
return reduce(_append_es2sequences, lists, [])
def prime_factors(n):
"""Lists prime factors of a given natural integer, from greatest to smallest
:param n: Natural integer
:rtype : list of all prime factors of the given natural n
"""
i = 2
while i <= sqrt(n):
if n % i == 0:
l = prime_factors(n / i)
l.append(i)
return l
i += 1
return [n] # n is prime
def _factor_generator(n):
"""
From a given natural integer, returns the prime factors and their multiplicity
:param n: Natural integer
:return:
"""
p = prime_factors(n)
factors = {}
for p1 in p:
try:
factors[p1] += 1
except KeyError:
factors[p1] = 1
return factors
def divisors(n):
"""
From a given natural integer, returns the list of divisors in ascending order
:param n: Natural integer
:return: List of divisors of n in ascending order
"""
factors = _factor_generator(n)
_divisors = []
listexponents = [[k ** x for x in range(0, factors[k] + 1)] for k in list(factors.keys())]
listfactors = _cartesian_product(listexponents)
for f in listfactors:
_divisors.append(reduce(lambda x, y: x * y, f, 1))
_divisors.sort()
return _divisors
def get_center_of_arc(p1, p2, radius):
"""
:param p1:
:param p2:
:param radius:
:return:
"""
dx = p2[0] - p1[0]
dy = p2[1] - p1[1]
dd = np.sqrt(dx * dx + dy * dy)
radical = np.power((radius / dd), 2) - 0.25
if radical < 0:
raise ValueError("Impossible to find center of arc because the arc is ill-defined")
tt = np.sqrt(radical)
if radius > 0:
tt = -tt
return (p1[0] + p2[0]) / 2 - tt * dy, (p1[1] + p2[1]) / 2 + tt * dx
def get_linearly_independent_vectors(vectors_list):
"""
:param vectors_list:
:return:
"""
independent_vectors_list = []
for vector in vectors_list:
if np.any(vector != 0):
if len(independent_vectors_list) == 0:
independent_vectors_list.append(np.array(vector))
elif len(independent_vectors_list) == 1:
rank = np.linalg.matrix_rank(np.array([independent_vectors_list[0], vector, [0, 0, 0]]))
if rank == 2:
independent_vectors_list.append(np.array(vector))
elif len(independent_vectors_list) == 2:
mm = np.array([independent_vectors_list[0], independent_vectors_list[1], vector])
if np.linalg.det(mm) != 0:
independent_vectors_list.append(np.array(vector))
if len(independent_vectors_list) == 3:
break
return independent_vectors_list
def scale_and_clamp(xx, edge0, edge1, clamp0, clamp1):
"""
:param xx:
:param edge0:
:param edge1:
:param clamp0:
:param clamp1:
:return:
"""
return np.clip((xx - edge0) / (edge1 - edge0), clamp0, clamp1)
# Step function based on the cumulative distribution function of the normal law
def normal_cdf_step(xx, mean, scale):
"""
:param xx:
:param mean:
:param scale:
:return:
"""
return 0.5 * (1.0 + erf((xx - mean) / (np.sqrt(2.0) * scale)))
# SMOOTH STEP FUNCTIONS
# Set of smooth step functions that allow to smoothly go from y = 0.0 (1.0) to y = 1.0 (0.0) by changing x
# from 0.0 to 1.0 respectively when inverse is False (True).
# (except if edges is given in which case a the values are first scaled and clamped to the interval given by edges)
# The derivative at x = 0.0 and x = 1.0 have to be 0.0
def smoothstep(xx, edges=None, inverse=False):
"""
:param xx:
:param edges:
:param inverse:
:return:
"""
if edges is None:
xx_clipped = np.clip(xx, 0.0, 1.0)
if inverse:
return 1.0 - xx_clipped * xx_clipped * (3.0 - 2.0 * xx_clipped)
return xx_clipped * xx_clipped * (3.0 - 2.0 * xx_clipped)
xx_scaled_and_clamped = scale_and_clamp(xx, edges[0], edges[1], 0.0, 1.0)
return smoothstep(xx_scaled_and_clamped, inverse=inverse)
def smootherstep(xx, edges=None, inverse=False):
"""
:param xx:
:param edges:
:param inverse:
:return:
"""
if edges is None:
xx_clipped = np.clip(xx, 0.0, 1.0)
if inverse:
return 1.0 - xx_clipped * xx_clipped * xx_clipped * (xx_clipped * (xx_clipped * 6 - 15) + 10)
return xx_clipped * xx_clipped * xx_clipped * (xx_clipped * (xx_clipped * 6 - 15) + 10)
xx_scaled_and_clamped = scale_and_clamp(xx, edges[0], edges[1], 0.0, 1.0)
return smootherstep(xx_scaled_and_clamped, inverse=inverse)
def cosinus_step(xx, edges=None, inverse=False):
"""
:param xx:
:param edges:
:param inverse:
:return:
"""
if edges is None:
xx_clipped = np.clip(xx, 0.0, 1.0)
if inverse:
return (np.cos(xx_clipped * np.pi) + 1.0) / 2.0
return 1.0 - (np.cos(xx_clipped * np.pi) + 1.0) / 2.0
xx_scaled_and_clamped = scale_and_clamp(xx, edges[0], edges[1], 0.0, 1.0)
return cosinus_step(xx_scaled_and_clamped, inverse=inverse)
def power3_step(xx, edges=None, inverse=False):
"""
:param xx:
:param edges:
:param inverse:
:return:
"""
return smoothstep(xx, edges=edges, inverse=inverse)
def powern_parts_step(xx, edges=None, inverse=False, nn=2):
"""
:param xx:
:param edges:
:param inverse:
:param nn:
:return:
"""
if edges is None:
aa = np.power(0.5, 1.0 - nn)
xx_clipped = np.clip(xx, 0.0, 1.0)
if np.mod(nn, 2) == 0:
if inverse:
return 1.0 - np.where(
xx_clipped < 0.5,
aa * np.power(xx_clipped, nn),
1.0 - aa * np.power(xx_clipped - 1.0, nn),
)
return np.where(
xx_clipped < 0.5,
aa * np.power(xx_clipped, nn),
1.0 - aa * np.power(xx_clipped - 1.0, nn),
)
if inverse:
return 1.0 - np.where(
xx_clipped < 0.5,
aa * np.power(xx_clipped, nn),
1.0 + aa * np.power(xx_clipped - 1.0, nn),
)
return np.where(
xx_clipped < 0.5,
aa * np.power(xx_clipped, nn),
1.0 + aa * np.power(xx_clipped - 1.0, nn),
)
xx_scaled_and_clamped = scale_and_clamp(xx, edges[0], edges[1], 0.0, 1.0)
return powern_parts_step(xx_scaled_and_clamped, inverse=inverse, nn=nn)
# FINITE DECREASING FUNCTIONS
# Set of decreasing functions that allow to smoothly go from y = 1.0 to y = 0.0 by changing x from 0.0 to 1.0
# The derivative at x = 1.0 has to be 0.0
def powern_decreasing(xx, edges=None, nn=2):
"""
:param xx:
:param edges:
:param nn:
:return:
"""
if edges is None:
aa = 1.0 / np.power(-1.0, nn)
return aa * np.power(xx - 1.0, nn)
xx_scaled_and_clamped = scale_and_clamp(xx, edges[0], edges[1], 0.0, 1.0)
return powern_decreasing(xx_scaled_and_clamped, nn=nn)
def power2_decreasing_exp(xx, edges=None, alpha=1.0):
"""
:param xx:
:param edges:
:param alpha:
:return:
"""
if edges is None:
aa = 1.0 / np.power(-1.0, 2)
return aa * np.power(xx - 1.0, 2) * np.exp(-alpha * xx)
xx_scaled_and_clamped = scale_and_clamp(xx, edges[0], edges[1], 0.0, 1.0)
return power2_decreasing_exp(xx_scaled_and_clamped, alpha=alpha)
# INFINITE TO FINITE DECREASING FUNCTIONS
# Set of decreasing functions that allow to smoothly go from y = + Inf to y = 0.0 by changing x from 0.0 to 1.0
# The derivative at x = 1.0 has to be 0.0
def power2_tangent_decreasing(xx, edges=None, prefactor=None):
"""
:param xx:
:param edges:
:param prefactor:
:return:
"""
if edges is None:
if prefactor is None:
aa = 1.0 / np.power(-1.0, 2)
else:
aa = prefactor
return -aa * np.power(xx - 1.0, 2) * np.tan((xx - 1.0) * np.pi / 2.0) # pylint: disable=E1130
xx_scaled_and_clamped = scale_and_clamp(xx, edges[0], edges[1], 0.0, 1.0)
return power2_tangent_decreasing(xx_scaled_and_clamped, prefactor=prefactor)
def power2_inverse_decreasing(xx, edges=None, prefactor=None):
"""
:param xx:
:param edges:
:param prefactor:
:return:
"""
if edges is None:
if prefactor is None:
aa = 1.0 / np.power(-1.0, 2)
else:
aa = prefactor
return np.where(np.isclose(xx, 0.0), aa * float("inf"), aa * np.power(xx - 1.0, 2) / xx)
# return aa * np.power(xx-1.0, 2) / xx if xx != 0 else aa * float("inf")
xx_scaled_and_clamped = scale_and_clamp(xx, edges[0], edges[1], 0.0, 1.0)
return power2_inverse_decreasing(xx_scaled_and_clamped, prefactor=prefactor)
def power2_inverse_power2_decreasing(xx, edges=None, prefactor=None):
"""
:param xx:
:param edges:
:param prefactor:
:return:
"""
if edges is None:
if prefactor is None:
aa = 1.0 / np.power(-1.0, 2)
else:
aa = prefactor
return np.where(
np.isclose(xx, 0.0),
aa * float("inf"),
aa * np.power(xx - 1.0, 2) / xx ** 2.0,
)
xx_scaled_and_clamped = scale_and_clamp(xx, edges[0], edges[1], 0.0, 1.0)
return power2_inverse_power2_decreasing(xx_scaled_and_clamped, prefactor=prefactor)
def power2_inverse_powern_decreasing(xx, edges=None, prefactor=None, powern=2.0):
"""
:param xx:
:param edges:
:param prefactor:
:param powern:
:return:
"""
if edges is None:
if prefactor is None:
aa = 1.0 / np.power(-1.0, 2)
else:
aa = prefactor
return aa * np.power(xx - 1.0, 2) / xx ** powern
xx_scaled_and_clamped = scale_and_clamp(xx, edges[0], edges[1], 0.0, 1.0)
return power2_inverse_powern_decreasing(xx_scaled_and_clamped, prefactor=prefactor, powern=powern)
|
gmatteo/pymatgen
|
pymatgen/analysis/chemenv/utils/math_utils.py
|
Python
|
mit
| 11,244
|
# --------------------------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# --------------------------------------------------------------------------------------------
import unittest
class TestCoreCLIOutput(unittest.TestCase):
def test_create_AzOutputProducer(self):
from azure.cli.core._output import AzOutputProducer
from azure.cli.core.mock import DummyCli
output_producer = AzOutputProducer(DummyCli())
self.assertEqual(7, len(output_producer._FORMAT_DICT)) # six types: json, jsonc, table, tsv, yaml, yamlc, none
self.assertIn('yaml', output_producer._FORMAT_DICT)
self.assertIn('none', output_producer._FORMAT_DICT)
# regression test for https://github.com/Azure/azure-cli/issues/9263
def test_yaml_output_with_ordered_dict(self):
from azure.cli.core._output import AzOutputProducer
from azure.cli.core.mock import DummyCli
from knack.util import CommandResultItem
from collections import OrderedDict
import yaml
account_dict = {
"environmentName": "AzureCloud",
"id": "000000-000000",
"isDefault": True,
"name": "test_sub",
"state": "Enabled",
"tenantId": "000000-000000-000000",
"user": {
"name": "test@example.com",
"type": "user"
}
}
output_producer = AzOutputProducer(DummyCli())
yaml_output = output_producer.get_formatter('yaml')(CommandResultItem(result=OrderedDict(account_dict)))
self.assertEqual(account_dict, yaml.safe_load(yaml_output))
if __name__ == '__main__':
unittest.main()
|
yugangw-msft/azure-cli
|
src/azure-cli-core/azure/cli/core/tests/test_output.py
|
Python
|
mit
| 1,859
|
#!/usr/bin/env python
# @(#) $Jeannot: test2.py,v 1.16 2004/03/20 17:06:54 js Exp $
# This example is a PuLP rendition of the todd.mod problem included in the GLPK
# 4.4 distribution. It's a hard knapsack problem.
# Import PuLP modeler functions
from pulp import *
# Import math functions
from math import *
# A new LP problem
prob = LpProblem("test2", LpMaximize)
# Parameters
# Size of the problem
n = 15
k = floor(log(n)/log(2));
# A vector of n binary variables
x = LpVariable.matrix("x", list(range(n)), 0, 1, LpInteger)
# A vector of weights
a = [pow(2,k + n + 1) + pow(2,k + n + 1 - j) + 1 for j in range(1,n+1)]
# The maximum weight
b = 0.5 * floor(sum(a))
# The total weight
weight = lpDot(a, x)
# Objective
prob += weight
# Constraint
prob += weight <= b
# Resolution
prob.solve()
# Print the status of the solved LP
print("Status:", LpStatus[prob.status])
# Print the value of the variables at the optimum
for v in prob.variables():
print(v.name, "=", v.varValue)
# Print the value of the objective
print("objective=", value(prob.objective))
|
talitaof/pulp
|
examples/test2.py
|
Python
|
mit
| 1,067
|
import unittest
import numpy
import six
import chainer
from chainer import cuda
from chainer import functions as F
from chainer import gradient_check
from chainer import testing
from chainer.testing import attr
from chainer.testing import condition
from chainer.utils import type_check
@testing.parameterize(*[
{'batched': True},
{'batched': False}
])
class DetFunctionTest(unittest.TestCase):
def setUp(self):
if self.batched:
self.x = numpy.random.uniform(
.5, 1, (6, 3, 3)).astype(numpy.float32)
self.y = numpy.random.uniform(
.5, 1, (6, 3, 3)).astype(numpy.float32)
self.gy = numpy.random.uniform(-1, 1, (6,)).astype(numpy.float32)
self.ct = self.x.transpose(0, 2, 1)
self.det = F.batch_det
self.matmul = F.batch_matmul
else:
self.x = numpy.random.uniform(.5, 1, (5, 5)).astype(numpy.float32)
self.y = numpy.random.uniform(.5, 1, (5, 5)).astype(numpy.float32)
self.gy = numpy.random.uniform(-1, 1, ()).astype(numpy.float32)
self.ct = self.x.transpose()
self.det = F.det
self.matmul = F.matmul
def det_transpose(self, gpu=False):
if gpu:
cx = cuda.to_gpu(self.x)
ct = cuda.to_gpu(self.ct)
else:
cx = self.x
ct = self.ct
xn = chainer.Variable(cx)
xt = chainer.Variable(ct)
yn = self.det(xn)
yt = self.det(xt)
gradient_check.assert_allclose(yn.data, yt.data, rtol=1e-4, atol=1)
@attr.gpu
@condition.retry(3)
def test_det_transpose_gpu(self):
self.det_transpose(gpu=True)
@condition.retry(3)
def test_det_transpose_cpu(self):
self.det_transpose(gpu=False)
def det_scaling(self, gpu=False):
scaling = numpy.random.randn(1).astype('float32')
if gpu:
cx = cuda.to_gpu(self.x)
sx = cuda.to_gpu(scaling * self.x)
else:
cx = self.x
sx = scaling * self.x
c = float(scaling ** self.x.shape[1])
cxv = chainer.Variable(cx)
sxv = chainer.Variable(sx)
cxd = self.det(cxv)
sxd = self.det(sxv)
gradient_check.assert_allclose(cxd.data * c, sxd.data)
@attr.gpu
@condition.retry(3)
def test_det_scaling_gpu(self):
self.det_scaling(gpu=True)
@condition.retry(3)
def test_det_scaling_cpu(self):
self.det_scaling(gpu=False)
def det_identity(self, gpu=False):
if self.batched:
chk = numpy.ones(len(self.x), dtype=numpy.float32)
dt = numpy.identity(self.x.shape[1], dtype=numpy.float32)
idt = numpy.repeat(dt[None], len(self.x), axis=0)
else:
idt = numpy.identity(self.x.shape[1], dtype=numpy.float32)
chk = numpy.ones(1, dtype=numpy.float32)
if gpu:
chk = cuda.to_gpu(chk)
idt = cuda.to_gpu(idt)
idtv = chainer.Variable(idt)
idtd = self.det(idtv)
gradient_check.assert_allclose(idtd.data, chk, rtol=1e-4, atol=1e-4)
@attr.gpu
def test_det_identity_gpu(self):
self.det_identity(gpu=True)
def test_det_identity_cpu(self):
self.det_identity(gpu=False)
def det_product(self, gpu=False):
if gpu:
cx = cuda.to_gpu(self.x)
cy = cuda.to_gpu(self.y)
else:
cx = self.x
cy = self.y
vx = chainer.Variable(cx)
vy = chainer.Variable(cy)
dxy1 = self.det(self.matmul(vx, vy))
dxy2 = self.det(vx) * self.det(vy)
gradient_check.assert_allclose(dxy1.data, dxy2.data, rtol=1e-4,
atol=1e-4)
@condition.retry(3)
def test_det_product_cpu(self):
self.det_product(gpu=False)
@attr.gpu
@condition.retry(3)
def test_det_product_gpu(self):
self.det_product(gpu=True)
@attr.gpu
@condition.retry(3)
def test_batch_backward_gpu(self):
x_data = cuda.to_gpu(self.x)
y_grad = cuda.to_gpu(self.gy)
gradient_check.check_backward(self.det, x_data, y_grad)
@condition.retry(3)
def test_batch_backward_cpu(self):
x_data, y_grad = self.x, self.gy
gradient_check.check_backward(self.det, x_data, y_grad)
def check_single_matrix(self, x):
x = chainer.Variable(x)
y = self.det(x)
if self.batched:
self.assertEqual(y.data.ndim, 1)
else:
self.assertEqual(y.data.ndim, 0)
def test_single_matrix_cpu(self):
self.check_single_matrix(self.x)
@attr.gpu
def test_expect_scalar_gpu(self):
self.check_single_matrix(cuda.to_gpu(self.x))
def check_singular_matrix(self, x):
if self.batched:
x[0, ...] = 0.0
else:
x[...] = 0.0
x = chainer.Variable(x)
# it checks no errors are raised
self.det(x)
def test_singular_matrix_cpu(self):
self.check_singular_matrix(self.x)
@attr.gpu
def test_singular_matrix_gpu(self):
self.check_singular_matrix(cuda.to_gpu(self.x))
def check_zero_det(self, x, gy, err):
if self.batched:
x[0, ...] = 0.0
else:
x[...] = 0.0
with self.assertRaises(err):
gradient_check.check_backward(self.det, x, gy)
def test_zero_det_cpu(self):
self.check_zero_det(self.x, self.gy, ValueError)
@attr.gpu
def test_zero_det_gpu(self):
self.check_zero_det(
cuda.to_gpu(self.x), cuda.to_gpu(self.gy), ValueError)
class TestDetSmallCase(unittest.TestCase):
def setUp(self):
self.x = numpy.random.uniform(.5, 1, (2, 2)).astype(numpy.float32)
def check_by_definition(self, x):
ans = F.det(chainer.Variable(x)).data
y = x[0, 0] * x[1, 1] - x[0, 1] * x[1, 0]
gradient_check.assert_allclose(ans, y)
@condition.retry(3)
def test_answer_cpu(self):
self.check_by_definition(self.x)
@attr.gpu
@condition.retry(3)
def test_answer_gpu(self):
self.check_by_definition(cuda.to_gpu(self.x))
@testing.parameterize(
*testing.product({
'shape': [(s, s) for s in six.moves.range(1, 5)],
}))
class TestDetGPUCPUConsistency(unittest.TestCase):
def setUp(self):
self.x = numpy.random.uniform(.5, 1, self.shape).astype(numpy.float32)
@attr.gpu
@condition.retry(3)
def test_answer_gpu_cpu(self):
x = cuda.to_gpu(self.x)
y = F.det(chainer.Variable(x))
gpu = cuda.to_cpu(y.data)
cpu = numpy.linalg.det(self.x)
gradient_check.assert_allclose(gpu, cpu)
@testing.parameterize(
*testing.product({
'shape': [(w, s, s) for s in six.moves.range(1, 5)
for w in six.moves.range(1, 5)],
}))
class TestBatchDetGPUCPUConsistency(unittest.TestCase):
def setUp(self):
self.x = numpy.random.uniform(.5, 1, self.shape).astype(numpy.float32)
@attr.gpu
@condition.retry(3)
def test_answer_gpu_cpu(self):
x = cuda.to_gpu(self.x)
y = F.batch_det(chainer.Variable(x))
gpu = cuda.to_cpu(y.data)
cpu = numpy.linalg.det(self.x)
gradient_check.assert_allclose(gpu, cpu)
class DetFunctionRaiseTest(unittest.TestCase):
def test_invalid_ndim(self):
with self.assertRaises(type_check.InvalidType):
F.batch_det(chainer.Variable(numpy.zeros((2, 2))))
def test_invalid_shape(self):
with self.assertRaises(type_check.InvalidType):
F.batch_det(chainer.Variable(numpy.zeros((1, 2))))
testing.run_module(__name__, __file__)
|
AlpacaDB/chainer
|
tests/chainer_tests/functions_tests/math_tests/test_det.py
|
Python
|
mit
| 7,745
|
# -*- coding: utf-8 -*-
"""
/***************************************************************************
Name : DB Manager
Description : Database manager plugin for QGIS (Oracle)
Date : Aug 27, 2014
copyright : (C) 2014 by Médéric RIBREUX
email : mederic.ribreux@gmail.com
The content of this file is based on
- PG_Manager by Martin Dobias <wonder.sk@gmail.com> (GPLv2 license)
- DB Manager by Giuseppe Sucameli <brush.tyler@gmail.com> (GPLv2 license)
***************************************************************************/
/***************************************************************************
* *
* This program is free software; you can redistribute it and/or modify *
* it under the terms of the GNU General Public License as published by *
* the Free Software Foundation; either version 2 of the License, or *
* (at your option) any later version. *
* *
***************************************************************************/
"""
from qgis.PyQt.QtCore import QTime
from ..data_model import TableDataModel, SqlResultModel, BaseTableModel
from ..plugin import DbError
class ORTableDataModel(TableDataModel):
def __init__(self, table, parent=None):
self.cursor = None
TableDataModel.__init__(self, table, parent)
if not self.table.rowCount:
self.table.refreshRowCount()
self.table.aboutToChange.connect(self._deleteCursor)
self._createCursor()
def _createCursor(self):
fields_txt = u", ".join(self.fields)
table_txt = self.db.quoteId(
(self.table.schemaName(), self.table.name))
self.cursor = self.db._get_cursor()
sql = u"SELECT {0} FROM {1}".format(fields_txt, table_txt)
self.db._execute(self.cursor, sql)
def _sanitizeTableField(self, field):
# get fields, ignore geometry columns
if field.dataType.upper() == u"SDO_GEOMETRY":
return (u"CASE WHEN {0} IS NULL THEN NULL ELSE 'GEOMETRY'"
u"END AS {0}".format(
self.db.quoteId(field.name)))
if field.dataType.upper() == u"DATE":
return u"CAST({} AS VARCHAR2(8))".format(
self.db.quoteId(field.name))
if u"TIMESTAMP" in field.dataType.upper():
return u"TO_CHAR({}, 'YYYY-MM-DD HH:MI:SS.FF')".format(
self.db.quoteId(field.name))
if field.dataType.upper() == u"NUMBER":
if not field.charMaxLen:
return u"CAST({} AS VARCHAR2(135))".format(
self.db.quoteId(field.name))
elif field.modifier:
nbChars = 2 + int(field.charMaxLen) + \
int(field.modifier)
return u"CAST({} AS VARCHAR2({}))".format(
self.db.quoteId(field.name),
unicode(nbChars))
return u"CAST({0} As VARCHAR2({1}))".format(
self.db.quoteId(field.name), field.charMaxLen)
def _deleteCursor(self):
self.db._close_cursor(self.cursor)
self.cursor = None
def __del__(self):
self.table.aboutToChange.disconnect(self._deleteCursor)
self._deleteCursor()
def getData(self, row, col):
if (row < self.fetchedFrom
or row >= self.fetchedFrom + self.fetchedCount):
margin = self.fetchedCount / 2
if row + margin >= self.rowCount():
start = self.rowCount() - margin
else:
start = row - margin
if start < 0:
start = 0
self.fetchMoreData(start)
# For some improbable cases
if row - self.fetchedFrom >= len(self.resdata):
return None
return self.resdata[row - self.fetchedFrom][col]
def fetchMoreData(self, row_start):
if not self.cursor:
self._createCursor()
self.cursor.scroll(row_start - 1)
self.resdata = self.cursor.fetchmany(self.fetchedCount)
self.fetchedFrom = row_start
class ORSqlResultModel(SqlResultModel):
def __init__(self, db, sql, parent=None):
self.db = db.connector
t = QTime()
t.start()
c = self.db._execute(None, unicode(sql))
self._affectedRows = 0
data = []
header = self.db._get_cursor_columns(c)
if not header:
header = []
try:
if len(header) > 0:
data = self.db._fetchall(c)
self._affectedRows = len(data)
except DbError:
# nothing to fetch!
data = []
header = []
self._secs = t.elapsed() / 1000.0
del t
BaseTableModel.__init__(self, header, data, parent)
# commit before closing the cursor to make sure that the
# changes are stored
self.db._commit()
c.close()
del c
|
AsgerPetersen/QGIS
|
python/plugins/db_manager/db_plugins/oracle/data_model.py
|
Python
|
gpl-2.0
| 5,139
|
import os
import logging
import time
from autotest.client.shared import utils_memory
from autotest.client.shared import error
from autotest.client.shared import ssh_key
from virttest import libvirt_vm
from virttest import utils_test
from virttest import remote
from virttest import data_dir
from virttest.utils_test import libvirt as utlv
from virttest.libvirt_xml import vm_xml
def set_cpu_memory(vm_name, cpu, memory):
"""
Change vms' cpu and memory.
"""
vmxml = vm_xml.VMXML.new_from_inactive_dumpxml(vm_name)
vmxml.vcpu = cpu
# To avoid exceeded current memory
vmxml.max_mem = memory
vmxml.current_mem = memory
logging.debug("VMXML info:\n%s", vmxml.get('xml'))
vmxml.undefine()
vmxml.define()
def check_dest_vm_network(vm, ip, remote_host, username, password,
shell_prompt):
"""
Ping migrated vms on remote host.
"""
session = remote.remote_login("ssh", remote_host, 22, username,
password, shell_prompt)
# Timeout to wait vm's network
logging.debug("verifying VM's IP...")
timeout = 60
ping_failed = True
ping_cmd = "ping -c 4 %s" % ip
while timeout > 0:
ps, po = session.cmd_status_output(ping_cmd)
if ps:
time.sleep(5)
timeout -= 5
continue
logging.error(po)
ping_failed = False
break
if ping_failed:
raise error.TestFail("Check %s IP failed." % vm.name)
def do_stress_migration(vms, srcuri, desturi, stress_type,
migration_type, params, thread_timeout=60):
"""
Migrate vms with stress.
:param vms: migrated vms.
"""
fail_info = utils_test.load_stress(stress_type, vms, params)
migtest = utlv.MigrationTest()
options = ''
if migration_type == "compressed":
options = "--compressed"
migration_type = "orderly"
shared_dir = os.path.dirname(data_dir.get_data_dir())
src_file = os.path.join(shared_dir, "scripts", "duplicate_pages.py")
dest_dir = "/tmp"
for vm in vms:
session = vm.wait_for_login()
vm.copy_files_to(src_file, dest_dir)
status = session.cmd_status("cd /tmp;python duplicate_pages.py")
if status:
fail_info.append("Set duplicated pages for vm failed.")
if len(fail_info):
logging.warning("Add stress for migration failed:%s", fail_info)
logging.debug("Starting migration...")
migrate_options = ("--live --unsafe %s --timeout %s"
% (options, params.get("virsh_migrate_timeout", 60)))
migtest.do_migration(vms, srcuri, desturi, migration_type, options=migrate_options,
thread_timeout=thread_timeout)
# vms will be shutdown, so no need to do this cleanup
# And migrated vms may be not login if the network is local lan
if stress_type == "stress_on_host":
utils_test.unload_stress(stress_type, vms)
if not migtest.RET_MIGRATION:
raise error.TestFail()
def run(test, params, env):
"""
Test migration under stress.
"""
vm_names = params.get("migration_vms").split()
if len(vm_names) < 2:
raise error.TestNAError("Provide enough vms for migration first.")
src_uri = params.get("migrate_src_uri", "qemu+ssh://EXAMPLE/system")
if src_uri.count('///') or src_uri.count('EXAMPLE'):
raise error.TestNAError("The src_uri '%s' is invalid" % src_uri)
dest_uri = params.get("migrate_dest_uri", "qemu+ssh://EXAMPLE/system")
if dest_uri.count('///') or dest_uri.count('EXAMPLE'):
raise error.TestNAError("The dest_uri '%s' is invalid" % dest_uri)
# Migrated vms' instance
vms = []
for vm_name in vm_names:
vms.append(libvirt_vm.VM(vm_name, params, test.bindir,
env.get("address_cache")))
load_vm_names = params.get("load_vms").split()
# vms for load
load_vms = []
for vm_name in load_vm_names:
load_vms.append(libvirt_vm.VM(vm_name, params, test.bindir,
env.get("address_cache")))
params['load_vms'] = load_vms
cpu = int(params.get("smp", 1))
memory = int(params.get("mem")) * 1024
stress_type = params.get("migration_stress_type")
vm_bytes = params.get("stress_vm_bytes")
stress_args = params.get("stress_args")
migration_type = params.get("migration_type")
start_migration_vms = "yes" == params.get("start_migration_vms", "yes")
thread_timeout = int(params.get("thread_timeout", 120))
remote_host = params.get("migrate_dest_host")
username = params.get("migrate_dest_user", "root")
password = params.get("migrate_dest_pwd")
prompt = params.get("shell_prompt", r"[\#\$]")
# Set vm_bytes for start_cmd
mem_total = utils_memory.memtotal()
vm_reserved = len(vms) * memory
if vm_bytes == "half":
vm_bytes = (mem_total - vm_reserved) / 2
elif vm_bytes == "shortage":
vm_bytes = mem_total - vm_reserved + 524288
if vm_bytes is not None:
params["stress_args"] = stress_args % vm_bytes
for vm in vms:
# Keep vm dead for edit
if vm.is_alive():
vm.destroy()
set_cpu_memory(vm.name, cpu, memory)
try:
vm_ipaddr = {}
if start_migration_vms:
for vm in vms:
vm.start()
vm.wait_for_login()
vm_ipaddr[vm.name] = vm.get_address()
# TODO: recover vm if start failed?
# Config ssh autologin for remote host
ssh_key.setup_ssh_key(remote_host, username, password, port=22)
do_stress_migration(vms, src_uri, dest_uri, stress_type,
migration_type, params, thread_timeout)
# Check network of vms on destination
if start_migration_vms and migration_type != "cross":
for vm in vms:
check_dest_vm_network(vm, vm_ipaddr[vm.name], remote_host,
username, password, prompt)
finally:
logging.debug("Cleanup vms...")
for vm_name in vm_names:
vm = libvirt_vm.VM(vm_name, params, test.bindir,
env.get("address_cache"))
utlv.MigrationTest().cleanup_dest_vm(vm, None, dest_uri)
if vm.is_alive():
vm.destroy(gracefully=False)
env.clean_objects()
|
waynesun09/tp-libvirt
|
libvirt/tests/src/virsh_cmd/domain/virsh_migrate_stress.py
|
Python
|
gpl-2.0
| 6,495
|
# -*- coding: utf-8 -*-
# ----------------------------------------------------------------------------
# Copyright (C) 2012-2019 British Crown (Met Office) & Contributors.
#
# This file is part of Rose, a framework for meteorological suites.
#
# Rose is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Rose is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Rose. If not, see <http://www.gnu.org/licenses/>.
# ----------------------------------------------------------------------------
"""Builtin application: rose_arch: transform and archive suite files."""
import errno
from glob import glob
import os
import re
from metomi.rose.app_run import (
BuiltinApp,
ConfigValueError,
CompulsoryConfigValueError)
from metomi.rose.checksum import get_checksum, get_checksum_func
from metomi.rose.env import env_var_process, UnboundEnvironmentVariableError
from metomi.rose.popen import RosePopenError
from metomi.rose.reporter import Event, Reporter
from metomi.rose.scheme_handler import SchemeHandlersManager
import shlex
import sqlite3
import sys
from tempfile import mkdtemp
from time import gmtime, strftime, time
class RoseArchDuplicateError(ConfigValueError):
"""An exception raised if duplicate archive targets are provided."""
ERROR_FORMAT = '%s: duplicate archive target%s: "%s"'
class RoseArchValueError(KeyError):
"""An error raised on a bad value."""
ERROR_FORMAT = "%s: bad %s: %s: %s: %s"
def __str__(self):
return self.ERROR_FORMAT % self.args
class RoseArchEvent(Event):
"""Event raised on an archiving target."""
def __str__(self):
target = self.args[0]
t_info = ""
if len(self.args) > 1:
times = self.args[1]
t_init, t_tran, t_arch = times
t_info = ", t(init)=%s, dt(tran)=%ds, dt(arch)=%ds" % (
strftime("%Y-%m-%dT%H:%M:%SZ", gmtime(t_init)),
t_tran - t_init,
t_arch - t_tran
)
ret_code_str = ""
if len(self.args) > 2 and self.args[2] is not None:
ret_code_str = ", ret-code=%d" % self.args[2]
ret = "%s %s [compress=%s%s%s]" % (
target.status,
target.name,
target.compress_scheme,
t_info,
ret_code_str)
if target.status != target.ST_OLD:
for source in sorted(target.sources.values(),
key=lambda s: s.name):
ret += "\n%s\t%s (%s)" % (
target.status, source.name, source.orig_name)
return ret
class RoseArchApp(BuiltinApp):
"""Transform and archive files generated by suite tasks."""
SCHEME = "rose_arch"
SECTION = "arch"
def run(self, app_runner, conf_tree, opts, args, uuid, work_files):
"""Transform and archive suite files.
This application is designed to work under "rose task-run" in a suite.
"""
dao = RoseArchDAO()
suite_name = os.getenv("ROSE_SUITE_NAME")
if not suite_name:
app_runner.handle_event(
'rose_arch must be run under rose task-run.',
level=Reporter.FAIL, kind=Reporter.KIND_ERR)
return 1
suite_dir = app_runner.suite_engine_proc.get_suite_dir(suite_name)
cwd = os.getcwd()
app_runner.fs_util.chdir(suite_dir)
try:
return self._run(dao, app_runner, conf_tree.node)
finally:
app_runner.fs_util.chdir(cwd)
dao.close()
def _run(self, dao, app_runner, config):
"""Transform and archive suite files.
This application is designed to work under "rose task-run" in a suite.
"""
compress_manager = SchemeHandlersManager(
[os.path.dirname(os.path.dirname(sys.modules["rose"].__file__))],
"rose.apps.rose_arch_compressions",
["compress_sources"],
None, app_runner)
# Set up the targets
s_key_tails = set()
targets = []
for t_key, t_node in sorted(config.value.items()):
if t_node.is_ignored() or ":" not in t_key:
continue
s_key_head, s_key_tail = t_key.split(":", 1)
if s_key_head != self.SECTION or not s_key_tail:
continue
# Determine target path.
s_key_tail = t_key.split(":", 1)[1]
try:
s_key_tail = env_var_process(s_key_tail)
except UnboundEnvironmentVariableError as exc:
raise ConfigValueError([t_key, ""], "", exc)
# If parenthesised target is optional.
is_compulsory_target = True
if s_key_tail.startswith("(") and s_key_tail.endswith(")"):
s_key_tail = s_key_tail[1:-1]
is_compulsory_target = False
# Don't permit duplicate targets.
if s_key_tail in s_key_tails:
raise RoseArchDuplicateError([t_key], '', s_key_tail)
else:
s_key_tails.add(s_key_tail)
target = self._run_target_setup(
app_runner, compress_manager, config, t_key, s_key_tail,
t_node, is_compulsory_target)
old_target = dao.select(target.name)
if old_target is None or old_target != target:
dao.delete(target)
else:
target.status = target.ST_OLD
targets.append(target)
targets.sort(key=lambda target: target.name)
# Delete from database items that are no longer relevant
dao.delete_all(filter_targets=targets)
# Update the targets
for target in targets:
self._run_target_update(dao, app_runner, compress_manager, target)
return [target.status for target in targets].count(
RoseArchTarget.ST_BAD)
def _run_target_setup(
self, app_runner, compress_manager, config, t_key, s_key_tail,
t_node, is_compulsory_target=True):
"""Helper for _run. Set up a target."""
target_prefix = self._get_conf(
config, t_node, "target-prefix", default="")
target = RoseArchTarget(target_prefix + s_key_tail)
target.command_format = self._get_conf(
config, t_node, "command-format", compulsory=True)
try:
target.command_format % {"sources": "", "target": ""}
except KeyError as exc:
target.status = target.ST_BAD
app_runner.handle_event(
RoseArchValueError(
target.name,
"command-format",
target.command_format,
type(exc).__name__,
exc
)
)
target.source_edit_format = self._get_conf(
config, t_node, "source-edit-format", default="")
try:
target.source_edit_format % {"in": "", "out": ""}
except KeyError as exc:
target.status = target.ST_BAD
app_runner.handle_event(
RoseArchValueError(
target.name,
"source-edit-format",
target.source_edit_format,
type(exc).__name__,
exc
)
)
update_check_str = self._get_conf(config, t_node, "update-check")
try:
checksum_func = get_checksum_func(update_check_str)
except ValueError as exc:
raise RoseArchValueError(
target.name,
"update-check",
update_check_str,
type(exc).__name__,
exc)
source_prefix = self._get_conf(
config, t_node, "source-prefix", default="")
for source_glob in shlex.split(
self._get_conf(config, t_node, "source", compulsory=True)):
is_compulsory_source = is_compulsory_target
if source_glob.startswith("(") and source_glob.endswith(")"):
source_glob = source_glob[1:-1]
is_compulsory_source = False
paths = glob(source_prefix + source_glob)
if not paths:
exc = OSError(errno.ENOENT, os.strerror(errno.ENOENT),
source_prefix + source_glob)
app_runner.handle_event(ConfigValueError(
[t_key, "source"], source_glob, exc))
if is_compulsory_source:
target.status = target.ST_BAD
continue
for path in paths:
# N.B. source_prefix may not be a directory
name = path[len(source_prefix):]
for path_, checksum, _ in get_checksum(path, checksum_func):
if checksum is None: # is directory
continue
if path_:
target.sources[checksum] = RoseArchSource(
checksum,
os.path.join(name, path_),
os.path.join(path, path_))
else: # path is a file
target.sources[checksum] = RoseArchSource(
checksum, name, path)
if not target.sources:
if is_compulsory_target:
target.status = target.ST_BAD
else:
target.status = target.ST_NULL
target.compress_scheme = self._get_conf(config, t_node, "compress")
if not target.compress_scheme:
target_base = target.name
if "/" in target.name:
target_base = target.name.rsplit("/", 1)[1]
if "." in target_base:
tail = target_base.split(".", 1)[1]
if compress_manager.get_handler(tail):
target.compress_scheme = tail
elif compress_manager.get_handler(target.compress_scheme) is None:
app_runner.handle_event(ConfigValueError(
[t_key, "compress"],
target.compress_scheme,
KeyError(target.compress_scheme)))
target.status = target.ST_BAD
rename_format = self._get_conf(config, t_node, "rename-format")
if rename_format:
rename_parser_str = self._get_conf(config, t_node, "rename-parser")
if rename_parser_str:
try:
rename_parser = re.compile(rename_parser_str)
except re.error as exc:
raise RoseArchValueError(
target.name,
"rename-parser",
rename_parser_str,
type(exc).__name__,
exc)
else:
rename_parser = None
for source in target.sources.values():
dict_ = {
"cycle": os.getenv("ROSE_TASK_CYCLE_TIME"),
"name": source.name}
if rename_parser:
match = rename_parser.match(source.name)
if match:
dict_.update(match.groupdict())
try:
source.name = rename_format % dict_
except (KeyError, ValueError) as exc:
raise RoseArchValueError(
target.name,
"rename-format",
rename_format,
type(exc).__name__,
exc)
return target
@classmethod
def _run_target_update(cls, dao, app_runner, compress_manager, target):
"""Helper for _run. Update a target."""
if target.status == target.ST_OLD:
app_runner.handle_event(RoseArchEvent(target))
return
if target.status in (target.ST_BAD, target.ST_NULL):
# boolean to int
target.command_rc = int(target.status == target.ST_BAD)
if target.status == target.ST_BAD:
level = Event.FAIL
else:
level = Event.DEFAULT
event = RoseArchEvent(target)
app_runner.handle_event(event)
app_runner.handle_event(event, kind=Event.KIND_ERR, level=level)
return
target.command_rc = 1
dao.insert(target)
work_dir = mkdtemp()
times = [time()] * 3 # init, transformed, archived
ret_code = None
try:
# Rename/edit sources
target.status = target.ST_BAD
rename_required = False
for source in target.sources.values():
if source.name != source.orig_name:
rename_required = True
break
if rename_required or target.source_edit_format:
for source in target.sources.values():
source.path = os.path.join(work_dir, source.name)
app_runner.fs_util.makedirs(
os.path.dirname(source.path))
if target.source_edit_format:
command = target.source_edit_format % {
"in": source.orig_path,
"out": source.path}
app_runner.popen.run_ok(command, shell=True)
else:
app_runner.fs_util.symlink(source.orig_path,
source.path)
# Compress sources
if target.compress_scheme:
handler = compress_manager.get_handler(
target.compress_scheme)
handler.compress_sources(target, work_dir)
times[1] = time() # transformed time
# Run archive command
sources = []
if target.work_source_path:
sources = [target.work_source_path]
else:
for source in target.sources.values():
sources.append(source.path)
command = target.command_format % {
"sources": app_runner.popen.list_to_shell_str(sources),
"target": app_runner.popen.list_to_shell_str([target.name])}
ret_code, out, err = app_runner.popen.run(command, shell=True)
if isinstance(out, bytes):
out, err = out.decode(), err.decode()
times[2] = time() # archived time
if ret_code:
app_runner.handle_event(
RosePopenError([command], ret_code, out, err))
else:
target.status = target.ST_NEW
app_runner.handle_event(err, kind=Event.KIND_ERR)
app_runner.handle_event(out)
target.command_rc = ret_code
dao.update_command_rc(target)
finally:
app_runner.fs_util.delete(work_dir)
event = RoseArchEvent(target, times, ret_code)
app_runner.handle_event(event)
if target.status in (target.ST_BAD, target.ST_NULL):
app_runner.handle_event(
event, kind=Event.KIND_ERR, level=Event.FAIL)
def _get_conf(self, r_node, t_node, key, compulsory=False, default=None):
"""Return the value of a configuration."""
value = t_node.get_value(
[key],
r_node.get_value([self.SECTION, key], default=default))
if compulsory and not value:
raise CompulsoryConfigValueError([key], None,
KeyError(key))
if value:
try:
value = env_var_process(value)
except UnboundEnvironmentVariableError as exc:
raise ConfigValueError([key], value, exc)
return value
class RoseArchTarget(object):
"""An archive target."""
ST_OLD = "="
ST_NEW = "+"
ST_BAD = "!"
ST_NULL = "0"
def __init__(self, name):
self.name = name
self.compress_scheme = None
self.command_format = None
self.command_rc = 0
self.sources = {} # checksum: RoseArchSource
self.source_edit_format = None
self.status = None
self.work_source_path = None
def __eq__(self, other):
if id(self) != id(other):
for key in ["name", "compress_scheme", "command_format",
"command_rc", "sources", "source_edit_format"]:
if getattr(self, key) != getattr(other, key, None):
return False
return True
def __ne__(self, other):
return not self.__eq__(other)
class RoseArchSource(object):
"""An archive source."""
def __init__(self, checksum, orig_name, orig_path=None):
self.checksum = checksum
self.orig_name = orig_name
self.orig_path = orig_path
self.name = self.orig_name
self.path = self.orig_path
def __eq__(self, other):
if id(self) != id(other):
for key in ["checksum", "name"]:
if getattr(self, key) != getattr(other, key, None):
return False
return True
def __ne__(self, other):
return not self.__eq__(other)
class RoseArchDAO(object):
"""Data access object for incremental mode."""
FILE_NAME = ".rose-arch.db"
T_SOURCES = "sources"
T_TARGETS = "targets"
def __init__(self):
self.file_name = os.path.abspath(self.FILE_NAME)
self.conn = None
self.create()
def close(self):
"""Close connection to the SQLite database file."""
if self.conn is not None:
self.conn.close()
self.conn = None
def get_conn(self):
"""Connect to the SQLite database file."""
if self.conn is None:
self.conn = sqlite3.connect(self.file_name)
return self.conn
def create(self):
"""Create the database file if it does not exist."""
if not os.path.exists(self.file_name):
conn = self.get_conn()
conn.execute("""CREATE TABLE """ + self.T_TARGETS + """ (
target_name TEXT,
compress_scheme TEXT,
command_format TEXT,
command_rc INT,
source_edit_format TEXT,
PRIMARY KEY(target_name))""")
conn.execute("""CREATE TABLE """ + self.T_SOURCES + """ (
target_name TEXT,
source_name TEXT,
checksum TEXT,
UNIQUE(target_name, checksum))""")
conn.commit()
def delete(self, target):
"""Remove target from the database."""
conn = self.get_conn()
for name in [self.T_TARGETS, self.T_SOURCES]:
conn.execute("DELETE FROM " + name + " WHERE target_name==?",
[target.name])
conn.commit()
def delete_all(self, filter_targets):
"""Remove all but those matching filter_targets from the database."""
conn = self.get_conn()
where = ""
stmt_args = []
if filter_targets:
stmt_fragments = []
for filter_target in filter_targets:
stmt_fragments.append("target_name != ?")
stmt_args.append(filter_target.name)
where += " WHERE " + " AND ".join(stmt_fragments)
for name in [self.T_TARGETS, self.T_SOURCES]:
conn.execute("DELETE FROM " + name + where, stmt_args)
conn.commit()
def insert(self, target):
"""Insert a target in the database."""
conn = self.get_conn()
t_stmt = "INSERT INTO " + self.T_TARGETS + " VALUES (?, ?, ?, ?, ?)"
t_stmt_args = [target.name, target.compress_scheme,
target.command_format, target.command_rc,
target.source_edit_format]
conn.execute(t_stmt, t_stmt_args)
sh_stmt = r"INSERT INTO " + self.T_SOURCES + " VALUES (?, ?, ?)"
sh_stmt_args = [target.name]
for checksum, source in target.sources.items():
conn.execute(sh_stmt, sh_stmt_args + [source.name, checksum])
conn.commit()
def select(self, target_name):
"""Query database for target_name.
On success, reconstruct the target as an instance of RoseArchTarget
and return it.
Return None on failure.
"""
conn = self.get_conn()
t_stmt = (
"SELECT " +
"compress_scheme,command_format,command_rc,source_edit_format " +
"FROM " +
self.T_TARGETS +
" WHERE target_name==?"
)
t_stmt_args = [target_name]
for row in conn.execute(t_stmt, t_stmt_args):
target = RoseArchTarget(target_name)
(target.compress_scheme,
target.command_format,
target.command_rc,
target.source_edit_format) = row
break
else:
return None
s_stmt = ("SELECT source_name,checksum FROM " + self.T_SOURCES +
" WHERE target_name==?")
s_stmt_args = [target_name]
for s_row in conn.execute(s_stmt, s_stmt_args):
source_name, checksum = s_row
target.sources[checksum] = RoseArchSource(checksum, source_name)
return target
def update_command_rc(self, target):
"""Update the command return code of a target in the database."""
conn = self.get_conn()
conn.execute("UPDATE " + self.T_TARGETS + " SET command_rc=?" +
" WHERE target_name==?", [target.command_rc, target.name])
conn.commit()
|
benfitzpatrick/rose
|
metomi/rose/apps/rose_arch.py
|
Python
|
gpl-3.0
| 22,301
|
import asyncio
from datetime import datetime
from Core.Util import UtilBot
import traceback
''' To use this, either add on to the ExtraCommands.py file or create your own Python file. Import the DispatcherSingleton
and annotate any function that you wish to be a command with the @DispatcherSingleton.register annotation, and it will
appear in the bot's help menu and be available to use.
For commands that should be hidden, use the @DispatcherSingleton.register_hidden annotation instead, and it won't
appear in the /help menu. It should be noted that hidden commands' primary purpose are to be used with autoreplies, and
won't be able to be ran by anyone other than the Bot itself.
To choose what happens when a command isn't found, register a function with @DispatcherSingleton.register_unknown, and
that function will run whenever the Bot can't find a command that suits what the user entered.'''
class NoCommandFoundError(Exception):
pass
class CommandDispatcher(object):
def __init__(self):
self.commands = {}
self.hidden_commands = {}
self.unknown_command = None
self.on_connect_listeners = []
@asyncio.coroutine
def run(self, bot, event, bot_command_char, *args, **kwds):
bot_command_char = bot_command_char.strip() # For cases like "/bot " or " / "
if args[0] == bot_command_char: # Either the command char is like "/bot" or the user did "/ ping"
args = list(args[1:])
if args[0].startswith(bot_command_char):
command = args[0][len(bot_command_char):]
else:
command = args[0]
try:
func = self.commands[command]
except KeyError:
try:
if event.user.is_self:
func = self.hidden_commands[command]
else:
raise KeyError
except KeyError:
if self.unknown_command:
func = self.unknown_command
else:
raise NoCommandFoundError(
"Command {} is not registered. Furthermore, no command found to handle unknown commands.".format
(command))
func = asyncio.coroutine(func)
args = list(args[1:])
# For help cases.
if len(args) > 0 and args[0] == '?':
if func.__doc__:
bot.send_message_segments(event.conv, UtilBot.text_to_segments(func.__doc__))
return
try:
asyncio.async(func(bot, event, *args, **kwds))
except Exception as e:
log = open('log.txt', 'a+')
log.writelines(str(datetime.now()) + ":\n " + traceback.format_exc() + "\n\n")
log.close()
print(traceback.format_exc())
def register_aliases(self, aliases=None):
"""Registers a command under the function name & any names specified in aliases.
"""
def func_wrapper(func):
self.commands[func.__name__] = func
for alias in aliases:
self.commands[alias] = func
return func
return func_wrapper
def register_extras(self, is_hidden=False, aliases=None, on_connect_listener=None):
"""Registers a function as hidden with aliases, or any combination of that."""
def func_wrapper(func):
if is_hidden and aliases:
self.hidden_commands[func.__name__] = func
for alias in aliases:
self.hidden_commands[alias] = func
elif aliases:
self.commands[func.__name__] = func
for alias in aliases:
self.commands[alias] = func
elif is_hidden:
self.hidden_commands[func.__name__] = func
else:
self.commands[func.__name__] = func
return func
self.on_connect_listeners.append(on_connect_listener)
return func_wrapper
def register(self, func):
"""Decorator for registering command"""
self.commands[func.__name__] = func
return func
def register_hidden(self, func):
"""Registers a command as hidden (This makes it only runnable by the Bot and it won't appear in the help menu)"""
self.hidden_commands[func.__name__] = func
return func
def register_unknown(self, func):
self.unknown_command = func
return func
def register_on_connect_listener(self, func):
self.on_connect_listeners.append(func)
# CommandDispatcher singleton
DispatcherSingleton = CommandDispatcher()
|
johnwiseheart/HangoutsBot
|
Core/Commands/Dispatcher.py
|
Python
|
gpl-3.0
| 4,632
|
#!/usr/bin/env python
# vim:fileencoding=UTF-8:ts=4:sw=4:sta:et:sts=4:ai
__license__ = 'GPL v3'
__copyright__ = '2010, Kovid Goyal <kovid@kovidgoyal.net>'
__docformat__ = 'restructuredtext en'
import operator, os, json, re, time
from binascii import hexlify, unhexlify
from collections import OrderedDict
import cherrypy
from calibre.constants import filesystem_encoding, config_dir
from calibre import (isbytestring, force_unicode, fit_image,
prepare_string_for_xml, sanitize_file_name2)
from calibre.utils.filenames import ascii_filename
from calibre.utils.config import prefs, JSONConfig
from calibre.utils.icu import sort_key
from calibre.utils.magick import Image
from calibre.library.comments import comments_to_html
from calibre.library.server import custom_fields_to_display
from calibre.library.field_metadata import category_icon_map
from calibre.library.server.utils import quote, unquote
from calibre.ebooks.metadata.sources.identify import urls_from_identifiers
def xml(*args, **kwargs):
ans = prepare_string_for_xml(*args, **kwargs)
return ans.replace(''', ''')
def render_book_list(ids, prefix, suffix=''): # {{{
pages = []
num = len(ids)
pos = 0
delta = 25
while ids:
page = list(ids[:delta])
pages.append((page, pos))
ids = ids[delta:]
pos += len(page)
page_template = u'''\
<div class="page" id="page{0}">
<div class="load_data" title="{1}">
<span class="url" title="{prefix}/browse/booklist_page"></span>
<span class="start" title="{start}"></span>
<span class="end" title="{end}"></span>
</div>
<div class="loading"><img src="{prefix}/static/loading.gif" /> {2}</div>
<div class="loaded"></div>
</div>
'''
pagelist_template = u'''\
<div class="pagelist">
<ul>
{pages}
</ul>
</div>
'''
rpages, lpages = [], []
for i, x in enumerate(pages):
pg, pos = x
ld = xml(json.dumps(pg), True)
start, end = pos+1, pos+len(pg)
rpages.append(page_template.format(i, ld,
xml(_('Loading, please wait')) + '…',
start=start, end=end, prefix=prefix))
lpages.append(' '*20 + (u'<li><a href="#" title="Books {start} to {end}"'
' onclick="gp_internal(\'{id}\'); return false;"> '
'{start} to {end}</a></li>').format(start=start, end=end,
id='page%d'%i))
rpages = u'\n\n'.join(rpages)
lpages = u'\n'.join(lpages)
pagelist = pagelist_template.format(pages=lpages)
templ = u'''\
<h3>{0} {suffix}</h3>
<div id="booklist">
<div id="pagelist" title="{goto}">{pagelist}</div>
<div class="listnav topnav">
{navbar}
</div>
{pages}
<div class="listnav bottomnav">
{navbar}
</div>
</div>
'''
gp_start = gp_end = ''
if len(pages) > 1:
gp_start = '<a href="#" onclick="goto_page(); return false;" title="%s">' % \
(_('Go to') + '…')
gp_end = '</a>'
navbar = u'''\
<div class="navleft">
<a href="#" onclick="first_page(); return false;">{first}</a>
<a href="#" onclick="previous_page(); return false;">{previous}</a>
</div>
<div class="navmiddle">
{gp_start}
<span class="start">0</span> to <span class="end">0</span>
{gp_end}of {num}
</div>
<div class="navright">
<a href="#" onclick="next_page(); return false;">{next}</a>
<a href="#" onclick="last_page(); return false;">{last}</a>
</div>
'''.format(first=_('First'), last=_('Last'), previous=_('Previous'),
next=_('Next'), num=num, gp_start=gp_start, gp_end=gp_end)
return templ.format(_('Browsing %d books')%num, suffix=suffix,
pages=rpages, navbar=navbar, pagelist=pagelist,
goto=xml(_('Go to'), True) + '…')
# }}}
def utf8(x): # {{{
if isinstance(x, unicode):
x = x.encode('utf-8')
return x
# }}}
def render_rating(rating, url_prefix, container='span', prefix=None): # {{{
if rating < 0.1:
return '', ''
added = 0
if prefix is None:
prefix = _('Average rating')
rstring = xml(_('%(prefix)s: %(rating).1f stars')%dict(
prefix=prefix, rating=rating if rating else 0.0),
True)
ans = ['<%s class="rating">' % (container)]
for i in range(5):
n = rating - added
x = 'half'
if n <= 0.1:
x = 'off'
elif n >= 0.9:
x = 'on'
ans.append(
u'<img alt="{0}" title="{0}" src="{2}/static/star-{1}.png" />'.format(
rstring, x, url_prefix))
added += 1
ans.append('</%s>'%container)
return u''.join(ans), rstring
# }}}
def get_category_items(category, items, datatype, prefix): # {{{
def item(i):
templ = (u'<div title="{4}" class="category-item">'
'<div class="category-name">'
'<a href="{5}{3}" title="{4}">{0}</a></div>'
'<div>{1}</div>'
'<div>{2}</div></div>')
rating, rstring = render_rating(i.avg_rating, prefix)
if i.use_sort_as_name:
name = xml(i.sort)
else:
name = xml(i.name)
if datatype == 'rating':
name = xml(_('%d stars')%int(i.avg_rating))
id_ = i.id
if id_ is None:
id_ = hexlify(force_unicode(name).encode('utf-8'))
id_ = xml(str(id_))
desc = ''
if i.count > 0:
desc += '[' + _('%d books')%i.count + ']'
q = i.category
if not q:
q = category
href = '/browse/matches/%s/%s'%(quote(q), quote(id_))
return templ.format(xml(name), rating,
xml(desc), xml(href, True), rstring, prefix)
items = list(map(item, items))
return '\n'.join(['<div class="category-container">'] + items + ['</div>'])
# }}}
class Endpoint(object): # {{{
'Manage encoding, mime-type, last modified, cookies, etc.'
def __init__(self, mimetype='text/html; charset=utf-8', sort_type='category'):
self.mimetype = mimetype
self.sort_type = sort_type
self.sort_kwarg = sort_type + '_sort'
self.sort_cookie_name = 'calibre_browse_server_sort_'+self.sort_type
def __call__(eself, func):
def do(self, *args, **kwargs):
if 'json' not in eself.mimetype:
sort_val = None
cookie = cherrypy.request.cookie
if cookie.has_key(eself.sort_cookie_name):
sort_val = cookie[eself.sort_cookie_name].value
kwargs[eself.sort_kwarg] = sort_val
# Remove AJAX caching disabling jquery workaround arg
kwargs.pop('_', None)
ans = func(self, *args, **kwargs)
cherrypy.response.headers['Content-Type'] = eself.mimetype
updated = self.db.last_modified()
cherrypy.response.headers['Last-Modified'] = \
self.last_modified(max(updated, self.build_time))
ans = utf8(ans)
return ans
do.__name__ = func.__name__
return do
# }}}
class BrowseServer(object):
def add_routes(self, connect):
base_href = '/browse'
connect('browse', base_href, self.browse_catalog)
connect('browse_catalog', base_href+'/category/{category}',
self.browse_catalog)
connect('browse_category_group',
base_href+'/category_group/{category}/{group}',
self.browse_category_group)
connect('browse_matches',
base_href+'/matches/{category}/{cid}',
self.browse_matches)
connect('browse_booklist_page',
base_href+'/booklist_page',
self.browse_booklist_page)
connect('browse_search', base_href+'/search',
self.browse_search)
connect('browse_details', base_href+'/details/{id}',
self.browse_details)
connect('browse_book', base_href+'/book/{id}',
self.browse_book)
connect('browse_random', base_href+'/random',
self.browse_random)
connect('browse_category_icon', base_href+'/icon/{name}',
self.browse_icon)
self.icon_map = JSONConfig('gui').get('tags_browser_category_icons', {})
# Templates {{{
def browse_template(self, sort, category=True, initial_search=''):
if not hasattr(self, '__browse_template__') or \
self.opts.develop:
self.__browse_template__ = \
P('content_server/browse/browse.html', data=True).decode('utf-8')
ans = self.__browse_template__
scn = 'calibre_browse_server_sort_'
if category:
sort_opts = [('rating', _('Average rating')), ('name',
_('Name')), ('popularity', _('Popularity'))]
scn += 'category'
else:
scn += 'list'
fm = self.db.field_metadata
sort_opts, added = [], set([])
displayed_custom_fields = custom_fields_to_display(self.db)
for x in fm.sortable_field_keys():
if x in ('ondevice', 'formats', 'sort'):
continue
if fm.is_ignorable_field(x) and x not in displayed_custom_fields:
continue
if x == 'comments' or fm[x]['datatype'] == 'comments':
continue
n = fm[x]['name']
if n not in added:
added.add(n)
sort_opts.append((x, n))
ans = ans.replace('{sort_select_label}', xml(_('Sort by')+':'))
ans = ans.replace('{sort_cookie_name}', scn)
ans = ans.replace('{prefix}', self.opts.url_prefix)
ans = ans.replace('{library}', _('library'))
ans = ans.replace('{home}', _('home'))
ans = ans.replace('{Search}', _('Search'))
opts = ['<option %svalue="%s">%s</option>' % (
'selected="selected" ' if k==sort else '',
xml(k), xml(n), ) for k, n in
sorted(sort_opts, key=lambda x: sort_key(operator.itemgetter(1)(x))) if k and n]
ans = ans.replace('{sort_select_options}', ('\n'+' '*20).join(opts))
lp = self.db.library_path
if isbytestring(lp):
lp = force_unicode(lp, filesystem_encoding)
ans = ans.replace('{library_name}', xml(os.path.basename(lp)))
ans = ans.replace('{library_path}', xml(lp, True))
ans = ans.replace('{initial_search}', initial_search)
return ans
@property
def browse_summary_template(self):
if not hasattr(self, '__browse_summary_template__') or \
self.opts.develop:
self.__browse_summary_template__ = \
P('content_server/browse/summary.html', data=True).decode('utf-8')
return self.__browse_summary_template__.replace('{prefix}',
self.opts.url_prefix)
@property
def browse_details_template(self):
if not hasattr(self, '__browse_details_template__') or \
self.opts.develop:
self.__browse_details_template__ = \
P('content_server/browse/details.html', data=True).decode('utf-8')
return self.__browse_details_template__.replace('{prefix}',
self.opts.url_prefix)
# }}}
# Catalogs {{{
def browse_icon(self, name='blank.png'):
cherrypy.response.headers['Content-Type'] = 'image/png'
cherrypy.response.headers['Last-Modified'] = self.last_modified(self.build_time)
if not hasattr(self, '__browse_icon_cache__'):
self.__browse_icon_cache__ = {}
if name not in self.__browse_icon_cache__:
if name.startswith('_'):
name = sanitize_file_name2(name[1:])
try:
with open(os.path.join(config_dir, 'tb_icons', name), 'rb') as f:
data = f.read()
except:
raise cherrypy.HTTPError(404, 'no icon named: %r'%name)
else:
try:
data = I(name, data=True)
except:
raise cherrypy.HTTPError(404, 'no icon named: %r'%name)
img = Image()
img.load(data)
width, height = img.size
scaled, width, height = fit_image(width, height, 48, 48)
if scaled:
img.size = (width, height)
self.__browse_icon_cache__[name] = img.export('png')
return self.__browse_icon_cache__[name]
def browse_toplevel(self):
categories = self.categories_cache()
category_meta = self.db.field_metadata
cats = [
(_('Newest'), 'newest', 'forward.png'),
(_('All books'), 'allbooks', 'book.png'),
(_('Random book'), 'randombook', 'random.png'),
]
def getter(x):
return category_meta[x]['name'].lower()
displayed_custom_fields = custom_fields_to_display(self.db)
uc_displayed = set()
for category in sorted(categories, key=lambda x: sort_key(getter(x))):
if len(categories[category]) == 0:
continue
if category in ('formats', 'identifiers'):
continue
meta = category_meta.get(category, None)
if meta is None:
continue
if self.db.field_metadata.is_ignorable_field(category) and \
category not in displayed_custom_fields:
continue
# get the icon files
main_cat = (category.partition('.')[0]) if hasattr(category,
'partition') else category
if main_cat in self.icon_map:
icon = '_'+quote(self.icon_map[main_cat])
elif category in category_icon_map:
icon = category_icon_map[category]
elif meta['is_custom']:
icon = category_icon_map['custom:']
elif meta['kind'] == 'user':
icon = category_icon_map['user:']
else:
icon = 'blank.png'
if meta['kind'] == 'user':
dot = category.find('.')
if dot > 0:
cat = category[:dot]
if cat not in uc_displayed:
cats.append((meta['name'][:dot-1], cat, icon))
uc_displayed.add(cat)
else:
cats.append((meta['name'], category, icon))
uc_displayed.add(category)
else:
cats.append((meta['name'], category, icon))
cats = [(u'<li><a title="{2} {0}" href="{3}/browse/category/{1}"> </a>'
u'<img src="{3}{src}" alt="{0}" />'
u'<span class="label">{0}</span>'
u'</li>')
.format(xml(x, True), xml(quote(y)), xml(_('Browse books by')),
self.opts.url_prefix, src='/browse/icon/'+z)
for x, y, z in cats]
main = u'<div class="toplevel"><h3>{0}</h3><ul>{1}</ul></div>'\
.format(_('Choose a category to browse by:'), u'\n\n'.join(cats))
return self.browse_template('name').format(title='',
script='toplevel();', main=main)
def browse_sort_categories(self, items, sort):
if sort not in ('rating', 'name', 'popularity'):
sort = 'name'
items.sort(key=lambda x: sort_key(getattr(x, 'sort', x.name)))
if sort == 'popularity':
items.sort(key=operator.attrgetter('count'), reverse=True)
elif sort == 'rating':
items.sort(key=operator.attrgetter('avg_rating'), reverse=True)
return sort
def browse_category(self, category, sort):
categories = self.categories_cache()
if category not in categories:
raise cherrypy.HTTPError(404, 'category not found')
category_meta = self.db.field_metadata
category_name = category_meta[category]['name']
datatype = category_meta[category]['datatype']
# See if we have any sub-categories to display. As we find them, add
# them to the displayed set to avoid showing the same item twice
uc_displayed = set()
cats = []
for ucat in sorted(categories.keys(), key=sort_key):
if len(categories[ucat]) == 0:
continue
if category == 'formats':
continue
meta = category_meta.get(ucat, None)
if meta is None:
continue
if meta['kind'] != 'user':
continue
cat_len = len(category)
if not (len(ucat) > cat_len and ucat.startswith(category+'.')):
continue
if ucat in self.icon_map:
icon = '_'+quote(self.icon_map[ucat])
else:
icon = category_icon_map['user:']
# we have a subcategory. Find any further dots (further subcats)
cat_len += 1
cat = ucat[cat_len:]
dot = cat.find('.')
if dot > 0:
# More subcats
cat = cat[:dot]
if cat not in uc_displayed:
cats.append((cat, ucat[:cat_len+dot], icon))
uc_displayed.add(cat)
else:
# This is the end of the chain
cats.append((cat, ucat, icon))
uc_displayed.add(cat)
cats = u'\n\n'.join(
[(u'<li><a title="{2} {0}" href="{3}/browse/category/{1}"> </a>'
u'<img src="{3}{src}" alt="{0}" />'
u'<span class="label">{0}</span>'
u'</li>')
.format(xml(x, True), xml(quote(y)), xml(_('Browse books by')),
self.opts.url_prefix, src='/browse/icon/'+z)
for x, y, z in cats])
if cats:
cats = (u'\n<div class="toplevel">\n'
'{0}</div>').format(cats)
script = 'toplevel();'
else:
script = 'true'
# Now do the category items
items = categories[category]
sort = self.browse_sort_categories(items, sort)
if not cats and len(items) == 1:
# Only one item in category, go directly to book list
html = get_category_items(category, items,
datatype, self.opts.url_prefix)
href = re.search(r'<a href="([^"]+)"', html)
if href is not None:
raise cherrypy.HTTPRedirect(href.group(1))
if len(items) <= self.opts.max_opds_ungrouped_items:
script = 'false'
items = get_category_items(category, items,
datatype, self.opts.url_prefix)
else:
getter = lambda x: unicode(getattr(x, 'sort', x.name))
starts = set([])
for x in items:
val = getter(x)
if not val:
val = u'A'
starts.add(val[0].upper())
category_groups = OrderedDict()
for x in sorted(starts):
category_groups[x] = len([y for y in items if
getter(y).upper().startswith(x)])
items = [(u'<h3 title="{0}"><a class="load_href" title="{0}"'
u' href="{4}{3}"><strong>{0}</strong> [{2}]</a></h3><div>'
u'<div class="loaded" style="display:none"></div>'
u'<div class="loading"><img alt="{1}" src="{4}/static/loading.gif" /><em>{1}</em></div>'
u'</div>').format(
xml(s, True),
xml(_('Loading, please wait'))+'…',
unicode(c),
xml(u'/browse/category_group/%s/%s'%(
hexlify(category.encode('utf-8')),
hexlify(s.encode('utf-8'))), True),
self.opts.url_prefix)
for s, c in category_groups.items()]
items = '\n\n'.join(items)
items = u'<div id="groups">\n{0}</div>'.format(items)
if cats:
script = 'toplevel();category(%s);'%script
else:
script = 'category(%s);'%script
main = u'''
<div class="category">
<h3>{0}</h3>
<a class="navlink" href="{3}/browse"
title="{2}">{2} ↑</a>
{1}
</div>
'''.format(
xml(_('Browsing by')+': ' + category_name), cats + items,
xml(_('Up'), True), self.opts.url_prefix)
return self.browse_template(sort).format(title=category_name,
script=script, main=main)
@Endpoint(mimetype='application/json; charset=utf-8')
def browse_category_group(self, category=None, group=None, sort=None):
if sort == 'null':
sort = None
if sort not in ('rating', 'name', 'popularity'):
sort = 'name'
try:
category = unhexlify(category)
if isbytestring(category):
category = category.decode('utf-8')
except:
raise cherrypy.HTTPError(404, 'invalid category')
categories = self.categories_cache()
if category not in categories:
raise cherrypy.HTTPError(404, 'category not found')
category_meta = self.db.field_metadata
datatype = category_meta[category]['datatype']
try:
group = unhexlify(group)
if isbytestring(group):
group = group.decode('utf-8')
except:
raise cherrypy.HTTPError(404, 'invalid group')
items = categories[category]
entries = []
getter = lambda x: unicode(getattr(x, 'sort', x.name))
for x in items:
val = getter(x)
if not val:
val = u'A'
if val.upper().startswith(group):
entries.append(x)
sort = self.browse_sort_categories(entries, sort)
entries = get_category_items(category, entries,
datatype, self.opts.url_prefix)
return json.dumps(entries, ensure_ascii=True)
@Endpoint()
def browse_catalog(self, category=None, category_sort=None):
'Entry point for top-level, categories and sub-categories'
prefix = '' if self.is_wsgi else self.opts.url_prefix
if category == None:
ans = self.browse_toplevel()
elif category == 'newest':
raise cherrypy.InternalRedirect(prefix +
'/browse/matches/newest/dummy')
elif category == 'allbooks':
raise cherrypy.InternalRedirect(prefix +
'/browse/matches/allbooks/dummy')
elif category == 'randombook':
raise cherrypy.InternalRedirect(prefix +
'/browse/random')
else:
ans = self.browse_category(category, category_sort)
return ans
# }}}
# Book Lists {{{
def browse_sort_book_list(self, items, sort):
fm = self.db.field_metadata
keys = frozenset(fm.sortable_field_keys())
if sort not in keys:
sort = 'title'
self.sort(items, 'title', True)
if sort != 'title':
ascending = fm[sort]['datatype'] not in ('rating', 'datetime',
'series')
self.sort(items, sort, ascending)
return sort
@Endpoint(sort_type='list')
def browse_matches(self, category=None, cid=None, list_sort=None):
if list_sort:
list_sort = unquote(list_sort)
if not cid:
raise cherrypy.HTTPError(404, 'invalid category id: %r'%cid)
categories = self.categories_cache()
if category not in categories and \
category not in ('newest', 'allbooks'):
raise cherrypy.HTTPError(404, 'category not found')
fm = self.db.field_metadata
try:
category_name = fm[category]['name']
dt = fm[category]['datatype']
except:
if category not in ('newest', 'allbooks'):
raise
category_name = {
'newest' : _('Newest'),
'allbooks' : _('All books'),
}[category]
dt = None
hide_sort = 'true' if dt == 'series' else 'false'
if category == 'search':
which = unhexlify(cid).decode('utf-8')
try:
ids = self.search_cache('search:"%s"'%which)
except:
raise cherrypy.HTTPError(404, 'Search: %r not understood'%which)
else:
all_ids = self.search_cache('')
if category == 'newest':
ids = all_ids
hide_sort = 'true'
elif category == 'allbooks':
ids = all_ids
else:
if fm.get(category, {'datatype':None})['datatype'] == 'composite':
cid = cid.decode('utf-8')
q = category
if q == 'news':
q = 'tags'
ids = self.db.get_books_for_category(q, cid)
ids = [x for x in ids if x in all_ids]
items = [self.db.data._data[x] for x in ids]
if category == 'newest':
list_sort = 'timestamp'
if dt == 'series':
list_sort = category
sort = self.browse_sort_book_list(items, list_sort)
ids = [x[0] for x in items]
html = render_book_list(ids, self.opts.url_prefix,
suffix=_('in') + ' ' + category_name)
return self.browse_template(sort, category=False).format(
title=_('Books in') + " " +category_name,
script='booklist(%s);'%hide_sort, main=html)
def browse_get_book_args(self, mi, id_, add_category_links=False):
fmts = self.db.formats(id_, index_is_id=True)
if not fmts:
fmts = ''
fmts = [x.lower() for x in fmts.split(',') if x]
pf = prefs['output_format'].lower()
try:
fmt = pf if pf in fmts else fmts[0]
except:
fmt = None
args = {'id':id_, 'mi':mi,
}
ccache = self.categories_cache() if add_category_links else {}
ftitle = fauthors = ''
for key in mi.all_field_keys():
val = mi.format_field(key)[1]
if not val:
val = ''
if key == 'title':
ftitle = xml(val, True)
elif key == 'authors':
fauthors = xml(val, True)
if add_category_links:
added_key = False
fm = mi.metadata_for_field(key)
if val and fm and fm['is_category'] and not fm['is_csp'] and\
key != 'formats' and fm['datatype'] not in ['rating']:
categories = mi.get(key)
if isinstance(categories, basestring):
categories = [categories]
dbtags = []
for category in categories:
dbtag = None
for tag in ccache[key]:
if tag.name == category:
dbtag = tag
break
dbtags.append(dbtag)
if None not in dbtags:
vals = []
for tag in dbtags:
tval = ('<a title="Browse books by {3}: {0}"'
' href="{1}" class="details_category_link">{2}</a>')
href='%s/browse/matches/%s/%s' % \
(self.opts.url_prefix, quote(tag.category), quote(str(tag.id)))
vals.append(tval.format(xml(tag.name, True),
xml(href, True),
xml(val if len(dbtags) == 1 else tag.name),
xml(key, True)))
join = ' & ' if key == 'authors' or \
(fm['is_custom'] and
fm['display'].get('is_names', False)) \
else ', '
args[key] = join.join(vals)
added_key = True
if not added_key:
args[key] = xml(val, True)
else:
args[key] = xml(val, True)
fname = quote(ascii_filename(ftitle) + ' - ' +
ascii_filename(fauthors))
return args, fmt, fmts, fname
@Endpoint(mimetype='application/json; charset=utf-8')
def browse_booklist_page(self, ids=None, sort=None):
if sort == 'null':
sort = None
if ids is None:
ids = json.dumps('[]')
try:
ids = json.loads(ids)
except:
raise cherrypy.HTTPError(404, 'invalid ids')
summs = []
for id_ in ids:
try:
id_ = int(id_)
mi = self.db.get_metadata(id_, index_is_id=True)
except:
continue
args, fmt, fmts, fname = self.browse_get_book_args(mi, id_)
args['other_formats'] = ''
args['fmt'] = fmt
if fmts and fmt:
other_fmts = [x for x in fmts if x.lower() != fmt.lower()]
if other_fmts:
ofmts = [u'<a href="{4}/get/{0}/{1}_{2}.{0}" title="{3}">{3}</a>'\
.format(f, fname, id_, f.upper(),
self.opts.url_prefix) for f in
other_fmts]
ofmts = ', '.join(ofmts)
args['other_formats'] = u'<strong>%s: </strong>' % \
_('Other formats') + ofmts
args['details_href'] = self.opts.url_prefix + '/browse/details/'+str(id_)
if fmt:
href = self.opts.url_prefix + '/get/%s/%s_%d.%s'%(
fmt, fname, id_, fmt)
rt = xml(_('Read %(title)s in the %(fmt)s format')% \
{'title':args['title'], 'fmt':fmt.upper()}, True)
args['get_button'] = \
'<a href="%s" class="read" title="%s">%s</a>' % \
(xml(href, True), rt, xml(_('Get')))
args['get_url'] = xml(href, True)
else:
args['get_button'] = args['get_url'] = ''
args['comments'] = comments_to_html(mi.comments)
args['stars'] = ''
if mi.rating:
args['stars'] = render_rating(mi.rating/2.0,
self.opts.url_prefix, prefix=_('Rating'))[0]
if args['tags']:
args['tags'] = u'<strong>%s: </strong>'%xml(_('Tags')) + \
args['tags']
if args['series']:
args['series'] = args['series']
args['details'] = xml(_('Details'), True)
args['details_tt'] = xml(_('Show book details'), True)
args['permalink'] = xml(_('Permalink'), True)
args['permalink_tt'] = xml(_('A permanent link to this book'), True)
summs.append(self.browse_summary_template.format(**args))
raw = json.dumps('\n'.join(summs), ensure_ascii=True)
return raw
def browse_render_details(self, id_, add_random_button=False):
try:
mi = self.db.get_metadata(id_, index_is_id=True)
except:
return _('This book has been deleted')
else:
args, fmt, fmts, fname = self.browse_get_book_args(mi, id_,
add_category_links=True)
args['fmt'] = fmt
if fmt:
args['get_url'] = xml(self.opts.url_prefix + '/get/%s/%s_%d.%s'%(
fmt, fname, id_, fmt), True)
else:
args['get_url'] = ''
args['formats'] = ''
if fmts:
ofmts = [u'<a href="{4}/get/{0}/{1}_{2}.{0}" title="{3}">{3}</a>'\
.format(xfmt, fname, id_, xfmt.upper(),
self.opts.url_prefix) for xfmt in fmts]
ofmts = ', '.join(ofmts)
args['formats'] = ofmts
fields, comments = [], []
displayed_custom_fields = custom_fields_to_display(self.db)
for field, m in list(mi.get_all_standard_metadata(False).items()) + \
list(mi.get_all_user_metadata(False).items()):
if self.db.field_metadata.is_ignorable_field(field) and \
field not in displayed_custom_fields:
continue
if m['datatype'] == 'comments' or field == 'comments' or (
m['datatype'] == 'composite' and \
m['display'].get('contains_html', False)):
val = mi.get(field, '')
if val and val.strip():
comments.append((m['name'], comments_to_html(val)))
continue
if field in ('title', 'formats') or not args.get(field, False) \
or not m['name']:
continue
if field == 'identifiers':
urls = urls_from_identifiers(mi.get(field, {}))
links = [u'<a class="details_category_link" target="_new" href="%s" title="%s:%s">%s</a>' % (url, id_typ, id_val, name)
for name, id_typ, id_val, url in urls]
links = u', '.join(links)
if links:
fields.append((m['name'], u'<strong>%s: </strong>%s'%(
_('Ids'), links)))
continue
if m['datatype'] == 'rating':
r = u'<strong>%s: </strong>'%xml(m['name']) + \
render_rating(mi.get(field)/2.0, self.opts.url_prefix,
prefix=m['name'])[0]
else:
r = u'<strong>%s: </strong>'%xml(m['name']) + \
args[field]
fields.append((m['name'], r))
fields.sort(key=lambda x: sort_key(x[0]))
fields = [u'<div class="field">{0}</div>'.format(f[1]) for f in
fields]
fields = u'<div class="fields">%s</div>'%('\n\n'.join(fields))
comments.sort(key=lambda x: x[0].lower())
comments = [(u'<div class="field"><strong>%s: </strong>'
u'<div class="comment">%s</div></div>') % (xml(c[0]),
c[1]) for c in comments]
comments = u'<div class="comments">%s</div>'%('\n\n'.join(comments))
random = ''
if add_random_button:
href = '%s/browse/random?v=%s'%(
self.opts.url_prefix, time.time())
random = '<a href="%s" id="random_button" title="%s">%s</a>' % (
xml(href, True), xml(_('Choose another random book'), True),
xml(_('Another random book')))
return self.browse_details_template.format(
id=id_, title=xml(mi.title, True), fields=fields,
get_url=args['get_url'], fmt=args['fmt'],
formats=args['formats'], comments=comments, random=random)
@Endpoint(mimetype='application/json; charset=utf-8')
def browse_details(self, id=None):
try:
id_ = int(id)
except:
raise cherrypy.HTTPError(404, 'invalid id: %r'%id)
ans = self.browse_render_details(id_)
return json.dumps(ans, ensure_ascii=True)
@Endpoint()
def browse_random(self, *args, **kwargs):
import random
book_id = random.choice(self.db.search_getting_ids(
'', self.search_restriction))
ans = self.browse_render_details(book_id, add_random_button=True)
return self.browse_template('').format(
title='', script='book();', main=ans)
@Endpoint()
def browse_book(self, id=None, category_sort=None):
try:
id_ = int(id)
except:
raise cherrypy.HTTPError(404, 'invalid id: %r'%id)
ans = self.browse_render_details(id_)
return self.browse_template('').format(
title='', script='book();', main=ans)
# }}}
# Search {{{
@Endpoint(sort_type='list')
def browse_search(self, query='', list_sort=None):
if isbytestring(query):
query = query.decode('UTF-8')
ids = self.db.search_getting_ids(query.strip(), self.search_restriction)
items = [self.db.data._data[x] for x in ids]
sort = self.browse_sort_book_list(items, list_sort)
ids = [x[0] for x in items]
html = render_book_list(ids, self.opts.url_prefix,
suffix=_('in search')+': '+xml(query))
return self.browse_template(sort, category=False, initial_search=query).format(
title=_('Matching books'),
script='search_result();', main=html)
# }}}
|
sss/calibre-at-bzr
|
src/calibre/library/server/browse.py
|
Python
|
gpl-3.0
| 37,993
|
# -*- test-case-name: twisted.words.test.test_xishutil -*-
#
# Copyright (c) 2001-2005 Twisted Matrix Laboratories.
# See LICENSE for details.
def _isStr(s):
""" Internal method to determine if an object is a string """
return isinstance(s, type('')) or isinstance(s, type(u''))
class _MethodWrapper(object):
""" Internal class for tracking method calls """
def __init__(self, method, *args, **kwargs):
self.method = method
self.args = args
self.kwargs = kwargs
def __call__(self, *args, **kwargs):
nargs = self.args + args
nkwargs = self.kwargs.copy()
nkwargs.update(kwargs)
self.method(*nargs, **nkwargs)
class CallbackList:
def __init__(self):
self.callbacks = {}
def addCallback(self, onetime, method, *args, **kwargs):
if not method in self.callbacks:
self.callbacks[method] = (_MethodWrapper(method, *args, **kwargs), onetime)
def removeCallback(self, method):
if method in self.callbacks:
del self.callbacks[method]
def callback(self, *args, **kwargs):
for key, (methodwrapper, onetime) in self.callbacks.items():
methodwrapper(*args, **kwargs)
if onetime:
del self.callbacks[key]
from twisted.words.xish import xpath, domish
class EventDispatcher:
""" Event dispatching service.
The C{EventDispatcher} allows observers to be registered for certain events
that are dispatched. There are two types of events: XPath events and Named
events.
Every dispatch is triggered by calling L{dispatch} with a data object and,
for named events, the name of the event.
When an XPath type event is dispatched, the associated object is assumed
to be a L{domish.Element} object, which is matched against all registered
XPath queries. For every match, the respective observer will be called with
the data object.
A named event will simply call each registered observer for that particular
event name, with the data object. Unlike XPath type events, the data object
is not restricted to L{domish.Element}, but can be anything.
When registering observers, the event that is to be observed is specified
using an L{xpath.XPathQuery} object or a string. In the latter case, the
string can also contain the string representation of an XPath expression.
To distinguish these from named events, each named event should start with
a special prefix that is stored in C{self.prefix}. It defaults to
C{//event/}.
Observers registered using L{addObserver} are persistent: after the
observer has been triggered by a dispatch, it remains registered for a
possible next dispatch. If instead L{addOnetimeObserver} was used to
observe an event, the observer is removed from the list of observers after
the first observed event.
Obsevers can also prioritized, by providing an optional C{priority}
parameter to the L{addObserver} and L{addOnetimeObserver} methods. Higher
priority observers are then called before lower priority observers.
Finally, observers can be unregistered by using L{removeObserver}.
"""
def __init__(self, eventprefix = "//event/"):
self.prefix = eventprefix
self._eventObservers = {}
self._xpathObservers = {}
self._orderedEventObserverKeys = []
self._orderedXpathObserverKeys = []
self._dispatchDepth = 0 # Flag indicating levels of dispatching in progress
self._updateQueue = [] # Queued updates for observer ops
def _isEvent(self, event):
return _isStr(event) and self.prefix == event[0:len(self.prefix)]
def addOnetimeObserver(self, event, observerfn, priority=0, *args, **kwargs):
""" Register a one-time observer for an event.
Like L{addObserver}, but is only triggered at most once. See there
for a description of the parameters.
"""
self._addObserver(True, event, observerfn, priority, *args, **kwargs)
def addObserver(self, event, observerfn, priority=0, *args, **kwargs):
""" Register an observer for an event.
Each observer will be registered with a certain priority. Higher
priority observers get called before lower priority observers.
@param event: Name or XPath query for the event to be monitored.
@type event: L{str} or L{xpath.XPathQuery}.
@param observerfn: Function to be called when the specified event
has been triggered. This function takes
one parameter: the data object that triggered
the event. When specified, the C{*args} and
C{**kwargs} parameters to addObserver are being used
as additional parameters to the registered observer
function.
@param priority: (Optional) priority of this observer in relation to
other observer that match the same event. Defaults to
C{0}.
@type priority: L{int}
"""
self._addObserver(False, event, observerfn, priority, *args, **kwargs)
def _addObserver(self, onetime, event, observerfn, priority, *args, **kwargs):
# If this is happening in the middle of the dispatch, queue
# it up for processing after the dispatch completes
if self._dispatchDepth > 0:
self._updateQueue.append(lambda:self.addObserver(event, observerfn, priority, *args, **kwargs))
return
observers = None
if _isStr(event):
if self.prefix == event[0:len(self.prefix)]:
# Treat as event
observers = self._eventObservers
else:
# Treat as xpath
event = xpath.internQuery(event)
observers = self._xpathObservers
else:
# Treat as xpath
observers = self._xpathObservers
key = (priority, event)
if not key in observers:
cbl = CallbackList()
cbl.addCallback(onetime, observerfn, *args, **kwargs)
observers[key] = cbl
else:
observers[key].addCallback(onetime, observerfn, *args, **kwargs)
# Update the priority ordered list of xpath keys --
# This really oughta be rethought for efficiency
self._orderedEventObserverKeys = self._eventObservers.keys()
self._orderedEventObserverKeys.sort()
self._orderedEventObserverKeys.reverse()
self._orderedXpathObserverKeys = self._xpathObservers.keys()
self._orderedXpathObserverKeys.sort()
self._orderedXpathObserverKeys.reverse()
def removeObserver(self, event, observerfn):
""" Remove function as observer for an event.
The observer function is removed for all priority levels for the
specified event.
@param event: Event for which the observer function was registered.
@type event: L{str} or L{xpath.XPathQuery}
@param observerfn: Observer function to be unregistered.
"""
# If this is happening in the middle of the dispatch, queue
# it up for processing after the dispatch completes
if self._dispatchDepth > 0:
self._updateQueue.append(lambda:self.removeObserver(event, observerfn))
return
observers = None
if _isStr(event):
if self.prefix == event[0:len(self.prefix)]:
observers = self._eventObservers
else:
event = xpath.internQuery(event)
observers = self._xpathObservers
else:
observers = self._xpathObservers
for priority, query in observers:
if event == query:
observers[(priority, query)].removeCallback(observerfn)
# Update the priority ordered list of xpath keys --
# This really oughta be rethought for efficiency
self._orderedEventObserverKeys = self._eventObservers.keys()
self._orderedEventObserverKeys.sort()
self._orderedEventObserverKeys.reverse()
self._orderedXpathObserverKeys = self._xpathObservers.keys()
self._orderedXpathObserverKeys.sort()
self._orderedXpathObserverKeys.reverse()
def dispatch(self, object, event = None):
""" Dispatch an event.
When C{event} is C{None}, an XPath type event is triggered, and
C{object} is assumed to be an instance of {domish.Element}. Otherwise,
C{event} holds the name of the named event being triggered. In the
latter case, C{object} can be anything.
@param object: The object to be dispatched.
@param event: Optional event name.
@type event: L{str}
"""
foundTarget = False
# Aiyiyi! If this dispatch occurs within a dispatch
# we need to preserve the original dispatching flag
# and not mess up the updateQueue
self._dispatchDepth = self._dispatchDepth + 1
if event != None:
for priority, query in self._orderedEventObserverKeys:
if event == query:
self._eventObservers[(priority, event)].callback(object)
foundTarget = True
else:
for priority, query in self._orderedXpathObserverKeys:
callbacklist = self._xpathObservers[(priority, query)]
if query.matches(object):
callbacklist.callback(object)
foundTarget = True
self._dispatchDepth = self._dispatchDepth -1
# If this is a dispatch within a dispatch, don't
# do anything with the updateQueue -- it needs to
# wait until we've back all the way out of the stack
if self._dispatchDepth == 0:
# Deal with pending update operations
for f in self._updateQueue:
f()
self._updateQueue = []
return foundTarget
|
kenorb-contrib/BitTorrent
|
twisted/words/xish/utility.py
|
Python
|
gpl-3.0
| 10,134
|
from .main import PublicHD
def start():
return PublicHD()
config = [{
'name': 'publichd',
'groups': [
{
'tab': 'searcher',
'subtab': 'providers',
'list': 'torrent_providers',
'name': 'PublicHD',
'description': 'Public Torrent site with only HD content. See <a href="https://publichd.se/">PublicHD</a>',
'wizard': True,
'options': [
{
'name': 'enabled',
'type': 'enabler',
'default': False,
},
{
'name': 'extra_score',
'advanced': True,
'label': 'Extra Score',
'type': 'int',
'default': 0,
'description': 'Starting score for each release found via this provider.',
}
],
},
],
}]
|
nabsboss/CouchPotatoServer
|
couchpotato/core/providers/torrent/publichd/__init__.py
|
Python
|
gpl-3.0
| 952
|
# (C) British Crown Copyright 2015 - 2016, Met Office
#
# This file is part of cartopy.
#
# cartopy is free software: you can redistribute it and/or modify it under
# the terms of the GNU Lesser General Public License as published by the
# Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# cartopy is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with cartopy. If not, see <https://www.gnu.org/licenses/>.
from __future__ import (absolute_import, division, print_function)
import unittest
import numpy as np
from numpy.testing import assert_almost_equal
from nose.tools import assert_equal
import cartopy.crs as ccrs
class TestLambertAzimuthalEqualArea(unittest.TestCase):
def test_default(self):
crs = ccrs.LambertAzimuthalEqualArea()
expected = ('+ellps=WGS84 +proj=laea +lon_0=0.0 '
'+lat_0=0.0 +x_0=0.0 +y_0=0.0 +no_defs')
assert_equal(crs.proj4_init, expected)
assert_almost_equal(np.array(crs.x_limits),
[-12755636.1863, 12755636.1863],
decimal=4)
assert_almost_equal(np.array(crs.y_limits),
[-12727770.598700099, 12727770.598700099],
decimal=4)
def test_eccentric_globe(self):
globe = ccrs.Globe(semimajor_axis=1000, semiminor_axis=500,
ellipse=None)
crs = ccrs.LambertAzimuthalEqualArea(globe=globe)
expected = ('+a=1000 +b=500 +proj=laea +lon_0=0.0 +lat_0=0.0 '
'+x_0=0.0 +y_0=0.0 +no_defs')
assert_equal(crs.proj4_init, expected)
assert_almost_equal(np.array(crs.x_limits),
[-1999.9, 1999.9], decimal=1)
assert_almost_equal(np.array(crs.y_limits),
[-1380.17298647, 1380.17298647], decimal=4)
def test_offset(self):
crs = ccrs.LambertAzimuthalEqualArea()
crs_offset = ccrs.LambertAzimuthalEqualArea(false_easting=1234,
false_northing=-4321)
expected = ('+ellps=WGS84 +proj=laea +lon_0=0.0 +lat_0=0.0 '
'+x_0=1234 +y_0=-4321 +no_defs')
assert_equal(crs_offset.proj4_init, expected)
assert_equal(tuple(np.array(crs.x_limits) + 1234),
crs_offset.x_limits)
assert_equal(tuple(np.array(crs.y_limits) - 4321),
crs_offset.y_limits)
if __name__ == '__main__':
import nose
nose.runmodule(argv=['-s', '--with-doctest'], exit=False)
|
belteshassar/cartopy
|
lib/cartopy/tests/crs/test_lambert_azimuthal_equal_area.py
|
Python
|
gpl-3.0
| 2,873
|
HOST = "wfSciwoncGW:enw1989@172.31.25.253:27001,172.31.25.251:27001,172.31.2.76:27001/?authSource=admin"
PORT = ""
USER = ""
PASSWORD = ""
DATABASE = "googlew"
READ_PREFERENCE = "secondary"
COLLECTION_INPUT = "task_events_info"
PREFIX_COLUMN = "g_"
ATTRIBUTES = ["event type", "standard deviation memory", "standard deviation cpu","standard deviation ratio", "average memory", "average cpu","average ratio"]
SORT = ["_id.filepath", "_id.numline"]
OPERATION_TYPE = "GROUP_BY_COLUMN"
COLUMN = "event type"
VALUE = ["0","1","2","3","4","5","6","7","8"]
INPUT_FILE = "task_events_info.dat"
|
elainenaomi/sciwonc-dataflow-examples
|
dissertation2017/Experiment 1B/instances/8_1_workflow_full_10files_secondary_w1_1sh_3rs_with_annot_with_proj_3s/calculateratio_2/ConfigDB_Calc_TEInfo_2.py
|
Python
|
gpl-3.0
| 588
|
#!/usr/bin/env python
"""Make sure the data in BOTMETA.yml is valid"""
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import glob
import os
import re
import sys
import yaml
from voluptuous import All, Any, Match, MultipleInvalid, Required, Schema
from voluptuous.humanize import humanize_error
from ansible.module_utils.six import string_types
def main():
"""Validate BOTMETA"""
path = '.github/BOTMETA.yml'
try:
with open(path, 'r') as f_path:
botmeta = yaml.safe_load(f_path)
except yaml.error.MarkedYAMLError as ex:
print('%s:%d:%d: YAML load failed: %s' % (path, ex.context_mark.line + 1, ex.context_mark.column + 1, re.sub(r'\s+', ' ', str(ex))))
sys.exit()
except Exception as ex: # pylint: disable=broad-except
print('%s:%d:%d: YAML load failed: %s' % (path, 0, 0, re.sub(r'\s+', ' ', str(ex))))
sys.exit()
list_string_types = list(string_types)
files_schema = Any(
Schema(*string_types),
Schema({
'ignored': Any(list_string_types, *string_types),
'keywords': Any(list_string_types, *string_types),
'labels': Any(list_string_types, *string_types),
'maintainers': Any(list_string_types, *string_types),
'migrated_to': All(
Any(*string_types),
Match(r'^https://galaxy.ansible.com/'),
),
'notified': Any(list_string_types, *string_types),
'supershipit': Any(list_string_types, *string_types),
'support': Any("core", "network", "community"),
})
)
list_dict_file_schema = [{str_type: files_schema}
for str_type in string_types]
schema = Schema({
Required('automerge'): bool,
Required('files'): Any(None, *list_dict_file_schema),
Required('macros'): dict, # Any(*list_macros_schema),
})
# Ensure schema is valid
try:
schema(botmeta)
except MultipleInvalid as ex:
for error in ex.errors:
# No way to get line numbers
print('%s:%d:%d: %s' % (path, 0, 0, humanize_error(botmeta, error)))
# Ensure botmeta is always support:core
botmeta_support = botmeta.get('files', {}).get('.github/BOTMETA.yml', {}).get('support', '')
if botmeta_support != 'core':
print('%s:%d:%d: .github/BOTMETA.yml MUST be support: core' % (path, 0, 0))
# Find all path (none-team) macros so we can substitute them
macros = botmeta.get('macros', {})
path_macros = []
for macro in macros:
if macro.startswith('team_'):
continue
path_macros.append(macro)
# Ensure all `files` correspond to a file
for file, file_meta in botmeta['files'].items():
migrated = isinstance(file_meta, dict) and file_meta.get('migrated_to') is not None
for macro in path_macros:
file = file.replace('$' + macro, botmeta.get('macros', {}).get(macro, ''))
if not os.path.exists(file) and not migrated:
# Not a file or directory, though maybe the prefix to one?
# https://github.com/ansible/ansibullbot/pull/1023
if not glob.glob('%s*' % file):
print("%s:%d:%d: Can't find '%s.*' in this branch" % (path, 0, 0, file))
if __name__ == '__main__':
main()
# Possible future work
# * Schema for `macros:` - currently ignored due to team_ansible
# * Ensure that all $teams mention in `files:` exist in `$macros`
# * Validate GitHub names - possibly expensive lookup needed - No should be validated when module is added - gundalow
|
tumbl3w33d/ansible
|
test/sanity/code-smell/botmeta.py
|
Python
|
gpl-3.0
| 3,657
|