repo_name
stringlengths 5
100
| path
stringlengths 4
231
| language
stringclasses 1
value | license
stringclasses 15
values | size
int64 6
947k
| score
float64 0
0.34
| prefix
stringlengths 0
8.16k
| middle
stringlengths 3
512
| suffix
stringlengths 0
8.17k
|
|---|---|---|---|---|---|---|---|---|
NelleV/pyconfr-test
|
symposion/proposals/templatetags/proposal_tags.py
|
Python
|
bsd-3-clause
| 2,245
| 0.0049
|
from django import template
from symposion.proposals.models import AdditionalSpeaker
register = template.Library()
class AssociatedProposalsNode(template.Node):
@classmethod
def handle_token(cls, parser, token):
bits = token.split_contents()
if len(bits) == 3 and bits[1] == "as":
return cls(bits[2])
else:
raise template.TemplateSyntaxError("%r takes 'as var'" % bits[0])
def __init__(self, context_var):
self.context_var = context_var
def render(self, context):
request = context["request"]
if request.user.speaker_profile:
pending = AdditionalSpeaker.SPEAKING_STATUS_ACCEPTED
speaker = request.user.speaker_profile
queryset = AdditionalSpeaker.objects.filter(speaker=speaker, status=pending)
context[self.context_var] = [item.proposalbase for item in queryset]
else:
context[self.context_var] = None
return u""
class PendingProposalsNode(template.Node):
@classmethod
def handle_token(cls, parser, token):
bits = token.split_contents()
if len(bits) == 3 and bits[1] == "as":
return cls(bits[2])
else:
raise template.TemplateSyntaxError("%r takes 'as var'" % bits[0])
def __init__(self, context_var):
self.context_var = context_var
def render(self, context):
request = context["request"]
if request.user.speaker_profile:
pending = AdditionalSpeaker.SPEAKING_STATUS_PENDING
speaker = request.user.speaker_profile
queryset = AdditionalSpeaker.objects.filter(speaker=speaker, status=pending)
context[self.context_var] = [item.proposalbase for item in queryset]
else:
|
context[self.context_var] = None
return u""
@register.tag
def pending_proposals(parser, token):
"""
{% pending_proposals as pending_proposals %}
"""
return PendingProposalsNode.handle_token(parser, token)
@register.tag
def associated_proposals(parser, token):
"""
{% associated_proposals as associated_proposals %}
"""
return AssociatedProposalsNode.handl
|
e_token(parser, token)
|
nis-sdn/odenos
|
src/main/python/org/o3project/odenos/core/manager/component_manager.py
|
Python
|
apache-2.0
| 5,084
| 0.00059
|
# -*- coding:utf-8 -*-
# Copyright 2015 NEC Corporation. #
# #
# Licensed under the Apache License, Version 2.0 (the "License"); #
# you may not use this file except in compliance with the License. #
# You may obtain a copy of the License at #
# #
# http://www.apache.org/licenses/LICENSE-2.0 #
# #
# Unless required by applicable law or agreed to in writing, software #
# distributed under the License is distributed on an "AS IS" BASIS, #
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. #
# See the License for the specific language governing permissions and #
# limitations under the License. #
import logging
import copy
from urlparse import urlparse
from org.o3project.odenos.core.util.request_parser import RequestParser
from org.o3project.odenos.remoteobject.message.request import Request
from org.o3project.odenos.remoteobject.message.response import Response
from org.o3project.odenos.remoteobject.object_property import ObjectProperty
from org.o3project.odenos.remoteobject.remote_object_manager import RemoteObjectManager
from org.o3project.odenos.remoteobject.manager.component.component_type\
import ComponentType
class ComponentManager(RemoteObjectManager):
DESCRIPTION = "python's ComponentManager"
COMPONENT_TYPES = "component_types"
def __init__(self, object_id, dispatcher):
RemoteObjectManager.__init__(self, object_id, dispatcher)
self._object_property.set_property(ComponentManager.COMPONENT_TYPES, "")
def regist
|
er_components(self, components):
self.register_remote_objects(components)
types = ",".join(self.remote_object_classes.keys())
self._object_property.set_property(C
|
omponentManager.COMPONENT_TYPES,
types)
def _add_rules(self):
rules = []
rules.append({RequestParser.PATTERN: r"^component_types/?$",
RequestParser.METHOD: Request.Method.GET,
RequestParser.FUNC: self._do_get_component_types,
RequestParser.PARAMS: 0})
rules.append({RequestParser.PATTERN: r"^components/?$",
RequestParser.METHOD: Request.Method.GET,
RequestParser.FUNC: self._do_get_remote_objects,
RequestParser.PARAMS: 0})
rules.append({RequestParser.PATTERN: r"^components/"
+ "([a-zA-Z0-9_-]+)/?$",
RequestParser.METHOD: Request.Method.PUT,
RequestParser.FUNC: self._do_put_remote_object,
RequestParser.PARAMS: 2})
rules.append({RequestParser.PATTERN: r"^components/"
+ "([a-zA-Z0-9_-]+)/?$",
RequestParser.METHOD: Request.Method.GET,
RequestParser.FUNC: self._do_get_remote_object,
RequestParser.PARAMS: 1})
rules.append({RequestParser.PATTERN: r"^components/"
+ "([a-zA-Z0-9_-]+)/?$",
RequestParser.METHOD: Request.Method.DELETE,
RequestParser.FUNC: self._do_delete_remote_object,
RequestParser.PARAMS: 1})
self._parser.add_rule(rules)
def _do_get_component_types(self):
comp_types = {}
tmp = None
try:
for type_name, clazz in self.remote_object_classes.items():
comp_id = "%s_%s" % (self.object_id, type_name)
component = clazz(comp_id, None)
obj_prop = component.object_property
component = None
type = obj_prop.get_property(ObjectProperty.OBJECT_TYPE)
super_type = obj_prop.get_property(ObjectProperty.OBJECT_SUPER_TYPE)
connection_types = {}
connection_types_str = obj_prop.get_property(
ObjectProperty.CONNECTION_TYPES)
conn_type_list = connection_types_str.split(",")
for type_elem in conn_type_list:
type_elem_list = type_elem.split(":")
if len(type_elem_list) == 2:
connection_types[type_elem_list[0]] = type_elem_list[1]
description = obj_prop.get_property(ObjectProperty.DESCRIPTION)
target = ComponentType(type, super_type,
connection_types, description)
comp_types[type_name] = target.packed_object()
except Exception, e:
return Response(Response.StatusCode.INTERNAL_SERVER_ERROR,
str(e))
return Response(Response.StatusCode.OK, comp_types)
|
darth-dodo/what_2_watch
|
test.py
|
Python
|
mit
| 759
| 0.02108
|
import re
# cat_list = ['Programming','Trending on Reddit','Trailers','Stand-up']
# def urlify(ip):
# # Remove all non-word characters (everything except numbers and letters)
# only_num_and_letters = re.sub(r'[^\w\d\s]','',ip)
# # Replace all runs of whitespace with a single dash
# output = re.sub(r'\s+','-',only_num_and_letters).lower()
# return output
# b = map(urlify,cat_list)
# print b
from random import choice
a = [1,
|
2,3,4,4,5,5,6,6,7,7,8,9]
amt = 4
def randomizer(amt,zipped_list):
amt = int(amt)
op_list = []
while len(op_list) < amt:
|
pass
rand_value = choice(zipped_list)
if rand_value not in op_list:
op_list.append(rand_value)
return op_list
print randomizer(5,a)
|
indrz/indrz
|
indrz/users/urls.py
|
Python
|
gpl-3.0
| 1,054
| 0.001898
|
from django.urls import url, include, path
from rest_framework import routers
from users import views
# router = routers.DefaultRouter()
# router.register(r'users', views.UserViewSet)
# router.register(r'groups', views.GroupViewSet)
#
# # Wire up our API using automatic URL routing.
# # Additionally, we include login URLs for the browsable API.
# urlpatterns = [
# path('', include(router.urls)),
# path('api-auth/
|
', include('rest_framework.urls', namespace='rest_framework'))
# ]
# -*- coding: utf-8 -*-
from __future__ import absolute_import, unicode_literals
urlpatterns = [
url(
regex=r'^$',
view=views.UserListView.as_view(),
name='list'
),
url(
regex=r'^~redirect/$',
view=views.UserRedirectView.as_view(),
name='redirect'
),
url(
regex=r'^(?P<username>[\w.@+-]+)/$',
view=views.UserDetailView.as_view(),
name='detail
|
'
),
url(
regex=r'^~update/$',
view=views.UserUpdateView.as_view(),
name='update'
),
]
|
brandicted/nefertari-es
|
tests/test_documents.py
|
Python
|
apache-2.0
| 35,116
| 0.000057
|
import pytest
from mock import patch, Mock, call
from nefertari.json_httpexceptions import (
JHTTPBadRequest,
JHTTPNotFound,
)
from .fixtures import (
simple_model, id_model, story_model, person_model,
tag_model, parent_model)
from nefertari_es import documents as docs
from nefertari_es import fields
class TestBaseDocument(object):
def test_comparison(self, simple_model):
item1 = simple_model(name=None)
item2 = simple_model(name=None)
assert item1 != item2
item2.name = '2'
assert item1 != item2
item1.name = '1'
assert item1 != item2
item1.name = '2'
assert item1 == item2
def test_hash(self, simple_model):
items = set()
item1 = simple_model(name='1')
items.add(item1)
assert item1 in items
item2 = simple_model(name='asd')
assert item2 not in items
item2.name = '1'
assert item2 in items
def test_sync_id_field(self, id_model):
item = id_model()
assert item.id is None
item._id = 123
item._sync_id_field()
assert item.id == '123'
def test_setattr_readme_id(self, id_model):
item = id_model()
with pytest.raises(AttributeError) as ex:
item.id = 123
assert 'id is read-only' in str(ex
|
.value)
def test_getattr_id_none(self, id_model):
item = id_model()
assert item._id is None
item.meta['id'] = 123
assert item._id == 123
@patch('nefertari_es.documents.BaseDocument._load_related')
def
|
test_getattr_load_rel(self, mock_load, story_model):
story = story_model()
story.author
mock_load.assert_called_once_with('author')
@patch('nefertari_es.documents.BaseDocument._load_related')
def test_getattr_raw(self, mock_load, story_model):
story = story_model(author=1)
assert mock_load.call_count == 1
assert story._getattr_raw('author') == 1
assert mock_load.call_count == 1
@patch('nefertari_es.documents.BaseDocument._load_related')
def test_unload_related(self, mock_load, parent_model, person_model):
parent = parent_model()
parent.children = [person_model(name='123')]
assert isinstance(parent.children[0], person_model)
parent._unload_related('children')
assert parent.children == ['123']
@patch('nefertari_es.documents.BaseDocument._load_related')
def test_unload_related_no_ids(self, mock_load, parent_model,
person_model):
parent = parent_model()
person = person_model(name=None)
parent._d_['children'] = [person]
assert isinstance(parent.children[0], person_model)
parent._unload_related('children')
assert parent.children == [person]
@patch('nefertari_es.documents.BaseDocument._load_related')
def test_unload_related_no_curr_value(
self, mock_load, parent_model, person_model):
parent = parent_model()
parent._d_['children'] = []
parent._unload_related('children')
assert parent.children == []
def test_load_related(self, parent_model, person_model):
parent = parent_model()
parent.children = ['123']
with patch.object(person_model, 'get_collection') as mock_get:
mock_get.return_value = ['foo']
parent._load_related('children')
mock_get.assert_called_once_with(name=['123'])
assert parent.children == ['foo']
def test_load_related_no_items(self, parent_model, person_model):
parent = parent_model()
parent.children = ['123']
with patch.object(person_model, 'get_collection') as mock_get:
mock_get.return_value = []
parent._load_related('children')
mock_get.assert_called_once_with(name=['123'])
assert parent.children == ['123']
def test_load_related_no_curr_value(
self, parent_model, person_model):
parent = parent_model()
parent.children = []
with patch.object(person_model, 'get_collection') as mock_get:
mock_get.return_value = ['foo']
parent._load_related('children')
assert not mock_get.called
assert parent.children == []
def test_save(self, person_model):
person = person_model(name='foo')
person._sync_id_field = Mock()
person.save()
person._sync_id_field.assert_called_once_with()
def test_update(self, simple_model):
item = simple_model(name='foo', price=123)
assert item.name == 'foo'
assert item.price == 123
item.save = Mock()
item.update({'name': 'bar', 'price': 321}, zoo=1)
assert item.name == 'foo'
assert item.price == 321
item.save.assert_called_once_with(zoo=1)
def test_update(self):
class MyModel(docs.BaseDocument):
id = fields.IdField(primary_key=True)
name = fields.StringField()
settings = fields.DictField()
MyModel.save = Mock()
myobj = MyModel(id=4, name='foo')
myobj.update({'name': 'bar', 'settings': {'sett1': 'val1'}})
assert myobj.name == 'bar'
assert myobj.settings == {'sett1': 'val1'}
def test_to_dict(self, simple_model):
item = simple_model(name='joe', price=42)
assert item.to_dict() == {'name': 'joe', 'price': 42}
assert item.to_dict(include_meta=True) == {
'_source': {'name': 'joe', 'price': 42}, '_type': 'Item'}
def test_to_dict_simple_request(self, simple_model):
item = simple_model(name='joe', price=42)
assert item.to_dict(request=True) == {
'name': 'joe', 'price': 42,
'_type': 'Item', '_pk': 'joe'}
def test_to_dict_nest_depth_not_reached(
self, person_model, tag_model, story_model):
sking = person_model(name='Stephen King')
novel = tag_model(name='novel')
story = story_model(name='11/22/63', author=sking, tags=[novel])
story._unload_related = Mock()
story._load_related = Mock()
story._nested_relationships = ['author', 'tags']
data = story.to_dict(request=True, _depth=1)
assert data == {
'_pk': '11/22/63',
'_type': 'Story',
'author': {'_pk': 'Stephen King', '_type': 'Person', 'name': 'Stephen King'},
'name': '11/22/63',
'tags': [{'_pk': 'novel', '_type': 'Tag', 'name': 'novel'}]
}
assert not story._unload_related.called
story._load_related.assert_has_calls([
call('author'), call('tags')], any_order=True)
assert sking._nesting_depth == 0
assert sking._request
assert novel._nesting_depth == 0
assert novel._request
def test_to_dict_nest_depth_reached(
self, person_model, tag_model, story_model):
sking = person_model(name='Stephen King')
novel = tag_model(name='novel')
story = story_model(name='11/22/63', author=sking, tags=[novel])
story._load_related = Mock()
story._nested_relationships = ['author', 'tags']
data = story.to_dict(request=True, _depth=0)
assert data == {
'_pk': '11/22/63',
'_type': 'Story',
'author': 'Stephen King',
'name': '11/22/63',
'tags': ['novel']
}
assert not story._load_related.called
assert sking._nesting_depth == 1
assert sking._request is None
assert novel._nesting_depth == 1
assert novel._request is None
def test_to_dict_nest_not_nested(
self, person_model, tag_model, story_model):
sking = person_model(name='Stephen King')
novel = tag_model(name='novel')
story = story_model(name='11/22/63', author=sking, tags=[novel])
story._load_related = Mock()
story._nested_relationships = ['tags']
data = story.to_dict(request=True, _depth=1)
assert data == {
'_pk': '11/22/63',
'_type': 'Story',
|
dolaCmeo/quick_flask
|
flask_site/user/__init__.py
|
Python
|
mit
| 49
| 0
|
# -
|
*- coding: utf-8 -*-#
__
|
author__ = 'dolacmeo'
|
MithileshCParab/HackerRank-10DaysOfStatistics
|
Problem Solving/Data Structure/Trie/no_prefix_set.py
|
Python
|
apache-2.0
| 2,190
| 0.010959
|
# Enter your code here. Read input from STDIN. Print output to STDOUT
class Node:
def __init__(self, letter):
self.letter = letter
|
self.children = {}
self.isWord = False
class Trie:
def __init__(self):
self.root = Node("*")
def buildTrie(self, word):
curr_node = self.root
for idx, char in enumerate(word):
if curr_node.isWord:
return word
elif idx == len(word)-1 and char in curr_node.children:
return wo
|
rd
elif char not in curr_node.children:
curr_node.children[char] = Node(char)
curr_node = curr_node.children[char]
curr_node.isWord = True
if __name__ == "__main__":
trie = Trie()
wordsDict = {}
isGoodSet = True
n = int(input())
for i in range(n):
word = input()
badword = trie.buildTrie(word)
if badword != None:
print("BAD SET")
print(badword)
isGoodSet = False
break
if isGoodSet:
print("GOOD SET")
# Enter your code here. Read input from STDIN. Print output to STDOUT
class Node:
def __init__(self, letter):
self.letter = letter
self.children = {}
self.isWord = False
class Trie:
def __init__(self):
self.root = Node("*")
def buildTrie(self, word):
curr_node = self.root
for idx, char in enumerate(word):
if curr_node.isWord:
return word
elif idx == len(word)-1 and char in curr_node.children:
return word
elif char not in curr_node.children:
curr_node.children[char] = Node(char)
curr_node = curr_node.children[char]
curr_node.isWord = True
if __name__ == "__main__":
trie = Trie()
wordsDict = {}
isGoodSet = True
n = int(input())
for i in range(n):
word = input()
badword = trie.buildTrie(word)
if badword != None:
print("BAD SET")
print(badword)
isGoodSet = False
break
if isGoodSet:
print("GOOD SET")
|
eduNEXT/edx-platform
|
openedx/core/djangoapps/course_live/migrations/0001_initial.py
|
Python
|
agpl-3.0
| 3,720
| 0.004839
|
# Generated by Django 3.2.12 on 2022-02-23 08:07
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
import django.utils.timezone
import model_utils.fields
import opaque_keys.edx.django.models
import simple_history.models
class Migration(migrations.Migration):
initial = True
dependencies = [
('lti_consumer', '0013_auto_20210712_1352'),
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
]
operations = [
migrations.CreateModel(
name='HistoricalCourseLiveConfiguration',
fields=[
('id', models.IntegerField(auto_created=True, blank=True, db_index=True, verbose_name='ID')),
('created', model_utils.fields.AutoCreatedField(default=django.utils.timezone.now, editable=False, verbose_name='created')),
('modified', model_utils.fields.AutoLastModifiedField(default=django.utils.timezone.now, editable=False, verbose_name='modified')),
('course_key', opaque_keys.edx.django.models.CourseKeyField(db_index=True, max_length=255)),
('enabled', models.BooleanField(default=True, help_text='If disabled, the LTI in the associated course will be disabled.')),
('provider_type', models.CharField(help_text="The LTI provider's id", max_length=50, verbose_name='LTI provid
|
er')),
('history_id', models.AutoField(primary_key=True, serialize=False)),
('history_date', models.DateTimeField()),
('his
|
tory_change_reason', models.CharField(max_length=100, null=True)),
('history_type', models.CharField(choices=[('+', 'Created'), ('~', 'Changed'), ('-', 'Deleted')], max_length=1)),
('history_user', models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='+', to=settings.AUTH_USER_MODEL)),
('lti_configuration', models.ForeignKey(blank=True, db_constraint=False, help_text='The LTI configuration data for this course/provider.', null=True, on_delete=django.db.models.deletion.DO_NOTHING, related_name='+', to='lti_consumer.lticonfiguration')),
],
options={
'verbose_name': 'historical course live configuration',
'ordering': ('-history_date', '-history_id'),
'get_latest_by': 'history_date',
},
bases=(simple_history.models.HistoricalChanges, models.Model),
),
migrations.CreateModel(
name='CourseLiveConfiguration',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('created', model_utils.fields.AutoCreatedField(default=django.utils.timezone.now, editable=False, verbose_name='created')),
('modified', model_utils.fields.AutoLastModifiedField(default=django.utils.timezone.now, editable=False, verbose_name='modified')),
('course_key', opaque_keys.edx.django.models.CourseKeyField(db_index=True, max_length=255)),
('enabled', models.BooleanField(default=True, help_text='If disabled, the LTI in the associated course will be disabled.')),
('provider_type', models.CharField(help_text="The LTI provider's id", max_length=50, verbose_name='LTI provider')),
('lti_configuration', models.ForeignKey(blank=True, help_text='The LTI configuration data for this course/provider.', null=True, on_delete=django.db.models.deletion.SET_NULL, to='lti_consumer.lticonfiguration')),
],
options={
'abstract': False,
},
),
]
|
Gustavo6046/ChatterBot
|
docs/conf.py
|
Python
|
bsd-3-clause
| 6,225
| 0.00241
|
# -*- coding: utf-8 -*-
#
# ChatterBot documentation build configuration file, created by
# sphinx-quickstart on Mon May 9 14:38:54 2016.
import sys
import os
import sphinx_rtd_theme
from datetime import datetime
# Insert the project root dir as the first element in the PYTHONPATH.
# This lets us ensure that the source package is imported, and that its version is used.
current_directory = os.path.dirname(os.path.abspath(__file__))
parent_directory = os.path.abspath(os.path.join(current_directory, os.pardir))
sys.path.insert(0, parent_directory)
import chatterbot
# -- General configuration ------------------------------------------------
# Add any Sphinx extension module names here, as strings.
extensions = [
'sphinx.ext.autodoc',
'sphinx.ext.autosectionlabel',
'sphinx.ext.coverage',
'sphinx.ext.doctest',
'sphinx.ext.intersphinx',
'sphinx.ext.mathjax',
'sphinx.ext.todo',
'sphinx.ext.viewcode'
]
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix(es) of source filenames.
# You can specify multiple suffix as a list of string:
source_suffix = ['.rst', '.md']
# The encoding of source files
#source_encoding = 'utf-8-sig'
# The master toctree document
master_doc = 'index'
# General information about the project
project = 'ChatterBot'
copyright = '{}, {}'.format(datetime.now().year, chatterbot.__author__)
author = chatterbot.__author__
# The short X.Y version
version = chatterbot.__version__
# The full version, including alpha/beta/rc tags
release = chatterbot.__version__
language = 'en'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
# This patterns also effect to html_static_path and html_extra_path
exclude_patterns = []
# If true, '()' will be appended to :func: etc. cross-reference text
#add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::)
#add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
#show_authors = False
# The name of the Pygments (syntax highlighting) style to use
pygments_sty
|
le = 'sphinx'
# -- Options for HTML output ----------------------------------------------
html_theme = 'sphinx_rtd_theme'
html_theme_path = [sphinx_rtd_theme.get_html_theme_path()]
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
html_theme_options = {
'logo
|
_only': True
}
html_show_sourcelink = False
# A shorter title for the navigation bar. Default is the same as html_title.
#html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
html_logo = '../graphics/banner.png'
# The name of an image file (relative to this directory) to use as a favicon of
# the docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
html_favicon = '_static/favicon.ico'
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
# Add any extra paths that contain custom files (such as robots.txt or
# .htaccess) here, relative to this directory. These files are copied
# directly to the root of the documentation.
#html_extra_path = []
# If not None, a 'Last updated on:' timestamp is inserted at every page
# bottom, using the given strftime format.
# The empty string is equivalent to '%b %d, %Y'.
#html_last_updated_fmt = None
# Custom sidebar templates, maps document names to template names.
#html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
#html_additional_pages = {}
# If false, no module index is generated.
#html_domain_indices = True
# If false, no index is generated.
#html_use_index = True
# If true, the index is split into individual pages for each letter.
#html_split_index = False
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
html_show_sphinx = False
# Language to be used for generating the HTML full-text search index.
# Sphinx supports the following languages:
# 'da', 'de', 'en', 'es', 'fi', 'fr', 'hu', 'it', 'ja'
# 'nl', 'no', 'pt', 'ro', 'ru', 'sv', 'tr', 'zh'
html_search_language = 'en'
# Output file base name for HTML help builder
htmlhelp_basename = 'ChatterBotdoc'
# Read the docs theme modifications
html_context = {
'extra_css_files': [
'_static/style.css'
]
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title,
# author, documentclass [howto, manual, or own class])
latex_documents = [
(master_doc, 'ChatterBot.tex', u'ChatterBot Documentation',
u'Gunther Cox', 'manual'),
]
# -- Options for manual page output ---------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section)
man_pages = [
(master_doc, 'chatterbot', u'ChatterBot Documentation',
[author], 1)
]
# -- Options for Texinfo output -------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
(master_doc, 'ChatterBot', u'ChatterBot Documentation',
author, 'ChatterBot', 'One line description of project.',
'Miscellaneous'),
]
# -- Options for Epub output ----------------------------------------------
# Bibliographic Dublin Core info
epub_title = project
epub_author = author
epub_publisher = author
epub_copyright = copyright
# A list of files that should not be packed into the epub file
epub_exclude_files = ['search.html']
# Example configuration for intersphinx: refer to the Python standard library
intersphinx_mapping = {'https://docs.python.org/': None}
|
Matusf/django-konfera
|
runtests.py
|
Python
|
mit
| 2,158
| 0
|
import sys
try:
from django.conf import settings
from django.test.utils import get_runner
settings.configure(
DEBUG=True,
USE_TZ=True,
DATABASES={
"default": {
"ENGINE": "django.db.backends.sqlite3",
}
},
TEMPLATES=[
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': ['konfera.templates'],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
'konfera.utils.collect_view_data',
],
},
},
],
ROOT_URLCONF="runtests_urls",
INSTALLED_APPS=[
"django.contrib.auth",
"django.contrib.contenttypes",
"django.contrib.sessions",
"django.contrib.sites",
"django.contrib.messages",
"konfera",
"payments",
],
SITE_ID=1,
PAYMENT_ERROR_RATE=0,
MIDDLEWARE_CLASSES=(
'django.contrib.sessions.middleware.SessionMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
),
)
try:
import django
setup = django.setup
except Attribu
|
teError:
pass
else:
setup()
except ImportError:
import traceback
traceback.print_exc()
msg = "To fix this error, run: pip install -r requirements.txt"
raise ImportError(msg)
def run_tests(*test_args):
if not test_args:
test_args = ['konfera.tests', 'payments.tests']
# Run tests
TestRunner = get_runner(settings)
|
test_runner = TestRunner()
failures = test_runner.run_tests(test_args)
if failures:
sys.exit(bool(failures))
if __name__ == '__main__':
run_tests(*sys.argv[1:])
|
kbrebanov/ansible-modules-extras
|
packaging/os/homebrew.py
|
Python
|
gpl-3.0
| 28,076
| 0.000712
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
# (c) 2013, Andrew Dunham <andrew@du.nham.ca>
# (c) 2013, Daniel Jaouen <dcj24@cornell.edu>
# (c) 2015, Indrajit Raychaudhuri <irc+code@indrajit.com>
#
# Based on macports (Jimmy Tang <jcftang@gmail.com>)
#
# This module is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This software is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this software. If not, see <http://www.gnu.org/licenses/>.
DOCUMENTATION = '''
---
module: homebrew
author:
- "Indrajit Raychaudhuri (@indrajitr)"
- "Daniel Jaouen (@danieljaouen)"
- "Andrew Dunham (@andrew-d)"
requirements:
- "python >= 2.6"
short_description: Package manager for Homebrew
description:
- Manages Homebrew packages
version_added: "1.1"
options:
name:
description:
- name of package to install/remove
required: false
default: None
aliases: ['pkg', 'package', 'formula']
path:
description:
- "':' separated list of paths to search for 'brew' executable. Since A package (I(formula) in homebrew parlance) location is prefixed relative to the actual path of I(brew) command, providing an alternative I(brew) path enables managing different set of packages in an alternative location in the system."
required: false
default: '/usr/local/bin'
state:
description:
- state of the package
choices: [ 'head', 'latest', 'present', 'absent', 'linked', 'unlinked' ]
required: false
default: present
update_homebrew:
description:
- update homebrew itself first
required: false
default: no
choices: [ "yes", "no" ]
aliases: ['update-brew']
upgrade_all:
description:
- upgrade all homebrew packages
required: false
default: no
choices: [ "yes", "no" ]
aliases: ['upgrade']
install_options:
description:
- options flags to install a package
required: false
default: null
aliases: ['options']
version_added: "1.4"
notes: []
'''
EXAMPLES = '''
# Install formula foo with 'brew' in default path (C(/usr/local/bin))
- homebrew:
name: foo
state: present
# Install formula foo with 'brew' in alternate path C(/my/other/location/bin)
- homebrew:
name: foo
path: /my/other/location/bin
state: present
# Update homebrew first and install formula foo with 'brew' in default path
- homebrew:
name: foo
state: present
update_homebrew: yes
# Update homebrew first and upgrade formula foo to latest available with 'brew' in default path
- homebrew:
name: foo
state: latest
update_homebrew: yes
# Update homebrew and upgrade all packages
- homebrew:
update_homebrew: yes
upgrade_all: yes
# Miscellaneous other examples
- homebrew:
name: foo
state: head
- homebrew:
name: foo
state: linked
- homebrew:
name: foo
state: absent
- homebrew:
name: foo,bar
state: absent
- homebrew:
name: foo
state: present
install_options: with-baz,enable-debug
'''
import o
|
s.path
import re
from ansible.module_utils.six import iteritems
# exceptions -------------------------------------------------------------- {{{
class HomebrewException(Exception):
pass
# /exceptions ------------------------------------------------------------- }}}
# utils ------------------------------------------------------------------- {{{
def _create_regex_group(s):
lines = (line.strip() for line in s.split('\n') if line.strip())
|
chars = filter(None, (line.split('#')[0].strip() for line in lines))
group = r'[^' + r''.join(chars) + r']'
return re.compile(group)
# /utils ------------------------------------------------------------------ }}}
class Homebrew(object):
'''A class to manage Homebrew packages.'''
# class regexes ------------------------------------------------ {{{
VALID_PATH_CHARS = r'''
\w # alphanumeric characters (i.e., [a-zA-Z0-9_])
\s # spaces
: # colons
{sep} # the OS-specific path separator
. # dots
- # dashes
'''.format(sep=os.path.sep)
VALID_BREW_PATH_CHARS = r'''
\w # alphanumeric characters (i.e., [a-zA-Z0-9_])
\s # spaces
{sep} # the OS-specific path separator
. # dots
- # dashes
'''.format(sep=os.path.sep)
VALID_PACKAGE_CHARS = r'''
\w # alphanumeric characters (i.e., [a-zA-Z0-9_])
. # dots
/ # slash (for taps)
\+ # plusses
- # dashes
: # colons (for URLs)
'''
INVALID_PATH_REGEX = _create_regex_group(VALID_PATH_CHARS)
INVALID_BREW_PATH_REGEX = _create_regex_group(VALID_BREW_PATH_CHARS)
INVALID_PACKAGE_REGEX = _create_regex_group(VALID_PACKAGE_CHARS)
# /class regexes ----------------------------------------------- }}}
# class validations -------------------------------------------- {{{
@classmethod
def valid_path(cls, path):
'''
`path` must be one of:
- list of paths
- a string containing only:
- alphanumeric characters
- dashes
- dots
- spaces
- colons
- os.path.sep
'''
if isinstance(path, basestring):
return not cls.INVALID_PATH_REGEX.search(path)
try:
iter(path)
except TypeError:
return False
else:
paths = path
return all(cls.valid_brew_path(path_) for path_ in paths)
@classmethod
def valid_brew_path(cls, brew_path):
'''
`brew_path` must be one of:
- None
- a string containing only:
- alphanumeric characters
- dashes
- dots
- spaces
- os.path.sep
'''
if brew_path is None:
return True
return (
isinstance(brew_path, basestring)
and not cls.INVALID_BREW_PATH_REGEX.search(brew_path)
)
@classmethod
def valid_package(cls, package):
'''A valid package is either None or alphanumeric.'''
if package is None:
return True
return (
isinstance(package, basestring)
and not cls.INVALID_PACKAGE_REGEX.search(package)
)
@classmethod
def valid_state(cls, state):
'''
A valid state is one of:
- None
- installed
- upgraded
- head
- linked
- unlinked
- absent
'''
if state is None:
return True
else:
return (
isinstance(state, basestring)
and state.lower() in (
'installed',
'upgraded',
'head',
'linked',
'unlinked',
'absent',
)
)
@classmethod
def valid_module(cls, module):
'''A valid module is an instance of AnsibleModule.'''
return isinstance(module, AnsibleModule)
# /class validations ------------------------------------------- }}}
# class properties --------------------------------------------- {{{
@property
def module(self):
retu
|
zerothi/sisl
|
sisl/physics/tests/test_spin.py
|
Python
|
mpl-2.0
| 4,252
| 0.002352
|
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at https://mozilla.org/MPL/2.0/.
import pytest
import math as m
import numpy as np
from sisl import Spin
pytestmark = [pytest.mark.physics, pytest.mark.spin]
def test_spin1():
for val in ['unpolarized', '', Spin.UNPOLARIZED,
'polarized', 'p', Spin.POLARIZED,
'non-collinear', 'nc', Spin.NONCOLINEAR,
'spin-orbit', 'so', Spin.SPINORBIT]:
s = Spin(val)
str(s)
s1 = s.copy()
assert s == s1
def test_spin2():
s1 = Spin()
s2 = Spin('p')
s3 = Spin('nc')
s4 = Spin('so')
assert s1.kind == Spin.UNPOLARIZED
assert s2.kind == Spin.POLARIZED
assert s3.kind == Spin.NONCOLINEAR
assert s4.kind == Spin.SPINORBIT
assert s1 == s1.copy()
assert s2 == s2.copy()
assert s3 == s3.copy()
assert s4 == s4.copy()
assert s1 < s2
assert s2 < s3
assert s3 < s4
assert s1 <= s2
assert s2 <= s3
assert s3 <= s4
assert s2 > s1
assert s3 > s2
assert s4 > s3
assert s2 >= s1
assert s3 >= s2
assert s4 >= s3
assert s1.is_unpolarized
assert not s1.is_polarized
assert not s1.is_noncolinear
assert not s1.is_spinorbit
assert not s2.is_unpolarized
assert s2.is_polarized
assert not s2.is_noncolinear
assert not s2.is_spinorbit
assert not s3.is_unpolarized
assert not s3.is_polarized
assert s3.is_noncolinear
assert not s3.is_spinorbit
assert not s4.is_unpolarized
assert not s4.is_polarized
assert not s4.is_noncolinear
assert s4.is_spinorbit
def test_spin3():
with pytest.raises(ValueError):
s = Spin('satoehus')
def test_spin4():
s1 = Spin(Spin.UNPOLARIZED)
S1 = Spin(Spin.UNPOLARIZED, np.complex64)
s2 = Spin(Spin.POLARIZED)
S2 = Spin(Spin.POLARIZED, np.complex64)
s3 = Spin(Spin.NONCOLINEAR)
S3 = Spin(Spin.NONCOLINEAR, np.complex64)
s4 = Spin(Spin.SPINORBIT)
S4 = Spin(Spin.SPINORBIT, np.complex64)
assert s1 == S1
assert s2 == S2
assert s3 == S3
assert s4 == S4
# real comparison
assert s1 < S2
assert s1 < S3
assert s1 < S4
assert s2 > S1
assert s2 < S3
assert s2 < S4
assert s3 > S1
assert s3 > S2
assert s3 < S4
assert s4 > S1
assert s4 > S2
assert s4 > S3
# complex complex
assert S1 < S2
assert S1 < S3
assert S1 < S4
assert S2 > S1
assert S2 < S3
assert S2 < S4
assert S3 > S1
assert S3 > S2
assert S3 < S4
assert S4 > S1
assert S4 > S2
assert S4 > S3
# real comparison
assert S1 < s2
assert S1 < s3
assert S1 < s4
assert S2 > s1
assert S2 < s3
assert S2 < s4
assert S3 > s1
assert S3 > s2
assert S3 < s4
assert S4 > s1
assert S4 > s2
assert S4 > s3
# complex complex
assert S1 < s2
assert S1 < s3
assert S1 < s4
assert S2 > s1
assert S2 < s3
assert S2 < s4
assert S3 > s1
assert S3 > s2
assert S3 < s4
assert S4 > s1
assert S4 > s2
assert S4 > s3
def test_pauli():
# just grab the default spin
S = Spin()
# Create a fictituous wave-function
sq2 = 2 ** .5
W = np.array([
[1/sq2, 1/sq2], # M_x = 1
[1/sq2, -1/sq2], # M_x = -1
[0.5 + 0.5j, 0.5 + 0.5j], # M_x = 1
[0.5 - 0.5j, -0.5 + 0.5j], # M_x = -1
[1/sq2, 1j/sq2], # M_y = 1
[1/sq2, -1j/sq2], # M_y = -1
[0.5 - 0.5j, 0.5 + 0.5j], # M_y = 1
[0.5 + 0.5j, 0.5 - 0.5j], # M_y = -1
[1, 0], # M_z = 1
[0, 1], # M_z = -1
|
])
x = np.array([1, -1, 1, -1, 0, 0, 0, 0, 0, 0])
assert np.allclose(x, (np.conj(W)*S.X.dot(W.T).T).sum(1).real)
y = np.array([0, 0, 0, 0, 1, -1, 1, -1, 0, 0])
assert np.allclose(y, (np.conj(W)*np.dot(S.Y, W.T).T).sum(1).real)
z = np.array([0, 0, 0, 0, 0, 0, 0, 0, 1, -1])
assert np.allclose(z, (np.conj(W)*np.dot(S.Z, W.T).T).sum(1).real)
def test_pickle():
i
|
mport pickle as p
S = Spin('nc')
n = p.dumps(S)
s = p.loads(n)
assert S == s
|
mitsei/dlkit
|
tests/resource/test_managers.py
|
Python
|
mit
| 26,397
| 0.002652
|
"""Unit tests of resource managers."""
import pytest
from ..utilities.general import is_never_authz, is_no_authz, uses_cataloging, uses_filesystem_only
from dlkit.abstract_osid.osid import errors
from dlkit.abstract_osid.type.objects import TypeList as abc_type_list
from dlkit.primordium.id.primitives import Id
from dlkit.primordium.type.primitives import Type
from dlkit.runtime import PROXY_SESSION, proxy_example
from dlkit.runtime.managers import Runtime
REQUEST = proxy_example.SimpleRequest()
CONDITION = PROXY_SESSION.get_proxy_condition()
CONDITION.set_http_request(REQUEST)
PROXY = PROXY_SESSION.get_proxy(CONDITION)
DEFAULT_TYPE = Type(**{'identifier': 'DEFAULT', 'namespace': 'DEFAULT', 'authority': 'DEFAULT'})
@pytest.fixture(scope="class",
params=['TEST_SERVICE', 'TEST_SERVICE_ALWAYS_AUTHZ', 'TEST_SERVICE_NEVER_AUTHZ', 'TEST_SERVICE_CATALOGING', 'TEST_SERVICE_FILESYSTEM', 'TEST_SERVICE_MEMCACHE'])
def resource_profile_class_fixture(request):
request.cls.service_config = request.param
request.cls.mgr = Runtime().get_service_manager(
'RESOURCE',
proxy=PROXY,
implementation=request.cls.service_config)
@pytest.fixture(scope="function")
def resource_profile_test_fixture(request):
pass
@pytest.mark.usefixtures("resource_profile_class_fixture", "resource_profile_test_fixture")
class TestResourceProfile(object):
"""Tests for ResourceProfile"""
def test_supports_resource_lookup(self):
"""Tests supports_resource_lookup"""
assert isinstance(self.mgr.supports_resource_lookup(), bool)
def test_supports_resource_query(self):
"""Tests supports_resource_query"""
assert isinstance(self.mgr.supports_resource_query(), bool)
def test_supports_resource_search(self):
"""Tests supports_resource_search"""
assert isinstance(self.mgr.supports_resource_search(), bool)
def test_supports_resource_admin(self):
"""Tests supports_resource_admin"""
assert isinstance(self.mgr.supports_resource_admin(), bool)
def test_supports_resource_notification(self):
"""Tests supports_resource_notification"""
assert isinstance(self.mgr.supports_resource_notification(), bool)
def test_supports_resource_bin(self):
"""Tests supports_resource_bin"""
assert isinstance(self.mgr.supports_resource_bin(), bool)
def test_supports_resource_bin_assignment(self):
"""Tests supports_resource_bin_assignment"""
assert isinstance(self.mgr.supports_resource_bin_assignment(), bool)
def test_supports_resource_agent(self):
"""Tests supports_resource_agent"""
assert isinstance(self.mgr.supports_resource_agent(), bool)
def test_supports_resource_agent_assignment(self):
"""Tests supports_resource_agent_assignment"""
assert isinstance(self.mgr.supports_resource_agent_assignment(), bool)
def test_supports_bin_lookup(self):
"""Tests supports_bin_lookup"""
assert isinstance(self.mgr.supports_bin_lookup(), bool)
def test_supports_bin_query(self):
"""Tests supports_bin_query"""
assert isinstance(self.mgr.supports_bin_query(), bool)
def test_supports_bin_admin(self):
"""Tests supports_bin_admin"""
assert isinstance(self.mgr.supports_bin_admin(), bool)
def test_supports_bin_hierarchy(self):
"""Tests supports_bin_hierarchy"""
assert isinstance(self.mgr.supports_bin_hierarchy(), bool)
def test_supports_bin_hierarchy_design(self):
"""Tests supports_bin_hierarchy_design"""
assert isinstance(self.mgr.supports_bin_hierarchy_design(), bool)
def test_get_resource_record_types(self):
"""Tests get_resource_record_types"""
assert isinstance(self.mgr.get_resource_record_types(), abc_type_list)
def test_get_resource_search_record_types(self):
"""Tests get_resource_search_record_types"""
assert isinstance(self.mgr.get_resource_search_record_types(), abc_type_list)
def test_get_resource_relationship_record_types(self):
"""Tests get_resource_relationship_record_types"""
assert isinstance(self.mgr.get_resource_relationship_record_types(), abc_type_list)
def test_get_resource_relationship_search_record_types(self):
"""Tests get_resource_relationship_search_record_types"""
assert isinstance(self.mgr.get_resource_relationship_search_record_types(), abc_type_list)
def test_get_bin_record_types(self):
"""Tests get_bin_record_types"""
assert isinstance(self.mgr.get_bin_record_types(), abc_type_list)
def test_get_bin_search_record_types(self):
"""Tests get_bin_search_record_types"""
assert isinstance(self.mgr.get_bin_search_record_types(), abc_type_list)
class NotificationReceiver(object):
# Implemented from resource.ResourceManager
pass
@pytest.fixture(scope="class",
params=['TEST_SERVICE', 'TEST_SERVICE_ALWAYS_AUTHZ', 'TEST_SERVICE_NEVER_AUTHZ', 'TEST_SERVICE_CATALOGING', 'TEST_SERVICE_FILESYSTEM', 'TEST_SE
|
RVICE_MEMCACHE'])
def resource_manager_class_fixture(request):
# Implemented from resource.ResourceManager
request.cls.service_config = request.param
request.cls.svc_mgr = Runtime().get_service_manager(
'RESOURCE',
implementation=request.cls.service_config)
if not is_never_authz(request.cls.service_config):
create_
|
form = request.cls.svc_mgr.get_bin_form_for_create([])
create_form.display_name = 'Test Bin'
create_form.description = 'Test Bin for resource manager tests'
catalog = request.cls.svc_mgr.create_bin(create_form)
request.cls.catalog_id = catalog.get_id()
request.cls.receiver = NotificationReceiver()
else:
request.cls.catalog_id = Id('resource.Resource%3A000000000000000000000000%40DLKIT.MIT.EDU')
def class_tear_down():
if not is_never_authz(request.cls.service_config):
request.cls.svc_mgr.delete_bin(request.cls.catalog_id)
request.addfinalizer(class_tear_down)
@pytest.fixture(scope="function")
def resource_manager_test_fixture(request):
# Implemented from resource.ResourceManager
pass
@pytest.mark.usefixtures("resource_manager_class_fixture", "resource_manager_test_fixture")
class TestResourceManager(object):
"""Tests for ResourceManager"""
def test_get_resource_lookup_session(self):
"""Tests get_resource_lookup_session"""
# From tests_templates/resource.py::ResourceManager::get_resource_lookup_session_template
if self.svc_mgr.supports_resource_lookup():
self.svc_mgr.get_resource_lookup_session()
def test_get_resource_lookup_session_for_bin(self):
"""Tests get_resource_lookup_session_for_bin"""
# From tests_templates/resource.py::ResourceManager::get_resource_lookup_session_for_bin_template
if self.svc_mgr.supports_resource_lookup():
self.svc_mgr.get_resource_lookup_session_for_bin(self.catalog_id)
with pytest.raises(errors.NullArgument):
self.svc_mgr.get_resource_lookup_session_for_bin()
def test_get_resource_query_session(self):
"""Tests get_resource_query_session"""
# From tests_templates/resource.py::ResourceManager::get_resource_lookup_session_template
if self.svc_mgr.supports_resource_query():
self.svc_mgr.get_resource_query_session()
def test_get_resource_query_session_for_bin(self):
"""Tests get_resource_query_session_for_bin"""
# From tests_templates/resource.py::ResourceManager::get_resource_lookup_session_for_bin_template
if self.svc_mgr.supports_resource_query():
self.svc_mgr.get_resource_query_session_for_bin(self.catalog_id)
with pytest.raises(errors.NullArgument):
self.svc_mgr.get_resource_query_session_for_bin()
def test_get_resource_search_session(self):
"""Tests get_resource_search_session"""
# From tests_templates/resource.py::ResourceManager::get_resource_admin_session_template
if self.svc_mgr.supports_resource_search():
|
marcelometal/python-semanticversion
|
tests/django_test_app/__init__.py
|
Python
|
bsd-2-clause
| 941
| 0
|
# -*- coding: utf-8 -*-
# Copyright (c) 2012-2014 The python-semanticversion project
# This code is distributed under the two-clause BSD License.
try: # pragma: no cover
import django
from django.conf import settings
django_loaded = True
except ImportError: # pragma: no cover
django_loaded = False
if django_loaded: # pragma: no cover
if not settings.configured:
settings.configure(
DATABASES={
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': 'tests/db/test.sqlite',
}
},
INSTALLED_APPS=[
'tests.django_test_app',
],
MIDDLEWARE_CLASSES=[],
)
# https://docs.djangoproj
|
ect.com/en/dev/releases/1.7/#app-loading-
|
changes
if django.VERSION >= (1, 7):
from django.apps import apps
apps.populate(settings.INSTALLED_APPS)
|
gurch101/portfolio-manager
|
setup.py
|
Python
|
mit
| 973
| 0.001028
|
"""stockretriever"""
from setuptools import setup
setup(
name='portfolio-manager',
version='1.0',
description='a web app that keeps track of your investment portfolio',
url='https://github.com/gurch101/portfolio-manager',
author='Gurchet Rai',
author_email='gurch101@gmail.com',
license='MIT',
classifiers=[
'Intended Audience :: Financial and Insurance Industry',
'Intended Audience :: Developers',
'Intended Audience :: Education',
'Licen
|
se :: OSI Approved :: MIT License',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7'
],
keywords='investme
|
nt portfolio',
dependency_links=['https://github.com/gurch101/StockScraper/tarball/master#egg=stockretriever-1.0'],
zip_safe=True,
setup_requires=[
'stockretriever==1.0',
'Flask==0.10.1',
'passlib==1.6.2',
'schedule==0.3.2',
'requests==2.2.1'
]
)
|
leowa/django_informixdb
|
django_informixdb/compiler.py
|
Python
|
apache-2.0
| 1,808
| 0.001106
|
from django.db.models.sql import compiler
class SQLCompiler(compiler.SQLCompiler):
def as_sql(self, with_limits=True, with_col_aliases=False, subquery=False):
if with_limits and self.query.low_mark == self.query.high_mark:
return '', ()
raw_sql, fields = super(SQLCompiler, self).as_sql(False, with_col_aliases, subquery)
# special dialect to return first n rows
if with_limits:
if self.query.high_mark is not None:
_select = "SELECT"
_first = self.query.high_mark
if self.query.low_mark:
_select += " SKIP %s" % self.query.low_mark
_first -= self.query.low_mark
_select += " FIRST %s" % _first
raw_sql = raw_sql.replace("SELECT", _select, 1)
|
return raw_sql.replace(r'%s', '?'), fields
def _list2tuple(arg):
return tuple(arg) if isinstance(arg, list) else arg
class SQLInsertCompiler(compiler.SQLInsertCompiler, SQLCompiler):
def as_sql(self):
result = super(SQLInsertCompil
|
er, self).as_sql()
return [(ret[0].replace(r'%s', '?'), _list2tuple(ret[1])) for ret in result]
class SQLAggregateCompiler(compiler.SQLAggregateCompiler, SQLCompiler):
def as_sql(self):
result = super(SQLAggregateCompiler, self).as_sql()
return result[0].replace(r'%s', '?'), result[1]
class SQLDeleteCompiler(compiler.SQLDeleteCompiler, SQLCompiler):
def as_sql(self):
result = super(SQLDeleteCompiler, self).as_sql()
return result[0].replace(r'%s', '?'), result[1]
class SQLUpdateCompiler(compiler.SQLUpdateCompiler, SQLCompiler):
def as_sql(self):
result = super(SQLUpdateCompiler, self).as_sql()
return result[0].replace(r'%s', '?'), result[1]
|
neilbrown/susman
|
dnotify.py
|
Python
|
gpl-2.0
| 3,660
| 0.003552
|
#!/usr/bin/env python
# class to allow watching multiple files and
# calling a callback when any change (size or mtime)
#
# We take exclusive use of SIGIO and maintain a global list of
# watched files.
# As we cannot get siginfo in python, we check every file
# every time we get a signal.
# we report change is size, mtime, or ino of the file (given by name)
# Copyright (C) 2011 Neil Brown <neilb@suse.de>
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License along
# with this program; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
import os, fcntl, signal
dirlist = []
def notified(sig, stack):
for d in dirlist:
fcntl.fcntl(d.fd, fcntl.F_NOTIFY, (fcntl.DN_MODIFY|fcntl.DN_RENAME|
fcntl.DN_CREATE|fcntl.DN_DELETE))
d.check()
class dir():
def __init__(self, dname):
self.dname = dname
self.fd = os.open(dname, 0)
self.files = []
self.callbacks = []
fcntl.fcntl(self.fd, fcntl.F_NOTIFY, (fcntl.DN_MODIFY|fcntl.DN_RENAME|
fcntl.DN_CREATE|fcntl.DN_DELETE))
if not dirlist:
signal.signal(signal.SIGIO, notified)
dirlist.append(self)
def watch(self, fname, callback):
f = file(os.path.join(self.dname, fname), callback)
self.files.append(f)
return f
def watchall(self, callback):
self.callbacks.append(callback)
def check(self):
newlist = []
for c in self.callbacks:
if c():
newlist.append(c)
|
self.callbacks = newlist
for f in self.files:
|
f.check()
def cancel(self, victim):
if victim in self.files:
self.files.remove(victim)
class file():
def __init__(self, fname, callback):
self.name = fname
try:
stat = os.stat(self.name)
except OSError:
self.ino = 0
self.size = 0
self.mtime = 0
else:
self.ino = stat.st_ino
self.size = stat.st_size
self.mtime = stat.st_mtime
self.callback = callback
def check(self):
try:
stat = os.stat(self.name)
except OSError:
if self.ino == 0:
return False
self.size = 0
self.mtime = 0
self.ino = 0
else:
if stat.st_size == self.size and stat.st_mtime == self.mtime \
and stat.st_ino == self.ino:
return False
self.size = stat.st_size
self.mtime = stat.st_mtime
self.ino = stat.st_ino
self.callback(self)
return True
def cancel(self):
global dirlist
for d in dirlist:
d.cancel(self)
if __name__ == "__main__" :
import signal
##
def ping(f): print "got ", f.name
d = dir("/tmp/n")
a = d.watch("a", ping)
b = d.watch("b", ping)
c = d.watch("c", ping)
while True:
signal.pause()
|
marcellodesales/svnedge-console
|
svn-server/lib/suds/xsd/query.py
|
Python
|
agpl-3.0
| 6,451
| 0.002945
|
# This program is free software; you can redistribute it and/or modify
# it under the terms of the (LGPL) GNU Lesser General Public License as
# published by the Free Software Foundation; either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Library Lesser General Public License for more details at
# ( http://www.gnu.org/licenses/lgpl.html ).
#
# You should have received a copy of the GNU Lesser General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
# written by: Jeff Ortel ( jortel@redhat.com )
"""
The I{query} module defines a class for performing schema queries.
"""
from logging import getLogger
from suds import *
from suds.sudsobject import *
from suds.xsd import qualify, isqref
from suds.xsd.sxbuiltin import Factory
log = getLogger(__name__)
class Query(Object):
"""
Schema query base class.
"""
def __init__(self, ref=None):
"""
@param ref: The schema reference being queried.
@type ref: qref
"""
Object.__init__(self)
self.id = objid(self)
self.ref = ref
self.history = []
self.resolved = False
if not isqref(self.ref):
raise Exception('%s, must be qref' % tostr(self.ref))
def execute(self, schema):
"""
Execute this query using the specified schema.
@param schema: The schema associated with the query. The schema
is used by the query to search for items.
@type schema: L{schema.Schema}
@return: The item matching the search criteria.
@rtype: L{sxbase.SchemaObject}
"""
raise Exception, 'not-implemented by subclass'
def filter(self, result):
"""
Filter the specified result based on query criteria.
@param result: A potential result.
@type result: L{sxbase.SchemaObject}
@return: True if result should be excluded.
@rtype: boolean
"""
if result is None:
return True
reject = ( result in self.history )
|
if reject:
log.debug('result %s, rejected by\n%s', Repr(result), self)
return reject
def result(self, result):
"""
Query result post processing.
@param result: A query result.
@type result: L{sxbase.SchemaObject}
"""
if result is None:
log.debug('%s, not-found', self.ref)
return
if self.resolved:
result = result.resolve()
log.debug('%s, found as: %s', self.ref, Repr(r
|
esult))
self.history.append(result)
return result
class BlindQuery(Query):
"""
Schema query class that I{blindly} searches for a reference in
the specified schema. It may be used to find Elements and Types but
will match on an Element first. This query will also find builtins.
"""
def execute(self, schema):
if schema.builtin(self.ref):
name = self.ref[0]
b = Factory.create(schema, name)
log.debug('%s, found builtin (%s)', self.id, name)
return b
result = None
for d in (schema.elements, schema.types):
result = d.get(self.ref)
if self.filter(result):
result = None
else:
break
if result is None:
eq = ElementQuery(self.ref)
eq.history = self.history
result = eq.execute(schema)
return self.result(result)
class TypeQuery(Query):
"""
Schema query class that searches for Type references in
the specified schema. Matches on root types only.
"""
def execute(self, schema):
if schema.builtin(self.ref):
name = self.ref[0]
b = Factory.create(schema, name)
log.debug('%s, found builtin (%s)', self.id, name)
return b
result = schema.types.get(self.ref)
if self.filter(result):
result = None
return self.result(result)
class GroupQuery(Query):
"""
Schema query class that searches for Group references in
the specified schema.
"""
def execute(self, schema):
result = schema.groups.get(self.ref)
if self.filter(result):
result = None
return self.result(result)
class AttrQuery(Query):
"""
Schema query class that searches for Attribute references in
the specified schema. Matches on root Attribute by qname first, then searches
deep into the document.
"""
def execute(self, schema):
result = schema.attributes.get(self.ref)
if self.filter(result):
result = self.__deepsearch(schema)
return self.result(result)
def __deepsearch(self, schema):
from suds.xsd.sxbasic import Attribute
result = None
for e in schema.all:
result = e.find(self.ref, (Attribute,))
if self.filter(result):
result = None
else:
break
return result
class AttrGroupQuery(Query):
"""
Schema query class that searches for attributeGroup references in
the specified schema.
"""
def execute(self, schema):
result = schema.agrps.get(self.ref)
if self.filter(result):
result = None
return self.result(result)
class ElementQuery(Query):
"""
Schema query class that searches for Element references in
the specified schema. Matches on root Elements by qname first, then searches
deep into the document.
"""
def execute(self, schema):
result = schema.elements.get(self.ref)
if self.filter(result):
result = self.__deepsearch(schema)
return self.result(result)
def __deepsearch(self, schema):
from suds.xsd.sxbasic import Element
result = None
for e in schema.all:
result = e.find(self.ref, (Element,))
if self.filter(result):
result = None
else:
break
return result
|
esc/pybuilder
|
build.py
|
Python
|
apache-2.0
| 6,011
| 0.001497
|
#!/usr/bin/env python
#
# -*- coding: utf-8 -*-
#
# This file is part of PyBuilder
#
# Copyright 2011-2015 PyBuilder Team
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import sys
sys.path.insert(0, 'src/main/python') # This is only necessary in PyBuilder sources for bootstrap
from pybuilder import bootstrap
from pybuilder.core import Author, init, use_plugin
bootstrap()
use_plugin("python.core")
use_plugin("python.pytddmon")
use_plugin("python.distutils")
use_plugin("python.install_dependencies")
use_plugin("copy_resources")
use_plugin("filter_resources")
use_plugin("source_distribution")
use_plugin("python.unittest")
if sys.platform != "win32":
use_plugin("python.cram")
use_plugin("python.integrationtest")
use_plugin("python.coverage")
use_plugin("python.flake8")
use_plugin('filter_resources')
if not sys.version_info[0] == 3:
use_plugin("python.snakefood")
use_plugin("python.pydev")
use_plugin("python.pycharm")
use_plugin("python.pytddmon")
use_plugin("python.sphinx")
if sys.platform != "win32":
use_plugin("python.pdoc")
name = "pybuilder"
summary = "An extensible, easy to use continuous build tool for Python"
description = """PyBuilder is a build automation tool for python.
PyBuilder is a software build tool written in pure Python which mainly targets Python applications.
It is based on the concept of dependency based programming but also comes along with powerful plugin mechanism that
allows the construction of build life cycles similar to those known from other famous build tools like Apache Maven.
"""
authors = [Author("Alexander Metzner", "alexander.metzner@gmail.com"),
Author("Maximilien Riehl", "max@riehl.io"),
Author("Michael Gruber", "aelgru@gmail.com"),
Author("Udo Juettner", "udo.juettner@gmail.com"),
Author("Marcel Wolf", "marcel.wolf@me.com"),
Author("Arcadiy Ivanov", "arcadiy@ivanov.biz"),
Author("Valentin Haenel", "valentin@haenel.co"),
]
url = "http://pybuilder.github.io"
license = "Apache License"
version = "0.12.0.dev"
requires_python = ">=2.7,!=3.0,!=3.1,!=3.2,!=3.3,<3.8"
default_task = ["install_dependencies", "analyze", "publish"]
@init
def initialize(project):
if sys.version_info[0] == 2:
project.build_depends_on("mock")
project.build_depends_on("pyfix") # required test framework
project.build_depends_on("pyassert")
project.build_depends_on("pygments")
project.depends_on("tblib")
project.depends_on("pip", "~=9.0")
project.depends_on("setuptools", "~=39.0")
project.depends_on("wheel", "~=0.31")
project.depends_on("tailer", "~=0.4")
project.set_property("verbose", True)
project.set_property("coverage_break_build", False)
project.set_property("coverage_reset_modules", True)
project.get_property("coverage_exceptions").append("pybuilder.cli")
project.get_property("coverage_exceptions").append("pybuilder.plugins.core_plugin")
# Issue #284
project.set_property("integrationtest_inherit
|
_environment", True)
project.set_propert
|
y('flake8_break_build', True)
project.set_property('flake8_include_test_sources', True)
project.set_property('flake8_include_scripts', True)
project.set_property('flake8_max_line_length', 130)
project.set_property('frosted_include_test_sources', True)
project.set_property('frosted_include_scripts', True)
project.set_property("copy_resources_target", "$dir_dist/pybuilder")
project.get_property("copy_resources_glob").append("LICENSE")
project.get_property("filter_resources_glob").append("**/pybuilder/__init__.py")
project.include_file("pybuilder", "LICENSE")
project.set_property("sphinx_doc_author", "PyBuilder Team")
project.set_property("sphinx_doc_builder", "html")
project.set_property("sphinx_project_name", project.name)
project.set_property("sphinx_project_version", project.version)
project.set_property("pdoc_module_name", "pybuilder")
project.get_property("source_dist_ignore_patterns").append(".project")
project.get_property("source_dist_ignore_patterns").append(".pydevproject")
project.get_property("source_dist_ignore_patterns").append(".settings")
# enable this to build a bdist on vagrant
# project.set_property("distutils_issue8876_workaround_enabled", True)
project.set_property("distutils_readme_description", True)
project.set_property("distutils_description_overwrite", True)
project.set_property("distutils_console_scripts", ["pyb_ = pybuilder.cli:main"])
project.set_property("distutils_classifiers", [
'Programming Language :: Python',
'Programming Language :: Python :: Implementation :: CPython',
'Programming Language :: Python :: Implementation :: PyPy',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: 3.7',
'Development Status :: 4 - Beta',
'Environment :: Console',
'Intended Audience :: Developers',
'License :: OSI Approved :: Apache Software License',
'Topic :: Software Development :: Build Tools',
'Topic :: Software Development :: Quality Assurance',
'Topic :: Software Development :: Testing'])
|
sizzlelab/pysmsd
|
extras/webob/__init__.py
|
Python
|
mit
| 82,534
| 0.001648
|
from cStringIO import StringIO
import sys
import cgi
import urllib
import urlparse
import re
import textwrap
from Cookie import BaseCookie
from rfc822 import parsedate_tz, mktime_tz, formatdate
from datetime import datetime, date, timedelta, tzinfo
import time
import calendar
import tempfile
import warnings
from webob.datastruct import EnvironHeaders
from webob.multidict import MultiDict, UnicodeMultiDict, NestedMultiDict, NoVars
from webob.etag import AnyETag, NoETag, ETagMatcher, IfRange, NoIfRange
from webob.headerdict import HeaderDict
from webob.statusreasons import status_reasons
from webob.cachecontrol import CacheControl, serialize_cache_control
from webob.acceptparse import Accept, MIMEAccept, NilAccept, MIMENilAccept, NoAccept
from webob.byterange import Range, ContentRange
try:
sorted
except NameError:
from webob.compat import sorted
_CHARSET_RE = re.compile(r';\s*charset=([^;]*)', re.I)
_SCHEME_RE = re.compile(r'^[a-z]+:', re.I)
_PARAM_RE = re.compile(r'([a-z0-9]+)=(?:"([^"]*)"|([a-z0-9_.-]*))', re.I)
_OK_PARAM_RE = re.compile(r'^[a-z0-9_.-]+$', re.I)
__all__ = ['Request', 'Response', 'UTC', 'day', 'week', 'hour', 'minute', 'second', 'month', 'year', 'html_escape']
class _UTC(tzinfo):
def dst(self, dt):
return timedelta(0)
def utcoffset(self, dt):
|
return timedelta(0)
def tzname(self, dt):
return 'UTC'
def __repr__(self):
return 'UTC'
UTC = _UTC()
def html_escape(s):
"""HTML-escape a string or object
This converts any non-string objects passed into it to strings
(actually, using ``unicode()``). All values returned are
|
non-unicode strings (using ``&#num;`` entities for all non-ASCII
characters).
None is treated specially, and returns the empty string.
"""
if s is None:
return ''
if not isinstance(s, basestring):
if hasattr(s, '__unicode__'):
s = unicode(s)
else:
s = str(s)
s = cgi.escape(s, True)
if isinstance(s, unicode):
s = s.encode('ascii', 'xmlcharrefreplace')
return s
def timedelta_to_seconds(td):
"""
Converts a timedelta instance to seconds.
"""
return td.seconds + (td.days*24*60*60)
day = timedelta(days=1)
week = timedelta(weeks=1)
hour = timedelta(hours=1)
minute = timedelta(minutes=1)
second = timedelta(seconds=1)
# Estimate, I know; good enough for expirations
month = timedelta(days=30)
year = timedelta(days=365)
class _NoDefault:
def __repr__(self):
return '(No Default)'
NoDefault = _NoDefault()
class environ_getter(object):
"""For delegating an attribute to a key in self.environ."""
def __init__(self, key, default='', default_factory=None,
settable=True, deletable=True, doc=None,
rfc_section=None):
self.key = key
self.default = default
self.default_factory = default_factory
self.settable = settable
self.deletable = deletable
docstring = "Gets"
if self.settable:
docstring += " and sets"
if self.deletable:
docstring += " and deletes"
docstring += " the %r key from the environment." % self.key
docstring += _rfc_reference(self.key, rfc_section)
if doc:
docstring += '\n\n' + textwrap.dedent(doc)
self.__doc__ = docstring
def __get__(self, obj, type=None):
if obj is None:
return self
if self.key not in obj.environ:
if self.default_factory:
val = obj.environ[self.key] = self.default_factory()
return val
else:
return self.default
return obj.environ[self.key]
def __set__(self, obj, value):
if not self.settable:
raise AttributeError("Read-only attribute (key %r)" % self.key)
if value is None:
if self.key in obj.environ:
del obj.environ[self.key]
else:
obj.environ[self.key] = value
def __delete__(self, obj):
if not self.deletable:
raise AttributeError("You cannot delete the key %r" % self.key)
del obj.environ[self.key]
def __repr__(self):
return '<Proxy for WSGI environ %r key>' % self.key
class header_getter(object):
"""For delegating an attribute to a header in self.headers"""
def __init__(self, header, default=None,
settable=True, deletable=True, doc=None, rfc_section=None):
self.header = header
self.default = default
self.settable = settable
self.deletable = deletable
docstring = "Gets"
if self.settable:
docstring += " and sets"
if self.deletable:
docstring += " and deletes"
docstring += " they header %s from the headers" % self.header
docstring += _rfc_reference(self.header, rfc_section)
if doc:
docstring += '\n\n' + textwrap.dedent(doc)
self.__doc__ = docstring
def __get__(self, obj, type=None):
if obj is None:
return self
if self.header not in obj.headers:
return self.default
else:
return obj.headers[self.header]
def __set__(self, obj, value):
if not self.settable:
raise AttributeError("Read-only attribute (header %s)" % self.header)
if value is None:
if self.header in obj.headers:
del obj.headers[self.header]
else:
if isinstance(value, unicode):
# This is the standard encoding for headers:
value = value.encode('ISO-8859-1')
obj.headers[self.header] = value
def __delete__(self, obj):
if not self.deletable:
raise AttributeError("You cannot delete the header %s" % self.header)
del obj.headers[self.header]
def __repr__(self):
return '<Proxy for header %s>' % self.header
class converter(object):
"""
Wraps a decorator, and applies conversion for that decorator
"""
def __init__(self, decorator, getter_converter, setter_converter, convert_name=None, doc=None, converter_args=()):
self.decorator = decorator
self.getter_converter = getter_converter
self.setter_converter = setter_converter
self.convert_name = convert_name
self.converter_args = converter_args
docstring = decorator.__doc__ or ''
docstring += " Converts it as a "
if convert_name:
docstring += convert_name + '.'
else:
docstring += "%r and %r." % (getter_converter, setter_converter)
if doc:
docstring += '\n\n' + textwrap.dedent(doc)
self.__doc__ = docstring
def __get__(self, obj, type=None):
if obj is None:
return self
value = self.decorator.__get__(obj, type)
return self.getter_converter(value, *self.converter_args)
def __set__(self, obj, value):
value = self.setter_converter(value, *self.converter_args)
self.decorator.__set__(obj, value)
def __delete__(self, obj):
self.decorator.__delete__(obj)
def __repr__(self):
if self.convert_name:
name = ' %s' % self.convert_name
else:
name = ''
return '<Converted %r%s>' % (self.decorator, name)
def _rfc_reference(header, section):
if not section:
return ''
major_section = section.split('.')[0]
link = 'http://www.w3.org/Protocols/rfc2616/rfc2616-sec%s.html#sec%s' % (
major_section, section)
if header.startswith('HTTP_'):
header = header[5:].title().replace('_', '-')
return " For more information on %s see `section %s <%s>`_." % (
header, section, link)
class deprecated_property(object):
"""
Wraps a decorator, with a deprecation warning or error
"""
def __init__(self, decorator, attr, message, warning=True):
self.decorator = decorator
self.attr = attr
self.message = message
self.warning = warning
def __get__(self, obj, type=None):
if obj is None:
|
aaxelb/osf.io
|
osf/migrations/0055_update_metaschema_active.py
|
Python
|
apache-2.0
| 594
| 0.001684
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11.4 on 2017-09-14 14:32
from __future__ import unicode_literals
from django.db import migrati
|
ons
from osf.models import MetaSchema
from website.project.metadata.schemas import LATEST_SCHEMA_VERSION
def update_metaschema_active(*args, **kwargs):
MetaSchema.objects.filter(schema_version__lt=LATEST_SCHEMA_VERSION).update(active=False)
class Migration(migrations.Migration):
dependencies = [
('osf', '0054_add_file_version_indices'),
]
operations = [
migrations.RunPython(update
|
_metaschema_active, ),
]
|
alexryndin/ambari
|
ambari-server/src/main/resources/common-services/AMBARI_INFRA/0.1.0/package/scripts/setup_infra_solr.py
|
Python
|
apache-2.0
| 5,041
| 0.004761
|
"""
Licensed to the Apache Software Foundation (ASF) under one
or more contributor license agreements. See the NOTICE file
distributed with this work for additional information
regarding copyright ownership. The ASF licenses this file
to you under the Apache License, Version 2.0 (the
"License"); you may not use this file except in compliance
with the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
from resource_management.core.exceptions import Fail
from resource_management.core.source import InlineTemplate, Template
from resource_management.core.resources.system import Directory, File
from resource_management.libraries.functions.decorator import retry
from resource_management.libraries.functions.format import format
from resource_management.libraries.functions import solr_cloud_util
def setup_infra_solr(name = None):
import params
if name == 'server':
Directory([params.infra_solr_log_dir, params.infra_solr_piddir,
params.infra_solr_datadir, params.infra_solr_data_resources_dir],
mode=0755,
cd_access='a',
create_parents=True,
owner=params.infra_solr_user,
group=params.user_group
)
Directory([params.solr_dir, params.infra_solr_conf],
mode=0755,
cd_access='a',
owner=params.infra_solr_user,
group=params.user_group,
|
create_parents=True,
recursive_ownership=True
)
File(params.infra_solr_log,
mode=0644,
owner=params.infra_solr_user,
|
group=params.user_group,
content=''
)
File(format("{infra_solr_conf}/infra-solr-env.sh"),
content=InlineTemplate(params.solr_env_content),
mode=0755,
owner=params.infra_solr_user,
group=params.user_group
)
File(format("{infra_solr_datadir}/solr.xml"),
content=InlineTemplate(params.solr_xml_content),
owner=params.infra_solr_user,
group=params.user_group
)
File(format("{infra_solr_conf}/log4j.properties"),
content=InlineTemplate(params.solr_log4j_content),
owner=params.infra_solr_user,
group=params.user_group
)
custom_security_json_location = format("{infra_solr_conf}/custom-security.json")
File(custom_security_json_location,
content=InlineTemplate(params.infra_solr_security_json_content),
owner=params.infra_solr_user,
group=params.user_group,
mode=0640
)
jaas_file = params.infra_solr_jaas_file if params.security_enabled else None
url_scheme = 'https' if params.infra_solr_ssl_enabled else 'http'
create_ambari_solr_znode()
security_json_file_location = custom_security_json_location \
if params.infra_solr_security_json_content and str(params.infra_solr_security_json_content).strip() \
else format("{infra_solr_conf}/security.json") # security.json file to upload
if params.security_enabled:
File(format("{infra_solr_jaas_file}"),
content=Template("infra_solr_jaas.conf.j2"),
owner=params.infra_solr_user)
File(format("{infra_solr_conf}/security.json"),
content=Template("infra-solr-security.json.j2"),
owner=params.infra_solr_user,
group=params.user_group,
mode=0640)
solr_cloud_util.set_cluster_prop(
zookeeper_quorum=params.zookeeper_quorum,
solr_znode=params.infra_solr_znode,
java64_home=params.java64_home,
prop_name="urlScheme",
prop_value=url_scheme,
jaas_file=jaas_file
)
solr_cloud_util.setup_kerberos_plugin(
zookeeper_quorum=params.zookeeper_quorum,
solr_znode=params.infra_solr_znode,
jaas_file=jaas_file,
java64_home=params.java64_home,
secure=params.security_enabled,
security_json_location=security_json_file_location
)
if params.security_enabled:
solr_cloud_util.secure_solr_znode(
zookeeper_quorum=params.zookeeper_quorum,
solr_znode=params.infra_solr_znode,
jaas_file=jaas_file,
java64_home=params.java64_home,
sasl_users_str=params.infra_solr_sasl_user
)
elif name == 'client':
solr_cloud_util.setup_solr_client(params.config)
else :
raise Fail('Nor client or server were selected to install.')
@retry(times=30, sleep_time=5, err_class=Fail)
def create_ambari_solr_znode():
import params
solr_cloud_util.create_znode(
zookeeper_quorum=params.zookeeper_quorum,
solr_znode=params.infra_solr_znode,
java64_home=params.java64_home,
retry=30, interval=5)
|
team-vigir/vigir_behaviors
|
vigir_flexbe_states/src/vigir_flexbe_states/moveit_predefined_pose_state.py
|
Python
|
bsd-3-clause
| 13,581
| 0.030705
|
#!/usr/bin/env python
import rospy
import actionlib
from flexbe_core import EventState, Logger
from vigir_flexbe_states.proxy import ProxyMoveitClient
"""
Created on 04/13/2014
@author: Philipp Schillinger
"""
class MoveitPredefinedPoseState(EventState):
"""
Uses moveit to go to one of the pre-defined poses.
-- target_pose int Identifier of the pre-defined pose to be used.
-- vel_scaling float Scales the velocity of the motion.
Lower values for slower trajectories.
-- ignore_collisions boolean Should collisions be ignored? Only pass True if you are sure that it is safe.
-- link_paddings dict link_name (str) : padding (float) pairs
-- is_cartesian boolean Execute as cartesian motion
># side string Arm side, turning direction, etc.
Possible values: {left, right, same}
<= done Successfully executed the motion.
<= failed Failed to execute the motion.
"""
# Arms
STAND_POSE = 0
SINGLE_ARM_STAND = 1 # for preventing unconstrained motion of the other
BOTH_ARMS_SIDES = 2 # for safely holding the tool at the robot's side
SINGLE_ARM_SIDE = 3
STAND_POSE_UP = 4
CALIBRATE_ARMS = 10 # for checking arm calibration with the head camera
WALK_POSE = 19
# Torso
TURN_TORSO_CENTER_POSE = 20
TURN_TORSO_SLIGHTLY = 21
TURN_TORSO_MORE = 22
TURN_TORSO_FULL = 23
# Head
HEAD_CENTER = 30
HEAD_SIDE = 31
# Task-specific poses
# Second digit is task number. Comment is pose in the Position widget.
CAR_ENTRY_ARMS_POSE = 111 # CarEntry
CAR_ENTRY_LEGS_POSE = 112 # CarEntry
CAR_ENTRY_FORE_POSE = 113 # CarEntryFore
CAR_PREDRIVE_LARM_POSE = 114 # pre_drive
CAR_DRIVE_LARM_POSE = 115 # drive
CAR_DRIVE_CAMERA_POSE = 116 # CarCamera
DOOR_READY_POSE = 131
DOOR_OPEN_POSE_TURNED = 132
DOOR_OPEN_POSE_STRAIGHT = 133
DOOR_OPEN_POSE_SIDE = 134 # push_door_3
DOOR_PUSH_SIDE = 136
DOOR_OPEN_TURN_MANIPULATE = 138
DOOR_OPEN_TURN_LIDAR = 139
POKE_READY_POSE = 151
PREP_CUT_WALL_1 = 155
PREP_CUT_WALL_2 = 156
PREP_CUT_WALL_3 = 157
LADDER_READY_POSE = 181
def __init__(self, target_pose,
vel_scaling = 0.1,
ignore_collisions = False,
link_paddings = {},
is_cartesian = False):
"""Constructor"""
super(MoveitPredefinedPoseState, self).__init__(outcomes=['done', 'failed'],
input_keys=['side'])
if not rospy.has_param("behavior/robot_namespace"):
Logger.logerr("Need to specify parameter behavior/robot_namespace at the parameter server")
return
self._robot = rospy.get_param("behavior/robot_namespace")
self._poses = dict()
self._poses['flor'] = dict()
# Position mode widget: src/vigir_ocs_eui/vigir_rqt/vigir_rqt_position_mode/launch
self._poses['flor']['left'] = {
# Joint names: l_arm_shz, l_arm_shx, l_arm_ely, l_arm_elx, l_arm_wry, l_arm_wrx, l_arm_wry2
1: {'group': 'l_arm_group', 'joints': [-0.25, -1.34, 1.88, 0.49, 0.0, 0.0, 0.0]},
3: {'group': 'l_arm_group', 'joints': [+0.72, -0.95, 2.7, 0.95, 0.0, -0.4, -0.50]},
10: {'group': 'l_arm_group', 'joints': [-1.0, 0.28, 1.2, 1.6, 0.3, 0.5 , 0.0]},
21: {'group': 'torso_group', 'joints': [+0.20, 0.00, 0.00]},
22: {'group': 'torso_group', 'joints': [+0.40, 0.00, 0.00]},
23: {'group': 'torso_group', 'joints': [+0.55, 0.00, 0.00]},
112: {'group': 'l_leg_group', 'joints': [+0.00, +0.00, -1.60, +1.40, -0.50, 0.00]}, # Safety pose
114: {'group': 'l_arm_group', 'joints': [+0.76, -0.94, 0.80, 2.00, +1.00, -0.20, -1.35]}, # pre_drive
115: {'group': 'l_arm_group', 'joints': [+0.11, -0.16, 1.75, 1.60, +1.00, -0.90, -1.00]}, # drive
116: {'group': 'l_arm_group', 'joints': []}, # We use the right hand for the camera!
131: {'group': 'l_arm_group', 'joints': [-0.29, -0.22, 1.87, 2.17, -0.17, 0.81, 0.12]},
132: {'group': 'l_arm_group', 'joints': [-0.70, -0.09, 1.93, 0.66, -0.15, 1.52, 0.12]},
133: {'group': 'l_arm_group', 'joints': [-1.38, -0.16, 1.82, 0.57, -0.19, 1.52, 0.12]},
134: {'group': 'l_arm_group', 'joints': []}, # Most probably will never be used
151: {'group': 'l_arm_group', 'joints': [-1.01, -0.43, +1.32, +1.67, -0.91, +1.46, +0.98]},
155: {'group': 'l_arm_group', 'joints': [0.0, -0.37, 2.65, 1.4, -0.2, 0.9 , -1.54]},
156: {'group': 'l_arm_group', 'joints': [-0.32, -0.9, 2.2, 1.3, 0.5, 1.0 , -1.8]},
157: {'group': 'l_arm_group', 'joints': [-0.45, -1.0, 2.1, 1.3, 0.5, 0.8 , -0.8]}
}
self._poses['flor']['same'] = {
0: {'group': 'both_arms_group', 'joints': [-0.25, -1.34, +1.88, +0.49, +0.00, +0.00, +0.00, \
+0.25, +1.34, +1.88, -0.49, +0.00, +0.00, +0.00]},
2: {'group': 'both_arms_group', 'joints': [+0.72, -0.95, 2.7, 0.95, 0.0, -0.4 , -0.5, \
-0.72, 0.95, 2.7, -0.95, 0.0, 0.4 , -0.5]},
19: {'group': 'both_arms_group', 'joints': [-1.5, -1.5, +0.30, +0.50, +0.0, +0.8, +0.00, \
+1.5, +1.5, +0.30, -0.50, +0.0, -0.8, +0.00]},
20: {'group': 'torso_group', 'joints': [+0.00, 0.00, 0.00]},
111: {'group': 'both_arms_group', 'joints': [+0.20, -1.50, +0.00, +1.72, 0.00, +0.00, 0.0, \
+0.00, +1.50, +0.00, -1.72, 0.00, +0.00, 0.0]},
113: {'group': 'both_arms_group', 'joints': [+0.20, -1.50, +0.00, +1.72, 0.00, -1.57, 0.0, \
+0.00, +1.50, +0.00, -1.72, 0.00, +1.57, 0.0]},
181: {'group': 'both_arms_group', 'joints': [-1.53, -0.69, +0.12, +1.47, 0.00, +0.88, 0.00, \
+1.53, +0.69, +0.12, -1.47, 0.00, -0.88, 0.00]}
}
self._poses['flor']['right'] = {
# Joint names: r_arm_shz, r_arm_shx, r_arm_ely, r_arm_elx, r_arm_wry, r_arm_wrx, r_arm_wry2
1: {'group': 'r_arm_group', 'joints': [+0.25, 1.34, 1.88, -0.49, 0.0, 0.0, 0.0]},
3: {'group': 'r_arm_group', 'joints': [-0.72, 0.95, 2.7, -0.95, 0.0, 0.4, -0.50]},
10: {'group': 'r_arm_group', 'joints': [+1.0, -0.28, 1.2, -1.6, 0.3, -0.5 , 0.0]},
21: {'group': 'torso_group', 'joints': [-0.20, 0.00, 0.00]},
22: {'group': 'torso_group', 'joints': [-0.40, 0.00, 0.00]},
23: {'group': 'torso_group', 'joints': [-0.55, 0.00, 0.00]},
112: {'group': 'r_leg_group', 'joints': [+0.00, +0.00, -1.34, +1.23, 0.00, 0.00]},
115: {'group': 'r_arm_group', 'joints': []}, # Driving is done with the left arm!
116: {'group': 'r_arm_group', 'joints': [+0.90, 0.17, 0.50, -1.58, -0.70, +1.50, -0.34]}, # CarCamera
131: {'group': 'r_arm_group', 'joints': [+0.29, 0.22, 1.87, -2.17, -0.17, -0.81, 0.12]},
132: {'group': 'r_arm_group', 'joints': [+0.70, 0.09, 1.93, -0.66, -0.15, -1.52, 0.12]},
133: {'group': 'r_arm_group', 'joints': [+1.38, 0.16, 1.82, -0.57, -0.19, -1.52, 0.12]},
134: {'group': 'r_arm_group', 'joints': [+0.00, +0.54, +0.94, -1.04, 0.80, 0.5, 0.7]},
151: {'group': 'r_arm_group', 'joints': [+1.01, +0.43, +1.32, -1.67, -0.91, -1.46, +0.98]},
155: {'group': 'r_arm_group', 'joints': [+0.00, +0.37,
|
+2.65, -1.40, -0.20, -0.90, -1.54]},
156: {'group': 'r_arm_group', 'joints': [+0.32, +0.90, +2.20, -1.30, +0.50, -1.00, -1.80]},
157: {'grou
|
p': 'r_arm_group', 'joints': [0.45, 1.0, 2.1, -1.3, 0.5, -0.8 , -0.8]}
}
self._poses['thor_mang'] = dict()
self._poses['thor_mang']['left'] = {
1: {'group': 'l_arm_group', 'joints': [0.785385646194622, -0.281153767716932, 0.000600782658167331, -1.57080884130538, -0.25205140042963, 0.01563815008, 0]},
3: {'group': 'l_arm_group', 'joints': [0.77, -0.27, 0.02, -1.21, -0.25, 0.02, 0]},
4: {'group': 'l_arm_group', 'joints': [0.785385646194622, -0.281153767716932, 0.000600782658167331, -1.97080884130538, -0.25205140042963, 0.01563815008, 0]},
21: {'group': 'torso_group', 'joints': [+0.30, 0.03]},
22: {'group': 'torso_group', 'joints': [+0.60, 0.03]},
23: {'group': 'torso_group', 'joints': [+1.02, 0.03]},
31: {'group': 'head_group', 'joints': [1.57, 0.00]},
131: {'group': 'l_arm_group', 'joints': [-1.73, -0.69, 1.75, -1.86, 0.04, -0.72, 1.63]},
132: {'group': 'l_arm_group', 'joints': [-1.76, -1.13, 1.68, -0.55, 0.02, -1.81, 1.63]},
|
apaku/jenkinstray
|
jenkinstray/jenkinsjob.py
|
Python
|
bsd-2-clause
| 3,238
| 0.005559
|
# -*- coding: utf-8 -*-
# Copyright (c) 2014, Andreas Pakulat <apaku@gmx.de>
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# 1. Redistributions of source code must retain the above copyright notice, this
# list of conditions and the following disclaimer.
# 2. Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
#
|
ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PART
|
ICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR
# ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
# (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
# ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
from enum import IntEnum
def colorToJenkinsState(colorstr):
assert(len(filter(lambda color: color in colorstr, ["blue", "yellow", "red", "disabled", "notbuilt", "aborted"])) == 1)
if colorstr.startswith("blue"):
return JenkinsState.Successful
elif colorstr.startswith("yellow"):
return JenkinsState.Unstable
elif colorstr.startswith("disabled") or colorstr.startswith("notbuilt") or colorstr.startswith("aborted"):
return JenkinsState.Disabled
elif colorstr.startswith("red"):
return JenkinsState.Failed
else:
return JenkinsState.Unknown
class JenkinsState(IntEnum):
Unstable = 0
Failed = 1
Successful = 2
Unknown = 3
Disabled = 4
class JenkinsJob(object):
def __init__(self, name, monitored, url, state):
assert name is not None
self.name = name
assert url is not None
self.url = url
assert state in JenkinsState
self.state = state
assert monitored is not None
self.monitored = monitored
self.lastState = JenkinsState.Unknown
def __ne__(self, other):
return self.name != other.name or self.url != other.url or self.monitored != other.monitored or self.state != other.state
def __eq__(self, other):
return self.name == other.name and self.url == other.url and self.monitored == other.monitored and self.state == other.state
def enableMonitoring(self):
self.monitored = True
def disableMonitoring(self):
self.monitored = False
def toDict(self):
return {"name": self.name, "state" : int(self.state), "url": self.url, "monitored": self.monitored}
@classmethod
def fromDict(clz, dictobj):
return JenkinsJob(dictobj["name"], dictobj["monitored"], dictobj["url"], JenkinsState(dictobj["state"]))
|
kermitfr/kermit-webui
|
src/webui/platforms/jboss/operations.py
|
Python
|
gpl-3.0
| 3,848
| 0.013773
|
'''
Created on Nov 8, 2011
@author: mmornati
'''
from webui.abstracts import ContextOperation
from webui import settings
from webui.core import kermit_modules
from guardian.shortcuts import get_objects_for_user
from webui.agent.models import Agent, Action
class JbossDeployContextMenu(ContextOperation):
def get_operations(self):
context_menu_ops = []
context_menu_ops.append(
{"name":"deploy_jboss",
"description":"Deploy Application",
"javascript":"getDeployForm('%s', 'jboss', 'deploy-dialog', 'deploy', '$$filterlist$$')" % settings.BASE_URL,
"server_operation":"",
})
return context_menu_ops
def get_type(self):
return 'JBoss'
def get_visible(self, server):
agent = server.agents.filter(name='jboss')
classes = server.puppet_classes.filter(name='jbs')
return len(agent)==1 and len(class
|
es)==1
def get_enabled(self, user):
if not user.is_superuser:
agents = get_objects_for_user(user, 'use_agent', Agent).filter(enabled=True, name="jboss")
if len(agents)==1:
action = get_objects_for_user(user, 'use
|
_action', Action).filter(agent=agents[0], name="deploy")
return action and len(action)==1
else:
return False
else:
return True
class JbossRedeployContextMenu(ContextOperation):
def get_operations(self):
context_menu_ops = []
context_menu_ops.append(
{"name":"redeploy_jboss",
"description":"Redeploy Application",
"javascript":"getDeployForm('%s', 'jboss', 'deploy-dialog', 'redeploy', '$$filterlist$$')" % settings.BASE_URL,
"server_operation":"",
})
return context_menu_ops
def get_type(self):
return 'JBoss'
def get_visible(self, server):
agent = server.agents.filter(name='jboss')
classes = server.puppet_classes.filter(name='jbs')
return len(agent)==1 and len(classes)==1
def get_enabled(self, user):
if not user.is_superuser:
agents = get_objects_for_user(user, 'use_agent', Agent).filter(enabled=True, name="jboss")
if len(agents)==1:
action = get_objects_for_user(user, 'use_action', Action).filter(agent=agents[0], name="redeploy")
return action and len(action)==1
else:
return False
else:
return True
class JbossLogContextMenu(ContextOperation):
def get_operations(self):
context_menu_ops = []
context_menu_ops.append(
{"name":"get_log_jboss",
"description":"Get Server Log",
"javascript":"getLogForm('%s', 'jboss', 'deploy-dialog', 'get_log', '$$filterlist$$')" % settings.BASE_URL,
"server_operation":"",
})
return context_menu_ops
def get_type(self):
return 'JBoss'
def get_visible(self, server):
agent = server.agents.filter(name='jboss')
classes = server.puppet_classes.filter(name='jbs')
return len(agent)==1 and len(classes)==1
def get_enabled(self, user):
if not user.is_superuser:
agents = get_objects_for_user(user, 'use_agent', Agent).filter(enabled=True, name="jboss")
if len(agents)==1:
action = get_objects_for_user(user, 'use_action', Action).filter(agent=agents[0], name="get_log")
return action and len(action)==1
else:
return False
else:
return True
kermit_modules.register(JbossDeployContextMenu)
kermit_modules.register(JbossRedeployContextMenu)
kermit_modules.register(JbossLogContextMenu)
|
jolyonb/edx-platform
|
lms/djangoapps/grades/tests/test_services.py
|
Python
|
agpl-3.0
| 12,114
| 0.002311
|
"""
Grades Service Tests
"""
from datetime import datetime
import ddt
import pytz
from freezegun import freeze_time
from lms.djangoapps.grades.constants import GradeOverrideFeatureEnum
from lms.djangoapps.grades.models import (
PersistentSubsectionGrade,
PersistentSubsectionGradeOverride,
PersistentSubsectionGradeOverrideHistory,
)
from lms.djangoapps.grades.services import GradesService
from mock import patch, call
from student.tests.factories import UserFactory
from xmodule.modulestore.tests.django_utils import ModuleStoreTestCase
from xmodule.modulestore.tests.factories import CourseFactory, ItemFactory
from ..config.waffle import REJECTED_EXAM_OVERRIDES_GRADE
from ..constants import ScoreDatabaseTableEnum
class MockWaffleFlag(object):
"""
A Mock WaffleFlag object.
"""
def __init__(self, state):
self.state = state
# pylint: disable=unused-argument
def is_enabled(self, course_key):
return self.state
@ddt.ddt
class GradesServiceTests(ModuleStoreTestCase):
"""
Tests for the Grades service
"""
def setUp(self):
super(GradesServiceTests, self).setUp()
self.service = GradesService()
self.course = CourseFactory.create(org='edX', number='DemoX', display_name='Demo_Course', run='Spring2019')
self.subsection = ItemFactory.create(parent=self.course, category="subsection", display_name="Subsection")
self.subsection_without_grade = ItemFactory.create(
parent=self.course,
category="subsection",
display_name="Subsection without grade"
)
self.user = UserFactory()
self.grade = PersistentSubsectionGrade.update_or_create_grade(
user_id=self.user.id,
course_id=self.course.id,
usage_key=self.subsection.location,
first_attempted=None,
visible_blocks=[],
earned_all=6.0,
possible_all=6.0,
earned_graded=5.0,
possible_graded=5.0
)
self.signal_patcher = patch('lms.djangoapps.grades.signals.signals.SUBSECTION_OVERRIDE_CHANGED.send')
self.mock_signal = self.signal_patcher.start()
self.id_patcher = patch('lms.djangoapps.grades.services.create_new_event_transaction_id')
self.mock_create_id = self.id_patcher.start()
self.mock_create_id.return_value = 1
self.type_patcher = patch('lms.djangoapps.grades.services.set_event_transaction_type')
self.mock_set_type = self.type_patcher.start()
self.flag_patcher = patch('lms.djangoapps.grades.services.waffle_flags')
self.mock_waffle_flags = self.flag_patcher.start()
self.mock_waffle_flags.return_value = {
REJECTED_EXAM_OVERRIDES_GRADE: MockWaffleFlag(True)
}
def tearDown(self):
super(GradesServiceTests, self).tearDown()
PersistentSubsectionGradeOverride.objects.all().delete() # clear out all previous overrides
self.signal_patcher.stop()
self.id_patcher.stop()
self.type_patcher.stop()
self.flag_patcher.stop()
def subsection_grade_to_dict(self, grade):
return {
'earned_all': grade.earned_all,
'earned_graded': grade.earned_graded
}
def subsection_grade_override_to_dict(self, grade):
return {
'earned_all_override': grade.earned_all_override,
'earned_graded_override': grade.earned_graded_override
}
def test_get_subsection_grade(self):
self.assertDictEqual(self.subsection_grade_to_dict(self.service.get_subsection_grade(
user_id=self.user.id,
course_key_or_id=self.course.id,
usage_key_or_id=self.subsection.location
)), {
'earned_all': 6.0,
'earned_graded': 5.0
})
# test with id strings as parameters instead
self.assertDictEqual(self.subsection_grade_to_dict(self.service.get_subsection_grade(
user_id=self.user.id,
course_key_or_id=unicode(self.course.id),
usage_key_or_id=unicode(self.subsection.location)
)), {
'earned_all': 6.0,
'earned_graded': 5.0
})
def test_get_subsection_grade_override(self):
override, _ = PersistentSubsectionGradeOverride.objects.update_or_create(grade=self.grade)
self.assertDictEqual(self.subsection_grade_override_to_dict(self.service.get_subsection_grade_override(
user_id=self.user.id,
course_key_or_id=self.course.id,
usage_key_or_id=self.subsection.location
)), {
'earned_all_override': override.earned_all_override,
'earned_graded_override': override.earned_graded_override
})
override, _ = PersistentSubsectionGradeOverride.objects.update_or_create(
grade=self.grade,
defaults={
'earned_all_override': 9.0
}
)
# test with course key parameter as string instead
self.assertDictEqual(self.subsection_grade_override_to_dict(self.service.get_subsection_grade_override(
user_id=self.user.id,
course_key_or_id=unicode(self.course.id),
usage_key_or_id=self.subsection.location
)), {
'earned_all_override': override.earned_all_override,
'earned_graded_override': override.earned_graded_override
})
def _verify_override_history(self, override_history, history_action):
self.assertIsNone(override_history.user)
self.assertIsNotNone(override_history.created)
self.assertEqual(override_history.feature, GradeOverrideFeatureEnum.proctoring)
self.assertEqual(override_history.action, history_action)
@ddt.data(
{
'earned_all': 0.0,
'earned_graded': 0.0
},
{
'earned_all': 0.0,
'earned_graded': None
},
{
'earned_all': None,
'earned_graded': None
},
{
'earned_all': 3.0,
'earned_graded': 2.0
},
)
def test_override_subsection_grade(self, override):
self.service.override_subsection_grade(
user_id=self.user.id,
course_key_or_id=self.course.id,
usage_key_or_id=self.subsection.location,
earned_all=override['earned_all'],
earned_graded=override['earned_graded']
)
override_obj = self.service.get_subsection_grade_override(
self.user.id,
self.course.id,
self.subsection.location
)
self.assertIsNotNone(override_obj)
self.assertEqual(override_obj.earned_all_override, override['earned_all'])
self.assertEqual(override_obj.earned_graded_override, override['earned_graded'])
self.assertEqual(
self.mock_signal.call_args,
call(
sender=None,
user_id=self.user.id,
course_id=unicode(self.course.id),
usage_id=unicode(self.subsection.location),
only_if_higher=False,
|
modified=override_obj.modified,
score_deleted=False,
score_db_table=ScoreDatabaseTableEnum.overrides
)
)
override_history = PersistentSubsectionGradeOverrideHistory.objects.filter(override_id=override_obj.id).first()
self._verify_override_history(override_history, PersistentSubsectionGradeOverrideHistory.CREATE_OR_UPDA
|
TE)
def test_override_subsection_grade_no_psg(self):
"""
When there is no PersistentSubsectionGrade associated with the learner
and subsection to override, one should be created.
"""
earned_all_override = 2
earned_graded_override = 0
self.service.override_subsection_grade(
user_id=self.user.id,
course_key_or_id=self.course.id,
usage_key_or_id=self.subsection_without_grade.location,
earned_all=earned_all_override,
earned_graded=earned_graded_override
|
tbursztyka/python-elf
|
elf/program.py
|
Python
|
lgpl-3.0
| 5,074
| 0.019511
|
"""
Copyright (C) 2008-2013 Tomasz Bursztyka
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU Lesser General Public License as published
by the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU Lesser General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
"""
""" ProgramHeader and Program classes """
from elf.core.property import VALUE_FIXED, VALUE_BITWISE
from elf.core.header import Header
from elf.core.page import Page
from elf.utils import mirrorDict
phdr_type = {
'PT_NULL' : 0,
'PT_LOAD' : 1,
'PT_DYNAMIC' : 2,
'PT_INTERP' : 3,
'PT_NOTE' : 4,
'PT_SHLIB' : 5,
'PT_PHDR' : 6,
'PT_TLS' : 7,
'PT_NUM' : 8,
'PT_LOOS' : 0x60000000,
'PT_GNU_EH_FRAME' : 0x6474e550,
'PT_GNU_STACK' : 0x6474e551,
'PT_GNU_RELRO' : 0x6474e552,
'PT_PAX_FLAGS' : 0x65041580,
'PT_LOSUNW' : 0x6ffffffa,
'PT_SUNWBSS' : 0x6ffffffa,
'PT_SUNWSTACK' : 0x6ffffffb,
'PT_HISUNW' : 0x6fffffff,
'PT_HIOS' : 0x6fffffff,
'PT_LOPROC' : 0x70000000,
'PT_HIPROC' : 0x7fffffff,
'PT_MIPS_REGINFO' : 0x70000000,
'PT_MIPS_RTPROC' : 0x70000001,
'PT_MIPS_OPTIONS' : 0x70000002,
'PT_HP_TLS' : (0x60000000 + 0x0),
'PT_HP_CORE_NONE' : (0x60000000 + 0x1),
'PT_HP_CORE_VERSION' : (0x60000000 + 0x2),
'PT_HP_CORE_KERNEL' : (0x60000000 + 0x3),
'PT_HP_CORE_COMM'
|
: (0x60000000 + 0x4),
'PT_HP_CORE_PROC' : (0x60000000 + 0x5),
'PT_HP_CORE_LOADABLE' : (0x60000000 + 0x6),
'PT_HP_CORE_STACK' : (0x60000000 + 0x7),
'PT_HP_CORE_SHM' : (0x60000000 + 0x8),
'PT_HP_CORE_MMF' : (0x60000000 + 0x9),
'PT_HP_PARALLEL' : (0x60000000 + 0x10),
'PT_HP_F
|
ASTBIND' : (0x60000000 + 0x11),
'PT_HP_OPT_ANNOT' : (0x60000000 + 0x12),
'PT_HP_HSL_ANNOT' : (0x60000000 + 0x13),
'PT_HP_STACK' : (0x60000000 + 0x14),
'PT_PARISC_ARCHEXT' : 0x70000000,
'PT_PARISC_UNWIND' : 0x70000001,
'PT_ARM_EXIDX' : 0x70000001,
'PT_IA_64_ARCHEXT' : (0x70000000 + 0),
'PT_IA_64_UNWIND' : (0x70000000 + 1),
'PT_IA_64_HP_OPT_ANOT' : (0x60000000 + 0x12),
'PT_IA_64_HP_HSL_ANOT' : (0x60000000 + 0x13),
'PT_IA_64_HP_STACK' : (0x60000000 + 0x14),
}
phdr_type = mirrorDict(phdr_type)
phdr_flags = {
'PF_X' : (1 << 0),
'PF_W' : (1 << 1),
'PF_R' : (1 << 2),
'PF_PAGEEXEC' : (1 << 4),
'PF_NOPAGEEXEC' : (1 << 5),
'PF_SEGMEXEC' : (1 << 6),
'PF_NOSEGMEXEC' : (1 << 7),
'PF_MPROTECT' : (1 << 8),
'PF_NOMPROTECT' : (1 << 9),
'PF_RANDEXEC' : (1 << 10),
'PF_NORANDEXEC' : (1 << 11),
'PF_EMUTRAMP' : (1 << 12),
'PF_NOEMUTRAMP' : (1 << 13),
'PF_RANDMMAP' : (1 << 14),
'PF_NORANDMMAP' : (1 << 15),
'PF_MASKOS' : 0x0ff00000,
'PF_MASKPROC' : 0xf0000000,
'PF_MIPS_LOCAL' : 0x10000000,
'PF_PARISC_SBP' : 0x08000000,
'PF_HP_PAGE_SIZE' : 0x00100000,
'PF_HP_FAR_SHARED' : 0x00200000,
'PF_HP_NEAR_SHARED' : 0x00400000,
'PF_HP_CODE' : 0x01000000,
'PF_HP_MODIFY' : 0x02000000,
'PF_HP_LAZYSWAP' : 0x04000000,
'PF_HP_SBP' : 0x08000000,
'PF_ARM_SB' : 0x10000000,
'PF_IA_64_NORECOV' : 0x80000000,
}
phdr_flags = mirrorDict(phdr_flags)
class ProgramHeader( Header ):
descriptions_32 = [ 'p_type', 'p_offset', 'p_vaddr', 'p_paddr',
'p_filesz', 'p_memsz', 'p_flags', 'p_align' ]
descriptions_64 = [ 'p_type', 'p_flags', 'p_offset', 'p_vaddr',
'p_paddr', 'p_filesz', 'p_memsz', 'p_align' ]
hr_values = {
'p_type' : [ VALUE_FIXED, phdr_type ],
'p_flags' : [ VALUE_BITWISE, phdr_flags ],
}
format_32 = [ 'i', 'I', 'I', 'I', 'i', 'i', 'i', 'I' ]
format_64 = [ 'i', 'i', 'Q', 'Q', 'Q', 'q', 'q', 'Q' ]
def affect(self, program):
try:
self.p_vaddr -= self.p_offset - program.offset_start
self.p_paddr = self.p_vaddr
self.p_offset = program.offset_start
self.p_filesz = program.size
except Exception:
pass
class Program( Page ):
def __init__(self, phdr):
Page.__init__(self, phdr, phdr.p_offset, phdr.p_filesz)
self.protected = True
#######
# EOF #
#######
|
rosspalmer/bitQuant
|
bitquant/sql/setup.py
|
Python
|
mit
| 857
| 0.002334
|
import clss
import os
def setup_sql():
menu()
s = clss.sql()
s.meta.create_all(s.eng)
def menu():
txt = open('auth_sql', 'w')
print
print '-----SQL Database setup-----'
print
print '=Select SQL type='
print ' (1) sqlite'
print ' (2) MySQL'
print
typ = int(raw_input(': '))
print
p
|
rint
|
if typ == 1:
file_path = str(raw_input('Location/Database Name: ')) + '\n'
txt.write(str(typ) + '\n')
txt.write(file_path)
if typ == 2:
host = str(raw_input('Host: ')) + '\n'
user = raw_input('Username: ') + '\n'
password = raw_input('Password: ') + '\n'
name = raw_input('Database Name: ') + '\n'
print
txt.write(str(typ) + '\n')
txt.write(host)
txt.write(user)
txt.write(password)
txt.write(name)
|
persandstrom/home-assistant
|
homeassistant/components/binary_sensor/bmw_connected_drive.py
|
Python
|
apache-2.0
| 8,080
| 0
|
"""
Reads vehicle status from BMW connected drive portal.
For more details about this platform, please refer to the documentation at
https://home-assistant.io/components/binary_sensor.bmw_connected_drive/
"""
import asyncio
import logging
from homeassistant.components.binary_sensor import BinarySensorDevice
from homeassistant.components.bmw_connected_drive import DOMAIN as BMW_DOMAIN
DEPENDENCIES = ['bmw_connected_drive']
_LOGGER = logging.getLogger(__name__)
SENSOR_TYPES = {
'lids': ['Doors', 'opening'],
'windows': ['Windows', 'opening'],
'door_lock_state': ['Door lock state', 'safety'],
'lights_parking': ['Parking lights', 'light'],
'condition_based_services': ['Condition based services', 'problem'],
'check_control_messages': ['Control messages', 'problem']
}
SENSOR_TYPES_ELEC = {
'charging_status': ['Charging status', 'power'],
'connection_status': ['Connection status', 'plug']
}
SENSOR_TYPES_ELEC.update(SENSOR_TYPES)
def setup_platform(hass, config, add_entities, discovery_info=None):
"""Set up the BMW sensors."""
accounts = hass.data[BMW_DOMAIN]
_LOGGER.debug('Found BMW accounts: %s',
', '.join([a.name for a in accounts]))
devices = []
for account in accounts:
for vehicle in account.account.vehicles:
if vehicle.has_hv_battery:
_LOGGER.debug('BMW with a high voltage battery')
for key, value in sorted(SENSOR_TYPES_ELEC.items()):
device = BMWConnectedDriveSensor(account, vehicle, key,
value[0], value[1])
devices.append(device)
elif vehicle.has_internal_combustion_engine:
_LOGGER.debug('BMW with an internal combustion engine')
for key, value in sorted(SENSOR_TYPES.items()):
device = BMWConnectedDriveSensor(account, vehicle, key,
value[0], value[1])
devices.append(device)
add_entities(devices, True)
class BMWConnectedDriveSensor(BinarySensorDevice):
"""Representation of a BMW vehicle binary sensor."""
def __init__(self, account, vehicle, attribute: str, sensor_name,
device_class):
"""Constructor."""
self._account = account
self._vehicle = vehicle
self._attribute = attribute
self._name = '{} {}'.format(self._vehicle.name, self._attribute)
self._unique_id = '{}-{}'.format(self._vehicle.vin, self._attribute)
self._sensor_name = sensor_name
self._device_class = device_class
self._state = None
@property
def should_poll(self) -> bool:
"""Return False.
Data update is triggered from BMWConn
|
ectedDriveEntity.
"""
return False
@property
def unique_id(self):
"""Return the unique ID of the binary sensor
|
."""
return self._unique_id
@property
def name(self):
"""Return the name of the binary sensor."""
return self._name
@property
def device_class(self):
"""Return the class of the binary sensor."""
return self._device_class
@property
def is_on(self):
"""Return the state of the binary sensor."""
return self._state
@property
def device_state_attributes(self):
"""Return the state attributes of the binary sensor."""
vehicle_state = self._vehicle.state
result = {
'car': self._vehicle.name
}
if self._attribute == 'lids':
for lid in vehicle_state.lids:
result[lid.name] = lid.state.value
elif self._attribute == 'windows':
for window in vehicle_state.windows:
result[window.name] = window.state.value
elif self._attribute == 'door_lock_state':
result['door_lock_state'] = vehicle_state.door_lock_state.value
result['last_update_reason'] = vehicle_state.last_update_reason
elif self._attribute == 'lights_parking':
result['lights_parking'] = vehicle_state.parking_lights.value
elif self._attribute == 'condition_based_services':
for report in vehicle_state.condition_based_services:
result.update(self._format_cbs_report(report))
elif self._attribute == 'check_control_messages':
check_control_messages = vehicle_state.check_control_messages
if not check_control_messages:
result['check_control_messages'] = 'OK'
else:
result['check_control_messages'] = check_control_messages
elif self._attribute == 'charging_status':
result['charging_status'] = vehicle_state.charging_status.value
# pylint: disable=protected-access
result['last_charging_end_result'] = \
vehicle_state._attributes['lastChargingEndResult']
if self._attribute == 'connection_status':
# pylint: disable=protected-access
result['connection_status'] = \
vehicle_state._attributes['connectionStatus']
return sorted(result.items())
def update(self):
"""Read new state data from the library."""
from bimmer_connected.state import LockState
from bimmer_connected.state import ChargingState
vehicle_state = self._vehicle.state
# device class opening: On means open, Off means closed
if self._attribute == 'lids':
_LOGGER.debug("Status of lid: %s", vehicle_state.all_lids_closed)
self._state = not vehicle_state.all_lids_closed
if self._attribute == 'windows':
self._state = not vehicle_state.all_windows_closed
# device class safety: On means unsafe, Off means safe
if self._attribute == 'door_lock_state':
# Possible values: LOCKED, SECURED, SELECTIVE_LOCKED, UNLOCKED
self._state = vehicle_state.door_lock_state not in \
[LockState.LOCKED, LockState.SECURED]
# device class light: On means light detected, Off means no light
if self._attribute == 'lights_parking':
self._state = vehicle_state.are_parking_lights_on
# device class problem: On means problem detected, Off means no problem
if self._attribute == 'condition_based_services':
self._state = not vehicle_state.are_all_cbs_ok
if self._attribute == 'check_control_messages':
self._state = vehicle_state.has_check_control_messages
# device class power: On means power detected, Off means no power
if self._attribute == 'charging_status':
self._state = vehicle_state.charging_status in \
[ChargingState.CHARGING]
# device class plug: On means device is plugged in,
# Off means device is unplugged
if self._attribute == 'connection_status':
# pylint: disable=protected-access
self._state = (vehicle_state._attributes['connectionStatus'] ==
'CONNECTED')
@staticmethod
def _format_cbs_report(report):
result = {}
service_type = report.service_type.lower().replace('_', ' ')
result['{} status'.format(service_type)] = report.state.value
if report.due_date is not None:
result['{} date'.format(service_type)] = \
report.due_date.strftime('%Y-%m-%d')
if report.due_distance is not None:
result['{} distance'.format(service_type)] = \
'{} km'.format(report.due_distance)
return result
def update_callback(self):
"""Schedule a state update."""
self.schedule_update_ha_state(True)
@asyncio.coroutine
def async_added_to_hass(self):
"""Add callback after being added to hass.
Show latest data after startup.
"""
self._account.add_update_listener(self.update_callback)
|
claudyus/LXC-Web-Panel
|
tests/utils.py
|
Python
|
mit
| 755
| 0.002649
|
import subprocess
import unittest
import os
class TestCmdLine(unittest.TestCase):
"""
Those tests are against the lwp command lines
"""
def test_01_generate_secret(self):
assert not os.path.exists('/etc/lwp/session_secret')
assert not os.path.exists('/etc/lwp/lwp.conf')
subprocess.check_call('python bin/lwp --gener
|
ate-session-secret', shell=True)
assert os.path.exists('/etc/lwp/session_se
|
cret')
def test_02_exit_if_no_config(self):
assert not os.path.exists('/etc/lwp/lwp.conf')
try:
subprocess.check_call('python bin/lwp', shell=True)
except subprocess.CalledProcessError as e:
assert e.returncode
if __name__ == '__main__':
unittest.main()
|
uw-it-aca/django-panopto-scheduler
|
scheduler/views/api/space.py
|
Python
|
apache-2.0
| 1,252
| 0
|
# Copyright 2021 UW-IT, University of Washington
# SPDX-License-Identifier: Apache-2.0
from scheduler.views.rest_dispatch import RESTDispatch
from uw_r25.spaces import get_spaces, get_space_by_id
import logging
|
logger = logging.getLogger(__name__)
class Space(RESTDispatch):
def __init__(self):
self._space_list_cache_timeout = 1 # timeout in hours
def get(self, request, *args, **kwargs):
space_id = kwargs.get('space_id')
if (space_id):
|
return self._get_space_details(space_id)
else:
params = {}
for q in request.GET:
params[q] = request.GET.get(q)
return self._list_spaces(params)
def _get_space_details(self, space_id):
space = get_space_by_id(space_id)
return self.json_response({
'space_id': space.space_id,
'name': space.name,
'formal_name': space.formal_name
})
def _list_spaces(self, args):
reps = []
for space in get_spaces(**args):
reps.append({
'space_id': space.space_id,
'name': space.name,
'formal_name': space.formal_name
})
return self.json_response(reps)
|
bung87/django-html5-boilerplate
|
project_name/urls/base.py
|
Python
|
mit
| 417
| 0.004796
|
from django.conf.urls import patterns, include, url
from django.contrib import admin
fro
|
m django.views.generic import TemplateView
admin.autodiscover()
urlpatterns = patterns('',
# Home Page -- Replace as you prefer
url(r'^$', TemplateView.as_view(template_name='home.html'), name='home'),
url(r'^admin/doc/', include('django.contrib.admindocs.urls')),
url(r'^admin/', include(admin.site.urls)),
|
)
|
zayfod/pyfranca
|
pyfranca/ast.py
|
Python
|
mit
| 15,502
| 0.000387
|
"""
Franca abstract syntax tree representation.
"""
from abc import ABCMeta
from collections import OrderedDict
class ASTException(Exception):
|
def __init__(self, message):
|
super(ASTException, self).__init__()
self.message = message
def __str__(self):
return self.message
class Package(object):
"""
AST representation of a Franca package.
"""
def __init__(self, name, file_name=None, imports=None,
interfaces=None, typecollections=None, comments=None):
"""
Constructs a new Package.
"""
self.name = name
self.files = [file_name] if file_name else []
self.imports = imports if imports else []
self.interfaces = interfaces if interfaces else OrderedDict()
self.typecollections = typecollections if typecollections else \
OrderedDict()
self.comments = comments if comments else OrderedDict()
for item in self.interfaces.values():
item.package = self
for item in self.typecollections.values():
item.package = self
def __contains__(self, namespace):
if not isinstance(namespace, str):
raise TypeError
res = namespace in self.typecollections or namespace in self.interfaces
return res
def __getitem__(self, namespace):
if not isinstance(namespace, str):
raise TypeError
elif namespace in self.typecollections:
return self.typecollections[namespace]
elif namespace in self.interfaces:
return self.interfaces[namespace]
else:
raise KeyError
def __iadd__(self, package):
if not isinstance(package, Package):
raise TypeError
# Ignore the name
self.files += package.files
for item in package.imports:
self.imports.append(item)
for item in package.interfaces.values():
if item.name in self:
raise ASTException("Interface member defined more than"
" once '{}'.".format(item.name))
self.interfaces[item.name] = item
item.package = self
for item in package.typecollections.values():
if item.name in self:
raise ASTException("Type collection member defined more than"
" once '{}'.".format(item.name))
self.typecollections[item.name] = item
item.package = self
return self
class Import(object):
def __init__(self, file_name, namespace=None):
self.file = file_name
self.namespace = namespace # None for "import model"
self.package_reference = None
self.namespace_reference = None
class Namespace(object):
__metaclass__ = ABCMeta
def __init__(self, name, flags=None, members=None, comments=None):
self.package = None
self.name = name
self.flags = flags if flags else [] # Unused
self.version = None
self.typedefs = OrderedDict()
self.enumerations = OrderedDict()
self.structs = OrderedDict()
self.arrays = OrderedDict()
self.maps = OrderedDict()
self.constants = OrderedDict()
self.comments = comments if comments else OrderedDict()
if members:
for member in members:
self._add_member(member)
def __contains__(self, name):
if not isinstance(name, str):
raise TypeError
res = name in self.typedefs or \
name in self.enumerations or \
name in self.structs or \
name in self.arrays or \
name in self.maps or \
name in self.constants
return res
def __getitem__(self, name):
if not isinstance(name, str):
raise TypeError
elif name in self.typedefs:
return self.typedefs[name]
elif name in self.enumerations:
return self.enumerations[name]
elif name in self.structs:
return self.structs[name]
elif name in self.arrays:
return self.arrays[name]
elif name in self.maps:
return self.maps[name]
elif name in self.constants[name]:
return self.constants[name]
else:
raise KeyError
def _add_member(self, member):
if isinstance(member, Version):
if not self.version:
self.version = member
else:
raise ASTException("Multiple version definitions.")
elif isinstance(member, Type):
if member.name in self:
raise ASTException(
"Duplicate namespace member '{}'.".format(member.name))
if isinstance(member, Typedef):
self.typedefs[member.name] = member
# Handle anonymous array special case.
if isinstance(member.type, Array):
member.type.namespace = self
elif isinstance(member, Enumeration):
self.enumerations[member.name] = member
elif isinstance(member, Struct):
self.structs[member.name] = member
# Handle anonymous array special case.
for field in member.fields.values():
if isinstance(field.type, Array):
field.type.namespace = self
elif isinstance(member, Array):
self.arrays[member.name] = member
# Handle anonymous array special case.
if isinstance(member.type, Array):
member.type.namespace = self
elif isinstance(member, Map):
self.maps[member.name] = member
# Handle anonymous array special case.
if isinstance(member.key_type, Array):
member.key_type.namespace = self
if isinstance(member.value_type, Array):
member.value_type.namespace = self
elif isinstance(member, Constant):
self.constants[member.name] = member
else:
raise ASTException("Unexpected namespace member type.")
member.namespace = self
else:
raise ValueError("Unexpected namespace member type.")
class TypeCollection(Namespace):
def __init__(self, name, flags=None, members=None, comments=None):
super(TypeCollection, self).__init__(name, flags=flags,
members=members, comments=comments)
class Type(object):
__metaclass__ = ABCMeta
def __init__(self, name=None, comments=None):
self.namespace = None
self.name = name if name else self.__class__.__name__
self.comments = comments if comments else OrderedDict()
class Typedef(Type):
def __init__(self, name, base_type, comments=None):
super(Typedef, self).__init__(name, comments)
self.type = base_type
class PrimitiveType(Type):
__metaclass__ = ABCMeta
def __init__(self):
super(PrimitiveType, self).__init__()
class Int8(PrimitiveType):
def __init__(self):
super(Int8, self).__init__()
class Int16(PrimitiveType):
def __init__(self):
super(Int16, self).__init__()
class Int32(PrimitiveType):
def __init__(self):
super(Int32, self).__init__()
class Int64(PrimitiveType):
def __init__(self):
super(Int64, self).__init__()
class UInt8(PrimitiveType):
def __init__(self):
super(UInt8, self).__init__()
class UInt16(PrimitiveType):
def __init__(self):
super(UInt16, self).__init__()
class UInt32(PrimitiveType):
def __init__(self):
super(UInt32, self).__init__()
class UInt64(PrimitiveType):
def __init__(self):
super(UInt64, self).__init__()
class Boolean(PrimitiveType):
def __init__(self):
super(Boolean, self).__init__()
class Float(PrimitiveType):
def __init__(self):
super(Float, self).__init__()
class Double(PrimitiveType):
def __
|
KnightHawk3/packr
|
packr/api/contact.py
|
Python
|
mit
| 4,011
| 0
|
import re
from datetime import datetime
from flask import current_app as app
from flask_jwt import current_identity
from flask_restplus import Namespace, Resource, fields, reqparse
from sqlalchemy.exc import IntegrityError
from packr.models import Message
api = Namespace('contact',
description='Operations related to the contact form')
message = api.model('Contact', {
'email': fields.String(required=True,
description='Contact email'),
'content': fields.String(required=True,
description='Message'),
})
message_id = api.model('ContactCompletion', {
'id': fields.Integer(required=True,
description='id')
})
@api.route('/')
class MessageItem(Resource):
@api.expect(message)
@api.response(204, 'Message successfully received.')
def post(self):
req_parse = reqparse.RequestParser(bundle_errors=True)
req_parse.add_argument('email', type=str, required=True,
help='No email provided',
location='json')
req_parse.add_argument('content', type=str, required=True,
help='No message provided',
location='json')
args = req_parse.parse_args()
email = args.get('email')
content = args.get('content')
if email == '':
return {'message': {'email': 'No email provided'}}, 400
elif not re.match(r"^[A-Za-z0-9.+_-]+@[A-Za-z0-9._-]+\.[a-zA-Z]*$",
email):
return {'message': {'email': 'Invalid email provided'}}, 400
if content == '':
return {'message': {'content': 'No content provided'}}, 400
new_message = Message(email=email,
content=content,
time=datetime.now())
try:
new_message.save()
except IntegrityError as e:
print(e)
return {
'description': 'Failed to send message.'
}, 409
except Exception as e:
print(e)
return {'description': 'Server encountered an error.'}, 500
return {'email': new_message.email}, 201
def get(self):
if not current_identity and not app.config.get('TESTING'):
return {'message': 'User not authenticated'}, 401
if app.config.get('TESTING') \
or current_identity.role.role_name == "ADMIN":
messages = dict()
for message_row in Message.query.filter_by(done=False).all():
messages[message_row.id] = {
"email": message_row.email,
"time": message_row.time.isoformat(),
"content": message_row.content
}
return messages, 201
else:
return {'message': 'Not authorised'}, 401
@api.route('/complete')
class CompleteItem(Resource):
@api.expect(message_id)
@api.response(204, 'Message successfully updated.')
def post(self):
req_parse = reqparse.Reques
|
tParser(bundl
|
e_errors=True)
req_parse.add_argument('id', type=int, required=True,
help='No id provided',
location='json')
args = req_parse.parse_args()
id = args.get('id')
if id == 0:
return {'message': {'id': 'No id provided'}}, 400
completed_message = Message.query.filter_by(id=id).first()
completed_message.done = True
try:
completed_message.save()
except IntegrityError as e:
print(e)
return {
'description': 'Failed to update message.'
}, 409
except Exception as e:
print(e)
return {'description': 'Server encountered an error.'}, 500
return {'message': "Message updated"}, 201
|
zhangf911/common
|
test/dev/system_resource_names_test.py
|
Python
|
mit
| 936
| 0.001068
|
import unittest
from biicode.common.dev.system_resource_names import SystemResourceNames
from biicode.common.dev.system_id import SystemID
class SystemResourceNamesTest(unittest.TestCase):
def setUp(self):
self.sut = SystemResourceNames(SystemID("open_gl", "CPP"))
def test_add_names(self):
self.sut.add_names(["stdio"])
|
self.assertListEqual(self.sut.names, ["stdio"])
def test_serialize(self):
|
self.assertIsInstance(self.sut.serialize(), dict)
def test_eq_true(self):
self.assertTrue(self.sut.__eq__(self.sut))
self.assertTrue(self.sut.__eq__(SystemResourceNames(SystemID("open_gl", "CPP"))))
def test_eq_false(self):
system_resource_names = SystemResourceNames(SystemID("open_gl", "CPP"))
system_resource_names.add_names(["stdio"])
self.assertFalse(self.sut.__eq__(system_resource_names))
self.assertFalse(self.sut.__eq__(""))
|
claudelee/bilibili-api
|
danmu-Delay/danmu_delay.py
|
Python
|
mit
| 966
| 0.045894
|
# 对ass弹幕文件进行延时。。。
# 为什么会有这个需求呢?因为妈蛋ffmpeg剪切ts视频失败啊!!
# 只好弹幕来配合了。。。
# 如果以后经常遇到。。再整理得好用一些。。。
# 酱~
import re
def t_delay(h,m,s,delay):
s += delay;
if s >= 60:
s -= 60
m += 1
if m >= 60:
m -= 60
h += 1
return [h,m,s]
filename = r'in.ass
|
'
delay = 30;
fid = open('out.ass','w')
for line in open(filename):
t = re.findall(r'^(Dialogue: 2,)(\d+):(\d+):(\d+)\.(\d+),(\d+):(\d+):(\d+)\.(.*)$',line)
if len(t) == 0:
fid.write(line)
else:
t = t[0]
[h,m,s] = t_delay(int(t[1]),int(t[2]),int(t[3]),delay)
|
fid.write('%s%d:%.2d:%.2d.%s,'%(t[0],h,m,s,t[4]))
[h,m,s] = t_delay(int(t[5]),int(t[6]),int(t[7]),delay)
fid.write('%d:%.2d:%.2d.%s\n'%(h,m,s,t[8]))
fid.close();
print "finished!!"
|
landlab/landlab
|
landlab/components/detachment_ltd_erosion/__init__.py
|
Python
|
mit
| 200
| 0
|
from .generate_detachment_ltd_erosion import DetachmentLt
|
dErosion
from .generate_erosion_by_depth_slope import DepthSlopeProductErosion
__all__ = ["DetachmentLtdErosion", "Dep
|
thSlopeProductErosion"]
|
mic4ael/indico
|
indico/core/db/sqlalchemy/custom/natsort.py
|
Python
|
mit
| 1,070
| 0.001869
|
# This file is part of Indico.
# Copyright (C) 2002 - 2020 CERN
#
# Indico is free software; you can redistribute it and/or
# modify it under the terms of the MIT License; see the
# LICENSE file for more details.
from __future__ import unicode_literals
from sqlalchemy import DDL, text
SQL_FUNCTION_NATSORT = '''
CREATE FUNCTION indico.natsort(value TEXT)
RETURNS bytea
AS $$
SELECT string_agg(
convert_to(coalesce(r[2], length(length(r[1])::text) || length(r[1])::text || r[1]), 'SQL_ASCII'),
' '
)
FROM regexp_matches(value, '0*([0-9]+)|([^0-9]+)', 'g') r;
$$
LANGUAGE SQL IMMUTABLE STRICT;
'''
def _should_create_function(ddl, target, connection, **kw):
sql = """
SELECT COUNT(*)
FROM information_schema.routines
|
WHERE routine_schema = 'indico' AND routine_nam
|
e = 'natsort'
"""
count = connection.execute(text(sql)).scalar()
return not count
def create_natsort_function(conn):
DDL(SQL_FUNCTION_NATSORT).execute_if(callable_=_should_create_function).execute(conn)
|
fernandog/osmc
|
package/mediacenter-addon-osmc/src/script.module.osmcsetting.updates/resources/lib/update_service.py
|
Python
|
gpl-2.0
| 47,883
| 0.03425
|
# Standard Modules
import apt
from datetime import datetime
import decimal
import json
import os
import Queue
import random
import socket
import subprocess
import sys
import traceback
# Kodi Modules
import xbmc
import xbmcaddon
import xbmcgui
# Custom modules
__libpath__ = xbmc.translatePath(os.path.join(xbmcaddon.Addon().getAddonInfo('path'), 'resources','lib'))
sys.path.append(__libpath__)
import comms
import simple_scheduler as sched
import OSMC_Backups
from CompLogger import comprehensive_logger as clog
__addon__ = xbmcaddon.Addon()
__addonid__ = __addon__.getAddonInfo('id')
__scriptPath__ = __addon__.getAddonInfo('path')
__setting__ = __addon__.getSetting
__image_file__ = os.path.join(__scriptPath__,'resources','media','update_available.png')
DIALOG = xbmcgui.Dialog()
def lang(id):
san = __addon__.getLocalizedString(id).encode( 'utf-8', 'ignore' )
return san
def log(message, label = ''):
logmsg = '%s : %s - %s ' % (__addonid__ , str(label), str(message.encode( 'utf-8', 'ignore' )))
xbmc.log(msg = logmsg, level=xbmc.LOGDEBUG)
@clog(log)
def exit_osmc_settings_addon():
address = '/var/tmp/osmc.settings.sockfile'
sock = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM)
sock.connect(address)
sock.sendall('exit')
sock.close()
return 'OSMC Settings addon called to exit'
def get_hardware_prefix():
''' Returns the prefix for the hardware type. rbp, rbp2, etc '''
with open('/proc/cmdline', 'r') as f:
line = f.readline()
settings = line.split(' ')
prefix = None
for setting in settings:
if setting.startswith('osmcdev='):
return setting[len('osmcdev='):]
return None
class Monitah(xbmc.Monitor):
def __init__(self, **kwargs):
super(Monitah, self).__init__()
self.parent_queue = kwargs['parent_queue']
@clog(log)
def onAbortRequested(self):
msg = json.dumps(('kill_yourself', {}))
self.parent_queue.put(msg)
@clog(log)
def onSettingsChanged(self):
msg = json.dumps(('update_settings', {}))
self.parent_queue.put(msg)
class Main(object):
''' This service allows for the checking for new updates, then:
- posts a notification on the home screen to say there is an update available, or
- calls for the download of the updates
- calls for the installation of the updates
- restarts Kodi to implement changes
The check for updates is done using the python-apt module. This module must be run as root, so is being called in
external scripts from the command line using sudo. The other script communicates with the update service using a socket file.
'''
# MAIN METHOD
def __init__(self):
self.first_run = True
# set the hardware prefix
self.hw_prefix = get_hardware_prefix()
# list of packages that require an external update
self.EXTERNAL_UPDATE_REQUIRED_LIST = [
"mediacenter",
"lirc-osmc",
"eventlircd-osmc",
"libcec-osmc",
"dbus",
"dbus-x11"
]
# list of packages that may break compatibility with addons and databases.
self.UPDATE_WARNING = False
self.UPDATE_WARNING_LIST = [
"-mediacenter-osmc",
]
# Items that start with a hyphen should have the hardware prefix attached
self.UPDATE_WARNING_LIST = [(str(self.hw_prefix) + x) if x[0] =='-' else x for x in self.UPDATE_WARNING_LIST]
log('UPDATE_WARNING_LIST: %s' % self.UPDATE_WARNING_LIST)
# the time that the service started
self.service_start = datetime.now()
# dictionary containing the permissable actions (communicated from the child apt scripts)
# and the corresponding methods in the parent
self.action_dict = {
'apt_cache update complete' : self.apt_update_complete,
'apt_cache update_manual complete' : self.apt_update_manual_complete,
'apt_cache commit complete' : self.apt_commit_complete,
'apt_cache fetch complete' : self.apt_fetch_complete,
'progress_bar' : self.progress_bar,
'update_settings' : self.update_settings,
'update_now' : self.update_now,
'user_update_now' : self.user_update_now,
'kill_yourself' : self.kill_yourself,
'settings_command' : self.settings_command,
'apt_error' : self.apt_error,
'apt_action_list_error' : self.apt_action_list_error,
'action_list' : self.action_list,
'apt_cache action_list complete' : self.action_list_complete,
'pre_backup_complete' : self.pre_backup_complete,
}
# queue for communication with the comm and Main
self.parent_queue = Queue.Queue()
self.randomid = random.randint(0,1000)
self.EXTERNAL_UPDATE_REQUIRED = 1
# create socket, listen for comms
self.listener = comms.communicator(self.parent_queue, socket_file='/var/tmp/osmc.settings.update.sockfile')
self.listener.start()
# grab the settings, saves them into a dict called seld.s
self.update_settings()
# a class to handle scheduling update checks
self.scheduler = sched.SimpleScheduler(self.s)
log(self.scheduler.trigger_time, 'trigger_time')
# this holding pattern holds a function that represents the completion of a process that was put on hold
# while the user was watching media or the system was active
self.function_holding_pattern = False
# monitor for identifying addon settings updates and kodi abort requests
self.monitor = Monitah(parent_queue = self.parent_queue)
# window onto which to paste the update notification
self.window = xbmcgui.Window(10000)
# property which determines whether the notification should be pasted to the window
self.window.setProperty('OSMC_notification','false')
# ControlImage(x, y, width, height, filename[, aspectRatio, colorDiffuse])
self.update_image = xbmcgui.ControlImage(50, 1695, 175, 75, __image_file__)
self.try_image_position_again = False
self.try_count = 0
self.position_icon()
self.window.addControl(self.update_image)
self.update_image.setVisibleCondition('[SubString(Window(Home).Property(OSMC_notification), true, left)]')
# self.window.setProperty('OSMC_notification', 'true') # USE THIS TO TEST THE UPDATE_ICON
# this flag is present when updates have been downloaded but the user wants to choose when to install using
# the manual control in the settings
self.block_update_file = '/var/tmp/.suppress_
|
osmc_update_checks'
# if the file is present, then suppress further update checks and show the notification
if os.path.isfile(self.block_update_file):
self.skip_update_check = True
# if the user has suppressed icon notification of updates and
|
has chosen not to install the updates
# its their own damned fault if osmc never get updated
if not self.s['suppress_icon']:
self.window.setProperty('OSMC_notification', 'true')
else:
self.skip_update_check = False
# check for the external update failed
fail_check_file = '/var/tmp/.osmc_failed_update'
if os.path.isfile(fail_check_file):
with open(fail_check_file, 'r') as f:
package = f.readline()
ok = DIALOG.ok(lang(32087), lang(32088) % package, '', lang(32089))
try:
os.remove(fail_check_file)
except:
pass
self.freespace_supressor = 172200
self.freespace_remedy = 'reboot' # change this to 'apt' to give the user the option to clean the apt files
# keep alive method
self._daemon()
# MAIN METHOD
@clog(log, nowait=True)
def _daemon(self):
self.keep_alive = True
count = 0 # FOR TESTING ONLY
while self.keep_alive:
# periodic announcement to confirm the service is alive
# FOR TESTING ONLY
if not count % 100: # FOR TESTING ONLY
xml = xbmc.getInfoLabel('Window.Property(xmlfile)')
log('blurp %s - %s' % (self.randomid, xml)) # FOR TESTING ONLY
count += 1 # FOR TESTING ONLY
# FOR TESTING ONLY
# freespace checker, (runs 5 minutes after boot)
self.automatic_freespace_checker()
# check the scheduler for the update trigger
if self.scheduler.check_trigger():
self.update_now()
log(self.scheduler.trigger_time, 'trigger_time')
# check the action queue
|
florianfesti/boxes
|
boxes/generators/unevenheightbox.py
|
Python
|
gpl-3.0
| 4,609
| 0.003688
|
#!/usr/bin/env python3
# Copyright (C) 2013-2018 Florian Festi
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from boxes import *
class UnevenHeightBox(Boxes):
"""Box with different height in each corner"""
ui_group = "Box"
def __init__(self):
Boxes.__init__(self)
self.addSettingsArgs(edges.FingerJointSettings)
self.addSettingsArgs(edges.GroovedSettings)
self.buildArgParser("x", "y", "outside", bottom_edge="F")
self.argparser.add_argument(
"--height0", action="store", type=float, default=50,
help="height of the front left corner in mm")
self.argparser.add_argument(
"--height1", action="store", type=float, default=50,
help="height of the front right corner in mm")
self.argparser.add_argument(
"--height2", action="store", type=float, default=100,
help="height of the right back corner in mm")
self.argparser.add_argument(
"--height3", action="store", type=float, default=100,
help="height of the left back corner in mm")
self.argparser.add_argument(
"--lid", action="store", type=boolarg, default=False,
help="add a lid (works best with high corners opposing each other)")
self.argparser.add_argument(
"--lid_height", action="store", type=float, default=0,
help="additional height of the lid")
self.argparser.add_argument(
"--edge_types", action="store", type=str, default="eeee",
help="which edges are flat (e) or grooved (z,Z), counter-clockwise from the front")
def render(self):
x, y = self.x, self.y
heights = [self.height0, self.height1, self.height2, self.height3]
edge_types = self.edge_types
if len(edge_types) != 4 or any(et not in "ezZ" for et in edge_types):
raise ValueError("Wrong edge_types style: %s)" % edge_types)
if self.outside:
x = self.adjustSize(x)
y = self.adjustSize(y)
for i in range(4):
heights[i] = self.adjustSize(heights[i], self.bottom_edge,
self.lid)
t = self.thickness
h0, h1, h2, h3 = heights
b = self.bottom_edge
self.trapezoidWall(x, h0, h1, [b, "F", edge_types[0], "F"], move="right")
|
self.trapezoidWall(y, h1, h2, [b, "f", edge_types[1], "f"], move="right")
|
self.trapezoidWall(x, h2, h3, [b, "F", edge_types[2], "F"], move="right")
self.trapezoidWall(y, h3, h0, [b, "f", edge_types[3], "f"], move="right")
with self.saved_context():
if b != "e":
self.rectangularWall(x, y, "ffff", move="up")
if self.lid:
maxh = max(heights)
lidheights = [maxh-h+self.lid_height for h in heights]
h0, h1, h2, h3 = lidheights
lidheights += lidheights
edges = ["E" if (lidheights[i] == 0.0 and lidheights[i+1] == 0.0) else "f" for i in range(4)]
self.rectangularWall(x, y, edges, move="up")
if self.lid:
self.moveTo(0, maxh+self.lid_height+self.edges["F"].spacing()+self.edges[b].spacing()+1*self.spacing, 180)
edge_inverse = {"e": "e", "z": "Z", "Z": "z"}
edge_types = [edge_inverse[et] for et in edge_types]
self.trapezoidWall(y, h0, h3, "Ff" + edge_types[3] + "f", move="right" +
(" only" if h0 == h3 == 0.0 else ""))
self.trapezoidWall(x, h3, h2, "FF" + edge_types[2] + "F", move="right" +
(" only" if h3 == h2 == 0.0 else ""))
self.trapezoidWall(y, h2, h1, "Ff" + edge_types[1] + "f", move="right" +
(" only" if h2 == h1 == 0.0 else ""))
self.trapezoidWall(x, h1, h0, "FF" + edge_types[0] + "F", move="right" +
(" only" if h1 == h0 == 0.0 else ""))
|
Serulab/Py4Bio
|
code/ch14/scatter.py
|
Python
|
mit
| 618
| 0.004854
|
from bokeh.charts import Scatter, output_file, show
x = [1, 2, 3, 4, 5, 6, 7, 8]
y = [2.1, 6.45, 3, 1.4, 4.55, 3.85, 5.2, 0.7]
z = [.5, 1.1, 1.9, 2.5, 3.1, 3.9
|
, 4.85, 5.2]
species = ['cat', 'cat', 'cat', 'dog', 'dog', 'dog', 'mouse', 'mouse']
country = ['US', 'US', 'US', 'US', 'UK', 'UK', 'BR', 'BR']
df = {'time': x, 'weight 1': y, 'weight 2': z, 'species':species, 'country': country}
scatter = Scatter(df, x='time', y='weight 1', color='country', marker='species',
title="Auto MPG", xlabel="Time in days",
ylabel="Weight in grams")
output_f
|
ile('scatter.html')
show(scatter)
|
croxis/SpaceDrive
|
spacedrive/renderpipeline/rplibs/colorama/ansitowin32.py
|
Python
|
mit
| 9,904
| 0.001918
|
# Copyright Jonathan Hartley 2013. BSD 3-Clause license, see LICENSE file.
import re
import sys
import os
from .ansi import AnsiFore, AnsiBack, AnsiStyle, Style
from .winterm import WinTerm, WinColor, WinStyle
from .win32 import windll, winapi_test
winterm = None
if windll is not None:
winterm = WinTerm()
def is_stream_closed(stream):
return not hasattr(stream, 'closed') or stream.closed
def is_a_tty(stream):
return hasattr(stream, 'isatty') and stream.isatty()
class StreamWrapper(object):
'''
Wraps a stream (such as stdout), acting as a transparent proxy for all
attribute access apart from method 'write()', which is delegated to our
Converter instance.
'''
def __init__(self, wrapped, converter):
# double-underscore everything to prevent clashes with names of
# attributes on the wrapped stream object.
self.__wrapped = wrapped
self.__convertor = converter
def __getattr__(self, name):
return getattr(self.__wrapped, name)
def write(self, text):
self.__convertor.write(text)
class AnsiToWin32(object):
'''
Implements a 'write()' method which, on Windows, will strip ANSI character
sequences from the text, and if outputting to a tty, will convert them into
win32 function calls.
'''
ANSI_CSI_RE = re.compile('\001?\033\[((?:\d|;)*)([a-zA-Z])\002?') # Control Sequence Introducer
ANSI_OSC_RE = re.compile('\001?\033\]((?:.|;)*?)(\x07)\002?') # Operating System Command
def __init__(self, wrapped, convert=None, strip=None, autoreset=False):
# The wrapped stream (normally sys.stdout or sys.stderr)
self.wrapped = wrapped
# should we reset colors to defaults after every .write()
self.autoreset = autoreset
# create the proxy wrapping our output stream
self.stream = StreamWrapper(wrapped, self)
on_windows = os.name == 'nt'
# We test if the WinAPI works, because even if we are on Windows
# we may be using a terminal that doesn't support the WinAPI
# (e.g. Cygwin Terminal). In this case it's up to the terminal
# to support the ANSI codes.
conversion_supported = on_windows and winapi_test()
# should we strip ANSI sequences from our output?
if strip is None:
strip = conversion_supported or (not is_stream_closed(wrapped) and not is_a_tty(wrapped))
self.strip = strip
# should we should convert ANSI sequ
|
ences into win32 calls?
if convert is None:
convert = conversion_supported and not is_stream_closed(wrapped) and is_a_tty(wrapped)
self.convert = convert
# dict of ansi codes to win32 functions and parameters
|
self.win32_calls = self.get_win32_calls()
# are we wrapping stderr?
self.on_stderr = self.wrapped is sys.stderr
def should_wrap(self):
'''
True if this class is actually needed. If false, then the output
stream will not be affected, nor will win32 calls be issued, so
wrapping stdout is not actually required. This will generally be
False on non-Windows platforms, unless optional functionality like
autoreset has been requested using kwargs to init()
'''
return self.convert or self.strip or self.autoreset
def get_win32_calls(self):
if self.convert and winterm:
return {
AnsiStyle.RESET_ALL: (winterm.reset_all, ),
AnsiStyle.BRIGHT: (winterm.style, WinStyle.BRIGHT),
AnsiStyle.DIM: (winterm.style, WinStyle.NORMAL),
AnsiStyle.NORMAL: (winterm.style, WinStyle.NORMAL),
AnsiFore.BLACK: (winterm.fore, WinColor.BLACK),
AnsiFore.RED: (winterm.fore, WinColor.RED),
AnsiFore.GREEN: (winterm.fore, WinColor.GREEN),
AnsiFore.YELLOW: (winterm.fore, WinColor.YELLOW),
AnsiFore.BLUE: (winterm.fore, WinColor.BLUE),
AnsiFore.MAGENTA: (winterm.fore, WinColor.MAGENTA),
AnsiFore.CYAN: (winterm.fore, WinColor.CYAN),
AnsiFore.WHITE: (winterm.fore, WinColor.GREY),
AnsiFore.RESET: (winterm.fore, ),
AnsiFore.LIGHTBLACK_EX: (winterm.fore, WinColor.BLACK, True),
AnsiFore.LIGHTRED_EX: (winterm.fore, WinColor.RED, True),
AnsiFore.LIGHTGREEN_EX: (winterm.fore, WinColor.GREEN, True),
AnsiFore.LIGHTYELLOW_EX: (winterm.fore, WinColor.YELLOW, True),
AnsiFore.LIGHTBLUE_EX: (winterm.fore, WinColor.BLUE, True),
AnsiFore.LIGHTMAGENTA_EX: (winterm.fore, WinColor.MAGENTA, True),
AnsiFore.LIGHTCYAN_EX: (winterm.fore, WinColor.CYAN, True),
AnsiFore.LIGHTWHITE_EX: (winterm.fore, WinColor.GREY, True),
AnsiBack.BLACK: (winterm.back, WinColor.BLACK),
AnsiBack.RED: (winterm.back, WinColor.RED),
AnsiBack.GREEN: (winterm.back, WinColor.GREEN),
AnsiBack.YELLOW: (winterm.back, WinColor.YELLOW),
AnsiBack.BLUE: (winterm.back, WinColor.BLUE),
AnsiBack.MAGENTA: (winterm.back, WinColor.MAGENTA),
AnsiBack.CYAN: (winterm.back, WinColor.CYAN),
AnsiBack.WHITE: (winterm.back, WinColor.GREY),
AnsiBack.RESET: (winterm.back, ),
AnsiBack.LIGHTBLACK_EX: (winterm.back, WinColor.BLACK, True),
AnsiBack.LIGHTRED_EX: (winterm.back, WinColor.RED, True),
AnsiBack.LIGHTGREEN_EX: (winterm.back, WinColor.GREEN, True),
AnsiBack.LIGHTYELLOW_EX: (winterm.back, WinColor.YELLOW, True),
AnsiBack.LIGHTBLUE_EX: (winterm.back, WinColor.BLUE, True),
AnsiBack.LIGHTMAGENTA_EX: (winterm.back, WinColor.MAGENTA, True),
AnsiBack.LIGHTCYAN_EX: (winterm.back, WinColor.CYAN, True),
AnsiBack.LIGHTWHITE_EX: (winterm.back, WinColor.GREY, True),
}
return dict()
def write(self, text):
if self.strip or self.convert:
self.write_and_convert(text)
else:
self.wrapped.write(text)
self.wrapped.flush()
if self.autoreset:
self.reset_all()
def reset_all(self):
if self.convert:
self.call_win32('m', (0,))
elif not self.strip and not is_stream_closed(self.wrapped):
self.wrapped.write(Style.RESET_ALL)
def write_and_convert(self, text):
'''
Write the given text to our wrapped stream, stripping any ANSI
sequences from the text, and optionally converting them into win32
calls.
'''
cursor = 0
text = self.convert_osc(text)
for match in self.ANSI_CSI_RE.finditer(text):
start, end = match.span()
self.write_plain_text(text, cursor, start)
self.convert_ansi(*match.groups())
cursor = end
self.write_plain_text(text, cursor, len(text))
def write_plain_text(self, text, start, end):
if start < end:
self.wrapped.write(text[start:end])
self.wrapped.flush()
def convert_ansi(self, paramstring, command):
if self.convert:
params = self.extract_params(command, paramstring)
self.call_win32(command, params)
def extract_params(self, command, paramstring):
if command in 'Hf':
params = tuple(int(p) if len(p) != 0 else 1 for p in paramstring.split(';'))
while len(params) < 2:
# defaults:
params = params + (1,)
else:
params = tuple(int(p) for p in paramstring.split(';') if len(p) != 0)
if len(params) == 0:
# defaults:
if command in 'JKm':
params = (0,)
elif com
|
eladhoffer/seq2seq.pytorch
|
seq2seq/models/modules/weight_drop.py
|
Python
|
mit
| 1,803
| 0.003882
|
# Taken from https://github.com/salesforce/awd-lstm-lm/blob/master/weight_drop.py
import torch
from torch.nn import Parameter
from functools import wraps
class WeightDrop(torch.nn.Module):
def __init__(self, module, weights, dropout=0, variational=False):
super(WeightDrop, self).__init__()
self.module = module
self.weights = weights
self.dropout = dropout
self.variational = variational
self._setup()
def _dummy(*args, **kwargs):
# We need to replace flatten_parameters with a nothing function
return
def _setup(self):
# Terrible temporary solution to an issue regarding compacting weights re: CUDNN RNN
if iss
|
ubclass(type(self.module), torch.nn.RNNBase):
self.module.flatten_parameters = self._dummy
for name_w in self.weights:
print('Applying weight drop of {} to {}'.format(self.dropout, name_w))
|
w = getattr(self.module, name_w)
del self.module._parameters[name_w]
self.module.register_parameter(name_w + '_raw', Parameter(w.data))
def _setweights(self):
for name_w in self.weights:
raw_w = getattr(self.module, name_w + '_raw')
w = None
if self.variational:
mask = torch.autograd.Variable(torch.ones(raw_w.size(0), 1))
if raw_w.is_cuda: mask = mask.cuda()
mask = torch.nn.functional.dropout(mask, p=self.dropout, training=True)
w = mask.expand_as(raw_w) * raw_w
else:
w = torch.nn.functional.dropout(raw_w, p=self.dropout, training=self.training)
setattr(self.module, name_w, w)
def forward(self, *args):
self._setweights()
return self.module.forward(*args)
|
jdanbrown/pydatalab
|
legacy_tests/kernel/sql_tests.py
|
Python
|
apache-2.0
| 7,834
| 0.003957
|
# Copyright 2015 Google Inc. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except
# in compliance with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software distributed under the License
# is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
# or implied. See the License for the specific language governing permissions and limitations under
# the License.
from __future__ import absolute_import
from __future__ import unicode_literals
import imp
import mock
from oauth2client.client import AccessTokenCredentials
import unittest
# import Python so we can mock the parts we need to here.
import IPython
import IPython.core.magic
import datalab.bigquery
import datalab.context
import datalab.data
import datalab.data.commands
import datalab.utils.commands
def noop_decorator(func):
return func
IPython.core.magic.register_line_cell_magic = noop_decorator
IPython.core.magic.register_line_magic = noop_decorator
IPython.core.magic.register_cell_magic = noop_decorator
IPython.get_ipython = mock.Mock()
class TestCases(unittest.TestCase):
_SQL_MODULE_MAIN = datalab.data._utils._SQL_MODULE_MAIN
_SQL_MODULE_LAST = datalab.data._utils._SQL_MODULE_LAST
def test_split_cell(self):
# TODO(gram): add tests for argument parser.
m = imp.new_module('m')
query = datalab.data.commands._sql._split_cell('', m)
self.assertIsNone(query)
self.assertNotIn(TestCases._SQL_MODULE_LAST, m.__dict__)
self.assertNotIn(TestCases._SQL_MODULE_MAIN, m.__dict__)
m = imp.new_module('m')
query = datalab.data.commands._sql._split_cell('\n\n', m)
self.assertIsNone(query)
self.assertNotIn(TestCases._SQL_MODULE_LAST, m.__dict__)
self.assertNotIn(TestCases._SQL_MODULE_MAIN, m.__dict__)
m = imp.new_module('m')
query = datalab.data.commands._sql._split_cell('# This is a comment\n\nSELECT 3 AS x', m)
self.assertEquals(query, m.__dict__[TestCases._SQL_MODULE_MAIN])
self.assertEquals(query, m.__dict__[TestCases._SQL_MODULE_LAST])
self.assertEquals('SELECT 3 AS x', m.__dict__[TestCases._SQL_MODULE_MAIN].sql)
self.assertEquals('SELECT 3 AS x', m.__dict__[TestCases._SQL_MODULE_LAST].sql)
m = imp.new_module('m')
query = datalab.data.commands._sql._split_cell(
'# This is a com
|
ment\n\nfoo="bar"\nSELECT 3 AS x', m)
self.assertEquals(query, m.__dict__[TestCases._SQL_MODULE_MAIN])
self.assertEquals(query, m.__dict__[TestCases._SQL_MODULE_LAST])
self.assertEquals('SELECT 3 AS x', m.__dict__[TestCases._SQL_MODULE_MAIN].sql)
self.assertEquals('SELECT 3 AS x', m.__dict__[TestCases._SQL_MODULE_LAST].sql)
sql_string_list = ['SELECT 3 AS x',
'WITH q1 as (
|
SELECT "1")\nSELECT * FROM q1',
'INSERT DataSet.Table (Id, Description)\nVALUES(100,"TestDesc")',
'INSERT DataSet.Table (Id, Description)\n'
'SELECT * FROM UNNEST([(200,"TestDesc2"),(300,"TestDesc3")])'
'INSERT DataSet.Table (Id, Description)\n' +
'WITH w as (SELECT ARRAY<STRUCT<Id int64, Description string>>\n' +
'[(400, "TestDesc4"),(500, "TestDesc5")] col)\n' +
'SELECT Id, Description FROM w, UNNEST(w.col)'
'INSERT DataSet.Table (Id, Description)\n' +
'VALUES (600,\n' +
'(SELECT Description FROM DataSet.Table WHERE Id = 400))',
'DELETE FROM DataSet.Table WHERE DESCRIPTION IS NULL'
'DELETE FROM DataSet.Table\n' +
'WHERE Id NOT IN (100, 200, 300)'
]
for i in range(0, len(sql_string_list)):
m = imp.new_module('m')
query = datalab.data.commands._sql._split_cell(sql_string_list[i], m)
self.assertEquals(query, m.__dict__[TestCases._SQL_MODULE_MAIN])
self.assertEquals(query, m.__dict__[TestCases._SQL_MODULE_LAST])
self.assertEquals(sql_string_list[i], m.__dict__[TestCases._SQL_MODULE_MAIN].sql)
self.assertEquals(sql_string_list[i], m.__dict__[TestCases._SQL_MODULE_LAST].sql)
m = imp.new_module('m')
query = datalab.data.commands._sql._split_cell('DEFINE QUERY q1\nSELECT 3 AS x', m)
self.assertEquals(query, m.__dict__[TestCases._SQL_MODULE_LAST])
self.assertEquals(query, m.__dict__[TestCases._SQL_MODULE_LAST])
self.assertEquals('SELECT 3 AS x', m.q1.sql)
self.assertNotIn(TestCases._SQL_MODULE_MAIN, m.__dict__)
self.assertEquals('SELECT 3 AS x', m.__dict__[TestCases._SQL_MODULE_LAST].sql)
m = imp.new_module('m')
query = datalab.data.commands._sql._split_cell(
'DEFINE QUERY q1\nSELECT 3 AS x\nSELECT * FROM $q1', m)
self.assertEquals(query, m.__dict__[TestCases._SQL_MODULE_MAIN])
self.assertEquals(query, m.__dict__[TestCases._SQL_MODULE_LAST])
self.assertEquals('SELECT 3 AS x', m.q1.sql)
self.assertEquals('SELECT * FROM $q1', m.__dict__[TestCases._SQL_MODULE_MAIN].sql)
self.assertEquals('SELECT * FROM $q1', m.__dict__[TestCases._SQL_MODULE_LAST].sql)
@mock.patch('datalab.context._context.Context.default')
def test_arguments(self, mock_default_context):
mock_default_context.return_value = TestCases._create_context()
m = imp.new_module('m')
query = datalab.data.commands._sql._split_cell("""
words = ('thus', 'forsooth')
limit = 10
SELECT * FROM [publicdata:samples.shakespeare]
WHERE word IN $words
LIMIT $limit
""", m)
sql = datalab.bigquery.Query(query, values={}).sql
self.assertEquals('SELECT * FROM [publicdata:samples.shakespeare]\n' +
'WHERE word IN ("thus", "forsooth")\nLIMIT 10', sql)
# As above but with overrides, using list
sql = datalab.bigquery.Query(query, words=['eyeball'], limit=5).sql
self.assertEquals('SELECT * FROM [publicdata:samples.shakespeare]\n' +
'WHERE word IN ("eyeball")\nLIMIT 5', sql)
# As above but with overrides, using tuple and values dict
sql = datalab.bigquery.Query(query, values={'limit': 3, 'words': ('thus',)}).sql
self.assertEquals('SELECT * FROM [publicdata:samples.shakespeare]\n' +
'WHERE word IN ("thus")\nLIMIT 3', sql)
# As above but with list argument
m = imp.new_module('m')
query = datalab.data.commands._sql._split_cell("""
words = ['thus', 'forsooth']
limit = 10
SELECT * FROM [publicdata:samples.shakespeare]
WHERE word IN $words
LIMIT $limit
""", m)
sql = datalab.bigquery.Query(query, values={}).sql
self.assertEquals('SELECT * FROM [publicdata:samples.shakespeare]\n' +
'WHERE word IN ("thus", "forsooth")\nLIMIT 10', sql)
# As above but with overrides, using list
sql = datalab.bigquery.Query(query, values={'limit': 2, 'words': ['forsooth']}).sql
self.assertEquals('SELECT * FROM [publicdata:samples.shakespeare]\n' +
'WHERE word IN ("forsooth")\nLIMIT 2', sql)
# As above but with overrides, using tuple
sql = datalab.bigquery.Query(query, words=('eyeball',)).sql
self.assertEquals('SELECT * FROM [publicdata:samples.shakespeare]\n' +
'WHERE word IN ("eyeball")\nLIMIT 10', sql)
# TODO(gram): add some tests for source and datestring variables
def test_date(self):
# TODO(gram): complete this test
pass
def test_sql_cell(self):
# TODO(gram): complete this test
pass
@staticmethod
def _create_context():
project_id = 'test'
creds = AccessTokenCredentials('test_token', 'test_ua')
return datalab.context.Context(project_id, creds)
|
Azure/azure-sdk-for-python
|
sdk/powerbiembedded/azure-mgmt-powerbiembedded/azure/mgmt/powerbiembedded/models/power_bi_embedded_management_client_enums.py
|
Python
|
mit
| 660
| 0
|
# coding=u
|
tf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# ---------------------------------------------
|
-----------------------------
from enum import Enum
class AccessKeyName(str, Enum):
key1 = "key1"
key2 = "key2"
class CheckNameReason(str, Enum):
unavailable = "Unavailable"
invalid = "Invalid"
|
jteehan/cfme_tests
|
cfme/tests/configure/test_log_depot_operation.py
|
Python
|
gpl-2.0
| 12,788
| 0.00305
|
# -*- coding: utf-8 -*-
""" Tests used to check the operation of log collecting.
Author: Milan Falešník <mfalesni@redhat.com>
Since: 2013-02-20
"""
from datetime import datetime
import fauxfactory
import pytest
import re
from cfme import test_requirements
from cfme.configure import configuration as configure
from utils import conf, testgen
from utils.appliance.implementations.ui import navigate_to
from utils.blockers import BZ
from utils.ftp import FTPClient
from utils.providers import get_mgmt
from utils.version import current_version
from utils.virtual_machines import deploy_template
pytestmark = [test_requirements.log_depot]
class LogDepotType(object):
def __init__(self, protocol, credentials, access_dir=None, path=None):
self.protocol = protocol
self._param_name = self.protocol
self.credentials = credentials
self.access_dir = access_dir or ""
self.path = path
self.machine_ip = None
@property
def ftp(self):
if self.protocol == "anon_ftp":
ftp_user_name = "anonymous"
ftp_password = ""
# case anonymous connection cfme works only with hardcoded "incoming" directory
# incoming folder used for https://bugzilla.redhat.com/show_bug.cgi?id=1307019
upload_dir = "incoming"
else:
ftp_user_name = self.credentials["username"]
ftp_password = self.credentials["password"]
# if it's not anonymous using predefined credentials
upload_dir = "/"
return FTPClient(self.machine_ip,
ftp_user_name,
ftp_password,
upload_dir)
def pytest_generate_tests(metafunc):
""" Parametrizes the logdepot tests according to cfme_data YAML file.
YAML structure (shared with db backup tests) is as follows:
log_db_depot:
credentials: credentials_key
protocols:
smb:
path_on_host: /path/on/host
use_for_log_collection: True
use_for_db_backups: False
nfs:
hostname: nfs.example.com/path/on/host
use_for_log_collection: False
use_for_db_backups: True
ftp:
hostname: ftp.example.com
use_for_log_collection: True
"""
if metafunc.function.__name__ == 'test_collect_unconfigured':
return
fixtures = ['log_depot']
data = conf.cfme_data.get("log_db_operations", {})
depots = []
ids = []
creds = conf.credentials[data['credentials']]
for protocol, proto_data in data['protocols'].iteritems():
if proto_data['use_for_log_collection']:
depots.append([LogDepotType(
protocol, creds,
proto_data.get('sub_folder'), proto_data.get('path_on_host'))])
ids.append(protocol)
if metafunc.function.__name__ in ['test_collect_multiple_servers',
"test_collect_single_servers"]:
ids = ids[:1]
depots = depots[:1]
testgen.parametrize(metafunc, fixtures, depots, ids=ids, scope="function")
return
@pytest.yield_fixture(scope="module")
def depot_machine_ip():
""" Deploy vm for depot test
This fixture uses for deploy vm on provider from yaml and then receive it's ip
After test run vm deletes from provider
"""
depot_machine_name = "test_long_log_depot_{}".format(fauxfactory.gen_alphanumeric())
data = conf.cfme_data.get("log_db_operations", {})
depot_provider_key = data["log_db_depot_template"]["provider"]
depot_template_name = data["log_db_depot_template"]["template_name"]
prov = get_mgmt(depot_provider_key)
deploy_template(depot_provider_key,
depot_machine_name,
template_name=depot_template_name)
yield prov.get_ip_address(depot_machine_name)
prov.delete_vm(depot_machine_name)
@pytest.fixture(scope="module")
def configured_external_appliance(temp_appliance_preconfig, app_creds_modscope,
temp_appliance_unconfig):
hostname = temp_appliance_preconfig.address
temp_appliance_unconfig.appliance_console_cli.configure_appliance_external_join(hostname,
app_creds_modscope['username'], app_creds_modscope['password'], 'vmdb_production',
hostname, app_creds_modscope['sshlogin'], app_creds_modscope['sshpass'])
temp_appliance_unconfig.start_evm_service()
temp_appliance_unconfig.wait_for_evm_service()
temp_appliance_unconfig.wait_for_web_ui()
return temp_appliance_unconfig
@pytest.yield_fixture(scope="function")
def configured_depot(log_depot, depot_machine_ip):
""" Configure selected depot provider
This fixture used the trick that the fixtures are cached for given function.
So if placed behind the depot_* stuff on the test function, it can actually
take the values from them.
It also provides a finalizer to disable the depot after test run.
"""
log_depot.machine_ip = depot_machine_ip
uri = log_depot.machine_ip + log_depot.access_dir
log_depot = configure.ServerLogDepot(log_depot.protocol,
depot_name=fauxfactory
|
.gen_alphanumeric(),
uri=uri,
username=log_depot.credentials["username"],
password=log_depot.credentials["password"]
)
log_depo
|
t.create()
yield log_depot
log_depot.clear()
def check_ftp(ftp, server_name, server_zone_id):
server_string = server_name + "_" + str(server_zone_id)
with ftp:
# Files must have been created after start with server string in it (for ex. EVM_1)
zip_files = ftp.filesystem.search(re.compile(r"^.*{}.*?[.]zip$".format(server_string)),
directories=False)
assert zip_files, "No logs found!"
# Check the times of the files by names
datetimes = []
for file in zip_files:
# files looks like "Current_region_0_default_1_EVM_1_20170127_043343_20170127_051010.zip"
# 20170127_043343 - date and time
date = file.name.split("_")
date_from = date[7] + date[8]
# removing ".zip" from last item
date_to = date[9] + date[10][:-4]
try:
date_from = datetime.strptime(date_from, "%Y%m%d%H%M%S")
date_to = datetime.strptime(date_to, "%Y%m%d%H%M%S")
except ValueError:
assert False, "Wrong file matching of {}".format(file.name)
datetimes.append((date_from, date_to, file.name))
# Check for the gaps
if len(datetimes) > 1:
for i in range(len(datetimes) - 1):
dt = datetimes[i + 1][0] - datetimes[i][1]
assert dt.total_seconds() >= 0.0, \
"Negative gap between log files ({}, {})".format(
datetimes[i][2], datetimes[i + 1][2])
@pytest.mark.tier(3)
@pytest.mark.nondestructive
@pytest.mark.meta(blockers=[BZ(1341502, unblock=lambda log_depot: log_depot.protocol != "anon_ftp",
forced_streams=["5.6", "5.7", "5.8", "upstream"])]
)
def test_collect_log_depot(log_depot, appliance, configured_depot, request):
""" Boilerplate test to verify functionality of this concept
Will be extended and improved.
"""
# Wipe the FTP contents in the end
@request.addfinalizer
def _clear_ftp():
with log_depot.ftp as ftp:
ftp.cwd(ftp.upload_dir)
ftp.recursively_delete()
# Prepare empty workspace
with log_depot.ftp as ftp:
# move to upload folder
ftp.cwd(ftp.upload_dir)
# delete all files
ftp.recursively_delete()
# Start the collection
configured_depot.collect_all()
# Check it on FTP
check_ftp(log_depot.ftp, appliance.server_name(), appliance.server_zone_id())
@pytest.mark.meta(blockers=[BZ(1436367, forced_streams=["5.8"])])
@pytest.mark.tier(3)
def test_collect_unconfigured(appliance):
""" Te
|
blorenz/btce-api
|
samples/cancel-orders.py
|
Python
|
mit
| 1,343
| 0.006701
|
#!/usr/bin/python
import sys
import btceapi
# This sample shows use of a KeyHandler. For each API key in the file
# passed in as the first argument, all pending orders for the specified
# pair and type will be canceled.
if len(sys.argv) < 4:
print "Usage: cancel_orders.py <key file> <pair> <order type>"
print " key file - Path to a file containing key/secret/nonce data"
print " pair - A currency pair, such as btc_usd"
print " order type - Type of orders to process, either 'buy' or 'sell'"
sys.exit(1)
key_file = sys.argv[1]
pair = sys.argv[2]
order_type = unicode(sys.argv[3])
handler = btceapi.KeyHandler(key_file)
for key in handler.keys:
print "Canceling orders for key %s" % key
t = btceapi.TradeAPI(key, handler)
try:
# Get a l
|
ist of orders for the given pair, and cancel the ones
# with the correct order type.
orders = t.orderList(pair = pair)
for o in orders:
if o.type == order_type:
print " Canceling %s %s order for %f @ %f" % (pair, order_type,
o.amount, o.r
|
ate)
t.cancelOrder(o.order_id)
if not orders:
print " There are no %s %s orders" % (pair, order_type)
except Exception as e:
print " An error occurred: %s" % e
|
syci/ingadhoc-odoo-addons
|
hr_timesheet_project/__openerp__.py
|
Python
|
agpl-3.0
| 591
| 0
|
# -*- coding: utf-8 -*-
{
'name': 'Time Tracking',
'version': '1.0',
|
'category': 'Human Resources',
'sequence': 23,
'description': """
This module implements a timesheet system.
==========================================
""",
'author': 'OpenERP SA',
'webs
|
ite': 'http://www.openerp.com',
'images': ['images/hr_timesheet_lines.jpeg'],
'depends': ['hr_timesheet', 'project'],
'data': [
],
'demo': [],
'test': [
],
'installable': True,
'auto_install': False,
}
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
|
andrewsy97/Treehacks
|
websocket/websocket/_app.py
|
Python
|
mit
| 10,379
| 0.002794
|
"""
websocket - WebSocket client library for Python
Copyright (C) 2010 Hiroki Ohtani(liris)
This library is free software; you can redistribute it and/or
modify it under the terms of the GNU Lesser General Public
License as published by the Free Software Foundation; either
version 2.1 of the License, or (at your option) any later version.
This library is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
Lesser General Public License for more details.
You should have received a copy of the GNU Lesser General Public
License along with this library; if not, write to the Free Software
Foundation, Inc., 51 Franklin Street, Fifth Floor,
Boston, MA 02110-1335 USA
"""
"""
WebSocketApp provides higher level APIs.
"""
import threading
import time
import traceback
import sys
import select
import six
from ._core import WebSocket, getdefaulttimeout
from ._exceptions import *
from ._logging import *
from ._abnf import ABNF
__all__ = ["WebSocketApp"]
class WebSocketApp(object):
|
"""
Higher level of APIs are provided.
The interface is like JavaScript WebSocket object.
"""
def __init__(self, url, header=[],
on_open=None, on_message=None, on_error=None,
on_close=None, on_ping=None, on_pong=None,
on_cont_message=None,
keep_running=Tr
|
ue, get_mask_key=None, cookie=None,
subprotocols=None,
on_data=None):
"""
url: websocket url.
header: custom header for websocket handshake.
on_open: callable object which is called at opening websocket.
this function has one argument. The argument is this class object.
on_message: callable object which is called when received data.
on_message has 2 arguments.
The 1st argument is this class object.
The 2nd argument is utf-8 string which we get from the server.
on_error: callable object which is called when we get error.
on_error has 2 arguments.
The 1st argument is this class object.
The 2nd argument is exception object.
on_close: callable object which is called when closed the connection.
this function has one argument. The argument is this class object.
on_cont_message: callback object which is called when receive continued
frame data.
on_message has 3 arguments.
The 1st argument is this class object.
The 2nd argument is utf-8 string which we get from the server.
The 3rd argument is continue flag. if 0, the data continue
to next frame data
on_data: callback object which is called when a message received.
This is called before on_message or on_cont_message,
and then on_message or on_cont_message is called.
on_data has 4 argument.
The 1st argument is this class object.
The 2nd argument is utf-8 string which we get from the server.
The 3rd argument is data type. ABNF.OPCODE_TEXT or ABNF.OPCODE_BINARY will be came.
The 4rd argument is continue flag. if 0, the data continue
keep_running: a boolean flag indicating whether the app's main loop
should keep running, defaults to True
get_mask_key: a callable to produce new mask keys,
see the WebSocket.set_mask_key's docstring for more information
subprotocols: array of available sub protocols. default is None.
"""
self.url = url
self.header = header
self.cookie = cookie
self.on_open = on_open
self.on_message = on_message
self.on_data = on_data
self.on_error = on_error
self.on_close = on_close
self.on_ping = on_ping
self.on_pong = on_pong
self.on_cont_message = on_cont_message
self.keep_running = keep_running
self.get_mask_key = get_mask_key
self.sock = None
self.last_ping_tm = 0
self.subprotocols = subprotocols
def send(self, data, opcode=ABNF.OPCODE_TEXT):
"""
send message.
data: message to send. If you set opcode to OPCODE_TEXT,
data must be utf-8 string or unicode.
opcode: operation code of data. default is OPCODE_TEXT.
"""
if not self.sock or self.sock.send(data, opcode) == 0:
raise WebSocketConnectionClosedException("Connection is already closed.")
def close(self):
"""
close websocket connection.
"""
self.keep_running = False
if self.sock:
self.sock.close()
def _send_ping(self, interval, event):
while not event.wait(interval):
self.last_ping_tm = time.time()
if self.sock:
self.sock.ping()
def run_forever(self, sockopt=None, sslopt=None,
ping_interval=0, ping_timeout=None,
http_proxy_host=None, http_proxy_port=None,
http_no_proxy=None, http_proxy_auth=None,
skip_utf8_validation=False,
host=None, origin=None):
"""
run event loop for WebSocket framework.
This loop is infinite loop and is alive during websocket is available.
sockopt: values for socket.setsockopt.
sockopt must be tuple
and each element is argument of sock.setsockopt.
sslopt: ssl socket optional dict.
ping_interval: automatically send "ping" command
every specified period(second)
if set to 0, not send automatically.
ping_timeout: timeout(second) if the pong message is not received.
http_proxy_host: http proxy host name.
http_proxy_port: http proxy port. If not set, set to 80.
http_no_proxy: host names, which doesn't use proxy.
skip_utf8_validation: skip utf8 validation.
host: update host header.
origin: update origin header.
"""
if not ping_timeout or ping_timeout <= 0:
ping_timeout = None
if sockopt is None:
sockopt = []
if sslopt is None:
sslopt = {}
if self.sock:
raise WebSocketException("socket is already opened")
thread = None
close_frame = None
try:
self.sock = WebSocket(self.get_mask_key,
sockopt=sockopt, sslopt=sslopt,
fire_cont_frame=self.on_cont_message and True or False,
skip_utf8_validation=skip_utf8_validation)
self.sock.settimeout(getdefaulttimeout())
self.sock.connect(self.url, header=self.header, cookie=self.cookie,
http_proxy_host=http_proxy_host,
http_proxy_port=http_proxy_port,
http_no_proxy=http_no_proxy, http_proxy_auth=http_proxy_auth,
subprotocols=self.subprotocols,
host=host, origin=origin)
self._callback(self.on_open)
if ping_interval:
event = threading.Event()
thread = threading.Thread(target=self._send_ping, args=(ping_interval, event))
thread.setDaemon(True)
thread.start()
while self.sock.connected:
r, w, e = select.select((self.sock.sock, ), (), (), ping_timeout)
if not self.keep_running:
break
if ping_timeout and self.last_ping_tm and time.time() - self.last_ping_tm > ping_timeout:
self.last_ping_tm = 0
raise WebSocketTimeoutException("ping timed out")
if r:
op_code, frame = self.sock.recv_data_frame(True)
if op_code == ABNF.OPCODE_CLOSE:
close_frame = frame
break
elif op_code == ABNF.OPCODE_PING:
self._callback(self.on_ping, frame.data)
elif op_code ==
|
arruda/amao
|
AMAO/apps/Corretor/models/retorno.py
|
Python
|
mit
| 2,633
| 0.012952
|
# -*- coding: utf-8 -*-
from django.db import models
from Corretor.base import CorretorException
from Corretor.base import ExecutorException
from Corretor.base import CompiladorException
from Corretor.base import ComparadorException
from Corretor.base import LockException
from model_utils import Choices
class RetornoCorrecao(models.Model):
"""Um modelo que possui informacoes sobre o retorno da correcao de uma questao(ou questao de avaliacao).
"""
TIPOS = Choices(
(0,'loading',u'Loading'),
(1,'compilacao',u'Compilação'),
(2,'execucao',u'Execução'),
(3,'comparacao',u'Comparação'),
(4,'lock',u'Lock'),
(5,'correto',u'Correto'),
)
tipo = models.SmallIntegerField(u"Tipo",choices=TIPOS, default=TIPOS.loading)
msg = models.TextField(u"Mensagem",blank=True,null=True)
task_id = models.CharField(max_length=350,blank=True,null=True)
class Meta:
verbose_name = u'Retorno Correção'
app_label = 'Corretor'
def __unicode__(self):
return "%s: %s" %(self.TIPOS[self.tipo][1],self.msg)
def altera_dados(self,sucesso=True,erroException=None):
"""
Altera os dados do retorno atual para pegar os dados de erro ou para por a mensagem
que foi com sucesso.
"""
|
tipo = RetornoCorrecao.TIPOS.correto
correcao_msg = "Correto!"
# print ">>altera_dados"
# print ">>isinstance(erroException,CorretorException)",isinstance(erroException,CorretorException)
i
|
f sucesso == True:
# print ">>retorno.successful()"
tipo = RetornoCorrecao.TIPOS.correto
correcao_msg = "Correto!"
elif isinstance(erroException,CorretorException):
# print "erro: %s" % erroException.message
if isinstance(erroException,ExecutorException):
correcao_msg = erroException.message
tipo = RetornoCorrecao.TIPOS.execucao
if isinstance(erroException,CompiladorException):
correcao_msg = erroException.message
tipo = RetornoCorrecao.TIPOS.compilacao
if isinstance(erroException,ComparadorException):
correcao_msg = erroException.message
tipo = RetornoCorrecao.TIPOS.comparacao
if isinstance(erroException,LockException):
correcao_msg = erroException.message
tipo = RetornoCorrecao.TIPOS.lock
self.tipo = tipo
self.msg = correcao_msg
|
vlukes/sfepy
|
examples/linear_elasticity/linear_elastic_damping.py
|
Python
|
bsd-3-clause
| 1,983
| 0.013111
|
r"""
Time-dependent linear elasticity with a simple damping.
Find :math:`\ul{u}` such that:
.. math::
\int_{\Omega} c\ \ul{v} \cdot \pdiff{\ul{u}}{t}
+ \int_{\Omega} D_{ijkl}\ e_{ij}(\ul{v}) e_{kl}(\ul{u})
= 0
\;, \quad \forall \ul{v} \;,
where
.. math::
D_{ijkl} = \mu (\delta_{ik} \delta_{jl}+\delta_{il} \delta_{jk}) +
\lambda \ \delta_{ij} \delta_{kl}
\;.
"""
from __future__ import print_function
from __future__ import absolute_import
from copy import deepcopy
import numpy as nm
from examples.linear_elasticity.linear_elastic import \
filename_mesh, materials, regions, fields, ebcs, \
integrals, solvers
def print_times(problem, state):
print(nm.array(problem.ts.times))
options = {
'ts' : 'ts',
'save_times' : 'all',
'post_process_hook_final' : print_times,
'output_format' : 'h5',
}
variables = {
'u' : ('unknown field', 'displacement', 0, 1),
'v' : ('test field', 'displacement', 'u'),
}
# Put density to 'solid'.
materials = deepcopy(materials)
materials['solid'][0].update({'c' : 1000.0})
# Moving the PerturbedSurface region.
ebcs = deepcopy(ebcs)
ebcs['PerturbedSurface'][1].update({'u.0' : 'ebc_sin'})
def ebc_sin(ts, coors, **kwargs):
val = 0.01 * nm.sin(2.0*nm.pi*ts.nt)
return nm.tile(val, (coors.shape[0],))
equations = {
'balance_of_forces in time' :
"""dw_dot.i.Omega( solid.c, v, du/dt )
+ dw_lin_elastic.i.Omega( solid.D, v, u ) = 0""",
}
def adapt_time_step(ts, status, adt, problem, verbose=False):
if ts.time > 0.5:
ts.set_time_step(0.1)
return True
solvers = deepcopy(solvers) # Do not spoil linear_elastic.py namespace in tests.
solvers.update({
'ts' : ('ts.adaptive', {
't0' : 0.0,
't1' : 1.0,
'dt' : None,
'n_step
|
' : 101,
'adapt_fun' : adapt_time_step,
'verbose' : 1,
}),
})
ls = solver
|
s['ls']
ls[1].update({'use_presolve' : True})
functions = {
'ebc_sin' : (ebc_sin,),
}
|
pcrews/rannsaka
|
mysql/mysql_demo.py
|
Python
|
apache-2.0
| 3,820
| 0.012565
|
import commands
import time
import MySQLdb
from locust import Locust, events, task, TaskSet
def show_tables(self):
print "Running show_tables..."
print self.client.query("SHOW TABLES IN mysql", name="SHOW TABLES")
def mysql_user(self):
print "Running show users..."
print self.client.query("SELECT * FROM mysql.user", name="SHOW USERS")
def city_select(self):
print "City SELECT..."
query = "SELECT * FROM City"
name = "City SELECT"
print "%s: %s" %(self.id, len(self.client.query(query, name)) )
def country_select(self):
print "Country SELECT..."
query = "SELECT * FROM Country"
name = "Country SELECT"
print "%s: %s" %(self.id, len(self.client.query(query, name)) )
def two_table_join(self):
print "2 table JOIN..."
query = "SELECT * FROM City, Country WHERE City.CountryCode=Country.Code"
name = "two_table_join"
print "%s: %s" %(self.id, len(self.client.query(query, name)) )
def three_table_join(self):
print "3 table JOIN..."
query = "SELECT * FROM City, Country, CountryLanguage WHERE City.CountryCode=Country.Code AND CountryLanguage.CountryCode=Country.Code"
name = "three_table_join"
print "%s: %s" %(self.id, len(self.client.query(query, name)) )
class MariadbClient():
"""
Simple, sample XML RPC client implementation that wraps xmlrpclib.ServerProxy and
fires locust events on request_success and request_failure, so that all requests
gets tracked in locust's statistics.
"""
def __init__(self):
try:
print 'Hello!'
except Exception as e:
print Exception, e
def query(self, query, name):
start_time = time.time()
try:
cmd = 'mysql -uroot world -e "%s"' %query
status, output = commands.getstatusoutput(cmd)
print "%s\ncmd: %s\nstatus: %s\n\n%s" %('#'*80, cmd, status, '#'*80)
except Exception as e:
total_time = float((time.time() - start_time) * 1000)
print Exception, e
events.request_failure.fire(request_type="mariadb", name=name, response_time=total_time, exception=e)
return None
else:
total_time = float((time.time() - start_time) * 1000)
events.request_success.fire(request_type="mariadb", name=name, response_time=total_time, response_length=0)
# In this example, I've hardcoded response_length=0. If we would want the response length to be
# reported correctly in the statistics, we would probably need to hook in at a lower level
return output
class Task_set(TaskSet):
def on_start(self):
|
""" on_start is called when a Locust start before any
|
task is scheduled """
self.id = str(self.locust).split('object at')[1].strip().replace('>','')
tasks = {three_table_join: 10,
two_table_join: 5,
city_select: 3,
country_select: 1
}
class MariadbLocust(Locust):
"""
This is the abstract Locust class which should be subclassed. It provides an XML-RPC client
that can be used to make XML-RPC requests that will be tracked in Locust's statistics.
"""
def __init__(self, *args, **kwargs):
super(MariadbLocust, self).__init__(*args, **kwargs)
self.client = MariadbClient()
task_set = Task_set
class ApiUser(MariadbLocust):
def __init__(self):
super(ApiUser, self).__init__()
self.host = "http://127.0.0.1:3306/"
self.min_wait = 0
self.max_wait = 10
task_set = Task_set
|
uwosh/CCDET-CBRF
|
ccdet.py
|
Python
|
gpl-2.0
| 8,257
| 0.004844
|
import csv
import logging
import transaction
DIRNAME = "/opt/Plone-4.3/zeocluster/Extensions/"
FILENAME = "ccdet.dat"
MAXROWS = 100000000
TRANSSIZE = 50
TRANSSIZE_FOR_READING = 5000
FOLDERID = 'cbrf-folder'
logger = logging.getLogger('ccdet_mem')
html_escape_table = {
"&": "&",
}
def html_escape(text):
"""Produce entities within text."""
return "".join(html_escape_table.get(c,c) for c in text)
def ccdet_mem(self, readonly):
allrecs = {}
readonly = (readonly == 1 or readonly == True)
def ccdet_read(self):
transcount = 0
totalcount = 0
with open('%s/%s' % (DIRNAME, FILENAME), 'rb') as csvfile:
linereader = csv.DictReader(csvfile, dialect=csv.excel_tab)
current_person_id = ''
current_person_object = None
last_person_object = None
last_unique_key = ''
for row in linereader:
(
unique_key,
last_name,
first_name,
middle_initial,
last_4,
birthdate,
phone,
email,
employment_status,
class_name,
start_date,
city,
state,
trainer_name,
trainer_approval_number
) = (
row['P_Unique_Key'],
row['Last_Name'],
row['First_Name'],
row['Middle_Initial'],
row['Last_4'],
row['P_Birthdate'],
row['P_Phone'],
row['Email'],
row['Employment_Status'],
row['Class_Name'],
row['Start_Date'],
row['C_City'],
row['C_State'],
row['P_Trainer'],
row['P_Trainer_Approval_#']
)
# strip out time from start_date
start_date = start_date.split(' ')[0]
if employment_status == 'S' and start_date != '':
current_person_id_base = '%s-%s-%s-%s' % (last_name, first_name, middle_initial, unique_key)
current_person_id_base = current_person_id_base.translate(None, "':\"/&")
current_person_id = current_person_id_base
if unique_key != last_unique_key:
# we are dealing with a new person so close out the previous person record
if last_unique_key != '':
# do not do this if we are
|
just starting
#last_person_object.setText(last_person_object.getText() + '</table>')
|
pass
# check if need to create a new person object or can edit an existing person object
if not allrecs.has_key(current_person_id):
allrecs[current_person_id] = {}
#logger.info('created new person id %s' % current_person_id)
current_person_object = allrecs[current_person_id]
current_person_object['title'] = '%s, %s %s %s' % (last_name, first_name, middle_initial, unique_key)
current_person_object['certs'] = []
else:
#logger.info('skipping existing person ID %s' % current_person_id)
current_person_object = allrecs[current_person_id]
else:
# we are adding a cert to an existing person object
current_person_object = allrecs[current_person_id]
transcount += 1
totalcount += 1
current_person_object['certs'].append({'class_name':class_name, 'start_date':start_date, 'city':city, 'state':state, 'trainer_name':trainer_name, 'trainer_approval_number':trainer_approval_number})
last_person_object = current_person_object
last_unique_key = unique_key
transcount += 1
if transcount == TRANSSIZE_FOR_READING:
transaction.commit()
transcount = 0
logger.info("read extract line %s" % totalcount)
if totalcount == MAXROWS:
#logger.error('reached max number of rows to process; bailing now')
message = 'reached max number of rows to process (%s); bailing now' % MAXROWS
logger.warn(message)
return message
def ccdet_write(self, readonly):
transcount = 0
totalcount = 0
updatecount = 0
newcount = 0
identicalcount = 0
if self.checkIdAvailable(FOLDERID):
if readonly:
logger.info('would create folder %s' % id)
else:
self.invokeFactory(type_name='Folder', id=FOLDERID)
folder = getattr(self, FOLDERID, None)
for current_person_id in allrecs.keys():
current_person = allrecs[current_person_id]
if not folder:
# must be in readonly mode
logger.info('would create person %s' % current_person_id)
newcount += 1
else:
if folder.checkIdAvailable(current_person_id):
if readonly:
logger.info('would create person %s' % current_person_id)
else:
folder.invokeFactory(type_name='CBRFPersonSimple', id=current_person_id)
#logger.info('created new person id %s' % current_person_id)
current_person_object = getattr(folder, current_person_id, None)
if not readonly:
current_person_object.setTitle(current_person['title'])
#current_person_object.reindexObject(idxs=["Title"]) # will reindex below
new_text = '<table width="100%"> <tr> <th> Class Name </th> <th> Date </th> <th> City </th> <th> Trainer </th> <th> Trainer # </th> </tr>\n'
for cert in current_person['certs']:
new_text += ' <tr> <td>%s</td> <td>%s</td> <td>%s %s</td> <td>%s</td> <td>%s</td> </tr>\n' % (cert['class_name'], cert['start_date'], cert['city'], cert['state'], cert['trainer_name'], cert['trainer_approval_number'])
new_text += '</table>'
if current_person_object:
current_text = current_person_object.getText()
else:
# we are in readonly mode and this would be a new person
current_text = ''
if current_text != html_escape(new_text):
if len(current_text) < 10: # arbitrary number
logger.info('setting %s' % current_person_id)
newcount += 1
else:
logger.info('updating %s' % current_person_id)
#logger.info('OLD: %s\nNEW: %s' % (current_text, html_escape(new_text)))
updatecount += 1
if not readonly:
current_person_object.setText(new_text)
current_person_object.reindexObject()
else:
#logger.info('no change to %s' % current_person_id)
identicalcount += 1
transcount += 1
totalcount += 1
if transcount == TRANSSIZE:
transaction.commit()
transcount = 0
logger.info("committed transaction %s" % totalcount)
logger.info('Created %s records. Updated %s records. %s records were unchanged.' % (newcount, updatecount, identicalcount))
ccdet_read(self)
if readonly:
logger.info('r
|
MarcosCommunity/odoo
|
comunity_modules/stock_no_negative/model/product.py
|
Python
|
agpl-3.0
| 1,270
| 0
|
# -*- coding: utf-8 -*-
#
#
# OpenERP, Open Source Management Solution
# Copyright (c) 2015 Vauxoo - http://www.vauxoo.com/
# All Rights Reserved.
# info Vauxoo (info@vauxoo.com)
#
# Coded by: Luis Torres (luis_t@vauxo
|
o.com)
#
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free S
|
oftware Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
#
from openerp import fields, models
class ProductTemplate(models.Model):
_inherit = 'product.template'
check_no_negative = fields.Boolean(
'Check no negative',
help='If this field is True can not move this'
' product in negative quantity available in'
' the internal location source')
|
AartGoossens/athletic_pandas
|
tests/algorithms/test_heartrate_models.py
|
Python
|
mit
| 701
| 0
|
import pandas as pd
import pytest
from athletic_pandas.algorithms import heartrate_models
def test_heartrate_model():
heartrate = pd.Series(range(50))
power = pd.Series(range(0, 100, 2))
model, predictions = heartrate_models.heartrate_model(heartrate, power)
assert mo
|
del.params['hr_rest'].value == 0.00039182374117378518
assert model.params['hr_max'].value == 195.75616175
|
654685
assert model.params['dhr'].value == 0.49914432620946803
assert model.params['tau_rise'].value == 0.98614419733274383
assert model.params['tau_fall'].value == 22.975975612579408
assert model.params['hr_drift'].value == 6.7232899323328612 * 10**-5
assert len(predictions) == 50
|
jumpstarter-io/ceph-deploy
|
ceph_deploy/hosts/rhel/pkg.py
|
Python
|
mit
| 260
| 0
|
from ceph_deploy.util import pkg_managers
def install(distro, packages):
return pkg_managers.yum(
distro.conn,
|
packages
)
def remove(distro, packages):
|
return pkg_managers.yum_remove(
distro.conn,
packages
)
|
arosenberg01/asdata
|
settings.py
|
Python
|
mit
| 279
| 0.003584
|
import os
DATABASE = {
'drivername': os.environ['NBA_DB_DRIVER'],
'host': os.env
|
iron['NBA_DB_HOST'],
'port': os.environ['NBA_DB_P
|
ORT'],
'username': os.environ['NBA_DB_USER'],
'password': os.environ['NBA_DB_PW'],
'database': os.environ['NBA_DB_NAME'],
}
|
securestate/king-phisher
|
king_phisher/client/widget/managers.py
|
Python
|
bsd-3-clause
| 17,478
| 0.023058
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# king_phisher/client/widget/managers.py
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of the project nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
import collections
import datetime
import functools
from king_phisher import utilities
from king_phisher.client import gui_utilities
from gi.repository import Gdk
from gi.repository import Gtk
class ButtonGroupManager(object):
"""
Manage a set of buttons. The buttons should all be of the same type (such as
"checkbutton" or "radiobutton") and include a common group name prefix. The
intent is to make managing buttons of similar functionality easier by
grouping them together.
"""
def __init__(self, glade_gobject, widget_type, group_name):
"""
:param glade_gobject: The gobject which has the radio buttons set.
:type glade_gobject: :py:class:`.GladeGObject`
:param str group_name: The name of the group of buttons.
"""
utilities.assert_arg_type(glade_gobject, gui_utilities.GladeGObject)
self.group_name = group_name
name_prefix = widget_type + '_' + self.group_name + '_'
self.buttons = utilities.FreezableDict()
for gobj_name in glade_gobject.dependencies.children:
if not gobj_name.startswith(name_prefix):
continue
button_name = gobj_name[len(name_prefix):]
self.buttons[button_name] = glade_gobject.gobjects[gobj_name]
if not len(self.buttons):
raise ValueError('found no ' + widget_type + ' of group: ' + self.group_name)
self.buttons.freeze()
def __repr__(self):
return "<{0} group_name={1!r} active={2!r} >".format(self.__class__.__name__, self.group_name, self.__str__())
class RadioButtonGroupManager(ButtonGroupManager):
"""
Manage a group of :py:class:`Gtk.RadioButton` objects together to allow the
active one to be easily set and identified. The buttons are retrieved from a
:py:class:`.GladeGObject` instance and must be correctly named
in the :py:attr:`.dependencies` attribute as
'radiobutton_group_name_button_name'.
"""
def __init__(self, glade_gobject, group_name):
"""
:param glade_gobject: The gobject which has the radio buttons set.
:type glade_gobject: :py:class:`.GladeGObject`
:param str group_name: The name of the group of buttons.
"""
super(RadioButtonGroupManager, self).__init__(glade_gobject, 'radiobutton', group_name)
def __str__(self):
return self.get_active() or ''
def get_active(self):
"""
Return the name of the active button if one in the group is active. If
no button in the group is active, None is returned.
:return: The name of the active button.
:rtype: str
"""
for name, button in self.buttons.items():
if button.get_active():
return name
return
def set_active(self, button):
"""
Set a button in the group as active.
:param str button: The name of the button to set as active.
"""
button = self.buttons[button]
button.set_active(True)
button.toggled()
class ToggleButtonGroupManager(ButtonGroupManager):
"""
Manage a mapping of button names to a boolean value indicating whether they
are active or not.
"""
def __str__(self):
return ', '.join(name for name, active in self.get_active().items() if active)
def get_active(self):
"""
Get the button names and whether or not they are active.
:return: A mapping of button names to whether or not they are active.
:rtype: dict
"""
return {name: button.get_active() for name, button in self.buttons.items()}
def set_active(self, buttons):
"""
Set the specified buttons to active or not.
:param dict buttons: A mapping of button names to boolean values.
"""
for name, active in buttons.items():
button = self.buttons.get(name)
if button is None:
raise ValueError('invalid button name: ' + name)
button.set_acti
|
ve(active)
class MenuManager(object):
"""
A class that wraps :py:class:`Gtk.Menu` objects and facilitates managing
their respective items.
"""
__slots__ = ('menu', 'items')
def __init__(self, menu=None):
"""
:param menu: An optional menu to start with. If a menu is specified it
is used as is, otherwise a new instance is u
|
sed and is set to be
visible using :py:meth:`~Gtk.Widget.show`.
:type menu: :py:class:`Gtk.Menu`
"""
if menu is None:
menu = Gtk.Menu()
menu.show()
self.menu = menu
self.items = collections.OrderedDict()
def __getitem__(self, label):
return self.items[label]
def __setitem__(self, label, menu_item):
return self.append_item(menu_item, set_show=False)
def append(self, label, activate=None, activate_args=()):
"""
Create and append a new :py:class:`Gtk.MenuItem` with the specified
label to the menu.
:param str label: The label for the new menu item.
:param activate: An optional callback function to connect to the new
menu item's ``activate`` signal.
:return: Returns the newly created and added menu item.
:rtype: :py:class:`Gtk.MenuItem`
"""
if label in self.items:
raise RuntimeError('label already exists in menu items')
menu_item = Gtk.MenuItem.new_with_label(label)
self.items[label] = menu_item
self.append_item(menu_item)
if activate:
menu_item.connect('activate', activate, *activate_args)
return menu_item
def append_item(self, menu_item, set_show=True):
"""
Append the specified menu item to the menu.
:param menu_item: The item to append to the menu.
:type menu_item: :py:class:`Gtk.MenuItem`
:param bool set_show: Whether to set the item to being visible or leave
it as is.
"""
if set_show:
menu_item.show()
self.menu.append(menu_item)
return menu_item
def append_submenu(self, label):
"""
Create and append a submenu item, then return a new menu manager
instance for it.
:param str label: The label for the new menu item.
:return: Returns the newly created and added menu item.
:rtype: :py:class:`Gtk.MenuManager`
"""
submenu = self.__class__()
submenu_item = Gtk.MenuItem.new_with_label(label)
submenu_item.set_submenu(submenu.menu)
self.append_item(submenu_item)
return submenu
class TreeViewManager(object):
"""
A class that wraps :py:class:`Gtk.TreeView` objects that use `Gtk.ListStore`
models with additional functions for conveniently displaying text data.
If *cb_delete* is specified, the callback will be called with the treeview
instance, and the selection as the parameters.
If *cb_refresh* is specified, the callback will be called without any
parameters.
"""
def __init__(self, treeview, selection_mode=None, cb_delete=None, cb_refresh=None):
"""
:param treeview: The treeview to wrap and manage.
:type treeview: :py:class:`Gtk.TreeView`
:param selection_mode: The selection mode to set for the treeview.
:type selection_mode: :py:class:`Gtk.SelectionMode`
:param cb_delete: An optional callback that can be
|
gpfinley/ensembles
|
scripts/remove_extraneous_extensions.py
|
Python
|
apache-2.0
| 664
| 0.00753
|
"""
Renames files in a directory (command line argument) to not have intermediate extensions.
Works for cTAKES xmi or xml files if the original file extension has been retained.
"""
import re
import subprocess
import sys
import os
try:
d = sys.argv[1]
except:
print('usage:\npython ' + sys.argv[0] + ' <path-to-directory>')
sys.exit(1)
files = subprocess.check_out
|
put(["ls", d]).split("\n")
files = [f for f in files if len(f) and 'TypeSystem' not in f]
|
for f in files:
oldname = f
newname = re.sub('(\....)+(\.xm[il])', '\\2', oldname, flags=re.IGNORECASE)
subprocess.call(['mv', os.path.join(d, oldname), os.path.join(d, newname)])
|
pwnbus/scoring_engine
|
scoring_engine/checks/rdp.py
|
Python
|
mit
| 410
| 0
|
from scoring_en
|
gine.engine.basic_check import BasicCheck, CHECKS_BIN_PATH
class RDPCheck(BasicCheck):
required_properties = []
CMD = CHECKS_BIN_PATH + '/rdp_check {0} {1} {2} {3}'
def command_format(self, properties):
account = self.get_random_account(
|
)
return (
account.username,
account.password,
self.host,
self.port,
)
|
paetzke/consolor
|
tests/test_consolor.py
|
Python
|
bsd-2-clause
| 3,238
| 0
|
# -*- coding: utf-8 -*-
"""
consolor
Copyright (c) 2013-2014, Friedrich Paetzke (f.paetzke@gmail.com)
All rights reserved.
"""
from __future__ import print_function
from consolor import BgColor, Color, get_line
try:
from unittest.mock import call, patch
except ImportError:
from mock import call, patch
def mockable_print(*args, **kwargs):
print(*args, **kwargs)
def test_print_bold():
result = get_line('123 bold', bold=True)
expected = '\x1b[1m123 bold\x1b[0m'
assert result == expected
def test_print_underline():
result = get_line('123 underline', underline=True)
expected = '\x1b[4m123 underline\x1b[0m'
assert result == expected
def test_get_bgcolor():
result = get_line('123 green bg', bgcolor=BgColor.Green)
expected = '\x1b[42;1m123 green bg\x1b[0m'
assert result == expected
def test_get_color():
result = get_line('123 light green', color=Color.LightGreen)
expected = '\x1b[1;32m123 light green\x1b[0m'
assert result == expected
def test_update_line():
for i in reversed(range(101)):
line = get_line('123%d' % i, update_line=True)
expected = '\x1b[2K\r%s%d\x1b[0m' % ('123', i)
assert line == expected
@patch('tests.test_consolor.mockable_print')
def test_print_color(mocked_print):
mockable_print(Color.Red, 'Red')
mockable_print('Red two')
mockable_print(Color.Reset, end='')
mockable_print('Not Red')
mocked_print.assert_has_calls([call('\x1b[0;31m', 'Red'),
call('Red two'),
call('\x1b[0m', end=''),
call('Not Red')])
@patch('tests.test_consolor.mockable_print')
def test_print_concat_color(mocked_print):
mockable_print(Color.Red, 'Red')
mockable_print('Red two')
mockable_print(Color.Blue, 'Blue')
mockable_print(Color.Reset, end='')
mockable_print('Not Blue')
mocked_print.assert_has_calls([call('\x1b[0;31m', 'Red'),
call('Red two'),
call('\x1b[0;34m', 'Blue'),
call('\x1b[0m', end=''),
call('Not Blue')])
@patch('tests.test_consolor.mockable_print')
def test_print_bgcolor(mocked_print):
mockable_print(BgColor.Red, 'Red')
mockable_print('Red two', BgColor.Reset)
mockable_print('None')
mocked_print.assert_has_calls([call('\x1b[41;1m', 'Red'),
call('
|
Red two', '\x1b[0m'),
call('None')])
@patc
|
h('tests.test_consolor.mockable_print')
def test_print_concat_bgcolor(mocked_print):
mockable_print(BgColor.Red, 'Red')
mockable_print('Red two')
mockable_print(BgColor.Cyan, 'None')
mockable_print(BgColor.Reset)
mocked_print.assert_has_calls([call('\x1b[41;1m', 'Red'),
call('Red two'),
call('\x1b[46;1m', 'None'),
call('\x1b[0m')])
def test_color_and_bgcolor():
result = get_line('1', bgcolor=BgColor.Green, color=Color.Red)
expected = '\x1b[0;31m\x1b[42;1m1\x1b[0m'
assert result == expected
|
takeshineshiro/nova
|
nova/tests/unit/virt/hyperv/test_vmutilsv2.py
|
Python
|
apache-2.0
| 11,758
| 0
|
# Copyright 2014 Cloudbase Solutions Srl
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import mock
from nova.tests.unit.virt.hyperv import test_vmutils
from nova.virt.hyperv import constants
from nova.virt.hyperv import vmutilsv2
class VMUtilsV2TestCase(test_vmutils.VMUtilsTestCase):
"""Unit tests for the Hyper-V VMUtilsV2 class."""
_DEFINE_SYSTEM = 'DefineSystem'
_DESTROY_SYSTEM = 'DestroySystem'
_DESTROY_SNAPSHOT = 'DestroySnapshot'
_ADD_RESOURCE = 'AddResourceSettings'
_REMOVE_RESOURCE = 'RemoveResourceSettings'
_SETTING_TYPE = 'VirtualSystemType'
_VM_GEN = constants.VM_GEN_2
_VIRTUAL_SYSTEM_TYPE_REALIZED = 'Microsoft:Hyper-V:System:Realized'
def setUp(self):
super(VMUtilsV2TestCase, self).setUp()
self._vmutils = vmutilsv2.VMUtilsV2()
self._vmutils._conn = mock.MagicMock()
def test_create_vm(self):
super(VMUtilsV2TestCase, self).test_create_vm()
mock_vssd = self._vmutils._conn.Msvm_VirtualSystemSettingData.new()
self.assertEqual(self._vmutils._VIRTUAL_SYSTEM_SUBTYPE_GEN2,
mock_vssd.VirtualSystemSubType)
self.assertFalse(mock_vssd.SecureBootEnabled)
def test_modify_virt_resource(self):
mock_svc = self._vmutils._conn.Msvm_VirtualSystemManagementService()[0]
mock_svc.ModifyResourceSettings.return_value = (self._FAKE_JOB_PATH,
mock.MagicMock(),
self._FAKE_RET_VAL)
mock_res_setting_data = mock.MagicMock()
mock_res_setting_data.GetText_.return_value = self._FAKE_RES_DATA
self._vmutils._modify_virt_resource(mock_res_setting_data,
self._FAKE_VM_PATH)
mock_svc.ModifyResourceSettings.assert_called_with(
ResourceSettings=[self._FAKE_RES_DATA])
@mock.patch.object(vmutilsv2, 'wmi', create=True)
@mock.patch.object(vmutilsv2.VMUtilsV2, 'check_ret_val')
def test_take_vm_snapshot(self, mock_check_ret_val, mock_wmi):
self._lookup_vm()
mock_svc = self._get_snapshot_service()
mock_svc.CreateSnapshot.return_value = (self._FAKE_JOB_PATH,
mock.MagicMock(),
self._FAKE_RET_VAL)
self._vmutils.take_vm_snapshot(self._FAKE_VM_NAME)
mock_svc.CreateSnapshot.assert_called_with(
AffectedSystem=self._FAKE_VM_PATH,
SnapshotType=self._vmutils._SNAPSHOT_FULL)
mock_check_ret_val.assert_called_once_with(self._FAKE_RET_VAL,
self._FAKE_JOB_PATH)
@mock.patch.object(vmutilsv2.VMUtilsV2, '_add_virt_resource')
@mock.patch.object(vmutilsv2.VMUtilsV2, '_get_new_setting_data')
@mock.patch.object(vmutilsv2.VMUtilsV2, '_get_nic_data_by_name')
def test_set_nic_connection(self, mock_get_nic_data, mock_get_new_sd,
mock_add_virt_res):
self._lookup_vm()
fake_eth_port = mock_get_new_sd.return_value
self._vmutils.set_nic_connection(self._FAKE_VM_NAME, None, None)
mock_add_virt_res.assert_called_with(fake_eth_port, self._FAKE_VM_PATH)
@mock.patch('nova.virt.hyperv.vmutils.VMUtils._get_vm_disks')
def test_enable_vm_metrics_collection(self, mock_get_vm_disks):
self._lookup_vm()
mock_svc = self._vmutils._conn.Msvm_MetricService()[0]
metric_def = mock.MagicMock()
mock_disk = mock.MagicMock()
mock_disk.path_.return_value = self._FAKE_RES_PATH
mock_get_vm_disks.return_value = ([mock_disk], [mock_disk])
fake_metric_def_paths = ['fake_0', 'fake_0', None]
fake_metric_resource_paths = [self._FAKE_VM_PATH,
self._FAKE_VM_PATH,
self._FAKE_RES_PATH]
metric_def.path_.side_effect = fake_metric_def_paths
self._vmutil
|
s._conn.CIM_BaseMetricDefinition.return_value = [
metric_def]
self._vmutils.enable_vm_metrics_collection(self._FAKE_VM_NAME)
calls = [mock.call(Name=def_name)
for def_name
|
in [self._vmutils._METRIC_AGGR_CPU_AVG,
self._vmutils._METRIC_AGGR_MEMORY_AVG]]
self._vmutils._conn.CIM_BaseMetricDefinition.assert_has_calls(calls)
calls = []
for i in range(len(fake_metric_def_paths)):
calls.append(mock.call(
Subject=fake_metric_resource_paths[i],
Definition=fake_metric_def_paths[i],
MetricCollectionEnabled=self._vmutils._METRIC_ENABLED))
mock_svc.ControlMetrics.assert_has_calls(calls, any_order=True)
def _get_snapshot_service(self):
return self._vmutils._conn.Msvm_VirtualSystemSnapshotService()[0]
def _assert_add_resources(self, mock_svc):
getattr(mock_svc, self._ADD_RESOURCE).assert_called_with(
self._FAKE_VM_PATH, [self._FAKE_RES_DATA])
def _assert_remove_resources(self, mock_svc):
getattr(mock_svc, self._REMOVE_RESOURCE).assert_called_with(
[self._FAKE_RES_PATH])
def test_list_instance_notes(self):
vs = mock.MagicMock()
attrs = {'ElementName': 'fake_name',
'Notes': ['4f54fb69-d3a2-45b7-bb9b-b6e6b3d893b3']}
vs.configure_mock(**attrs)
vs2 = mock.MagicMock(ElementName='fake_name2', Notes=None)
self._vmutils._conn.Msvm_VirtualSystemSettingData.return_value = [vs,
vs2]
response = self._vmutils.list_instance_notes()
self.assertEqual([(attrs['ElementName'], attrs['Notes'])], response)
self._vmutils._conn.Msvm_VirtualSystemSettingData.assert_called_with(
['ElementName', 'Notes'],
VirtualSystemType=self._vmutils._VIRTUAL_SYSTEM_TYPE_REALIZED)
def _get_fake_instance_notes(self):
return [self._FAKE_VM_UUID]
@mock.patch('nova.virt.hyperv.vmutilsv2.VMUtilsV2.check_ret_val')
@mock.patch('nova.virt.hyperv.vmutilsv2.VMUtilsV2._get_wmi_obj')
def _test_create_vm_obj(self, mock_get_wmi_obj, mock_check_ret_val,
vm_path, dynamic_memory_ratio=1.0):
mock_vs_man_svc = mock.MagicMock()
mock_vs_data = mock.MagicMock()
mock_job = mock.MagicMock()
fake_job_path = 'fake job path'
fake_ret_val = 'fake return value'
fake_vm_name = 'fake_vm_name'
_conn = self._vmutils._conn.Msvm_VirtualSystemSettingData
mock_check_ret_val.return_value = mock_job
_conn.new.return_value = mock_vs_data
mock_vs_man_svc.DefineSystem.return_value = (fake_job_path,
vm_path,
fake_ret_val)
mock_job.associators.return_value = ['fake vm path']
response = self._vmutils._create_vm_obj(
vs_man_svc=mock_vs_man_svc,
vm_name=fake_vm_name,
vm_gen='fake vm gen',
notes='fake notes',
dynamic_memory_ratio=dynamic_memory_ratio,
instance_path=mock.sentinel.instance_path)
if not vm_path:
mock_job.associators.assert_called_once_with(
self._vmutils._AFFECTED_JOB_ELEMENT_CLASS)
_conn.new.assert_called_once_with()
self.assertEqual(mock_vs_data.ElementName, fake_vm_name)
mock_vs_man_svc.DefineSystem.assert_called_once_with(
|
Bobbyshow/Avoid
|
screen/menu.py
|
Python
|
unlicense
| 923
| 0.010834
|
#-*- coding: utf-8 -*-
import pygame.key
from pygame.font import Font
from lib.base_
|
screen import BaseScreen, ChangeScreenException
from pygame.locals import K_SPACE as SPACE
class MenuScreen(BaseScreen):
def init_entities_before(self, surfac
|
e):
self.font = Font(None, 30)
self.textImg = self.font.render(
'Press SPACE to BEGIN !',
1,
(255,255,255)
)
surface.blit(self.textImg, (200,200))
def execute(self, surface):
if pygame.key.get_pressed()[SPACE] == 1:
raise ChangeScreenException(1, 'Launch the game!')
def erase_all_map(self):
pass
def draw(self, surface):
pass
def game_over(self, text, number=None):
BaseScreen.erase_all_map(self)
font = Font(None, 30)
textImg = font.render(text, 1, (255,255,255))
self.surface.blit(textImg, (200,100))
|
pavlenko-volodymyr/codingmood
|
codemood/social/migrations/0004_auto__chg_field_post_link.py
|
Python
|
mit
| 4,577
| 0.007865
|
# -*- coding: utf-8 -*-
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Changing field 'Post.link'
db.alter_column(u'social_post', 'link', self.gf('django.db.models.fields.URLField')(max_length=200))
def backwards(self, orm):
# Changing field 'Post.link'
db.alter_column(u'social_post', 'link', self.gf('django.db.models.fields.CharField')(max_length=255))
models = {
u'auth.group': {
'Meta': {'object_name': 'Group'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
u'auth.permission': {
'Meta': {'ordering': "(u'content_type__app_label', u'content_type__model', u'codename')", 'unique_together': "((u'content_type', u'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['contenttypes.ContentType']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
u'auth.user': {
'Meta': {'object_name': 'User'},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30',
|
'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
u'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)"
|
, 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
u'social.post': {
'Meta': {'object_name': 'Post'},
'content': ('django.db.models.fields.TextField', [], {}),
'created': ('django.db.models.fields.DateTimeField', [], {}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'link': ('django.db.models.fields.URLField', [], {'max_length': '200'}),
'mood': ('django.db.models.fields.CharField', [], {'max_length': '15', 'db_index': 'True'}),
'mood_negative': ('django.db.models.fields.SmallIntegerField', [], {'default': '0'}),
'mood_neutral': ('django.db.models.fields.SmallIntegerField', [], {'default': '0'}),
'mood_positive': ('django.db.models.fields.SmallIntegerField', [], {'default': '0'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['auth.User']"})
}
}
complete_apps = ['social']
|
Galexrt/zulip
|
zerver/tests/test_muting.py
|
Python
|
apache-2.0
| 4,650
| 0.00086
|
import ujson
from django.http import HttpResponse
from mock import p
|
atch
from typing import Any, Dict
from zerver.lib.test_classes import ZulipTestCase
from zerver.lib.stream_topic import StreamTopicTarget
from zerver.models import (
get_realm,
get_stream,
get_stream_recipient,
get_user,
Recipient,
UserProfile,
)
from zerver.lib
|
.topic_mutes import (
add_topic_mute,
get_topic_mutes,
topic_is_muted,
)
class MutedTopicsTests(ZulipTestCase):
def test_user_ids_muting_topic(self):
# type: () -> None
hamlet = self.example_user('hamlet')
cordelia = self.example_user('cordelia')
realm = hamlet.realm
stream = get_stream(u'Verona', realm)
recipient = get_stream_recipient(stream.id)
topic_name = 'teST topic'
stream_topic_target = StreamTopicTarget(
stream_id=stream.id,
topic_name=topic_name,
)
user_ids = stream_topic_target.user_ids_muting_topic()
self.assertEqual(user_ids, set())
def mute_user(user):
# type: (UserProfile) -> None
add_topic_mute(
user_profile=user,
stream_id=stream.id,
recipient_id=recipient.id,
topic_name='test TOPIC',
)
mute_user(hamlet)
user_ids = stream_topic_target.user_ids_muting_topic()
self.assertEqual(user_ids, {hamlet.id})
mute_user(cordelia)
user_ids = stream_topic_target.user_ids_muting_topic()
self.assertEqual(user_ids, {hamlet.id, cordelia.id})
def test_add_muted_topic(self):
# type: () -> None
email = self.example_email('hamlet')
self.login(email)
url = '/api/v1/users/me/subscriptions/muted_topics'
data = {'stream': 'Verona', 'topic': 'Verona3', 'op': 'add'}
result = self.client_patch(url, data, **self.api_auth(email))
self.assert_json_success(result)
user = self.example_user('hamlet')
self.assertIn([u'Verona', u'Verona3'], get_topic_mutes(user))
stream = get_stream(u'Verona', user.realm)
self.assertTrue(topic_is_muted(user, stream.id, 'Verona3'))
self.assertTrue(topic_is_muted(user, stream.id, 'verona3'))
def test_remove_muted_topic(self):
# type: () -> None
self.user_profile = self.example_user('hamlet')
email = self.user_profile.email
self.login(email)
realm = self.user_profile.realm
stream = get_stream(u'Verona', realm)
recipient = get_stream_recipient(stream.id)
add_topic_mute(
user_profile=self.user_profile,
stream_id=stream.id,
recipient_id=recipient.id,
topic_name=u'Verona3',
)
url = '/api/v1/users/me/subscriptions/muted_topics'
data = {'stream': 'Verona', 'topic': 'vERONA3', 'op': 'remove'}
result = self.client_patch(url, data, **self.api_auth(email))
self.assert_json_success(result)
user = self.example_user('hamlet')
self.assertNotIn([[u'Verona', u'Verona3']], get_topic_mutes(user))
def test_muted_topic_add_invalid(self):
# type: () -> None
self.user_profile = self.example_user('hamlet')
email = self.user_profile.email
self.login(email)
realm = self.user_profile.realm
stream = get_stream(u'Verona', realm)
recipient = get_stream_recipient(stream.id)
add_topic_mute(
user_profile=self.user_profile,
stream_id=stream.id,
recipient_id=recipient.id,
topic_name=u'Verona3',
)
url = '/api/v1/users/me/subscriptions/muted_topics'
data = {'stream': 'Verona', 'topic': 'Verona3', 'op': 'add'}
result = self.client_patch(url, data, **self.api_auth(email))
self.assert_json_error(result, "Topic already muted")
def test_muted_topic_remove_invalid(self):
# type: () -> None
self.user_profile = self.example_user('hamlet')
email = self.user_profile.email
self.login(email)
url = '/api/v1/users/me/subscriptions/muted_topics'
data = {'stream': 'BOGUS', 'topic': 'Verona3', 'op': 'remove'}
result = self.client_patch(url, data, **self.api_auth(email))
self.assert_json_error(result, "Topic is not there in the muted_topics list")
data = {'stream': 'Verona', 'topic': 'BOGUS', 'op': 'remove'}
result = self.client_patch(url, data, **self.api_auth(email))
self.assert_json_error(result, "Topic is not there in the muted_topics list")
|
mrklein/vtk-plot
|
plot-vtk.py
|
Python
|
unlicense
| 2,344
| 0
|
#!/usr/bin/env python
def load_velocity(filename):
import os
if not os.path.exists(filename):
return None
from numpy import zeros
from vtk import vtkPolyDataReader, vtkCellDataToPointData
reader = vtkPolyDataReader()
reader.SetFileName(filename)
reader.ReadAllVectorsOn()
reader.Update()
data = reader.GetOutput(
|
)
# Extracting triangulation information
triangles = data.GetPolys().GetData()
points = data.GetPoints()
# Mapping data
|
: cell -> point
mapper = vtkCellDataToPointData()
mapper.AddInputData(data)
mapper.Update()
mapped_data = mapper.GetOutput()
# Extracting interpolate point data
udata = mapped_data.GetPointData().GetArray(0)
ntri = triangles.GetNumberOfTuples()/4
npts = points.GetNumberOfPoints()
nvls = udata.GetNumberOfTuples()
tri = zeros((ntri, 3))
x = zeros(npts)
y = zeros(npts)
ux = zeros(nvls)
uy = zeros(nvls)
for i in xrange(0, ntri):
tri[i, 0] = triangles.GetTuple(4*i + 1)[0]
tri[i, 1] = triangles.GetTuple(4*i + 2)[0]
tri[i, 2] = triangles.GetTuple(4*i + 3)[0]
for i in xrange(npts):
pt = points.GetPoint(i)
x[i] = pt[0]
y[i] = pt[1]
for i in xrange(0, nvls):
U = udata.GetTuple(i)
ux[i] = U[0]
uy[i] = U[1]
return (x, y, tri, ux, uy)
def plot(filename):
import os
from matplotlib.pyplot import clf, tricontour, tricontourf, \
gca, savefig, rc, minorticks_on
if not os.path.exists(filename):
return -1
rc('text', usetex=True)
clf()
x, y, tri, ux, uy = load_velocity(filename)
tricontourf(x, y, tri, ux, 16)
tricontour(x, y, tri, ux, 16, linestyles='-',
colors='black', linewidths=0.5)
minorticks_on()
gca().set_aspect('equal')
gca().tick_params(direction='out', which='both')
gca().set_xticklabels([])
gca().set_yticklabels([])
name, _ = os.path.splitext(filename)
name = os.path.basename(name)
savefig('{0}.png'.format(name), dpi=300, bbox_inches='tight')
savefig('{0}.pdf'.format(name), bbox_inches='tight')
if __name__ == '__main__':
import sys
if len(sys.argv) < 2:
print 'usage: {0} [FILENAME]'.format(sys.argv[0])
sys.exit(-1)
sys.exit(plot(sys.argv[1]))
|
dimagi/commcare-hq
|
corehq/form_processor/tests/test_sql_update_strategy.py
|
Python
|
bsd-3-clause
| 8,947
| 0.001229
|
from django.test import TestCase
from freezegun import freeze_time
from unittest.mock import patch
from testil import eq
from corehq.util.soft_assert.core import SoftAssert
from casexml.apps.case.exceptions import ReconciliationError
from casexml.apps.case.xml.parser import CaseUpdateAction, KNOWN_PROPERTIES
from corehq.form_processor.backends.sql.processor import FormProcessorSQL
from corehq.form_processor.backends.sql.update_strategy import SqlCaseUpdateStrategy
from corehq.form_processor.interfaces.processor import ProcessedForms
from corehq.form_processor.models import (
CommCareCase,
CaseTransaction,
RebuildWithReason,
)
from corehq.form_processor.utils import TestFormMetadata
from corehq.form_processor.tests.utils import sharded, FormProcessorTestUtils
from corehq.util.test_utils import get_form_ready_to_save
import uuid
from datetime import datetime
@sharded
class SqlUpdateStrategyTest(TestCase):
|
DOMAIN = 'update-strategy-test-' + uuid.uuid4().hex
USER_ID = 'mr_wednesday_'
@classmethod
def setUpClass(cls):
super(SqlUpdateStrategyTest, cls).setUpClass()
FormProcessorTestUtils.delete_all_sql_forms()
FormProcessorTestUtils.delete_all_sql_cases()
@classmethod
def tearDownClass(cls):
FormProcessorTestUtils.delete
|
_all_sql_forms()
FormProcessorTestUtils.delete_all_sql_cases()
super(SqlUpdateStrategyTest, cls).tearDownClass()
@patch.object(SoftAssert, '_call')
def test_reconcile_transactions(self, soft_assert_mock):
""" tests a transanction with an early client date and late server date """
with freeze_time("2018-10-10"):
case = self._create_case()
with freeze_time("2018-10-11"):
new_old_xform = self._create_form()
with freeze_time("2018-10-08"):
new_old_trans = self._create_case_transaction(case, new_old_xform)
with freeze_time("2018-10-11"):
self._save(new_old_xform, case, new_old_trans)
case = CommCareCase.objects.get_case(case.case_id)
update_strategy = SqlCaseUpdateStrategy(case)
self.assertTrue(update_strategy.reconcile_transactions_if_necessary())
self._check_for_reconciliation_error_soft_assert(soft_assert_mock)
case.save(with_tracked_models=True)
case = CommCareCase.objects.get_case(case.case_id)
update_strategy = SqlCaseUpdateStrategy(case)
self.assertFalse(update_strategy.reconcile_transactions_if_necessary())
self._check_for_reconciliation_error_soft_assert(soft_assert_mock)
def test_reconcile_not_necessary(self):
with freeze_time("2018-10-10"):
case = self._create_case()
with freeze_time("2018-10-11"):
new_old_xform = self._create_form()
new_old_trans = self._create_case_transaction(case, new_old_xform)
self._save(new_old_xform, case, new_old_trans)
case = CommCareCase.objects.get_case(case.case_id)
update_strategy = SqlCaseUpdateStrategy(case)
self.assertFalse(update_strategy.reconcile_transactions_if_necessary())
def test_ignores_before_rebuild_transaction(self):
with freeze_time("2018-10-10"):
case = self._create_case()
with freeze_time("2018-10-11"):
new_old_xform = self._create_form()
with freeze_time("2018-10-08"):
new_old_trans = self._create_case_transaction(case, new_old_xform)
with freeze_time("2018-10-11"):
self._save(new_old_xform, case, new_old_trans)
self.assertFalse(case.check_transaction_order())
with freeze_time("2018-10-13"):
new_rebuild_xform = self._create_form()
rebuild_detail = RebuildWithReason(reason="shadow's golden coin")
rebuild_transaction = CaseTransaction.rebuild_transaction(case, rebuild_detail)
self._save(new_rebuild_xform, case, rebuild_transaction)
case = CommCareCase.objects.get_case(case.case_id)
update_strategy = SqlCaseUpdateStrategy(case)
self.assertFalse(update_strategy.reconcile_transactions_if_necessary())
def test_first_transaction_not_create(self):
with freeze_time("2018-10-10"):
case = self._create_case()
with freeze_time("2018-10-08"):
new_old_xform = self._create_form()
new_old_trans = self._create_case_transaction(case, new_old_xform)
self._save(new_old_xform, case, new_old_trans)
self.assertTrue(case.check_transaction_order())
case = CommCareCase.objects.get_case(case.case_id)
update_strategy = SqlCaseUpdateStrategy(case)
self.assertRaises(ReconciliationError, update_strategy.reconcile_transactions)
@patch.object(SoftAssert, '_call')
def test_reconcile_transactions_within_fudge_factor(self, soft_assert_mock):
""" tests a transanction with an early client date and late server date """
with freeze_time("2018-10-10"):
case = self._create_case()
with freeze_time("2018-10-11 06:00"):
new_old_xform = self._create_form()
with freeze_time("2018-10-10 18:00"):
new_old_trans = self._create_case_transaction(case, new_old_xform)
with freeze_time("2018-10-11 06:00"):
self._save(new_old_xform, case, new_old_trans)
with freeze_time("2018-10-11"):
new_old_xform = self._create_form()
new_old_trans = self._create_case_transaction(case, new_old_xform)
self._save(new_old_xform, case, new_old_trans)
case = CommCareCase.objects.get_case(case.case_id)
update_strategy = SqlCaseUpdateStrategy(case)
self.assertTrue(update_strategy.reconcile_transactions_if_necessary())
self._check_for_reconciliation_error_soft_assert(soft_assert_mock)
case.save(with_tracked_models=True)
case = CommCareCase.objects.get_case(case.case_id)
update_strategy = SqlCaseUpdateStrategy(case)
self.assertFalse(update_strategy.reconcile_transactions_if_necessary())
self._check_for_reconciliation_error_soft_assert(soft_assert_mock)
def _create_form(self, user_id=None, received_on=None):
"""
Create the models directly so that these tests aren't dependent on any
other apps.
:return: XFormInstance
"""
user_id = user_id or 'mr_wednesday'
received_on = received_on or datetime.utcnow()
metadata = TestFormMetadata(
domain=self.DOMAIN,
received_on=received_on,
user_id=user_id
)
form = get_form_ready_to_save(metadata)
return form
def _create_case_transaction(self, case, form=None, submitted_on=None, action_types=None):
form = form or self._create_form()
submitted_on = submitted_on or datetime.utcnow()
return CaseTransaction.form_transaction(case, form, submitted_on, action_types)
def _create_case(self, case_type=None, user_id=None, case_id=None):
case_id = case_id or uuid.uuid4().hex
user_id = user_id or 'mr_wednesday'
utcnow = datetime.utcnow()
case = CommCareCase(
case_id=case_id,
domain=self.DOMAIN,
type=case_type or '',
owner_id=user_id,
opened_on=utcnow,
modified_on=utcnow,
modified_by=utcnow,
server_modified_on=utcnow
)
form = self._create_form(user_id, utcnow)
trans = self._create_case_transaction(case, form, utcnow, action_types=[128])
self._save(form, case, trans)
return CommCareCase.objects.get_case(case_id)
def _save(self, form, case, transaction):
# disable publish to Kafka to avoid intermittent errors caused by
# the nexus of kafka's consumer thread and freeze_time
with patch.object(FormProcessorSQL, "publish_changes_to_kafka"):
case.track_create(transaction)
FormProcessorSQL.save_processed_models(ProcessedForms(form, []), [case])
def _check_for_reconciliation_error_soft_assert(self, soft
|
elopezga/ErrorRate
|
ivi/agilent/agilentDSA91204A.py
|
Python
|
mit
| 1,632
| 0.004289
|
"""
Python Interchangeable Virtual Instrument Library
Copyright (c) 2012-2014 Alex Forencich
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons t
|
o whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYR
|
IGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
"""
from .agilent90000 import *
class agilentDSA91204A(agilent90000):
"Agilent Infiniium DSA91204A IVI oscilloscope driver"
def __init__(self, *args, **kwargs):
self.__dict__.setdefault('_instrument_id', 'DSO91204A')
super(agilentDSA91204A, self).__init__(*args, **kwargs)
self._analog_channel_count = 4
self._digital_channel_count = 0
self._channel_count = 4
self._bandwidth = 12e9
self._init_channels()
|
Philippe12/external_chromium_org
|
tools/telemetry/telemetry/core/backends/chrome/extension_dict_backend.py
|
Python
|
bsd-3-clause
| 2,641
| 0.008709
|
# Copyright 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import json
import re
import weakref
from telemetry.core import extension_page
from telemetry.core.backends.chrome import inspector_backend
class ExtensionNotFoundException(Exception):
pass
class ExtensionDictBackend(object):
def __init__(self, browser_backend):
self._browser_backend = browser_backend
# Maps extension ids to ExtensionPage objects.
self._extension_dict = weakref.WeakValueDictionary()
def __getitem__(self, extension_id):
extension_object = self._extension_dict.get(extension_id)
if not extension_object:
extension_object = self._CreateExtensi
|
onObject(extension_id)
assert extension_object
self._extension_dict[extension_id] = extension_object
return extension_object
def __contains__(self, extension_id):
return extension_id in self.GetExtensionIds()
@staticmethod
def _ExtractExtensionId(url):
m = re.match(r"(chrome-extension://)([^/]+)", url)
assert m
return m.group(2)
@staticmethod
def _GetExtensionId(extension_info):
if 'u
|
rl' not in extension_info:
return None
return ExtensionDictBackend._ExtractExtensionId(extension_info['url'])
def _CreateExtensionObject(self, extension_id):
extension_info = self._FindExtensionInfo(extension_id)
if not extension_info or not 'webSocketDebuggerUrl' in extension_info:
raise ExtensionNotFoundException()
return extension_page.ExtensionPage(
extension_id,
extension_info['url'],
self._CreateInspectorBackendForDebuggerUrl(
extension_info['webSocketDebuggerUrl']))
def _CreateInspectorBackendForDebuggerUrl(self, debugger_url):
return inspector_backend.InspectorBackend(self._browser_backend.browser,
self._browser_backend,
debugger_url)
def _FindExtensionInfo(self, extension_id):
for extension_info in self.GetExtensionInfoList():
if self._GetExtensionId(extension_info) == extension_id:
return extension_info
return None
def GetExtensionInfoList(self, timeout=None):
data = self._browser_backend.Request('', timeout=timeout)
return self._FilterExtensions(json.loads(data))
def _FilterExtensions(self, all_pages):
return [page_info for page_info in all_pages
if page_info['url'].startswith('chrome-extension://')]
def GetExtensionIds(self):
return map(self._GetExtensionId, self.GetExtensionInfoList())
|
liqd/adhocracy3.mercator
|
src/adhocracy_meinberlin/adhocracy_meinberlin/resources/kiezkassen.py
|
Python
|
agpl-3.0
| 1,552
| 0
|
"""Mercator proposal."""
from adhocracy_core.resources import add_resource_type_to_registry
from adhocracy_core.resources import process
from adhocracy_core.resources import proposal
from adhocracy_core.sheets.geo import IPoint
from adhocracy_core.sheets.geo import ILocationReference
from adhocracy_core.sheets.image import IImageReference
import adhocracy_meinberlin.sheets.kiezkassen
class IProposalVersion(proposal.IProposalVersion):
"""Kiezkassen proposal version."""
proposal_version_meta = proposal.proposal_version_meta._replace(
iresource=IProposalVersion,
)._add(extended_sheets=(adhocracy_meinberlin.sheets.kiezkassen.IProposal,
IPoint))
class IProposal(proposal.IProposal):
"""Kiezkassen proposal versions pool."""
proposal_meta = proposal.proposal_meta._replace(
iresource=IProposal,
element_types=(IProposalVersion,),
item_type=IProposalVersion,
)
class IProcess(process.IProcess):
"""Kiezkassen participation process."""
process_meta = process.process_meta._replace(
content_name='KiezkassenProcess',
iresource=IProcess,
element_types=(IProposal,
),
is_implicit_addable=Tr
|
ue,
extended_sheets=(
ILocationReference,
IImageR
|
eference,
),
default_workflow='kiezkassen',
)
def includeme(config):
"""Add resource type to content."""
add_resource_type_to_registry(proposal_meta, config)
add_resource_type_to_registry(proposal_version_meta, config)
add_resource_type_to_registry(process_meta, config)
|
plotly/plotly.py
|
packages/python/plotly/plotly/validators/cone/_cmid.py
|
Python
|
mit
| 443
| 0
|
import _plotly_utils.basevalidators
class CmidValidator(_plotly_utils.basevalidators.NumberValidator):
def __init__(self, p
|
lotly_name="cmid", parent_name="cone", **kwargs):
super(CmidValidator, self).__init__(
plotly_name=plotly_n
|
ame,
parent_name=parent_name,
edit_type=kwargs.pop("edit_type", "calc"),
implied_edits=kwargs.pop("implied_edits", {}),
**kwargs
)
|
cs411sp15vmnjhtdw/MeetU
|
CS411Project/urls.py
|
Python
|
mit
| 837
| 0
|
"""CS411Project URL Configuration
The `urlpatterns` list routes URLs to views. Fo
|
r more information please see:
https://docs.djangoproject.com/en/1.8/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: url(r'^$', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a
|
URL to urlpatterns: url(r'^$', Home.as_view(), name='home')
Including another URLconf
1. Add an import: from blog import urls as blog_urls
2. Add a URL to urlpatterns: url(r'^blog/', include(blog_urls))
"""
from django.conf.urls import include, url
from django.contrib import admin
from meetu import urls as meetu_urls
urlpatterns = [
url(r'^admin/', include(admin.site.urls)),
url(r'^/*', include(meetu_urls)),
]
|
mattr555/AtYourService
|
main/migrations/0011_auto__add_index_organization_name__add_index_userevent_date_end__add_i.py
|
Python
|
mit
| 8,705
| 0.007007
|
# -*- coding: utf-8 -*-
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding index on 'Organization', fields ['name']
db.create_index('main_organization', ['name'])
# Adding index on 'UserEvent', fields ['date_end']
db.create_index('main_userevent', ['date_end'])
# Adding index on 'Event', fields ['name']
db.create_index('main_event', ['name'])
# Adding index on 'Event', fields ['date_start']
db.create_index('main_event', ['date_start'])
# Adding index on 'Event', fields ['date_end']
db.create_index('main_event', ['date_end'])
def backwards(self, orm):
# Removing index on 'Event', fields ['date_end']
db.delete_index('main_event', ['date_end'])
# Removing index on 'Event', fields ['date_start']
db.delete_index('main_event', ['date_start'])
# Removing index on 'Event', fields ['name']
db.delete_index('main_event', ['name'])
# Removing index on 'UserEvent', fields ['date_end']
db.delete_index('main_userevent', ['date_end'])
# Removing index on 'Organization', fields ['name']
db.delete_index('main_organization', ['name'])
models = {
'auth.group': {
'Meta': {'object_name': 'Group'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '80', 'unique': 'True'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'blank': 'True', 'to': "orm['auth.Permission']", 'symmetrical': 'False'})
},
'auth.permission': {
'Meta': {'ordering': "('content_type__app_label', 'content_type__model', 'codename')", 'unique_together': "(('content_type', 'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
'auth.user': {
'Meta': {'object_name': 'User'},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'blank': 'True', 'max_length': '75'}),
'first_name': ('django.db.models.fields.CharField', [], {'blank': 'True', 'max_length': '30'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'blank': 'True', 'to': "orm['auth.Group']", 'symmetrical': 'False'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'blank': 'True', 'max_length': '30'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'blank': 'True', 'to': "orm['auth.Permission']", 'symmetrical': 'False'}),
'username': ('django.db.models.fields.CharField', [], {'max_length': '30', 'unique': 'True'})
},
'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'db_table': "'django_content_type'", 'object_name': 'ContentType'},
'app_label': ('django.db.models.fields.CharField
|
', [], {'ma
|
x_length': '100'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
'main.event': {
'Meta': {'object_name': 'Event'},
'confirmed_participants': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.User']", 'symmetrical': 'False', 'related_name': "'confirmed_events'"}),
'date_end': ('django.db.models.fields.DateTimeField', [], {'db_index': 'True'}),
'date_start': ('django.db.models.fields.DateTimeField', [], {'db_index': 'True'}),
'description': ('django.db.models.fields.TextField', [], {}),
'geo_lat': ('django.db.models.fields.FloatField', [], {'blank': 'True', 'null': 'True'}),
'geo_lon': ('django.db.models.fields.FloatField', [], {'blank': 'True', 'null': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'location': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '300', 'db_index': 'True'}),
'organization': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['main.Organization']", 'related_name': "'events'"}),
'organizer': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']", 'related_name': "'events_organized'"}),
'participants': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.User']", 'symmetrical': 'False', 'related_name': "'events'"})
},
'main.organization': {
'Meta': {'object_name': 'Organization'},
'admin': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']", 'related_name': "'orgs_admin'"}),
'description': ('django.db.models.fields.TextField', [], {}),
'geo_lat': ('django.db.models.fields.FloatField', [], {'blank': 'True', 'null': 'True'}),
'geo_lon': ('django.db.models.fields.FloatField', [], {'blank': 'True', 'null': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'location': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'members': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.User']", 'symmetrical': 'False', 'related_name': "'organizations'"}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '300', 'db_index': 'True'})
},
'main.userevent': {
'Meta': {'object_name': 'UserEvent'},
'date_end': ('django.db.models.fields.DateTimeField', [], {'blank': 'True', 'null': 'True', 'db_index': 'True'}),
'date_start': ('django.db.models.fields.DateTimeField', [], {}),
'description': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'geo_lat': ('django.db.models.fields.FloatField', [], {'blank': 'True', 'null': 'True'}),
'geo_lon': ('django.db.models.fields.FloatField', [], {'blank': 'True', 'null': 'True'}),
'hours_worked': ('django.db.models.fields.FloatField', [], {}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'location': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'organization': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']", 'related_name': "'user_events'"})
},
'main.userprofile': {
'Meta': {'object_name': 'UserProfile'},
'geo_lat': ('django.db.models.fields.FloatField', [], {'blank': 'True', 'nu
|
nwjs/chromium.src
|
third_party/android_deps/libs/org_apache_maven_wagon_wagon_http_shared/3pp/fetch.py
|
Python
|
bsd-3-clause
| 1,402
| 0.000713
|
#!/usr/bin/env python
# Copyright 2021 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
# This is generated, do not edit. Update BuildConfigGenerator.groovy and
# 3ppFetch.template instead.
from __future__ import print_function
import argparse
import json
import os
_FILE_URL = 'https://repo.maven.apache.org/maven2/org/apache/maven/wagon/wagon-http-shared/1.0-beta-6/wagon-http-shared-1.0-beta-6.jar'
_FILE_NAME = 'wagon-http-shared-1.0-beta-6.jar'
_FILE_VERSION = '1.0-beta-6'
def do_latest():
print(_FILE_VERSION)
def get_download_url(version):
if _FILE_URL.endswith('.jar'):
|
ext = '.jar'
elif _FILE_URL.endswith('.aar'):
ext = '.aar'
else:
raise Exception('Unsupported extension for %s' % _FILE_URL)
partial_manifest = {
'url': [_FILE_URL],
'name': [_FILE_NAME],
'ext': ext,
}
prin
|
t(json.dumps(partial_manifest))
def main():
ap = argparse.ArgumentParser()
sub = ap.add_subparsers()
latest = sub.add_parser("latest")
latest.set_defaults(func=lambda _opts: do_latest())
download = sub.add_parser("get_url")
download.set_defaults(
func=lambda _opts: get_download_url(os.environ['_3PP_VERSION']))
opts = ap.parse_args()
opts.func(opts)
if __name__ == '__main__':
main()
|
maartenbreddels/vaex
|
tests/ml/sklearn_test.py
|
Python
|
mit
| 11,265
| 0.002929
|
import pytest
import vaex
pytest.importorskip("sklearn")
from vaex.ml.sklearn import Predictor, IncrementalPredictor
import numpy as np
# Regressions
from sklearn.linear_model import LinearRegression, Ridge, Lasso, SGDClassifier, SGDRegressor
from sklearn.svm import SVR
from sklearn.ensemble import AdaBoostRegressor, GradientBoostingRegressor, RandomForestRegressor
# Classifiers
from sklearn.linear_model import LogisticRegression
from sklearn.svm import SVC
from sklearn.ensemble import AdaBoostClassifier, GradientBoostingClassifier, RandomForestClassifier
models_regression = [LinearRegression(),
Ridge(random_state=42, max_iter=100),
Lasso(random_state=42, max_iter=100),
SVR(gamma='scale'),
AdaBoostRegressor(random_state=42, n_estimators=10),
GradientBoostingRegressor(random_state=42, max_depth=3, n_estimators=10),
RandomForestRegressor(n_estimators=10, random_state=42, max_depth=3)]
models_classification = [LogisticRegression(solver='lbfgs', max_iter=100, random_state=42),
SVC(gamma='scale', max_iter=100, probability=True),
AdaBoostClassifier(random_state=42, n_estimators=10),
GradientBoostingClassifier(random_state=42, max_depth=3, n_estimators=10),
RandomForestClassifier(n_estimators=10, random_state=42, max_depth=3)]
def test_sklearn_estimator():
ds = vaex.ml.datasets.load_iris()
features = ['sepal_length', 'sepal_width', 'petal_length']
train, test = ds.ml.train_test_split(verbose=False)
model = Predictor(model=LinearRegression(), features=features, target='petal_width', prediction_name='pred')
model.fit(train)
prediction = model.predict(test)
test = model.transform(test)
np.testing.assert_array_almost_equal(test.pred.values, prediction, decimal=5)
# Transfer the state of train to ds
train = model.transform(train)
state = train.state_get()
ds.state_set(state)
assert ds.pred.values.shape == (150,)
def test_sklearn_estimator_virtual_columns():
ds = vaex.ml.datasets.load_iris()
ds['x'] = ds.sepal_length * 1
ds['y'] = ds.sepal_width * 1
ds['w'] = ds.petal_length * 1
ds['z'] = ds.petal_width * 1
train, test = ds.ml.train_test_split(test_size=0.2, verbose=False)
features = ['x', 'y', 'z']
model = Predictor(model=LinearRegression(), features=features, target='w', prediction_name='pred')
model.fit(ds)
ds = model.transform(ds)
assert ds.pred.values.shape == (150,)
def test_sklearn_estimator_serialize(tmpdir):
ds = vaex.ml.datasets.load_iris()
features = ['sepal_length', 'sepal_width', 'petal_length']
model = Predictor(model=LinearRegression(), features=features, target='petal_width', prediction_name='pred')
model.fit(ds)
pipeline = vaex.ml.Pipeline([model])
pipeline.save(str(tmpdir.join('test.json')))
pipeline.load(str(tmpdir.join('test.json')))
model = Predictor(model=LinearRegression(), features=features, target='petal_width', prediction_name='pred')
model.fit(ds)
model.state_set(model.state_get())
pipeline = vaex.ml.Pipeline([model])
pipeline.save(str(tmpdir.join('test.json')))
pipeline.load(str(tmpdir.join('test.json')))
def test_sklearn_estimator_regression_validation():
ds = vaex.ml.datasets.load_iris()
train, test = ds.ml.train_test_split(verbose=False)
features = ['sepal_length', 'sepal_width', 'petal_length']
# Dense features
Xtrain = train[features].values
Xtest = test[features].values
ytrain = train.petal_width.values
for model in models_regression:
# vaex
vaex_model = Predictor(model=model, features=features, target='petal_width', prediction_name='pred')
vaex_model.fit(train)
test = vaex_model.transform(test)
# sklearn
model.fit(Xtrain, ytrain)
skl_pred = model.predict(Xtest)
np.testing.assert_array_almost_equal(test.pred.values, skl_pred, decimal=5)
def test_sklearn_estimator_pipeline():
ds = vaex.ml.datasets.load_iris()
train, test = ds.ml.train_test_split(verbose=False)
# Add virtual columns
train['sepal_virtual'] = np.sqrt(train.sepal_length**2 + train.sepal_width**2)
train['petal_scaled'] = train.petal_length * 0.2
# Do a pca
features = ['sepal_virtual', 'petal_scaled']
pca = train.ml.pca(n_components=2, features=features, transform=False)
train = pca.transform(train)
# Do state transfer
st = train.ml.state_transfer()
# now apply the model
features = ['sepal_virtual', 'petal_scaled']
model = Predictor(model=LinearRegression(), features=features, target='petal_width', prediction_name='pred')
model.fit(train)
# Create a pipeline
pipeline = vaex.ml.Pipeline([st, model])
# Use the pipeline
pred = pipeline.predict(test)
df_trans = pipeline.transform(test)
# WARNING: on windows/appveyor this gives slightly different results
# do we fully understand why? I also have the same results on my osx laptop
# sklearn 0.21.1 (scikit-learn-0.21.2 is installed on windows) so it might be a
# version related thing
np.testing.assert_array_almost_equal(pred, df_trans.pred.values)
@pytest.mark.parametrize("prediction_type", ['predict', 'predict_proba'])
def test_sklearn_estimator_classification_validation(prediction_type):
df = vaex.ml.datasets.load_titanic()
df['survived'] = df.survived.astype('int')
train, test = df.ml.train_test_split(verbose=False)
features = ['pclass', 'parch', 'sibsp']
# Dense features
Xtrain = train[features].values
Xtest = test[features].values
ytrain = train.survived.values
for model in models_classification:
# vaex
vaex_model = Predictor(model=model, features=features, target='survived', prediction_name='pred', prediction_type=prediction_type)
vaex_model.fit(train)
test = vaex_model.transform(test)
# scikit-learn
model.fit(Xtrain, ytrain)
if prediction_type == 'predict':
skl_pred = model.predict(Xtest)
else:
skl_pred = model.predict_proba(Xtest)
assert np.all(skl_pred == test.pred.values)
def test_sklearn_incremental_predictor_regression():
df = vaex.example()
df_train, df_test = df.ml.train_test_split(test_size=0.1, verbose=False)
features = df_train.column_names[:6]
target = 'FeH'
incremental = IncrementalPredictor(model=SGDRegressor(),
features=features,
target=target,
batch_size=10*1000,
num_epochs=5,
shuffle=True,
prediction_name='pred')
incremental.fit(df=df_train)
df_train = incremental.transform(df_train)
# State transfer
state = df_train.state_get()
df_test.state_set(state)
assert df_train.column_count() == df_test.column_count()
assert df_test.pred.values.shape == (33000,)
pred_in_memory = incremental.predict(df_test)
np.testing.assert_array_almost_equal(pred_in_memory, df_test.pred.values, decimal=1)
@pytest.mark.parametrize("prediction_type", ['predict', 'predict_proba'])
def test_sklearn_incremental_predictor_classification(prediction_type):
df = vaex.ml.datasets.load_iris_1e5()
df_train, d
|
f_test = df.ml.
|
train_test_split(test_size=0.1, verbose=False)
features = df_train.column_names[:4]
target = 'class_'
incremental = IncrementalPredictor(model=SGDClassifier(loss='log', learning_rate='constant', eta0=0.01),
features=features,
target=target,
batch_size=10_000,
num_epochs=3,
shuffle=False,
prediction_name='pred',
|
Oliver-Lab/snakemakelib-oliver
|
snakemakelib_oliver/odo/geo.py
|
Python
|
mit
| 333
| 0
|
import re
from blaze import resource, DataFrame
impo
|
rt pandas as pd
from snakemakelib.odo.pandas import annotate_by_uri
@resource.register('.+fastq.summary')
@annotate_by_uri
def resource_fastqc_summary(uri, **kwargs):
with open(ur
|
i):
data = pd.read_csv(uri, sep=",", index_col=["fileName"])
return DataFrame(data)
|
metno/satistjenesten
|
setup.py
|
Python
|
mit
| 1,107
| 0.009033
|
from setuptools import setup
from setuptools import find_packages
import os
requirements = ['numpy',
'netCDF4',
'pyresample',
'pyyaml',
'pillow',
'rasterio']
readme_contents = ""
setup(
name='satistjenesten',
version=0.5,
author='Mikhail Itkin',
descriptio
|
n='Istjenesten satellite processing suite',
packages=['satistjenesten'],
data_files=[os.pa
|
th.join(os.path.dirname(__file__), 'test_data', 'DroidSans.ttf')],
long_description=readme_contents,
install_requires=requirements,
test_suite='tests',
scripts=['scripts/amsr2_mosaic.py', 'scripts/mitiff2geotiff.py', 'scripts/mitiff_mosaic.py'],
classifiers=[
'Development Status :: 5 - Production/Stable',
'License :: OSI Approved :: GNU General Public License v3 (GPLv3)',
'Programming Language :: Python',
'Operating System :: OS Independent',
'Intended Audience :: Science/Research',
'Topic :: Scientific/Engineering'
],
include_package_data = True,
)
|
Grassboy/plugin.video.plurkTrend
|
youtube_dl/extractor/gamekings.py
|
Python
|
mit
| 1,331
| 0.003005
|
import re
from .common import InfoExtractor
class GamekingsIE(InfoExtractor):
_VALID_URL = r'http://www\.gamekings\.tv/videos/(?P<name>[0-9a-z\-]+)'
_TEST = {
u"url": u"http://www.gamekings.tv/videos/phoenix-wright-ace-attorney-dual-destinies-review/",
u'file': u'20130811.mp4',
# MD5 is flaky, seems to change regularly
#u'md5': u'2f32b1f7b80fdc5cb616efb4f387f8a3',
u'info_dict': {
u"title": u"Phoenix Wright: Ace Attorney \u2013 Dual Destinies Review",
u"description": u"Melle en Steven hebben voor de review een week in de rechtbank doorbracht met Phoenix Wright: Ace Attorney - Dual Destinies.",
}
}
def _real_extract(self, url):
|
mobj = re.match(self._VALID_URL, url)
name = mobj.group('name')
webpage = self._download_webpage(url, name)
video_url = self._og_search_video_url(webpage)
video = re.search(r'[0-9]+', video_url)
video_id = video.g
|
roup(0)
# Todo: add medium format
video_url = video_url.replace(video_id, 'large/' + video_id)
return {
'id': video_id,
'ext': 'mp4',
'url': video_url,
'title': self._og_search_title(webpage),
'description': self._og_search_description(webpage),
}
|
DePierre/owtf
|
install/install.py
|
Python
|
bsd-3-clause
| 10,142
| 0.003352
|
#!/usr/bin/env python
import os
import sys
import time
import platform
import argparse
from datetime import datetime
from space_checker_utils import wget_wrapper
import ConfigParser
def create_directory(directory):
"""Create parent directories as necessary.
:param directory: (~str) Path of directory to be made.
:return: True - if directory is created, and False - if not.
"""
try:
os.makedirs(directory)
return True
except OSError:
# Checks if the folder is empty
if not os.listdir(directory):
return True
return False
def run_command(command):
"""Execute the provided shell command.
:param command: (~str) Linux shell command.
:return: True - if command executed, and False if not.
"""
Colorizer.normal("[*] Running following command")
Colorizer.info("%s" % command)
# If command is `wget`, then before execution, `wget_wrapper` checks whether there is enough disk space available
if not wget_wrapper(command):
return False
return os.system(command)
def owtf_last_commit():
"""Prints the local git repo's last commit hash."""
if os.path.exists(os.path.join(root_dir, '.git')):
command = 'git log -n 1 --pretty=format:"%H"'
commit_hash = os.popen(command).read()
return commit_hash
else:
return "*Not a git repository.*"
def check_sudo():
"""Checks if the user has sudo access."""
sudo = os.system("sudo -v")
if not sudo:
return
else:
Colorizer.warning("[!] Your user does not have sudo privileges. Some OWTF components require sudo permissions to install")
sys.exit()
def install_in_directory(directory, command):
"""Execute a certain command while staying inside one directory.
:param directory: (~str) Path of directory in which installation command has to be executed.
:param command: (~str) Linux shell command (most likely `wget` here)
:return: True - if installation successful or directory already exists, and False if not.
"""
if create_directory(directory):
Colorizer.info("[*] Switching to %s" % directory)
os.chdir(directory)
return run_command(command)
else:
Colorizer.warning("[!] Directory %s already exists, so skipping installation for this" % directory)
return True
def inst
|
all_using_pip(requirements_file):
"""Install pip libraries as mentioned in a requirements file.
:param requirements_file: (~str) Path to requirements file - in which libraries are listed.
:retur
|
n: True - if installation successful, and False if not.
"""
# Instead of using file directly with pip which can crash because of single library
return run_command("sudo -E pip2 install --upgrade -r %s" % requirements_file)
def install_restricted_from_cfg(config_file):
"""Install restricted tools and dependencies which are distro independent.
:param config_file: (~str) Path to configuration file having information about restricted content.
"""
cp = ConfigParser.ConfigParser({"RootDir": root_dir, "Pid": pid})
cp.read(config_file)
for section in cp.sections():
Colorizer.info("[*] Installing %s" % section)
install_in_directory(os.path.expanduser(cp.get(section, "directory")), cp.get(section, "command"))
def is_compatible():
compatible_value = os.system("which apt-get >> /dev/null 2>&1")
if compatible_value>>8 == 1:
return False
else:
return True
def finish(error_code):
if error_code == 1:
Colorizer.danger("\n[!] The installation was not successful.")
Colorizer.normal("[*] Visit https://github.com/owtf/owtf for help ")
else:
Colorizer.success("[*] Finished!")
Colorizer.info("[*] Start OWTF by running './owtf.py' in parent directory")
def install(cmd_arguments):
"""Perform installation of OWTF Framework. Wraps around all helper methods made in this module.
:param cmd_arguments:
"""
args = parser.parse_args(cmd_arguments)
# User asked to select distro (in case it can't be automatically detected) and distro related stuff is installed
cp = ConfigParser.ConfigParser({"RootDir": root_dir, "Pid": pid})
cp.read(distros_cfg)
# Try get the distro automatically
distro, version, arch = platform.linux_distribution()
distro_num = 0
if "kali" in distro.lower():
distro_num = 1
elif "samurai" in distro.lower():
distro_num = 2
elif is_compatible():
distro_num = 3
# Loop until proper input is received
while True:
if distro_num != 0:
Colorizer.info("[*] %s has been automatically detected... " % distro)
Colorizer.normal("[*] Continuing in auto-mode")
break
if args.no_user_input:
distro_num = 0
break
print("")
for i, item in enumerate(cp.sections()):
Colorizer.warning("(%d) %s" % (i + 1, item))
Colorizer.warning("(0) My distro is not listed :( %s" % distro)
distro_num = raw_input("Select a number based on your distribution : ")
try:
# Checking if valid input is received
distro_num = int(distro_num)
break
except ValueError:
print('')
Colorizer.warning("[!] Invalid Number specified")
continue
# First all distro independent stuff is installed
install_restricted_from_cfg(restricted_cfg)
if distro_num != 0:
run_command(cp.get(cp.sections()[int(distro_num)-1], "install"))
else:
Colorizer.normal("[*] Skipping distro related installation :(")
# Return if option to install only owtf dependencies is given, as there are optional tools further
if args.core_only:
return
Colorizer.normal("[*] Upgrading pip to the latest version ...")
# Upgrade pip before install required libraries
run_command("sudo pip2 install --upgrade pip")
Colorizer.normal("Upgrading setuptools to the latest version ...")
# Upgrade setuptools
run_command("sudo pip2 install --upgrade setuptools")
Colorizer.normal("Upgrading cffi to the latest version ...")
# Mitigate cffi errors by upgrading it first
run_command("sudo pip2 install --upgrade cffi")
if distro_num == '1':
# check kali major release number 0.x, 1.x, 2.x
kali_version = os.popen("cat /etc/issue", "r").read().split(" ")[2][0]
if kali_version == '1':
if args.no_user_input:
fixsetuptools = 'n'
else:
fixsetuptools = raw_input("Delete /usr/lib/python2.7/dist-packages/setuptools.egg-info? (y/n)\n(recommended, solves some issues in Kali 1.xx)")
if fixsetuptools == 'y':
Colorizer.normal("[*] Backing up the original symlink...")
ts = time.time()
human_timestamp = datetime.fromtimestamp(ts).strftime('%Y-%m-%d-%H:%M:%S')
symlink_orig_path = "/usr/lib/python2.7/dist-packages/setuptools.egg-info"
run_command("mv %s %s-BACKUP-%s" % (symlink_orig_path, symlink_orig_path, human_timestamp))
Colorizer.info("[*] The original symlink exists at %s-BACKUP-%s" % (symlink_orig_path, human_timestamp))
install_using_pip(owtf_pip)
else:
Colorizer.warning("[!] Moving on with the installation but you were warned: there may be some errors!")
install_using_pip(owtf_pip)
run_command("sudo sh %s init" % (os.path.join(scripts_path, "db_setup.sh")))
run_command("sudo sh %s" % (os.path.join(scripts_path, "db_run.sh")))
class Colorizer:
"""Helper class for colorized strings.
Different statements will have different colors:
- `normal`, denoting ongoing procedure (WHITE)
- `info`, any file path, commit hash or any other info (BLUE)
- `warning`, any potential hindrance in installation (YELLOW)
- `danger`, abrupt failure, desired file/dir not found etc. (RED)
"""
BOLD = '\033[1m'
RED = BOLD
|
ncliam/serverpos
|
openerp/addons/report_webkit/webkit_report.py
|
Python
|
agpl-3.0
| 16,744
| 0.005256
|
# -*- coding: utf-8 -*-
##############################################################################
#
# Copyright (c) 2010 Camptocamp SA (http://www.camptocamp.com)
# All Right Reserved
#
# Author : Nicolas Bessi (Camptocamp)
# Contributor(s) : Florent Xicluna (Wingo SA)
#
# WARNING: This program as such is intended to be used by professional
# programmers who take the whole responsability of assessing all potential
# consequences resulting from its eventual inadequacies and bugs
# End users who are looking for a ready-to-use solution with commercial
# garantees and support are strongly adviced to contract a Free Software
# Service Company
#
# This program is Free Software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
#
##############################################################################
import subprocess
import os
import sys
from openerp import report
import tempfile
import time
import logging
from functools import partial
from report_helper import WebKitHelper
import openerp
from openerp.modules.module import get_module_resource
from openerp.report.report_sxw import *
from openerp import SUPERUSER_ID
from openerp import tools
from openerp.tools.translate import _
from openerp.osv.osv import except_osv
from urllib import urlencode, quote as quote
_logger = logging.getLogger(__name__)
try:
# We use a jinja2 sandboxed environment to render mako templates.
# Note that the rendering does not cover all the mako syntax, in particular
# arbitrary Python statements are not accepted, and not all expressions are
# allowed: only "public" attributes (not starting with '_') of objects may
# be accessed.
# This is done on purpose: it prevents incidental or malicious execution of
# Python code that may break the security of the server.
from jinja2.sandbox import SandboxedEnvironment
mako_template_env = SandboxedEnvironment(
block_start_string="<%",
block_end_string="%>",
variable_start_string="${",
variable_end_string="}",
comment_start_string="<%doc>",
comment_end_string="</%doc>",
line_statement_prefix="%",
line_comment_prefix="##",
trim_blocks=True, # do not output newline after blocks
autoescape=True, # XML/HTML automatic escaping
)
mako_template_env.globals.update({
'str': str,
'quote': quote,
'urlencode': urlencode,
})
except ImportError:
_logger.warning("jinja2 not available, templating features will not work!")
def mako_template(text):
"""Build a Mako template.
This template uses UTF-8 encoding
"""
return mako_template_env.from_string(text)
_extender_functions = {}
def webkit_report_extender(report_name):
"""
A decorator to define functions to extend the context used in a template rendering.
report_name must be the xml id of the desired report (it is mandatory to indicate the
module in that xml id).
The given function will be called at the creation of the report. The following arguments
will be passed to it (in this order):
- pool The model pool.
- cr The cursor.
- uid The user id.
- localcontext The context given to the template engine to render the templates for the
current report. This is the context that should be modified.
- context The OpenERP context.
"""
def fct1(fct):
lst = _extender_functions.get(report_name)
if not lst:
lst = []
_extender_functions[report_name] = lst
lst.append(fct)
return fct
return fct1
class WebKitParser(report_sxw):
"""Custom class that use webkit to render HTML reports
Code partially taken from report openoffice. Thanks guys :)
"""
def __init__(self, name, table, rml=False, parser=rml_parse,
header=True, store=False, register=True):
self.localcontext = {}
report_sxw.__init__(self, name, table, rml, parser,
header, store, register=register)
def get_lib(self, cursor, uid):
"""Return the lib wkhtml path"""
proxy = self.pool['ir.config_parameter']
webkit_path = proxy.get_param(cursor, SUPERUSER_ID, 'webkit_path')
if not webkit_path:
try:
defpath = os.environ.get('PATH', os.defpath).split(os.pathsep)
if hasattr(sys, 'frozen'):
defpath.append(os.getcwd())
if tools.config['root_path']:
defpath.append(os.path.dirname(tools.config['root_path']))
webkit_path = tools.which('wkhtmltopdf', path=os.pathsep.join(defpath))
except IOError:
webkit_path = None
if webkit_path:
return webkit_path
raise except_osv(
_('Wkhtmltopdf library path is not set'),
_('Please install executable on your system' \
' (sudo apt-get install wkhtmltopdf) or download it from here:' \
' http://code.google.com/p/wkhtmltopdf/downloads/list and set the' \
' path in the ir.config_parameter with the webkit_path key.' \
'Minimal version is 0.9.9')
)
def generate_pdf(self, comm_path, report_xml, header, footer, html_list, webkit_header=False):
"""Call webkit in order to generate pdf"""
if not webkit_header:
webkit_header = report_xml.webkit_header
fd, out_filename = tempfile.mkstemp(suffix=".pdf",
prefix="webkit.tmp.")
file_to_del = [out_filename]
if comm_path:
command = [comm_path]
else:
command = [
|
'wkhtmltopdf']
command.append('--quiet')
# default to UTF-8 encoding. Use <meta charset="latin-1"> to override.
command.extend(['--encoding', 'utf-8'])
if header :
with tempfile.NamedTemporaryFile(suffix=".head.html",
delete=False) as head_file:
head_file.write(self._sanitize_h
|
tml(header.encode('utf-8')))
file_to_del.append(head_file.name)
command.extend(['--header-html', head_file.name])
if footer :
with tempfile.NamedTemporaryFile(suffix=".foot.html",
delete=False) as foot_file:
foot_file.write(self._sanitize_html(footer.encode('utf-8')))
file_to_del.append(foot_file.name)
command.extend(['--footer-html', foot_file.name])
if webkit_header.margin_top :
command.extend(['--margin-top', str(webkit_header.margin_top).replace(',', '.')])
if webkit_header.margin_bottom :
command.extend(['--margin-bottom', str(webkit_header.margin_bottom).replace(',', '.')])
if webkit_header.margin_left :
command.extend(['--margin-left', str(webkit_header.margin_left).replace(',', '.')])
if webkit_header.margin_right :
command.extend(['--margin-right', str(webkit_header.margin_right).replace(',', '.')])
if webkit_header.orientation :
command.extend(['--orientation', str(webkit_header.orientation).replace(',', '.')])
if webkit_header.format :
command.extend(['--page-size', str(webkit_header.format).replace(',', '.')])
count = 0
for html in html_list :
with tempfile.Named
|
zedlander/flake8-commas
|
test/data/keyword_before_parenth_form/py2_bad.py
|
Python
|
mit
| 273
| 0
|
# Requires trailing commas in
|
Py2 but syntax error in Py3k
def True(
foo
):
True(
foo
)
def Fa
|
lse(
foo
):
False(
foo
)
def None(
foo
):
None(
foo
)
def nonlocal (
foo
):
nonlocal(
foo
)
|
exter/pycover
|
tools/timed_wrapper.py
|
Python
|
mit
| 386
| 0.015544
|
# __author__ = 'Exter'
from functools import wraps
import time
def timed(f):
@wraps(f)
def wrapper(*args, **kwds):
current_milli_time = lambda: int(round(time.time() * 1000))
start = current_milli_time()
result = f(*
|
args, **kwds)
elapsed = current_milli_time() - start
print "%s too
|
k %d ms to finish" % (f.__name__, elapsed)
return result
return wrapper
|
jml/flocker
|
flocker/common/_net.py
|
Python
|
apache-2.0
| 1,289
| 0
|
# Copyright Hybrid Logic Ltd. See LICENSE file for details.
"""
Network utilities.
"""
from ipaddr import IPAddress
import netifaces
def ipaddress_from_string(ip_address_string):
"""
Parse an IPv4 or IPv6 address string and return an
IPAddress instance.
Remove the "embedded scope id" from IPv6 addresses (if there is
one).
:param str ip_address_string: The IP address string to be parsed.
:returns: An ``ipaddr.IPAddress`` instance.
"""
# There may be an embedded scope id in an IPv6 address. Discard
# it. Eg fe80::f816:3eff:fe11:ca54%eth0
parts = ip_address_string.rsplit('%', 1)
ip_address_string = parts[0]
return IPAddress(ip_address_string)
def get_all_ips():
"""
Find all IPs for this machine.
:return: ``set`` of IP addresses (``bytes``).
"""
ips = set()
interfaces = netifaces.interfaces()
for interface in interfaces:
addresses = netifaces.ifaddresses(interface)
|
for address_family in (netifaces.AF_INET, netifaces.AF_INET6):
family_addresses = addresses.get(address_family)
if not family_addresses:
continue
for address in family_addresses:
ips.add(ipaddress_from_string(address['addr']))
|
return ips
|
danabauer/app-on-openstack
|
code/worker/deploy.py
|
Python
|
mit
| 331
| 0
|
#!/usr/bin/e
|
nv python
import os
from watermark.config import config as conf
from watermark import connect
config_name = os.getenv('WM_CONFIG_ENV') or 'default'
config = conf[config_name]()
conn = connect.get_connection(config)
conn.message.create_queue(name=config.NAME)
print("{name} queue created".format(name=config.N
|
AME))
|
NixaSoftware/CVis
|
venv/lib/python2.7/site-packages/pandas/io/sas/sasreader.py
|
Python
|
apache-2.0
| 2,558
| 0.000391
|
"""
Read SAS sas7bdat or xport files.
"""
from pandas import compat
from pandas.io.common import _stringify_path
def read_sas(filepath_or_buffer, format=None, index=None, encoding=None,
chunksize=None, iterator=False):
"""
Read SAS files stored as either XPORT or SAS7BDAT format files.
Parameters
----------
filepath_or_buffer : string or file-like object
Path to the SAS file.
format : string {'xport', 'sas7bdat'} or None
If None, file format is inferred. If 'xport' or 'sas7bdat',
uses the corresponding format.
index : identifier of index column, defaults to None
Identifier of column that should be used as index of the DataFrame.
encoding : string, default is None
Encoding for text data. If None, text data are stored as raw bytes.
chunksize : int
Read file `chunksize` lines at a time, returns iterator.
iterator : bool, defaults to False
If True, returns an iterator for reading the file incrementally.
Returns
-------
DataFrame if iterator=False and chunksize=None, else SAS7BDATReader
or XportReader
"""
if format is None:
buffer_error_msg = ("If this is a buffer object rather "
"than a string name, you must specify "
|
"a format string")
filepath_or_buffer = _stringify_path(filepath_or_buffer)
if not isinstance(filepath_or_buffer, compat.string_types):
raise ValueError(buffer_error_msg)
try:
fname = filepath_or_buffer.lower()
if fname.endswith(".xpt"):
format = "xport"
elif fname.endswith(".sas7bdat"):
format = "sas7bd
|
at"
else:
raise ValueError("unable to infer format of SAS file")
except:
pass
if format.lower() == 'xport':
from pandas.io.sas.sas_xport import XportReader
reader = XportReader(filepath_or_buffer, index=index,
encoding=encoding,
chunksize=chunksize)
elif format.lower() == 'sas7bdat':
from pandas.io.sas.sas7bdat import SAS7BDATReader
reader = SAS7BDATReader(filepath_or_buffer, index=index,
encoding=encoding,
chunksize=chunksize)
else:
raise ValueError('unknown SAS format')
if iterator or chunksize:
return reader
data = reader.read()
reader.close()
return data
|
QuanticPotato/vcoq
|
plugin/coq.py
|
Python
|
gpl-2.0
| 3,667
| 0.035451
|
import xml.etree.ElementTree as XMLFactory
import subprocess
import os
import signal
import utils
from buffers import Text, Color
class CoqManager:
def __init__(self, WM):
# The coqtop process
self.coqtop = None
# The string return by 'coqtop --version'
self.coqtopVersion = ''
# The windows manager instance
self.windowsManager = WM
def launchCoqtopProcess(self):
if self.coqtop :
try:
self.coqtop.terminate()
self.coqtop.communicate() # Clear the pipe
except OSError:
pass
self.coqtopVersion = subprocess.check_output(['coqtop', '--version'])
self.coqtop = subprocess.Popen(
['coqtop', '-ideslave'], # We need -ide-slave to be able to send XML queries
stdin = subprocess.PIPE,
stdout = subprocess.PIPE,
stderr = subprocess.STDOUT,
preexec_fn = lambda:signal.signal(signal.SIGINT, signal.SIG_IGN))
versionText = Text(self.coqtopVersion)
versionText.setPosition(0, 2)
self.windowsManager.output.updateWindowContent('__Console__', versionText)
def sendQueryCommand(self):
query = self.windowsManager.input.getLine("__Input__", 0)
xml = XMLFactory.Element('call')
xml.set('val', 'interp')
xml.set('id', '0')
xml.set('raw', 'true')
xml.text = query
response = self.sendXML(xml)
if response != None:
if response.get('val') == 'good':
response_info = response.find('string')
if not response_info is None:
rep = Text(response_info.text)
self.windowsManager.output.updateWindowContent("__Console__", rep, True)
elif response.get('val') == 'fail':
err = Text(str(response.text))
err.setColor(Color.red)
self.windowsManager.output.updateWindowContent("__Console__", err, True)
else:
utils.error("No respons
|
es (query) ..")
def sendChunk(self, chunk):
xml = XMLFactory.Element('call')
xml.set('val', 'interp')
xml.set('id', '0')
xml.text = chunk.decode('utf-8')
response = self.sendXML(xml)
if response != None:
if response.get('val') == 'good':
#response_info = response.find('string')
#if not response_info is None:
|
# rep = Text(response_info.text)
# self.windowsManager.output.updateWindowContent("Console", rep, True)
return True
elif response.get('val') == 'fail':
err = Text(str(response.text))
err.setColor(Color.red)
self.windowsManager.output.updateWindowContent("__Console__", err, True)
else:
utils.error("No responses (sendChunk) ..")
return False
def rewind(self, steps):
xml = XMLFactory.Element('call')
xml.set('val', 'rewind')
xml.set('steps', str(steps))
response = self.sendXML(xml)
if response != None:
if response.get('val') == 'good':
additional_steps = response.find('int')
return steps + int(additional_steps.text)
print(XMLFactory.tostring(response))
utils.error("Something went wrong !")
else:
utils.error("No responses (rewind) ..")
return 0
def sendXML(self, xml):
""" First, check wether the coq process is still running.
Then it send the XML command, and finally it waits for the response """
if self.coqtop == None:
utils.error('ERROR: The coqtop process is not running or died !')
print('Trying to relaunch it ...')
self.launchCoqtopProcess()
try:
self.coqtop.stdin.write(XMLFactory.tostring(xml, 'utf-8'))
except IOError as e:
utils.error('Cannot communicate with the coq process : ' + str(e))
self.coqtop = None
return None
response = ''
file = self.coqtop.stdout.fileno()
while True:
try:
response += os.read(file, 0x4000)
try:
t = XMLFactory.fromstring(response)
return t
except XMLFactory.ParseError:
continue
except OSError:
return None
|
kosior/eventful
|
eventful/events/tests/factories.py
|
Python
|
mit
| 2,221
| 0.00045
|
import factory
from django.contrib.auth.models import User
from django.utils import timezone
from events.models import Event, EventInvite
from userprofiles.models import FriendRequest
class UserFactory(factory.django.DjangoModelFactory):
class Meta:
model = User
class Params:
join_event = None
get_invited_to = None
friend_user = None
@classmethod
def create(cls, **kwargs):
obj = super().create(**kwargs)
join_event = kwargs.get('join_event')
get_invited_to = kwargs.get('get_invited_to')
friend_user = kwargs.get('friend_user')
if isinstance(join_event, Event):
EventInvite.objects.join(join_event, obj, check_perm=False)
elif isinstance(get_invited_to, Event):
from_user = get_invited_to.created_by
EventInvite.objects.invite(get_invited_to, from_user, obj.pk)
if friend_user:
FriendRequest.objects.send_friend_request(friend_user, obj.pk)
FriendRequest.objects.accept(obj, friend_user.pk)
return obj
username = factory.Sequence(lambda n: f'user{n}')
password = 'pass'
class EventFactory(factory.django.DjangoModelFactory):
class Meta:
model = Event
class Params:
past_or_future = 'future'
join = False
@classmethod
def create(cls, **kwargs):
obj = super().create(**kwargs)
if kwargs.get('join'):
EventInvite.objects.join(obj, obj.created_by)
return obj
@staticmethod
def _get_date_time(obj):
if obj.past_or_future =
|
= 'future':
return timezone.now() + timezone.timedelta(hours=24)
|
elif obj.past_or_future == 'past':
return timezone.now() - timezone.timedelta(hours=24)
created_by = factory.SubFactory(UserFactory)
title = 'Event title'
start_date = factory.LazyAttribute(lambda o: EventFactory._get_date_time(o))
class FriendshipFactory(factory.DjangoModelFactory):
class Meta:
model = FriendRequest
@classmethod
def create(cls, **kwargs):
obj = super().create(**kwargs)
FriendRequest.objects.accept(obj.to_user, obj.from_user.pk)
return obj
|
go2net/PythonBlocks
|
components/propertyeditor/QPropertyModel.py
|
Python
|
mit
| 7,747
| 0.013037
|
from PyQt4 import QtCore, QtGui
from components.propertyeditor.Property import Property
from components.RestrictFileDialog import RestrictFileDialog
from PyQt4.QtCore import *
from PyQt4.QtGui import *
import sys, os
class QPropertyModel(QtCore.QAbstractItemModel):
def __init__(self, parent):
super(QPropertyModel, self).__init__(parent)
self.rootItem = Property("Root", "Root", 0, None);
def index (self, row, column, parent):
parentItem = self.rootItem;
if (parent.isValid()):
parentItem = parent.internalPointer()
if (row >= parentItem.childCount() or row < 0):
return QtCore.QModelIndex();
return self.createIndex(row, column, parentItem.child(row))
def getIndexForNode(self, node):
return self.createIndex(node.row(), 1, node)
def getPropItem(self, name, parent=None):
if(par
|
ent == None):
parent = self.rootItem
for item in parent.childItems:
if(item.name == name):
return item
return None
def headerData (self, section, orientation, role) :
if (orientation == QtCore.Qt.Horizontal and role == QtCore.Qt.DisplayRole) :
if (section == 0) :
|
return "Property"
elif (section == 1) :
return "Value"
return None # QtCore.QVariant();
def flags (self, index ):
if (not index.isValid()):
return QtCore.Qt.ItemIsEnabled;
item = index.internalPointer();
if (index.column() == 0):
return QtCore.Qt.ItemIsEnabled | QtCore.Qt.ItemIsSelectable
# only allow change of value attribute
if (item.isRoot()):
return QtCore.Qt.ItemIsEnabled;
elif (item.readOnly):
return QtCore.Qt.ItemIsDragEnabled
else:
return QtCore.Qt.ItemIsDragEnabled | QtCore.Qt.ItemIsEnabled | QtCore.Qt.ItemIsEditable;
def parent(self, index):
if not index.isValid():
return QtCore.QModelIndex()
childItem = index.internalPointer()
parentItem = childItem.parentItem
if parentItem == None or parentItem == self.rootItem:
return QtCore.QModelIndex()
return self.createIndex(parentItem.childCount(), 0, parentItem)
def rowCount ( self, parent ):
parentItem = self.rootItem;
if (parent.isValid()):
parentItem = parent.internalPointer()
return len(parentItem.childItems)
def columnCount (self, parent):
return 2
def data (self, index, role):
if (not index.isValid()):
return None
item = index.internalPointer()
if(item.editor_type == Property.IMAGE_EDITOR):
if (index.column() == 0) and (
role == QtCore.Qt.ToolTipRole or
role == QtCore.Qt.DecorationRole or
role == QtCore.Qt.DisplayRole or
role == QtCore.Qt.EditRole):
return item.label.replace('_', ' ');
if (index.column() == 1):
if(role == QtCore.Qt.DecorationRole):
if(item.value['icon'] != None and not item.value['icon'].isNull()):
return item.value['icon'].scaled(18, 18)
else:
return None
if(role == QtCore.Qt.DisplayRole):
return item.value['url']
if(role == QtCore.Qt.EditRole):
return item.value
else:
if(role == QtCore.Qt.ToolTipRole or
role == QtCore.Qt.DecorationRole or
role == QtCore.Qt.DisplayRole or
role == QtCore.Qt.EditRole):
if (index.column() == 0):
return item.label.replace('_', ' ');
if (index.column() == 1):
return item.value
if(role == QtCore.Qt.BackgroundRole):
if (item.isRoot()):
return QtGui.QApplication.palette("QTreeView").brush(QtGui.QPalette.Normal, QtGui.QPalette.Button).color();
return None
def getItem(self, index):
if index.isValid():
item = index.internalPointer()
if item:
return item
return self.rootItem
def insertRows(self, position, rows, parent=QtCore.QModelIndex()):
parentItem = self.getItem(parent)
self.beginInsertRows(parent, position, position + rows - 1)
for row in range(rows):
success = parentItem.insertChild(position+row) != None
self.endInsertRows()
return success
def removeRows(self, position, rows, parent=QtCore.QModelIndex()):
parentItem = self.getItem(parent)
self.beginRemoveRows(parent, position, position + rows - 1)
success = parentItem.removeChildren(position, rows)
self.endRemoveRows()
return success
# edit methods
def setData(self, index, value, role = QtCore.Qt.EditRole):
if (index.isValid() and role == Qt.EditRole):
item = index.internalPointer()
item.setValue(value)
self.dataChanged.emit(index, index)
return True;
return False
def import_module_from_file(self, full_path_to_module):
"""
Import a module given the full path/filename of the .py file
Python 3.4
"""
module = None
# Get module name and path from full path
module_dir, module_file = os.path.split(full_path_to_module)
module_name, module_ext = os.path.splitext(module_file)
if(sys.version_info >= (3,4)):
import importlib
# Get module "spec" from filename
spec = importlib.util.spec_from_file_location(module_name,full_path_to_module)
module = spec.loader.load_module()
else:
import imp
module = imp.load_source(module_name,full_path_to_module)
return module
def getModuleFuncList(self, module_name):
import inspect
func_list = []
if(module_name != ''):
try:
module_name = os.getcwd() + '\\' + module_name
module = self.import_module_from_file(module_name)
all_functions = inspect.getmembers(module, inspect.isfunction)
for function in all_functions:
func_list.append(function[0])
except:
pass
return func_list
def getModuleName(self, editor):
module_name = QFileDialog.getOpenFileName(None, 'Open File', '.', "All file(*.*);;Python (*.py)")
module_name = os.path.relpath(module_name, os.getcwd())
if (module_name == ''): return
prop_root = self.getPropItem('properties')
module_name_prop= self.getPropItem('module_name', prop_root)
module_name_prop.setValue(module_name)
module_name_index = self.getIndexForNode(module_name_prop)
self.dataChanged.emit(module_name_index, module_name_index)
function_name_prop= self.getPropItem('function_name', prop_root)
function_name_prop.editor_type = Property.COMBO_BOX_EDITOR
function_name_prop.editor_data = self.getModuleFuncList(module_name)
function_name_index = self.getIndexForNode(function_name_prop)
self.dataChanged.emit(function_name_index, function_name_index)
|
OpenAssets/openassets
|
openassets/protocol.py
|
Python
|
mit
| 19,047
| 0.002888
|
# -*- coding: utf-8; -*-
#
# The MIT License (MIT)
#
# Copyright (c) 2014 Flavien Charlon
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associat
|
ed documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge,
|
publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
"""
Provides the infrastructure for calculating the asset ID and asset quantity of Bitcoin outputs,
according to the Open Assets Protocol.
"""
import asyncio
import bitcoin.core
import bitcoin.core.script
import enum
import hashlib
import io
class ColoringEngine(object):
"""The backtracking engine used to find the asset ID and asset quantity of any output."""
def __init__(self, transaction_provider, cache, event_loop):
"""
Constructs an instance of the ColorEngine class.
:param bytes -> Future[CTransaction] transaction_provider: A function returning a transaction given its hash.
:param OutputCache cache: The cache object to use.
:param BaseEventLoop | None event_loop: The event loop used to schedule asynchronous tasks.
"""
self._transaction_provider = transaction_provider
self._cache = cache
self._loop = event_loop
@asyncio.coroutine
def get_output(self, transaction_hash, output_index):
"""
Gets an output and information about its asset ID and asset quantity.
:param bytes transaction_hash: The hash of the transaction containing the output.
:param int output_index: The index of the output.
:return: An object containing the output as well as its asset ID and asset quantity.
:rtype: Future[TransactionOutput]
"""
cached_output = yield from self._cache.get(transaction_hash, output_index)
if cached_output is not None:
return cached_output
transaction = yield from self._transaction_provider(transaction_hash)
if transaction is None:
raise ValueError('Transaction {0} could not be retrieved'.format(bitcoin.core.b2lx(transaction_hash)))
colored_outputs = yield from self.color_transaction(transaction)
for index, output in enumerate(colored_outputs):
yield from self._cache.put(transaction_hash, index, output)
return colored_outputs[output_index]
@asyncio.coroutine
def color_transaction(self, transaction):
"""
Computes the asset ID and asset quantity of every output in the transaction.
:param CTransaction transaction: The transaction to color.
:return: A list containing all the colored outputs of the transaction.
:rtype: Future[list[TransactionOutput]]
"""
# If the transaction is a coinbase transaction, the marker output is always invalid
if not transaction.is_coinbase():
for i, output in enumerate(transaction.vout):
# Parse the OP_RETURN script
marker_output_payload = MarkerOutput.parse_script(output.scriptPubKey)
if marker_output_payload is not None:
# Deserialize the payload as a marker output
marker_output = MarkerOutput.deserialize_payload(marker_output_payload)
if marker_output is not None:
# Fetch the colored outputs for previous transactions
inputs = []
for input in transaction.vin:
inputs.append((yield from asyncio.async(
self.get_output(input.prevout.hash, input.prevout.n), loop=self._loop)))
asset_ids = self._compute_asset_ids(
inputs,
i,
transaction.vout,
marker_output.asset_quantities)
if asset_ids is not None:
return asset_ids
# If no valid marker output was found in the transaction, all outputs are considered uncolored
return [
TransactionOutput(output.nValue, output.scriptPubKey, None, 0, OutputType.uncolored)
for output in transaction.vout]
@classmethod
def _compute_asset_ids(cls, inputs, marker_output_index, outputs, asset_quantities):
"""
Computes the asset IDs of every output in a transaction.
:param list[TransactionOutput] inputs: The outputs referenced by the inputs of the transaction.
:param int marker_output_index: The position of the marker output in the transaction.
:param list[CTxOut] outputs: The outputs of the transaction.
:param list[int] asset_quantities: The list of asset quantities of the outputs.
:return: A list of outputs with asset ID and asset quantity information.
:rtype: list[TransactionOutput]
"""
# If there are more items in the asset quantities list than outputs in the transaction (excluding the
# marker output), the marker output is deemed invalid
if len(asset_quantities) > len(outputs) - 1:
return None
# If there is no input in the transaction, the marker output is always invalid
if len(inputs) == 0:
return None
result = []
# Add the issuance outputs
issuance_asset_id = cls.hash_script(bytes(inputs[0].script))
for i in range(0, marker_output_index):
value, script = outputs[i].nValue, outputs[i].scriptPubKey
if i < len(asset_quantities) and asset_quantities[i] > 0:
output = TransactionOutput(value, script, issuance_asset_id, asset_quantities[i], OutputType.issuance)
else:
output = TransactionOutput(value, script, None, 0, OutputType.issuance)
result.append(output)
# Add the marker output
issuance_output = outputs[marker_output_index]
result.append(TransactionOutput(
issuance_output.nValue, issuance_output.scriptPubKey, None, 0, OutputType.marker_output))
# Add the transfer outputs
input_iterator = iter(inputs)
input_units_left = 0
for i in range(marker_output_index + 1, len(outputs)):
if i <= len(asset_quantities):
output_asset_quantity = asset_quantities[i - 1]
else:
output_asset_quantity = 0
output_units_left = output_asset_quantity
asset_id = None
while output_units_left > 0:
# Move to the next input if the current one is depleted
if input_units_left == 0:
current_input = next(input_iterator, None)
if current_input is None:
# There are less asset units available in the input than in the outputs:
# the marker output is considered invalid
return None
else:
input_units_left = current_input.asset_quantity
# If the current input is colored, assign its asset ID to the current output
if current_input.asset_id is not None:
progress = min(input_units_left, output_units_left)
output_units_left -= progress
|
LEEClab/LS_CORRIDORS
|
old_versions/before_v1_0_0/log_def.py
|
Python
|
gpl-2.0
| 2,333
| 0.045864
|
import os
from datetime import datetime
os.chdir(r"E:\__data_2015\___john\Desenvolvimentos\aplications\Aplicacoes_grass\LSCorridors\___dados_cortados_teste_desenvolvimento")
now = datetime.now() # INSTANCE
day_start=now.day
month_start=now.month
year_start=now.year
hour_start=now.hour # GET START HOUR
minuts_start=now.minute #GET START MINUTS
second_start=now.second #GET START
now = datetime.now() # INSTANCE
day_end=now.day
month_end=now.month
year_end=now.year
header_log="_____Log__"+`year`+"-"+`month`+"-Day_"+`day`+"_Time_"+`hour_start`+"_"+`minuts_start`+"_"+`second_start`
txt_log=open(header_log+".txt","w")
txt_log.write("Start time : Year "+`year_start`+"-Month "+`month_start`+"-Day "+`day_start`+" ---- time: "+`hour_start`+":"+`minuts_start`+":"+`second_start`+"\n")
hour_en
|
d=now.hour # GET end HOUR
minuts_end=now.minute #GET end MINUTS
second_end=now.second #GET end seconds
txt_log.write("End time : Year "+`year_end`+"-Month "+`month_end`+"-Day "+`day_end`+" ---- time: "+`hour
|
_end`+":"+`minuts_end`+":"+`second_end`+"\n")
diference_time=`month_end - month_start`+" month - "+`abs(day_end - day_start)`+" Day - "+" Time: "+`abs(hour_end - hour_start)`+":"+`abs(minuts_end - minuts_start)`+":"+`abs(second_end - second_start)`
txt_log.write("Processing time : "+diference_time+"\n\n")
txt_log.write("Inputs : \n")
txt_log.write(" Cost Map : xxxxx \n")
txt_log.write(" Source Target Map : xxxxx \n")
txt_log.write(" Methods : xxxxx \n")
txt_log.write(" Variability : xxxxx \n")
txt_log.write(" Number interactions M1 : xxxxx \n")
txt_log.write(" Number interactions M1 : xxxxx \n")
txt_log.write(" Number interactions M1 : xxxxx \n")
txt_log.write(" Number interactions M1 : xxxxx \n\n")
txt_log.write("Output location : \n")
txt_log.write(r" E:\__data_2015\___john\Desenvolvimentos\aplications\Aplicacoes_grass\LSCorridors\___dados_cortados_teste_desenvolvimento")
now = datetime.now() # INSTANCE
day_now=now.day
month_now=now.month
year_now=now.year
hour_now=now.hour # GET START HOUR
minuts_now=now.minute #GET START MINUTS
second_now=now.second #GET START
txt_log.write("\n\n")
txt_log.write("[Error ->-> :] xxx "+`year_now`+"-"+ `month_now` + "-"+ `day_now`+" --- time : "+`hour_now `+":"+`second_now`)
txt_log.close()
|
philotas/opencaster
|
tutorials/psi-generation/firstsdt.py
|
Python
|
gpl-2.0
| 2,269
| 0.032613
|
#! /usr/bin/env python
#
# Copyright (C) 2008 Lorenzo Pal
|
lara, l.pallara@avalpa.com
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) an
|
y later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
import os
from dvbobjects.PSI.SDT import *
from dvbobjects.DVB.Descriptors import *
from dvbobjects.MPEG.Descriptors import *
#
# Service Description Table (ETSI EN 300 468 5.2.3)
#
sdt = service_description_section(
transport_stream_id = 1, # demo value, an official value should be demanded to dvb org
original_network_id = 1, # demo value, an official value should be demanded to dvb org
service_loop = [
service_loop_item(
service_ID = 1, # demo value
EIT_schedule_flag = 0, # 0 no current even information is broadcasted, 1 broadcasted
EIT_present_following_flag = 0, # 0 no next event information is broadcasted, 1 is broadcasted
running_status = 4, # 4 service is running, 1 not running, 2 starts in a few seconds, 3 pausing
free_CA_mode = 0, # 0 means service is not scrambled, 1 means at least a stream is scrambled
service_descriptor_loop = [
service_descriptor(
service_type = 1, # digital television service
service_provider_name = "Avalpa",
service_name = "Avalpa 1",
),
],
),
],
version_number = 1, # you need to change the table number every time you edit, so the decoder will compare its version with the new one and update the table
section_number = 0,
last_section_number = 0,
)
#
# PSI marshalling and encapsulation
#
out = open("./firstsdt.sec", "wb")
out.write(sdt.pack())
out.close
|
tgbugs/pyontutils
|
ilxutils/ilxutils/database_client.py
|
Python
|
mit
| 2,525
| 0.00198
|
from collections import defaultdict
import pandas as pd
import pickle
from sqlalchemy import create_engine, inspect, Table, Column
from sqlalchemy.engine.url import make_url
from sys import exit
class DatabaseClient:
""" Takes care of the database pass opening to find the url and can query
the respected database.
Input:
dbpass_path path to the text file with the list of database urls
dbname database name so we know which database to query from the list
"""
def __init__(self, dbpass_path, dbname):
self.dbpass_path = dbpass_path
self.dbname = dbname
self.db_url = self.get_db_url()
self.engine = create_engine(self.db_url)
def get_db_url(self):
with open(self.dbpass_path, 'r') as infile:
db_names = []
for raw_url in infile.read().splitlines():
url_obj = make_url(raw_url)
if url_obj.database == self.dbname:
infile.close()
return raw_url
db_names.append(url_obj.database)
infile.close()
exit('database name does not exist in dbpass given:' + ', '.join(db_names))
def get_df_with_query(self, query):
""" WARNING :: Will crash if too large. If so, you should just create the df file
first via create_df_file(query=).
load example:
with open(input, 'rb') as infile:
objs = []
while True:
try:
obj = pickle.load(infile)
except EOFError:
break
...
"""
return pd.read_sql(query, self.engine)
def create_df_file_with_query(self, query, output):
""" Dumps in df in chunks to avoid crashes.
"""
chun
|
k_size = 100000
offset = 0
data = defaultdict(lambda : defaultdict(list))
with open(output, 'wb') as outfile:
query = query.replace(';', '')
query += """ LIMIT {chunk_size} OFFSET {offs
|
et};"""
while True:
print(offset)
query = query.format(
chunk_size=chunk_size,
offset=offset
)
df = pd.read_sql(query, self.engine)
pickle.dump(df, outfile)
offset += chunk_size
if len(df) < chunk_size:
break
outfile.close()
|
timeyyy/orchestra.nvim
|
rplugin/python3/orchestra/util.py
|
Python
|
unlicense
| 7,671
| 0.001825
|
import os
import wave
import platform
import threading
from functools import wraps
import time
import pyaudio
CUSTOMCMDS = (),
AUTOCMDS = (
'BufNewFile', 'BufReadPre', 'BufRead', 'BufReadPost',
'BufReadCmd', 'FileReadPre', 'FileReadPost', 'FileReadCmd',
'FilterReadPre', 'FilterReadPost', 'StdinReadPre',
'StdinReadPost', 'BufWrite', 'BufWritePre', 'BufWritePost',
'BufWriteCmd', 'FileWritePre', 'FileWritePost',
'FileWriteCmd', 'FileAppendPre', 'FileAppendPost',
'FileAppendCmd', 'FilterWritePre', 'FilterWritePost',
'BufAdd', 'BufCreate', 'BufDelete', 'BufWipeout',
'BufFilePre', 'BufFilePost', 'BufEnter', 'BufLeave',
'BufWinEnter', 'BufWinLeave', 'BufUnload', 'BufHidden',
'BufNew', 'SwapExists', 'TermOpen', 'TermClose',
'FileType', 'Syntax', 'EncodingChanged', 'TermChanged',
'OptionSet', 'VimEnter', 'GUIEnter', 'GUIFailed',
'TermResponse', 'QuitPre', 'VimLeavePre', 'VimLeave',
'FileChangedShell', 'FileChangedShellPost', 'FileChangedRO',
'ShellCmdPost', 'ShellFilterPost', 'CmdUndefined',
'FuncUndefined', 'SpellFileMissing', 'SourcePre', 'SourceCmd',
'VimResized', 'FocusGained', 'FocusLost', 'CursorHold',
'CursorHoldI', 'CursorMoved', 'CursorMovedI', 'WinEnter',
'WinLeave', 'TabEnter', 'TabLeave', 'TabNew',
'TabNewEntered', 'TabClosed', 'CmdwinEnter', 'CmdwinLeave',
'InsertEnter', 'InsertChange', 'InsertLeave', 'InsertCharPre',
'TextYankPost', 'TextChanged', 'TextChangedI', 'ColorScheme',
'RemoteReply', 'QuickFixCmdPre', 'QuickFixCmdPost',
'SessionLoadPost', 'MenuPopup', 'CompleteDone', 'User',)
class VimMix():
def echom(self, thing):
self.vim.command('echom "{0}"'.format(thing))
def play_sound(file):
if not os.path.isfile(file):
return False
# open the file for reading.
wf = wave.open(file, 'rb')
# create an audio object
p = pyaudio.PyAudio()
# open stream based on the wave object which has been input.
stream = p.open(format =
p.get_format_from_width(wf.getsampwidth()),
channels = wf.getnchannels(),
rate = wf.getframerate(),
output = True)
# read data
data = wf.readframes(wf.getnframes())
# play sound
stream.write(data)
# cleanup stuff.
stream.close()
p.terminate()
return True
def etb(func, *args, **kwargs):
'''
error to bool
'''
try:
return func(*args, **kw
|
args)
except Exception:
return False
def get_audio_parts(file):
'''
see orchestra.__init__.ensemble
'''
def plus1(file):
path, ext = os.path.splitext(file)
split = path.split('_')
old_num = etb(int, split[-1])
if not old_num:
split.append('1')
else:
split[-1] = str(old_num + 1)
return '_'.join(split) + ext
parts = []
if os.path.exists(file):
|
parts.append(file)
part = plus1(file)
while os.path.exists(part):
parts.append(part)
part = plus1(part)
return parts
class InMemoryWriter(list, object):
"""
simplify editing files
On creation you can read all contents either from:
an open file,
a list
a path/name to a file
While iterating you can set copy=True to edit data
as you iterate over it
you can accesses the current position using self.i, useful if
you are using filter or something like that while iterating
"""
def __init__(self, file=None, copy=False):
list.__init__(self)
self.copy = copy
self.data = self
if isinstance(file, str):
try:
with open(file, 'r') as f:
self.writelines(f)
self.original_filename = file
except FileNotFoundError as err:
raise err
elif file:
self.writelines(file)
def write(self, stuff):
self.append(stuff)
def writelines(self, passed_data):
for item in passed_data:
self.data.append(item)
def __call__(self, copy=None):
if copy:
self.copy = True
return self
def __iter__(self):
self.i = 0
if self.copy:
self.data_copy = self.data[:]
return self
def __next__(self):
if self.i + 1 > len(self.data):
try:
del self.data_copy
except AttributeError:
pass
raise StopIteration
if not self.copy:
requested = self.data[self.i]
else:
requested = self.data_copy[self.i]
self.i += 1
return requested
def close(self):
pass
def readlines(self):
return self.data
def save(self, path=False):
if not path:
path = self.original_filename
with open(path, 'w') as file:
for row in self.data:
file.write(row)
def add(self, thing):
self.write(thing+'\n')
self.save()
def setup_logger(log_file):
'''
couldn't get logging module to work fml...
just call file.add()
'''
if os.path.exists(log_file):
os.remove(log_file)
open(log_file, 'w').close()
file = InMemoryWriter(log_file)
file.add('System is: %s' % platform.platform())
file.add('Python archetecture is: %s' %
platform.architecture()[0])
file.add('Machine archetecture is: %s' %
platform.machine())
return file
def rate_limited(max_per_second, mode='wait', delay_first_call=False):
"""
Decorator that make functions not be called faster than
set mode to 'kill' to just ignore requests that are faster than the
rate.
set mode to 'refresh_timer' to reset the timer on successive calls
set delay_first_call to True to delay the first call as well
"""
lock = threading.Lock()
min_interval = 1.0 / float(max_per_second)
def decorate(func):
last_time_called = [0.0]
@wraps(func)
def rate_limited_function(*args, **kwargs):
def run_func():
lock.release()
ret = func(*args, **kwargs)
last_time_called[0] = time.perf_counter()
return ret
lock.acquire()
elapsed = time.perf_counter() - last_time_called[0]
left_to_wait = min_interval - elapsed
if delay_first_call:
if left_to_wait > 0:
if mode == 'wait':
time.sleep(left_to_wait)
return run_func()
elif mode == 'kill':
lock.release()
return
else:
return run_func()
else:
if not last_time_called[0] or elapsed > min_interval:
return run_func()
elif mode == 'refresh_timer':
print('Ref timer')
lock.release()
last_time_called[0] += time.perf_counter()
return
elif left_to_wait > 0:
if mode == 'wait':
time.sleep(left_to_wait)
return run_func()
elif mode == 'kill':
lock.release()
return
return rate_limited_function
return decorate
if __name__ == '__main__':
play_sound(os.path.abspath('woosh.wav'))
# print(get_audio_parts(['keyboard_slow.wav']))
|
pypa/warehouse
|
warehouse/search/queries.py
|
Python
|
apache-2.0
| 3,381
| 0.000592
|
# Licensed under the Apache L
|
icense, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is
|
distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import re
from elasticsearch_dsl import Q
SEARCH_FIELDS = [
"author",
"author_email",
"description",
"download_url",
"home_page",
"keywords",
"license",
"maintainer",
"maintainer_email",
"normalized_name",
"platform",
"summary",
]
SEARCH_BOOSTS = {
"name": 10,
"normalized_name": 10,
"description": 5,
"keywords": 5,
"summary": 5,
}
SEARCH_FILTER_ORDER = (
"Framework",
"Topic",
"Development Status",
"License",
"Programming Language",
"Operating System",
"Environment",
"Intended Audience",
"Natural Language",
)
def get_es_query(es, terms, order, classifiers):
"""
Returns an Elasticsearch query from data from the request.
"""
if not terms:
query = es.query()
else:
bool_query = gather_es_queries(terms)
query = es.query(bool_query)
query = query.suggest("name_suggestion", terms, term={"field": "name"})
# Require match to all specified classifiers
for classifier in classifiers:
query = query.query("prefix", classifiers=classifier)
query = query_for_order(query, order)
return query
def gather_es_queries(q):
quoted_string, unquoted_string = filter_query(q)
must = [form_query("phrase", i) for i in quoted_string] + [
form_query("best_fields", i) for i in unquoted_string
]
bool_query = Q("bool", must=must)
# Allow to optionally match on prefix
# if ``q`` is longer than one character.
if len(q) > 1:
bool_query = bool_query | Q("prefix", normalized_name=q)
return bool_query
def filter_query(s):
"""
Filters given query with the below regex
and returns lists of quoted and unquoted strings
"""
matches = re.findall(r'(?:"([^"]*)")|([^"]*)', s)
result_quoted = [t[0].strip() for t in matches if t[0]]
result_unquoted = [t[1].strip() for t in matches if t[1]]
return result_quoted, result_unquoted
def form_query(query_type, query):
"""
Returns a multi match query
"""
fields = [
field + "^" + str(SEARCH_BOOSTS[field]) if field in SEARCH_BOOSTS else field
for field in SEARCH_FIELDS
]
return Q("multi_match", fields=fields, query=query, type=query_type)
def query_for_order(query, order):
"""
Applies transformations on the ES query based on the search order.
Order is assumed to be a string with the name of a field with an optional
hyphen to indicate descending sort order.
"""
if order == "": # relevance should not sort
return query
field = order[order.find("-") + 1 :]
sort_info = {
field: {
"order": "desc" if order.startswith("-") else "asc",
"unmapped_type": "long",
}
}
query = query.sort(sort_info)
return query
|
patmun/pynetdicom
|
netdicom/DIMSEprovider.py
|
Python
|
mit
| 4,422
| 0.000678
|
#
# Copyright (c) 2012 Patrice Munger
# This file is part of pynetdicom, released under a modified MIT license.
# See the file license.txt included with this distribution, also
# available at http://pynetdicom.googlecode.com
#
import DIMSEmessages
import DIMSEparameters
from DIMSEmessages import DIMSEMessage
from DULparameters import P_DATA_ServiceParameters
import time
try: from queue import Empty
except ImportError: from Queue import Empty
import logging
logger = logging.getLogger(__name__)
class DIMSEServiceProvider(object):
def __init__(self, DUL):
self.DUL = DUL
self.message = None
def Send(self, primitive, id, maxpdulength):
# take a DIMSE primitive, convert it to one or more DUL primitive and
# send it
if primitive.__class__ == DIMSEparameters.C_ECHO_ServiceParameters:
if primitive.MessageID is not None:
dimse_msg = DIMSEmessages.C_ECHO_RQ_Message()
else:
dimse_msg = DIMSEmessages.C_ECHO_RSP_Message()
if primitive.__class__ == DIMSEparameters.C_STORE_ServiceParameters:
if primitive.MessageID is not None:
dimse_msg = DIMSEmessages.C_STORE_RQ_Message()
else:
dimse_msg = DIMSEmessages.C_STORE_RSP_Message()
if primitive.__class__ == DIMSEparameters.C_FIND_ServiceParameters:
if primitive.MessageID is not None:
dimse_msg = DIMSEmessages.C_FIND_RQ_Message()
else:
dimse_msg = DIMSEmessages.C_FIND_RSP_Mess
|
age()
if primitive.__class__ == DIMSEparameters.C_GET_ServiceParameters:
if primitive.MessageID is not None:
dimse_msg = DIMSEmessages.C_GET_RQ_Message()
|
else:
dimse_msg = DIMSEmessages.C_GET_RSP_Message()
if primitive.__class__ == DIMSEparameters.C_MOVE_ServiceParameters:
if primitive.MessageID is not None:
dimse_msg = DIMSEmessages.C_MOVE_RQ_Message()
else:
dimse_msg = DIMSEmessages.C_MOVE_RSP_Message()
logger.debug('DIMSE message of class %s' % dimse_msg.__class__)
dimse_msg.FromParams(primitive)
logger.debug('DIMSE message: %s', str(dimse_msg))
pdatas = dimse_msg.Encode(id, maxpdulength)
logger.debug('encoded %d fragments' % len(pdatas))
for ii, pp in enumerate(pdatas):
logger.debug('sending pdata %d of %d' % (ii + 1, len(pdatas)))
self.DUL.Send(pp)
logger.debug('DIMSE message sent')
def Receive(self, Wait=False, Timeout=120):
logger.debug("In DIMSEprovider.Receive")
if self.message is None:
self.message = DIMSEMessage()
if Wait:
# loop until complete DIMSE message is received
logger.debug('Entering loop for receiving DIMSE message')
# If connection fails, the peek loop can iterate forever, as the
# DUL Receive never happens. Approximate a timeout to abort
itrs = 0
delay = 0.001
while 1:
time.sleep(delay)
nxt = self.DUL.Peek()
if nxt is None:
itrs +=1
if Timeout and itrs > Timeout/float(delay):
# just like the DUL.Receive would on timeout
raise Empty('Timeout waiting for DIMSE message')
else:
continue
if nxt.__class__ is not P_DATA_ServiceParameters:
return None, None
if self.message.Decode(self.DUL.Receive(Wait, Timeout)):
tmp = self.message
self.message = None
logger.debug('Decoded DIMSE message: %s', str(tmp))
return tmp.ToParams(), tmp.ID
else:
cls = self.DUL.Peek().__class__
if cls not in (type(None), P_DATA_ServiceParameters):
logger.debug('Waiting for P-DATA but received %s', cls)
return None, None
if self.message.Decode(self.DUL.Receive(Wait, Timeout)):
tmp = self.message
self.message = None
logger.debug('Received DIMSE message: %s', tmp)
return tmp.ToParams(), tmp.ID
else:
return None, None
|
TinEye/tineyeservices_python
|
tineyeservices/mobileengine_request.py
|
Python
|
mit
| 1,115
| 0
|
# -*- coding: utf-8 -*-
# Copyright (c) 2018 TinEye. All rights reserved worldwide.
from .matchengine_request import MatchEngineRequest
class MobileEngineRequest(MatchEngineRequest):
"""
Class to send requests to a MobileEngine API.
Adding an image using data:
>>> from tineyeservices import MobileEngineRequest, Image
>>> api = MobileEngineReques
|
t(api_url='http://localhost/rest/')
>>> image = Image(filepath='/path/to/image.jpg')
>>> api.add_image(images=[image])
{u'error': [], u'method': u'add', u'result': [], u'status': u'ok'}
Searching for an image using an image URL:
>>> api.search
|
_url(url='https://tineye.com/images/meloncat.jpg')
{'error': [],
'method': 'search',
'result': [{'filepath': 'match1.png',
'score': '97.2',
'overlay': 'overlay/query.png/match1.png[...]'}],
'status': 'ok'}
"""
def __repr__(self):
return "MobileEngineRequest(api_url=%r, username=%r, password=%r)" %\
(self.api_url, self.username, self.password)
|
FNNDSC/roi_tag
|
roi_gcibs.py
|
Python
|
mit
| 37,159
| 0.010603
|
#!/usr/bin/env python
'''
'roi_gcibs.py' compares two groups informed by an a priori bootstrap analysis.
'''
import os
import sys
import argparse
import tempfile, shutil
import json
import pprint
import copy
from collections import defaultdict
from _common import systemMisc as misc
from _common import crun
import error
import message
import stage
import fnndsc as base
class FNNDSC_roigcibs(base.FNNDSC):
'''
This class is a specialization of the FNNDSC base and geared to dyslexia
curvature analysis.
'''
#
# Class member variables -- if declared here are shared
# across all instances of this class
#
_dictErr = {
'subjectSpecFail' : {
'action' : 'examining command line arguments, ',
'error' : 'it seems that no subjects were specified.',
'exitCode' : 10},
'noFreeSurferEnv' : {
'action' : 'examining environment, ',
'error' : 'it seems that the FreeSurfer environment has not been sourced.',
'exitCode' : 11},
'noStagePostConditions' : {
'action' : 'querying a stage for its exitCode, ',
'error' : 'it seems that the stage has not been specified.',
'exitCode' : 12},
'subjectDirnotExist': {
'action' : 'examining the <subjectDirectories>, ',
'error' : 'the directory does not exist.',
'exitCode' : 13},
'Load' : {
'action' : 'attempting to pickle load object, ',
'error' : 'a PickleError occured.',
'exitCode' : 14},
'outDirNotCreate': {
'action' : 'attempting to create the <outDir>, ',
'error' : 'a system error was encountered. Do you have create permission?',
'exitCode' : 15},
'workingDirNotExist': {
'action' : 'attempting to access the <workingDir>, ',
'error' : 'a system error was encountered. Does the directory exist?',
'exitCode' : 16},
}
def l_pval(self):
return self._l_pval
def l_roi(self):
return self._l_ROI
def l_hemisphere(self):
return self._l_hemi
def l_surface(self):
return self._l_surface
def l_statFunc(self):
return self._l_statFunc
def l_group(self):
return self._l_group
def l_curvFunc(self):
return self._l_curvFunc
def pval(self):
return self._str_pval
def topDir(self, *args):
if len(args):
self._topDir = args[0]
else:
return self._topDir
def dirSpec(self):
"""
Return the dirSpec based on internal pipeline._str_* variables
"""
return '%s/%s/%s/%s/%s/%s/%s' % (
self.outDir(),
self._str_annotation,
self._str_group,
self._str_pval,
self._str_statFunc,
self._str_surface,
self._str_hemi
)
def dirSpecPartial(self):
"""
Return the dirSpec based on internal pipeline._str_* variables w/o
the leading directories.
"""
return '%s/%s/%s/%s' % ( sel
|
f._str_pval,
|
self._str_statFunc,
self._str_surface,
self._str_hemi)
def namespec(self, *args):
'''
Return the namespec based on internal pipeline._str_* variables.
'''
str_sep = "-"
if len(args): str_sep = args[0]
return '%s%s%s%s%s%s%s%s%s%s%s' % (
self._str_annotation, str_sep,
self._str_group, str_sep,
self._str_pval, str_sep,
self._str_statFunc, str_sep,
self._str_surface, str_sep,
self._str_hemi
)
def schedulerStdOutDir(self, *args):
if len(args):
self._str_schedulerStdOutDir = args[0]
else:
return self._str_schedulerStdOutDir
def schedulerStdErrDir(self, *args):
if len(args):
self._str_schedulerStdErrDir = args[0]
else:
return self._str_schedulerStdErrDir
def roi(self):
return self._str_roi
def surface(self):
return self._str_surface
def hemi(self):
return self._str_hemi
def statFunc(self):
return self._str_statFunc
def curvFunc(self):
return self._str_curvFunc
def outDir(self, *args):
if len(args):
self._outDir = args[0]
else:
return self._outDir
def workingDir(self, *args):
if len(args):
self._workingDir = args[0]
else:
return self._workingDir
def clobber(self, *args):
if len(args):
self._b_clobber = args[0]
else:
return self._b_clobber
def group(self):
return self._str_group
def __init__(self, **kwargs):
"""
Basic constructor. Checks on named input args, checks that files
exist and creates directories.
"""
base.FNNDSC.__init__(self, **kwargs)
self._lw = 120
self._rw = 20
self._l_ROI = []
self._l_pval = []
self._l_group = []
self._l_surface = []
self._l_statFunc = []
self._l_curvFunc = []
self._l_hemi = []
self._l_annot = []
self._outDir = ''
self._workingDir = ''
self._stageslist = '12'
self._f_lowerBoundHard = 0.0
self._f_lowerBoundSoft = 0.0
self._f_upperBoundSoft = 0.0
# Internal tracking vars
self._str_pval = ''
self._str_group = ''
self._str_roi = ''
self._str_hemi = ''
self._str_surface = ''
self._str_statFunc = ''
self._str_curvFunc = ''
self._str_annotation = ''
self._topDir = ''
self._d_bootstrapOccurrence = Tree()
self._d_bootstrapThreshold = Tree()
self._d_bootstrapFiltered = Tree()
# Scheduler std out/err dirs
self._str_schedulerStdOutDir = '~/scratch'
self._str_schedulerStdErrDir = '~/scratch'
self._b_clobber = False
for key, value in kwargs.iteritems():
if key == 'outDir': self._outDir = value
if key == 'workingDir': self._workingDir = value
if key == 'stages': self._stageslist = value
if key == 'curvFunc': self._l_curvFunc = value.split(':')
if key == 'pval': self._l_pval = value.split(',')
if key == 'group': self._l_group = value.split(',')
if key == 'surface': self._l_surface = value.split(',')
if key == 'statFunc': self._l_statFunc = value.split(',')
if key == 'hemi': self._l_hemi = value.split(',')
if key
|
dims/heat
|
heat/db/sqlalchemy/migrate_repo/versions/043_migrate_template_versions.py
|
Python
|
apache-2.0
| 2,285
| 0
|
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import time
from oslo_serialization import jsonutils
import sqlalchemy
from sqlalchemy.orm import sessionmaker
def upgrade(migrate_engine):
Session = sessionmaker(bind=migrate_engine)
ses
|
sion = Session()
meta = sqlalchemy.MetaData(bind=migrate_engine)
templ_table = sqlalchemy.Table('raw_template', meta, autoload=True)
raw_templates = templ_table.select().execute()
# NOTE (sdake) 2014-04-24 is the date of the Icehouse release
|
. It is
# possible that folks could continue to make errors in their templates
# right up until the release of Icehouse. For stacks with version dates
# in the future, they remain unlistable. This is to prevent future
# breakage when new versions come out
patch_date = time.strptime('2014-04-24', '%Y-%m-%d')
version_map = [('heat_template_version', '2013-05-23'),
('AWSTemplateFormatVersion', '2010-09-09'),
('HeatTemplateFormatVersion', '2012-12-12')]
for raw_template in raw_templates:
for key, date in version_map:
template = jsonutils.loads(raw_template.template)
if key in template:
version = template[key]
try:
dt = time.strptime(version, '%Y-%m-%d')
except (TypeError, ValueError):
dt = None
if dt is None or dt < patch_date:
template[key] = date
(templ_table.update().
where(templ_table.c.id == raw_template.id).
values(template=jsonutils.dumps(template)).
execute())
session.commit()
session.close()
|
kubeflow/kubeflow
|
py/kubeflow/kubeflow/ci/notebook_servers/notebook_server_jupyter_scipy_tests.py
|
Python
|
apache-2.0
| 1,826
| 0.001095
|
""""Argo Workflow for testing notebook-server-jupyter-scipy OCI image"""
from kubeflow.kubef
|
low.ci import workflow_utils
from kubeflow.testing import argo_build_util
class Builder(workflow_utils.ArgoTestBuilder):
def __init__(self, name=None, namespace=None, bucket=None,
test_target_name=None, **kwargs):
super().__init__(name=name, namespace=namespace, bucket=bucket,
test_target_name=test_target_name, **kwargs)
def build(self):
"""Build the Argo workflow graph"""
|
workflow = self.build_init_workflow(exit_dag=False)
task_template = self.build_task_template()
# Test building notebook-server-jupyter-scipy image using Kaniko
dockerfile = ("%s/components/example-notebook-servers"
"/jupyter-scipy/Dockerfile") % self.src_dir
context = "dir://%s/components/example-notebook-servers/jupyter-scipy/" % self.src_dir
destination = "notebook-server-jupyter-scipy-test"
kaniko_task = self.create_kaniko_task(task_template, dockerfile,
context, destination, no_push=True)
argo_build_util.add_task_to_dag(workflow,
workflow_utils.E2E_DAG_NAME,
kaniko_task, [self.mkdir_task_name])
# Set the labels on all templates
workflow = argo_build_util.set_task_template_labels(workflow)
return workflow
def create_workflow(name=None, namespace=None, bucket=None, **kwargs):
"""
Args:
name: Name to give to the workflow. This can also be used to name
things associated with the workflow.
"""
builder = Builder(name=name, namespace=namespace, bucket=bucket, **kwargs)
return builder.build()
|
ssamot/vgdl_competition
|
src/server/db_utils.py
|
Python
|
gpl-3.0
| 3,033
| 0.012199
|
#!/usr/bin/python
# Python DB APIs:
#for more: http://www.mikusa.com/python-mysql-docs/index.html
#and more: http://zetcode.com/db/mysqlpython/
#and else: http://mysql-python.sourceforge.net/MySQLdb.html
import MySQLdb as mdb
import itertools
from pprint import pprint
import ConfigParser
def db_connect(properties_file):
config = ConfigParser.ConfigParser()
#print properties_file
#open(properties_file)
config.read(properties_file)
#print config.sections()
host = config.get("Database", "host")
user = config.get("Database", "user")
passwd = config.get("Database", "passwd")
db_name = config.get("Database", "db")
db = mdb.connect(host=host, # your host, usually localhost
user=user, # your username
passwd=passwd, # your password
db=db_name) # name of the data base
return db
#
|
Gets all game info from a given game ID. Data returned in a dictionary.
def get_game_info(db, game_id):
#This cursor allows access as in a dictionary:
cur = db.cursor(mdb.cursors.DictCursor)
cur.execute
|
("SELECT * from games where game_id = %s", (game_id))
game_data = cur.fetchone()
return game_data
# Gets all info from all levels given a game ID. Data returned in array.
def get_levels_from_game(db, game_id):
#This cursor allows access as AN ARRAY:
cur = db.cursor()
cur.execute("SELECT * from levels where game_id = %s", (game_id))
levels = cur.fetchall()
return levels
# Gets ids from all levels given a game ID. Data returned in array
def get_level_ids_from_game(db, game_id):
#This cursor allows access as AN ARRAY:
cur = db.cursor()
cur.execute("SELECT level_id from levels where game_id = %s", (game_id))
levels = cur.fetchall()
level_ids = list(itertools.chain.from_iterable(levels))
return level_ids
# Gets all info from a level given a level ID. Data returned in a dictionary
def get_level_info(db, level_id):
#This cursor allows access as in a dictionary:
cur = db.cursor(mdb.cursors.DictCursor)
cur.execute("SELECT * from levels where level_id = %s", (level_id))
level = cur.fetchone()
return level
if __name__=="__main__":
#Connect to database.
try:
print ""
print " ------- Game info from game_id ------- "
game_data = get_game_info(db,1)
pprint(game_data)
print ""
print " ------- Level IDs from game_id ------- "
level_ids_from_game = get_level_ids_from_game(db,1)
pprint(level_ids_from_game)
print ""
print " ------- Levels from game_id ------- "
levels = get_levels_from_game(db,1)
print(levels)
print ""
print " ------- Level info from level_id ------- "
level = get_level_info(db,3)
pprint(level)
print ""
except Exception, e:
print "Error accessing database, " + str(e)
# -*- coding: utf-8 -*-
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.