code
stringlengths 3
1.05M
| repo_name
stringlengths 5
104
| path
stringlengths 4
251
| language
stringclasses 1
value | license
stringclasses 15
values | size
int64 3
1.05M
|
|---|---|---|---|---|---|
#!/usr/bin/env python
import os
import sys
import logging
import unittest
sys.path.insert(0, os.path.abspath(".."))
sys.path.insert(0, os.path.abspath("../problem/.libs")) # because of _pyabrt
os.environ["PATH"] = "{0}:{1}".format(os.path.abspath(".."), os.environ["PATH"])
from nose import tools
from base import ProblematicTestCase
import problem
class GetTestCase(ProblematicTestCase):
def test_get(self):
prob = self.create_problem()
prob.add_current_process_data()
ident = prob.save()
prob2 = problem.get(ident, False, self.proxy)
prob3 = problem.get(ident, True, self.proxy)
tools.eq_(prob.reason, prob2.reason)
tools.eq_(prob.reason, prob3.reason)
prob.delete()
def test_get_nonexistent(self):
tools.ok_(problem.get('random', False, self.proxy) is None)
if __name__ == '__main__':
logging.basicConfig(level=logging.DEBUG)
unittest.main()
|
mhabrnal/abrt
|
src/python-problem/tests/test_get.py
|
Python
|
gpl-2.0
| 944
|
from globals import *
import libtcodpy as tcod
import alife
import life
def create_menu(menu=[], position=[0,0], title='Untitled', format_str='$k: $v', padding=MENU_PADDING,
on_select=None, on_change=None, on_close=None, on_move=None, dim=True, alignment='', action=None,
close_on_select=False):
_menu = {'settings': {'position': list(position),'title': title,'padding': padding,'dim': dim,'format': format_str},
'on_select': on_select,
'on_change': on_change,
'on_move': on_move,
'on_close': on_close,
'close_on_select': close_on_select,
'alignment': alignment,
'index': 0,
'values':{},
'action':action}
#TODO: Does this need to be copied?
_menu['menu'] = menu[:]
_size = [len(title),len(_menu['menu'])+2+(_menu['settings']['padding'][1]*2)]
_uid = 0
for entry in _menu['menu']:
entry['uid'] = _uid
_uid+=1
for value in range(len(entry['values'])):
_line = format_entry(_menu['settings']['format'], entry, value=value)
if len(_line) > _size[0]:
_size[0] = len(_line)
_menu['settings']['size'] = (_size[0]+(_menu['settings']['padding'][0]*2),_size[1])
_menu['settings']['console'] = tcod.console_new(_menu['settings']['size'][0],_menu['settings']['size'][1])
MENUS.append(_menu)
return MENUS.index(_menu)
def create_item(item_type,key,values,icon=' ',enabled=True, color=(tcod.gray, tcod.white), **kwargs):
if not isinstance(values,list):
values = [values]
_item = {'type': item_type,
'key': key,
'enabled': enabled,
'icon': icon,
'color': color,
'values': values,
'value': 0}
if _item['type'] in ['title','spacer']:
_item['enabled'] = False
_item.update(kwargs)
return _item
def remove_item_from_menus(matching):
for menu in MENUS:
for item in menu['menu'][:]:
_match = True
for key in matching:
#print item.keys()
if not key in item or not matching[key] == item[key]:
_match = False
break
if _match:
menu['menu'].remove(item)
def format_entry(format_str, entry, value=-1):
if value == -1:
value = entry['value']
return format_str.replace('$k', str(entry['key']))\
.replace('$v', str(entry['values'][value]))\
.replace('$i', str(entry['icon']))
def redraw_menu(menu):
tcod.console_clear(menu['settings']['console'])
def draw_menus():
for menu in MENUS:
_y_offset = menu['settings']['padding'][1]
tcod.console_set_default_foreground(menu['settings']['console'], tcod.white)
tcod.console_print(menu['settings']['console'],
menu['settings']['padding'][0],
_y_offset,
menu['settings']['title'])
_y_offset += 2
for item in menu['menu']:
if item['type'] == 'title':
tcod.console_set_default_foreground(menu['settings']['console'], tcod.white)
_line = format_entry('- $k',item)
elif item['type'] == 'spacer':
tcod.console_set_default_foreground(menu['settings']['console'], tcod.white)
_line = item['key']*(menu['settings']['size'][0]-menu['settings']['padding'][0])
elif item['type'] == 'input':
#TODO: Input check?
if MENUS.index(menu) == ACTIVE_MENU['menu'] and menu['menu'].index(item) == menu['index'] and item['enabled']:
#TODO: Colors
tcod.console_set_default_foreground(menu['settings']['console'], item['color'][1])
elif not item['enabled']:
tcod.console_set_default_foreground(menu['settings']['console'], tcod.dark_sepia)
elif menu['settings']['dim']:
tcod.console_set_default_foreground(menu['settings']['console'], item['color'][0])
_line = format_entry(menu['settings']['format'],item)
else:
if MENUS.index(menu) == ACTIVE_MENU['menu'] and menu['menu'].index(item) == menu['index'] and item['enabled']:
#TODO: Colors
tcod.console_set_default_foreground(menu['settings']['console'], item['color'][1])
elif not item['enabled']:
tcod.console_set_default_foreground(menu['settings']['console'], tcod.dark_sepia)
elif menu['settings']['dim']:
tcod.console_set_default_foreground(menu['settings']['console'], item['color'][0])
#TODO: Per-item formats here
_line = format_entry(menu['settings']['format'],item)
tcod.console_print(menu['settings']['console'],
menu['settings']['padding'][0],
_y_offset,
_line)
_y_offset += 1
def align_menus():
for menu in MENUS:
if not MENUS.index(menu):
continue
if not menu['alignment'] and menu['settings']['position'][1] > 1:
continue
if not 'position_mod' in menu['settings']:
menu['settings']['position_mod'] = menu['settings']['position'][:]
if menu['alignment'] == 'botleft':
menu['settings']['position'][0] = 1
menu['settings']['position'][1] = WINDOW_SIZE[1]-menu['settings']['size'][1]-1
if menu['alignment']:
menu['settings']['position'][0] += menu['settings']['position_mod'][0]
menu['settings']['position'][1] += menu['settings']['position_mod'][1]
continue
_prev_menu = MENUS[MENUS.index(menu)-1]
_y_mod = _prev_menu['settings']['position'][1]+_prev_menu['settings']['size'][1]
menu['settings']['position'][1] = _y_mod+1
def delete_menu(id, abort=False):
_menu = get_menu(id)
if _menu['on_close'] and abort:
_entry = get_selected_item(id, _menu['index'])
_menu['on_close'](_entry)
if ACTIVE_MENU['menu'] == id:
ACTIVE_MENU['menu'] -= 1
MENUS.pop(id)
def delete_active_menu(abort=True):
if MENUS:
delete_menu(ACTIVE_MENU['menu'], abort=abort)
return True
return False
def get_menu(id):
return MENUS[id]
def get_menu_by_name(name):
for _menu in MENUS:
if _menu['settings']['title'] == name:
return MENUS.index(_menu)
return -1
def activate_menu(id):
ACTIVE_MENU['menu'] = id
MENUS[id]['index'] = find_item_after(MENUS[id])
if MENUS[id]['on_move']:
_entry = get_selected_item(id, MENUS[id]['index'])
return MENUS[id]['on_move'](_entry)
def activate_menu_by_name(name):
ACTIVE_MENU['menu'] = get_menu_by_name(name)
def find_item_before(menu,index=0):
_items = menu['menu'][:index][:]
_items.reverse()
for item in _items:
if item['enabled']:
return menu['menu'].index(item)
return find_item_before(menu,index=len(menu['menu']))
def find_item_after(menu,index=-1):
for item in menu['menu'][index+1:]:
if item['enabled']:
return menu['menu'].index(item)
return find_item_after(menu)
def get_menu_index_by_key(menu, key):
menu = get_menu(menu)
_i = 0
for entry in menu['menu']:
if entry['key'] == key:
return _i
_i += 1
return -1
def get_menu_index_by_flag(menu, flag, value):
menu = get_menu(menu)
_i = 0
for entry in menu['menu']:
if entry[flag] == value:
return _i
_i += 1
return -1
def go_to_menu_index(menu, index):
get_menu(menu)['index'] = index
if get_menu(menu)['on_move']:
_entry = get_selected_item(menu, index)
return get_menu(menu)['on_move'](_entry)
def move_up(menu, index):
menu['index'] = find_item_before(menu, index=index)
if menu['on_move']:
_entry = get_selected_item(MENUS.index(menu), menu['index'])
return menu['on_move'](_entry)
def move_down(menu, index):
menu['index'] = find_item_after(menu, index=index)
if menu['on_move']:
_entry = get_selected_item(MENUS.index(menu), menu['index'])
return menu['on_move'](_entry)
def previous_item(menu,index):
if menu['menu'][index]['value']:
menu['menu'][index]['value']-=1
redraw_menu(menu)
def next_item(menu,index):
if menu['menu'][index]['value']<len(menu['menu'][index]['values'])-1:
menu['menu'][index]['value']+=1
redraw_menu(menu)
def get_selected_item(menu,index):
menu = get_menu(menu)
_entry = menu['menu'][index]
return _entry
def item_selected(menu_id, index):
_entry = get_selected_item(menu_id, index)
_menu = get_menu(menu_id)
if _menu['close_on_select']:
delete_menu(menu_id)
if _menu['on_select']:
return _menu['on_select'](_entry)
return False
def item_changed(menu,index):
_entry = get_selected_item(menu,index)
menu = get_menu(menu)
if menu['on_change']:
return menu['on_change'](_entry)
else:
return False
def is_getting_input(menu_id):
_item = get_selected_item(menu_id, MENUS[menu_id]['index'])
if _item['type'] == 'input':
return _item
return False
def is_any_menu_getting_input():
for menu_id in [MENUS.index(m) for m in MENUS]:
_item = is_getting_input(menu_id)
if _item:
return _item
return False
def _create_target_list(target_list):
_menu_items = []
_near_targets = []
_group_targets = []
if LIFE[SETTINGS['controlling']]['group']:
_group = alife.groups.get_group(LIFE[SETTINGS['controlling']], LIFE[SETTINGS['controlling']]['group'])
else:
_group = None
for target_id in target_list:
if LIFE[target_id]['dead'] or target_id == SETTINGS['controlling']:
continue
if target_id in LIFE[SETTINGS['controlling']]['seen']:
_near_targets.append(target_id)
if _group and target_id in _group['members']:
_group_targets.append(target_id)
if _near_targets:
_menu_items.append(create_item('title', 'Near', None))
for target_id in _near_targets:
if not _menu_items:
SETTINGS['following'] = target_id
_color = life.draw_life_icon(LIFE[target_id])[1]
_menu_items.append(create_item('single',
' '.join(LIFE[target_id]['name']),
None,
target=target_id,
color=(_color, tcod.color_lerp(_color, tcod.white, 0.5))))
if _group_targets:
_menu_items.append(create_item('title', 'Group', None))
for target_id in _group_targets:
if not _menu_items:
SETTINGS['following'] = target_id
_color = life.draw_life_icon(LIFE[target_id])[1]
_menu_items.append(create_item('single',
' '.join(LIFE[target_id]['name']),
None,
target=target_id,
color=(_color, tcod.color_lerp(_color, tcod.white, 0.5))))
if not target_list:
return []
_menu_items.append(create_item('title', 'All', None))
for target_id in target_list:
if target_id == SETTINGS['controlling']:
continue
if not _menu_items:
SETTINGS['following'] = target_id
_color = life.draw_life_icon(LIFE[target_id])[1]
_menu_items.append(create_item('single',
' '.join(LIFE[target_id]['name']),
None,
target=target_id,
color=(_color, tcod.color_lerp(_color, tcod.white, 0.5))))
return _menu_items
def create_target_list():
return _create_target_list(LIFE[SETTINGS['controlling']]['seen'])
|
flags/Reactor-3
|
menus.py
|
Python
|
mit
| 10,705
|
from . import base
from . import mixins
from datetime import date
class TransformedRecord(
mixins.GenericCompensationMixin,
mixins.GenericDepartmentMixin, mixins.GenericIdentifierMixin,
mixins.GenericJobTitleMixin, mixins.GenericPersonMixin,
mixins.MembershipMixin, mixins.OrganizationMixin, mixins.PostMixin,
mixins.RaceMixin, mixins.LinkMixin, base.BaseTransformedRecord):
MAP = {
'last_name': 'Last name',
'first_name': 'First name',
'middle_name': 'Middle name',
'department': 'Department',
'job_title': 'Job Title',
'hire_date': 'Hire Date',
# We received several sheets from Houston, one that had an important Status column
# But was missing thousands of salaries
# We have another one that has all the right salaries
# But was missing the Status column
# So we merged those two files into one
# Which is why the compensation col needs to be 'Annual Salary_x'
# The 'x' was added when the join happened
# 'x' = the first sheet we joined on
# 'y' = the second sheet we joined on
'compensation': 'Annual Salary_x',
'gender': 'Gender',
'race': 'Racial Category',
'employment_type': 'Employee Grp',
'employment_subtype': 'Employee Subgroup',
}
NAME_FIELDS = ('first_name', 'middle_name', 'last_name', )
ORGANIZATION_NAME = 'Houston'
ORGANIZATION_CLASSIFICATION = 'City'
description = 'Annual salary'
DATE_PROVIDED = date(2019, 7, 17)
URL = "http://raw.texastribune.org.s3.amazonaws.com/houston/salaries/2019-11/TPIA_request_tt_edit.csv"
gender_map = {'Female': 'F', 'Male': 'M'}
@property
def is_valid(self):
# We have two people with names of 'NA'
# So let's account for them
if self.first_name == '' and self.last_name == 'YAO':
self.first_name = 'NA'
if self.last_name == '' and self.first_name == 'JOHN':
self.last_name = 'NA'
return self.first_name != ''
@property
def person(self):
name = self.get_name()
r = {
'family_name': name.last,
'given_name': name.first,
'additional_name': name.middle,
'name': unicode(name),
'gender': self.gender_map[self.gender.strip()]
}
return r
@property
def compensation(self):
status = self.get_mapped_value('employment_type')
compensation = self.get_mapped_value('compensation')
if status == 'Full Time':
return self.get_mapped_value('compensation')
else:
return 0
@property
def compensation_type(self):
status = self.get_mapped_value('employment_type')
compensation = self.get_mapped_value('compensation')
if status == 'Full Time':
return 'FT'
else:
return 'PT'
@property
def department(self):
dept = self.get_mapped_value('department').replace("'S","'s")
return dept
@property
def description(self):
status = self.get_mapped_value('employment_type')
sub_status = self.get_mapped_value('employment_subtype')
if status == 'Full Time':
return 'Annual salary'
elif status == 'HFD Deferred Term':
return 'Deferred term: Paid hourly rate, which is not shown'
elif status == 'Temporary':
return 'Temporary: Paid hourly rate, which is not shown'
elif 'Part Time' in status:
return 'Part-time: Paid hourly rate, which is not shown'
@property
def race(self):
given_race = self.get_mapped_value('race')
if given_race == '':
given_race = 'Unknown/Not Specified'
return {'name': given_race}
transform = base.transform_factory(TransformedRecord)
|
texastribune/tx_salaries
|
tx_salaries/utils/transformers/houston.py
|
Python
|
apache-2.0
| 3,915
|
from setuptools import setup
setup(
name='pyqode-uic',
version='0.1.1',
py_modules=['pyqode_uic'],
url='https://github.com/pyQode/pyqode-pyuic',
license='MIT',
author='Colin Duquesnoy',
author_email='colin.duquesnoy',
description='pyQode Qt ui compiler',
entry_points={
'console_scripts': [
'pyqode-uic = pyqode_uic:main_uic',
'pyqode-rcc = pyqode_uic:main_rcc',
],
},
classifiers=[
'Environment :: X11 Applications :: Qt',
'Environment :: Win32 (MS Windows)',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: Microsoft :: Windows',
'Operating System :: POSIX :: Linux',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.2',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
]
)
|
pyQode/pyqode-uic
|
setup.py
|
Python
|
mit
| 958
|
import traceback
import sys
try:
from django.apps import AppConfig
except ImportError:
AppConfig = object
class WooeyConfig(AppConfig):
name = 'wooey'
verbose_name = 'Wooey'
def ready(self):
from .backend import utils
try:
utils.load_scripts()
except:
sys.stderr.write('Unable to load scripts:\n{}\n'.format(traceback.format_exc()))
from . import signals
|
wooey/django-djangui
|
wooey/apps.py
|
Python
|
bsd-3-clause
| 434
|
# -*- coding: utf-8 -*-
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import proto # type: ignore
from google.ads.googleads.v10.enums.types import conversion_action_category
from google.ads.googleads.v10.enums.types import (
conversion_value_rule_set_status,
)
from google.ads.googleads.v10.enums.types import value_rule_set_attachment_type
from google.ads.googleads.v10.enums.types import value_rule_set_dimension
__protobuf__ = proto.module(
package="google.ads.googleads.v10.resources",
marshal="google.ads.googleads.v10",
manifest={"ConversionValueRuleSet",},
)
class ConversionValueRuleSet(proto.Message):
r"""A conversion value rule set
Attributes:
resource_name (str):
Immutable. The resource name of the conversion value rule
set. Conversion value rule set resource names have the form:
``customers/{customer_id}/conversionValueRuleSets/{conversion_value_rule_set_id}``
id (int):
Output only. The ID of the conversion value
rule set.
conversion_value_rules (Sequence[str]):
Resource names of rules within the rule set.
dimensions (Sequence[google.ads.googleads.v10.enums.types.ValueRuleSetDimensionEnum.ValueRuleSetDimension]):
Defines dimensions for Value Rule conditions.
The condition types of value rules within this
value rule set must be of these dimensions. The
first entry in this list is the primary
dimension of the included value rules. When
using value rule primary dimension segmentation,
conversion values will be segmented into the
values adjusted by value rules and the original
values, if some value rules apply.
owner_customer (str):
Output only. The resource name of the conversion value rule
set's owner customer. When the value rule set is inherited
from a manager customer, owner_customer will be the resource
name of the manager whereas the customer in the
resource_name will be of the requesting serving customer.
\*\* Read-only \*\*
attachment_type (google.ads.googleads.v10.enums.types.ValueRuleSetAttachmentTypeEnum.ValueRuleSetAttachmentType):
Immutable. Defines the scope where the
conversion value rule set is attached.
campaign (str):
The resource name of the campaign when the
conversion value rule set is attached to a
campaign.
status (google.ads.googleads.v10.enums.types.ConversionValueRuleSetStatusEnum.ConversionValueRuleSetStatus):
Output only. The status of the conversion value rule set.
\*\* Read-only \*\*
conversion_action_categories (Sequence[google.ads.googleads.v10.enums.types.ConversionActionCategoryEnum.ConversionActionCategory]):
Immutable. The conversion action categories
of the conversion value rule set.
"""
resource_name = proto.Field(proto.STRING, number=1,)
id = proto.Field(proto.INT64, number=2,)
conversion_value_rules = proto.RepeatedField(proto.STRING, number=3,)
dimensions = proto.RepeatedField(
proto.ENUM,
number=4,
enum=value_rule_set_dimension.ValueRuleSetDimensionEnum.ValueRuleSetDimension,
)
owner_customer = proto.Field(proto.STRING, number=5,)
attachment_type = proto.Field(
proto.ENUM,
number=6,
enum=value_rule_set_attachment_type.ValueRuleSetAttachmentTypeEnum.ValueRuleSetAttachmentType,
)
campaign = proto.Field(proto.STRING, number=7,)
status = proto.Field(
proto.ENUM,
number=8,
enum=conversion_value_rule_set_status.ConversionValueRuleSetStatusEnum.ConversionValueRuleSetStatus,
)
conversion_action_categories = proto.RepeatedField(
proto.ENUM,
number=9,
enum=conversion_action_category.ConversionActionCategoryEnum.ConversionActionCategory,
)
__all__ = tuple(sorted(__protobuf__.manifest))
|
googleads/google-ads-python
|
google/ads/googleads/v10/resources/types/conversion_value_rule_set.py
|
Python
|
apache-2.0
| 4,631
|
from django.http import HttpResponse, HttpResponseRedirect, Http404
from django.shortcuts import render_to_response, get_object_or_404
from django.contrib.auth.decorators import login_required
from django.template import RequestContext
import excel
from models import *
from forms import *
from decorators import superuser_only
EDIT_SELECTION_MAX_DATA_LENGTH = 50
def get_context(request):
return RequestContext(request)
### Redirection functions ###
def redirect_to_index():
return HttpResponseRedirect('/querybuilder/')
def redirect_to_edit_selection(selection):
return HttpResponseRedirect('/querybuilder/edit_selection/%s/' % selection.id)
### Query Builder non-modifying views ###
@superuser_only
def index(request):
selections = DataSelection.objects.all()
return render_to_response('querybuilder/data_selection_list.html', {'selections': selections}, context_instance = get_context(request))
@login_required
def export_selection(request, selection_id): # Allow all users to export the data selection
selection = get_object_or_404(DataSelection, id = selection_id)
try:
data = selection.get_data()
data_error = False
except Exception, e:
data_error = " Invalid data: " + e.message
return HttpResponse(data_error)
try:
xls = excel.queryset_to_xls(data, selection)
except Exception, e:
raise e
# raise Exception("Unable to turn QuerySet into Excel file: xlwt isn't installed")
return xls
### Query Builder Selection/Column modifying views ###
@superuser_only
def add_column(request, selection_id):
selection = get_object_or_404(DataSelection, id = selection_id)
column_types = get_column_types()
column_type = None
if request.method == 'POST':
if 'column_type' in request.POST:
column_type = request.POST['column_type']
if column_type and column_type in column_types.keys():
column = column_types[column_type]()
form = get_column_form(column)
else:
form = None
column = None
column_type = None
if request.method == 'POST':
if 'name' in request.POST:
form = get_column_form(column, request.POST)
if form.is_valid():
form.save()
return redirect_to_edit_selection(selection)
return render_to_response('querybuilder/add_column.html', {'types': column_types, 'column': column, 'form': form, 'column_type': column_type}, context_instance = get_context(request))
@superuser_only
def edit_column(request, selection_id, column_id):
col = get_object_or_404(DataColumn, id = column_id)
selection = get_object_or_404(DataSelection, id = selection_id)
if col.get_child(): # If we have a Column type, use that as instance instead
col = col.get_child()
if request.method == 'POST':
form = get_column_form(col, request.POST)
if form.is_valid():
ret = form.save()
return redirect_to_edit_selection(selection)
else:
form = get_column_form(col)
return render_to_response('querybuilder/edit_column.html', {'form': form, 'column': col, 'selection': selection}, context_instance = get_context(request))
@superuser_only
def edit_selection(request, selection_id):
selection = get_object_or_404(DataSelection, id = selection_id)
if request.method == 'POST':
form = DataSelectionForm(request.POST, instance = selection)
if form.is_valid():
ret = form.save()
else:
form = DataSelectionForm(instance = selection)
try:
explain = selection.get_data(explain = True)
except Exception, e:
explain = " Invalid SQL query: " + str(e)
try:
as_python = selection.get_data(as_python = True)
except Exception, e:
as_python = " Invalid Python: " + str(e)
try:
max_data_exceeded = False
data = selection.get_data()
data_error = False
except Exception, e:
# raise e
data = []
data_error = u" Invalid data: " + str(e)
return render_to_response('querybuilder/edit_selection.html', {'form': form, 'selection': selection, 'explain': explain, 'data': data, 'max_data_exceeded': max_data_exceeded, 'data_error': data_error, 'as_python': as_python, 'request': request}, context_instance = get_context(request))
|
alextreme/Django-Bingo
|
querybuilder/views.py
|
Python
|
bsd-3-clause
| 4,415
|
# fail early, fail often
import cairo # noqa:F401
|
tdsmith/elisascripts
|
elisa/__init__.py
|
Python
|
bsd-3-clause
| 51
|
#import OpenGL
#OpenGL.FULL_LOGGING = True
import functools
import logging
from PySide.QtOpenGL import QGLFormat
logging.basicConfig()
LOG = logging.getLogger(__name__)
from time import time
from PySide import QtCore, QtGui
from UI import QAppCtx, QGLView, QTimeline, createAction
from UI.QGLViewer import Camera
from UI import GLGrid, GLPoints2D, GLPoints3D, GLSkel, GLCameras
from UI import DRAWOPTS, DRAWOPT_ALL, DRAWOPT_DEFAULT, DRAWOPT_AXES
import IO
class CameraSwitchMenu(QtGui.QMenu):
''' menu that allows selecting the current camera from a list
the menu has a pointer to a :class:`UI.QGLViewer.QGLView` and reads it's cameras attribute directly.
it will then set the current camera in the view by directly setting it's camera attribute
the camera list is built dynamically when the menu is about to be shown
'''
def __init__(self, name, parent=None):
if not parent:
# windows pyside 1.1.2 will crash if this is not constructed with a parent
raise RuntimeError("CameraSwitchMenu must be passed a parent widget")
super(CameraSwitchMenu, self).__init__(name, parent=parent)
#: pointer to the :class:`UI.QGLView` that i'll build the camera list from
self.glview = None
# add the actions before showing. (this can't be in the showEvent or the menu is not
# resized properly and may potentially go off the bottom of the screen)
self.aboutToShow.connect(self._addActions)
@QtCore.Slot()
def _addActions(self):
''' create an action for each camera whenever the menu is about shown '''
for camera in self.glview.cameras:
ca = QtGui.QAction(camera.name, self)
ca.setCheckable(True)
ca.setChecked(self.glview.camera == camera)
ca.triggered.connect(functools.partial(self.setCamera, camera))
self.addAction(ca)
def hideEvent(self, event):
''' remove all the cameras on hide (the list is freshly created on show) '''
super(CameraSwitchMenu, self).hideEvent(event)
for action in self.actions():
self.removeAction(action)
@QtCore.Slot()
def setCamera(self, camera):
''' connected to the camera actions to switch the current camera in the view '''
self.glview.camera = camera
self.glview.updateGL()
class GPanel(QtGui.QFrame):
''' this is the base class for all GRIP panels. '''
def __init__(self, parent=None):
super(GPanel, self).__init__(parent=parent)
self.setFrameStyle(QtGui.QFrame.Sunken)
# 1px frame around the panel that has focus
self.setStyleSheet("""QFrame:focus {border: 1px solid #FFFFFF;}""")
# menus
self.menuBar = QtGui.QMenuBar(self)
layout = QtGui.QVBoxLayout()
layout.setContentsMargins(1, 0, 1, 1)
layout.setSpacing(0)
layout.addWidget(self.menuBar)
self.setLayout(layout)
class QGLPanel(GPanel):
''' contains a QGLView and menus'''
def __init__(self, grid=True, parent=None):
super(QGLPanel, self).__init__(parent=parent)
self.setFrameStyle(QtGui.QFrame.Sunken)
# mask of drawable stuff (axes/bones/labels etc). don't show axes by default
self.drawOpts = DRAWOPT_DEFAULT
#: The QGLView widget, access this directly, there's no .view() getter
self.view = QGLView(parent=self)
# add grid
if grid:
self.view.primitives.append(GLGrid())
self.view.drawOpts = self.drawOpts
cameraMenu = QtGui.QMenu("&Camera")
self.showMenu = QtGui.QMenu("Show")
# the camera switch menu needs a pointer to the view so that it can get the list of cameras
# and switch the current one
camSwitchMenu = CameraSwitchMenu("Switch", parent=self)
camSwitchMenu.glview = self.view
cameraMenu.addMenu(camSwitchMenu)
#cameraMenu.addAction(createAction('Next Camera', self, [functools.partial(self.view.cycleCamera, 1)]))
#cameraMenu.addAction(createAction('Previous Camera', self, [functools.partial(self.view.cycleCamera, -1)]))
cameraMenu.addAction(createAction('Reset', self, [self.resetCamera3D, self.resetCamera2D]))
cameraMenu.addAction(createAction('Reset 3D', self, [self.resetCamera3D]))
cameraMenu.addAction(createAction('Reset 2D', self, [self.resetCamera2D]))
# show menu
self.showMenu.addAction(createAction('Frame Selected', self, [self.view.frame]))
self.showMenu.addAction(createAction('Show All', self, [functools.partial(self.setAllDrawOptions, True)]))
self.showMenu.addAction(createAction('Show None', self, [functools.partial(self.setAllDrawOptions, False)]))
for opt in sorted(DRAWOPTS):
a = createAction(opt, self, [functools.partial(self.toggleDrawOption, DRAWOPTS[opt])], checkable=True, checked=bool(DRAWOPTS[opt] & self.drawOpts))
self.showMenu.addAction(a)
self.menuBar.addMenu(cameraMenu)
self.menuBar.addMenu(self.showMenu)
self.layout().addWidget(self.view)
self.layout().setStretch(1, 1)
def frame(self):
self.view.frame()
self.view.updateGL()
def resetCamera3D(self):
self.view.camera.reset3D()
self.view.updateGL()
def resetCamera2D(self):
self.view.camera.reset2D()
self.view.updateGL()
def setAllDrawOptions(self, state):
'''set all draw options on or off
:param bool state: True = on'''
for a in self.showMenu.actions(): a.setChecked(state)
self.view.drawOpts = [0, DRAWOPT_ALL][state]
self.view.updateGL()
def toggleDrawOption(self, opt):
'''toggle the provided draw option
:param int opt: one or a combination of :`data:UI.DRAWOPTS` values'''
self.view.drawOpts = self.view.drawOpts ^ opt
self.view.updateGL()
class QGViewer(QtGui.QMainWindow):
def __init__(self, parent=None):
super(QGViewer, self).__init__(parent=parent)
self.setWindowTitle('Imaginarium Viewer')
self.setMinimumWidth(640)
self.setMinimumHeight(480)
self.menus = {}
#: list of all the GL* things to draw. the views all refer to this
self.primitives = []
self.primitives2D = []
self.setTabPosition(QtCore.Qt.TopDockWidgetArea, QtGui.QTabWidget.North)
self.setTabPosition(QtCore.Qt.RightDockWidgetArea, QtGui.QTabWidget.East)
self.setTabPosition(QtCore.Qt.LeftDockWidgetArea, QtGui.QTabWidget.West)
self.setTabPosition(QtCore.Qt.BottomDockWidgetArea, QtGui.QTabWidget.North)
menuBar = QtGui.QMenuBar(self)
self.setMenuBar(menuBar)
self.getOrCreateMenu('&File')
self.createWidgets()
self.createMenus()
self.createLayout()
self.createConnections()
self.new()
def createMenus(self):
self.getOrCreateMenu('&File').addAction(createAction('&New', self, [self.new], tip='Create a new scene'))
# hack. maybe make a getOrCreateAction thing like the menus so i can get them by name
# (this is so i can insert the import action before it, so it's not very robust or good)
self._exitAction = createAction('Exit', self, [self.close], tip='Quit the application')
self.getOrCreateMenu('&File').addAction(self._exitAction)
self.getOrCreateMenu('&Edit')
self.getOrCreateMenu('&View').addAction(createAction('Show status bar', self, [lambda:self.statusBar().setVisible(not self.statusBar().isVisible())],checkable=True, checked=True, tip='Toggle the help/status bar on or off'))
def createWidgets(self):
# widgets
self._mainWidget = QtGui.QWidget(self)
self.setCentralWidget(self._mainWidget)
self.statusBar().showMessage("Starting up...", 1000) # this shows the status bar too
self.timeline = QTimeline(self)
self.addDock('Timeline', self.timeline,
QtCore.Qt.BottomDockWidgetArea | QtCore.Qt.TopDockWidgetArea,
QtCore.Qt.BottomDockWidgetArea,
QtGui.QDockWidget.DockWidgetMovable | QtGui.QDockWidget.DockWidgetVerticalTitleBar,
useTitle=False)
self._panel = QGLPanel(grid=False)
self.view().primitives = self.primitives
self.view().primitives2D = self.primitives2D
def createConnections(self):
# when the frame rate changes, reset the lastTimes cache in the view so it can update
# rate hud display more accurately sooner
self.timeline.rateChanged.connect(lambda: self.view().__setattr__('lastTimes', [time()]))
def createLayout(self):
# layout
self.viewLayout = QtGui.QVBoxLayout()
self.viewLayout.setContentsMargins(0, 0, 0, 0)
self.viewLayout.setSpacing(0)
self.viewLayout.addWidget(self._panel)
self._mainWidget.setLayout(self.viewLayout)
def getOrCreateMenu(self, menuName, before=None):
try:
return self.menus[menuName]
except KeyError:
if before:
m = QtGui.QMenu(menuName)
self.menuBar().insertMenu(self.menus[before].menuAction(), m)
self.menus[menuName] = m
else:
self.menus[menuName] = self.menuBar().addMenu(menuName)
return self.menus[menuName]
def addDock(self, title, obj, allowed, start, features, useTitle=True):
dock = QtGui.QDockWidget(self)
if useTitle: dock.setWindowTitle(title)
dock.setObjectName(title) # required so that save settings doesn't barf
dock.setWidget(obj)
dock.setAllowedAreas(allowed)
dock.setFeatures(features)
dock.layout().setContentsMargins(0, 0, 0, 0)
dock.layout().setSpacing(0)
self.addDockWidget(start, dock)
self.getOrCreateMenu('&View').addAction(createAction('Show %s' % title, self, [lambda:dock.setVisible(not dock.isVisible())], checkable=True, checked=True, tip='Toggle whether the %s is displayed' % title))
def view(self):
return self._panel.view
def updateGL(self):
self.view().updateGL()
def refreshImageData(self):
self.view().refreshImageData()
def new(self):
del self.primitives[:]
self.view().points = None
self.primitives.append(GLGrid())
self.view().camera.reset2D()
self.view().camera.reset3D()
self.timeline.setRange(1, 100)
self.updateGL()
def addPoints2D(self, points):
'''
add 2d points to the viewer.
:type points: :class:`numpy.array` (Nx2)
:param points: array of vertices
'''
glPoints = GLPoints2D(points)
self.primitives2D.append(glPoints)
return self.primitives2D[-1]
def addPoints3D(self, points):
'''
add 3d points to the viewer.
:type points: :class:`numpy.array` (Nx3)
:param points: array of vertices
'''
glPoints = GLPoints3D(points)
self.primitives.append(glPoints)
return self.primitives[-1]
def addCameras(self, mats, camera_ids, movies=None):
if movies == None: movies = [None]*len(mats)
for mat, cid, md in zip(mats, camera_ids, movies):
camera = Camera(cid)
camera.setP(mat[2], distortion=mat[3])
camera.setResetData()
if md != None: camera.setImageData(md['vbuffer'],md['vheight'],md['vwidth'],3)
self.view().addCamera(camera)
cams = GLCameras(camera_ids, mats)
self.primitives.append(cams)
if __name__ == '__main__':
with QAppCtx():
dialog = QGViewer()
dialog.show()
|
davidsoncolin/IMS
|
Example/GViewer.py
|
Python
|
mit
| 10,433
|
#!/usr/bin/python
# -*- coding: iso-8859-15 -*-
import sys
import matplotlib.pyplot as plt
sys.path.append("../")
from controller.Controller_Result_Experiment import ControllerScreenResultExperiment
from Window import Main, Gtk, GdkPixbuf
controller = ControllerScreenResultExperiment()
class WindowResultExperiment:
def __init__(self, path, set_results, controller_screen_new_experiment):
self.set_results = set_results
self.window = Main()
self.window.set_handler(controller.get_handler())
self.window.set_file_ui(path)
self.window.connect_handles_ui()
controller.set_screen_result_experiment(self)
controller.set_controller_screen_new_experiment(controller_screen_new_experiment)
self.window.set_name_object("result_window")
self.set_tables(self.window.get_object_from_window("tabela_resultados"),self.window.get_object_from_window("liststore_result_experiment"))
self.set_tree_views(self.window.get_object_from_window("tree_view"),self.window.get_object_from_window("tree_view2"))
self.create_columns()
self.index = 0
self.axis_x = []
self.axis_y = []
#set_results.print_set_results()
while (self.index < len(self.set_results.get_measurements())):
self.times_and_volts = self.set_results.get_specific_measurement(self.index).split('|')
for i in range(0,len(self.times_and_volts)-1):
self.times_and_volts[i] = self.times_and_volts[i].split(';')
self.insert_data_table(self.times_and_volts[i][0], self.times_and_volts[i][1])
'''
add measurements datas to vector
'''
self.axis_x.append(self.times_and_volts[i][0])
self.axis_y.append(self.times_and_volts[i][1])
self.index += 1
'''
Create graphics from collected data, save it and show him in result screen
'''
plt.plot(self.axis_x,self.axis_y)
plt.xlabel('Tempo')
plt.ylabel('Tensao')
plt.title('Processo de Carga do Capcitor')
plt.grid(True)
#plt.tight_layout()
plt.savefig("curva_capacitor.jpeg", dpi = 800)
graphic = self.window.get_object_from_window("graphic")
#make picture
self.pixbuf = GdkPixbuf.Pixbuf.new_from_file_at_scale(filename="curva_capacitor.jpeg", width=700, height=500, preserve_aspect_ratio=True)
graphic.set_from_pixbuf(self.pixbuf)
#graphic.set_from_file("curva_capacitor.jpeg")
controller.fill_experiments_data()
controller.fill_table_results()
def show_window(self):
self.window.start_window()
Gtk.main()
'''
Os métodos a partir deste ponto do código lidam com a estrutura de tabela
apresentada na tela de resultados.
'''
def set_tables(self, table_1, table_2):
self.table_1 = table_1
self.table_2 = table_2
def set_tree_views(self, tree_view_1, tree_view_2):
self.tree_view_1 = tree_view_1
self.tree_view_2 = tree_view_2
'''
Método que cria o número de colunas da tabela de resultados e define o tipo de dado
'''
def create_columns(self):
cell_tree_view_1 = Gtk.CellRendererText()
self.tree_view_1.get_column(0).pack_start(cell_tree_view_1, False)
self.tree_view_1.get_column(0).add_attribute(cell_tree_view_1, "text", 0)
self.tree_view_1.get_column(1).pack_start(cell_tree_view_1, False)
self.tree_view_1.get_column(1).add_attribute(cell_tree_view_1, "text", 1)
cell_tree_view_2 = Gtk.CellRendererText()
self.tree_view_2.get_column(0).pack_start(cell_tree_view_2, False)
self.tree_view_2.get_column(0).add_attribute(cell_tree_view_2, "text", 0)
self.tree_view_2.get_column(1).pack_start(cell_tree_view_2, False)
self.tree_view_2.get_column(1).add_attribute(cell_tree_view_2, "text", 1)
self.tree_view_2.get_column(2).pack_start(cell_tree_view_2, False)
self.tree_view_2.get_column(2).add_attribute(cell_tree_view_2, "text", 2)
self.tree_view_2.get_column(3).pack_start(cell_tree_view_2, False)
self.tree_view_2.get_column(3).add_attribute(cell_tree_view_2, "text", 3)
self.tree_view_2.get_column(4).pack_start(cell_tree_view_2, False)
self.tree_view_2.get_column(4).add_attribute(cell_tree_view_2, "text", 4)
self.tree_view_2.get_column(5).pack_start(cell_tree_view_2, False)
self.tree_view_2.get_column(5).add_attribute(cell_tree_view_2, "text", 5)
self.tree_view_2.get_column(6).pack_start(cell_tree_view_2, False)
self.tree_view_2.get_column(6).add_attribute(cell_tree_view_2, "text", 6)
self.tree_view_2.get_column(7).pack_start(cell_tree_view_2, False)
self.tree_view_2.get_column(7).add_attribute(cell_tree_view_2, "text", 7)
self.tree_view_2.get_column(8).pack_start(cell_tree_view_2, False)
self.tree_view_2.get_column(8).add_attribute(cell_tree_view_2, "text", 8)
def insert_data_table(self, volt, seconds):
iter_tree = self.table_1.prepend([volt,seconds])
def set_value_volt(self, volt):
self.volt = volt
def set_value_second(self, seconds):
self.second = seconds
def get_control(self):
return self.controller
def get_table_results(self):
return self.table_2
def get_set_results(self):
return self.set_results
'''
Método que retorna o objeto window
'''
def get_window(self):
return self.window
#windowResult = WindowResultExperiment("../view/xml_windows/result_experiment.glade")
#windowResult.show_window()
|
Experiments-Data-Base-Managment/data-base-managment
|
view/Screen_Result_Experiment.py
|
Python
|
gpl-3.0
| 5,238
|
from jinja2 import Environment, PackageLoader
env = Environment(
loader=PackageLoader('.', 'templates'),
)
def render(name, **kwargs):
return env.get_template(f'{name}.tmplt').render(**kwargs).split('\n')
|
ttaanngg/petal
|
uniform_model/functions/utils.py
|
Python
|
bsd-3-clause
| 216
|
# -*- test-case-name: twisted.test.test_stdio.StandardInputOutputTestCase.test_consumer -*-
# Copyright (c) Twisted Matrix Laboratories.
# See LICENSE for details.
"""
Main program for the child process run by
L{twisted.test.test_stdio.StandardInputOutputTestCase.test_consumer} to test
that process transports implement IConsumer properly.
"""
import sys
from twisted.python import log, reflect
from twisted.internet import stdio, protocol
from twisted.protocols import basic
def failed(err):
log.startLogging(sys.stderr)
log.err(err)
class ConsumerChild(protocol.Protocol):
def __init__(self, junkPath):
self.junkPath = junkPath
def connectionMade(self):
d = basic.FileSender().beginFileTransfer(file(self.junkPath), self.transport)
d.addErrback(failed)
d.addCallback(lambda ign: self.transport.loseConnection())
def connectionLost(self, reason):
reactor.stop()
if __name__ == '__main__':
reflect.namedAny(sys.argv[1]).install()
from twisted.internet import reactor
stdio.StandardIO(ConsumerChild(sys.argv[2]))
reactor.run()
|
waseem18/oh-mainline
|
vendor/packages/twisted/twisted/test/stdio_test_consumer.py
|
Python
|
agpl-3.0
| 1,113
|
"""The tests for the Mikrotik device tracker platform."""
from datetime import timedelta
from homeassistant.components import mikrotik
import homeassistant.components.device_tracker as device_tracker
from homeassistant.helpers import entity_registry
from homeassistant.setup import async_setup_component
import homeassistant.util.dt as dt_util
from . import DEVICE_2_WIRELESS, DHCP_DATA, MOCK_DATA, MOCK_OPTIONS, WIRELESS_DATA
from .test_hub import setup_mikrotik_entry
from tests.common import MockConfigEntry, patch
DEFAULT_DETECTION_TIME = timedelta(seconds=300)
def mock_command(self, cmd, params=None):
"""Mock the Mikrotik command method."""
if cmd == mikrotik.const.MIKROTIK_SERVICES[mikrotik.const.IS_WIRELESS]:
return True
if cmd == mikrotik.const.MIKROTIK_SERVICES[mikrotik.const.DHCP]:
return DHCP_DATA
if cmd == mikrotik.const.MIKROTIK_SERVICES[mikrotik.const.WIRELESS]:
return WIRELESS_DATA
return {}
async def test_platform_manually_configured(hass):
"""Test that nothing happens when configuring mikrotik through device tracker platform."""
assert (
await async_setup_component(
hass,
device_tracker.DOMAIN,
{device_tracker.DOMAIN: {"platform": "mikrotik"}},
)
is False
)
assert mikrotik.DOMAIN not in hass.data
async def test_device_trackers(hass, legacy_patchable_time):
"""Test device_trackers created by mikrotik."""
# test devices are added from wireless list only
hub = await setup_mikrotik_entry(hass)
device_1 = hass.states.get("device_tracker.device_1")
assert device_1 is not None
assert device_1.state == "home"
assert device_1.attributes["ip"] == "0.0.0.1"
assert "ip_address" not in device_1.attributes
assert device_1.attributes["mac"] == "00:00:00:00:00:01"
assert device_1.attributes["host_name"] == "Device_1"
assert "mac_address" not in device_1.attributes
device_2 = hass.states.get("device_tracker.device_2")
assert device_2 is None
with patch.object(mikrotik.hub.MikrotikData, "command", new=mock_command):
# test device_2 is added after connecting to wireless network
WIRELESS_DATA.append(DEVICE_2_WIRELESS)
await hub.async_update()
await hass.async_block_till_done()
device_2 = hass.states.get("device_tracker.device_2")
assert device_2 is not None
assert device_2.state == "home"
assert device_2.attributes["ip"] == "0.0.0.2"
assert "ip_address" not in device_2.attributes
assert device_2.attributes["mac"] == "00:00:00:00:00:02"
assert "mac_address" not in device_2.attributes
assert device_2.attributes["host_name"] == "Device_2"
# test state remains home if last_seen consider_home_interval
del WIRELESS_DATA[1] # device 2 is removed from wireless list
hub.api.devices["00:00:00:00:00:02"]._last_seen = dt_util.utcnow() - timedelta(
minutes=4
)
await hub.async_update()
await hass.async_block_till_done()
device_2 = hass.states.get("device_tracker.device_2")
assert device_2.state != "not_home"
# test state changes to away if last_seen > consider_home_interval
hub.api.devices["00:00:00:00:00:02"]._last_seen = dt_util.utcnow() - timedelta(
minutes=5
)
await hub.async_update()
await hass.async_block_till_done()
device_2 = hass.states.get("device_tracker.device_2")
assert device_2.state == "not_home"
async def test_restoring_devices(hass):
"""Test restoring existing device_tracker entities if not detected on startup."""
config_entry = MockConfigEntry(
domain=mikrotik.DOMAIN, data=MOCK_DATA, options=MOCK_OPTIONS
)
config_entry.add_to_hass(hass)
registry = await entity_registry.async_get_registry(hass)
registry.async_get_or_create(
device_tracker.DOMAIN,
mikrotik.DOMAIN,
"00:00:00:00:00:01",
suggested_object_id="device_1",
config_entry=config_entry,
)
registry.async_get_or_create(
device_tracker.DOMAIN,
mikrotik.DOMAIN,
"00:00:00:00:00:02",
suggested_object_id="device_2",
config_entry=config_entry,
)
await setup_mikrotik_entry(hass)
# test device_2 which is not in wireless list is restored
device_1 = hass.states.get("device_tracker.device_1")
assert device_1 is not None
assert device_1.state == "home"
device_2 = hass.states.get("device_tracker.device_2")
assert device_2 is not None
assert device_2.state == "not_home"
|
partofthething/home-assistant
|
tests/components/mikrotik/test_device_tracker.py
|
Python
|
apache-2.0
| 4,677
|
# -*- coding: utf-8 -*-
"""
signals.py - module containing Signal class
**Classes**
* Signal - signal class for data objects
Created on Tue Jun 23 2015
@author: hyuh
"""
from __future__ import print_function
from .globals import FdpError
import numpy as np
import types
import inspect
import sys
if sys.version_info > (3,):
long = int
class Signal(np.ndarray):
"""
sig=fdp.Signal(signal_ndarray, units='m/s', axes=['radius','time'],
axes_values=[ax1_1Darray, ax2_1Darray],
axes_units=['s','cm'])
e.g.:
mds.Signal(np.arange((20*10)).reshape((10,20)), units='keV',
axes=['radius','time'], axes_values=[100+np.arange(10)*5,
np.arange(20)*0.1], axes_units=['s','cm'])
or an empty signal:
s=mds.Signal()
default axes order=[time, space]
sig=fdp.Signal(units='m/s', axes=['radius','time'],
axes_values=[radiusSignal, timeSignal])
"""
def __new__(cls, input_array=[], **kwargs):
obj = np.asanyarray(input_array).view(cls).copy()
for key in iter(kwargs):
setattr(obj, key, kwargs[key])
return obj
def __init__(self, **kwargs):
self.mdsshape = self._get_mdsshape()
pass
def __array_finalize__(self, obj):
"""
see https://docs.scipy.org/doc/numpy/user/basics.subclassing.html
self is the new object; obj is the original object
type(self) is always Signal subclass
type(obj) is None for explicit constructor like a = Signal(...)
is ndarray for "view casting"
is type(self) for slicing or copy
"""
objdict = getattr(obj, '__dict__', None)
if obj is None or objdict is None:
return
# logic for view casting and slicing/copy
objaxes = getattr(obj, 'axes', None)
objslic = getattr(obj, '_slic', None)
for key in iter(objdict):
if objaxes and key in objaxes:
# skip copy of axis attributes
pass
elif key in ['axes', 'point_axes']:
# shallow copy obj.axes and obj.point_axes
setattr(self, key, objdict[key][:])
else:
setattr(self, key, objdict[key])
if objdict.get('_fname') == 'transpose':
if objaxes is not None:
if '_fargs' in objdict:
self.axes = [obj.axes[i] for i in objdict['_fargs'][0]]
else:
self.axes = obj.axes[::-1]
# _deltmpattr = True
# if objdict.get('_debug'):
# _deltmpattr = False
if objaxes:
for axis in objaxes:
if objslic is not None:
# slice axis according to _slic
obj_axis = getattr(obj, axis)
if isinstance(objslic, (slice, list, np.ndarray)):
# logic for 1D arrays
setattr(self, axis, obj_axis[objslic])
elif isinstance(objslic, tuple):
# logic for multi-dim arrays
slic_axis = tuple([objslic[objaxes.index(axisaxis)] for
axisaxis in (obj_axis.axes + [axis])])
if isinstance(slic_axis[0], (int, int, float, np.generic)):
# "point_axes" is a dict with axis keys and dict values
if axis in self.point_axes:
raise FdpError('Point axis already present')
self.point_axes.append({'axis': axis,
'value': obj_axis[slic_axis],
'units': obj_axis.units})
self.axes.remove(axis)
elif isinstance(slic_axis[0], slice):
setattr(self, axis, obj_axis[slic_axis])
else:
raise FdpError('slic_axis is unexpected type')
# for axisaxis in obj_axis.axes:
# if isinstance(objslic[objaxes.index(axisaxis)], (int, long, float, np.generic)):
# obj_axis.axes.remove(axisaxis)
else:
raise FdpError('obj._slic is unexpected type')
else:
# obj._slic is undefined; copy each axis as is
setattr(self, axis, getattr(obj, axis, None))
# clean-up temp attributes
# def delattrtry(ob, at):
# try:
# delattr(ob, at)
# except:
# pass
# if _deltmpattr:
for attrname in ['_slic', '_fname', '_fargs', '_fkwargs']:
for o in [self, obj]:
if hasattr(o, attrname):
delattr(o, attrname)
# delattrtry(self, '_slic')
# delattrtry(self, '_fname')
# delattrtry(self, '_fargs')
# delattrtry(self, '_fkwargs')
# delattrtry(obj, '_slic')
# delattrtry(obj, '_fname')
# delattrtry(obj, '_fargs')
# delattrtry(obj, '_fkwargs')
def __array_wrap__(self, out_arr, context=None):
return np.ndarray.__array_wrap__(self, out_arr, context)
def __array_prepare__(self, out_arr, context=None):
return np.ndarray.__array_prepare__(self, out_arr, context)
def __getitem__(self, index):
'''
self must be Signal class for this to be called, so therefore
must have the _slic attribute. The _slic attribute preserves indexing for attributes
'''
# This passes index to array_finalize after a new signal obj is created
# to assign axes
def parseindex(index, dims):
# format index to account for single elements and pad with appropriate slices.
#int2slc=lambda i: slice(-1,-2,-1) if int(i) == -1 else slice(int(i),int(i)+1)
if isinstance(index, (list, slice, np.ndarray)):
# index is list, slice, or ndarray
if dims < 2:
return index
else:
newindex = [index]
elif isinstance(index, (int, int, float, np.generic)):
newindex = [int(index)]
elif isinstance(index, tuple):
newindex = [int(i) if isinstance(i, (int, int, float, np.generic))
else i for i in index]
# check for ellipses in newindex
ellipsisbool = [Ellipsis is i for i in newindex]
if sum(ellipsisbool) > 0:
# elipses exists
ellipsisindex = ellipsisbool.index(True)
slcpadding = ([slice(None)] * (dims - len(newindex) + 1))
newindex = newindex[:ellipsisindex] \
+ slcpadding \
+ newindex[ellipsisindex + 1:]
else:
# no elipses
newindex = newindex + ([slice(None)] * (dims - len(newindex)))
return tuple(newindex)
slcindex = parseindex(index, self.ndim)
self._slic = slcindex
if self._empty is True:
self._get_mdsdata()
return super(Signal, self).__getitem__(slcindex)
def _get_mdsdata(self):
if self._empty is True:
data = self._root._get_mdsdata(self)
self.resize(data.shape, refcheck=False)
self._empty = False
self[:] = data
def _get_mdsshape(self):
return self._root._get_mdsshape(self)
def __getattr__(self, attribute):
if attribute == '_parent' or self._parent is None:
raise AttributeError("'{}' object has no attribute '{}'".format(
type(self), attribute))
attr = getattr(self._parent, attribute)
if inspect.ismethod(attr):
return types.MethodType(attr.__func__, self)
else:
return attr
def __repr__(self):
# self._get_mdsdata()
return super(Signal, self).__repr__()
def __str__(self):
# self._get_mdsdata()
return super(Signal, self).__str__()
def __getslice__(self, start, stop):
"""
This solves a subtle bug, where __getitem__ is not called, and all
the dimensional checking not done, when a slice of only the first
dimension is taken, e.g. a[1:3]. From the Python docs:
Deprecated since version 2.0: Support slice objects as parameters
to the __getitem__() method. (However, built-in types in CPython
currently still implement __getslice__(). Therefore, you have to
override it in derived classes when implementing slicing.)
"""
return self.__getitem__(slice(start, stop))
def __call__(self, **kwargs):
slc = [slice(None)] * len(self.axes)
for axis_name, axis_values in kwargs.items():
if axis_name not in self.axes:
print(' {} is not a valid axis.'.format(axis_name))
raise TypeError
iaxis = self.axes.index(axis_name)
axis = getattr(self, axis_name)
try:
axis_indices = [np.abs(value - axis[:]).argmin()
for value in axis_values]
slc[iaxis] = slice(axis_indices[0], axis_indices[1])
except TypeError:
axis_indices = np.abs(axis_values - axis[:]).argmin()
slc[iaxis] = axis_indices
return self[tuple(slc)]
def __bool__(self):
return bool(self.mdsshape)
def sigwrapper(f):
def inner(*args, **kwargs):
args[0]._fname = f.__name__
if len(args) > 1:
args[0]._fargs = args[1:]
args[0]._fkwargs = kwargs
if kwargs:
return f(*args, **kwargs)
else:
return f(*args)
return inner
@sigwrapper
def min(self, *args, **kwargs):
return super(Signal, self).min(*args, **kwargs)
@sigwrapper
def transpose(self, *args):
return super(Signal, self).transpose(*args)
|
drsmith48/fdp
|
fdp/lib/signal.py
|
Python
|
mit
| 10,302
|
#!/usr/bin/env python
# Copyright (c) 2003 Phil Gregory
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
#
"""coin.py - flips a coin on demand"""
handler_list=["coin"]
from moobot_module import MooBotModule
class coin(MooBotModule):
def __init__(self):
self.regex="^coin"
def handler(self, **args):
"""If given a list of alternatives separated by 'or', picks
from among them. Otherwise picks either heads or tails from a
virtual coin."""
import random, re
from irclib import Event
# Strip "botname: coin" off the front.
str = " ".join(args["text"].split()[2:])
# Attempt some rudimentary first-to-second person changes.
str = re.sub('\b([Ii]|[Mm][Ee])\b', 'you', str)
str = re.sub('\b[Mm][Yy]\b', 'your', str)
# Prepare the options for decision.
str = re.sub('\?', '', str)
options = re.split(',?\s+or\s+', str)
if len(options) <= 1 or random.randint(1, 10) == 1:
options = ["Heads!", "Tails!"]
third = "Edge!?"
elif len(options) == 2:
third = "Both!"
else:
third = "All of them!"
choice = random.choice(options)
if (random.randint(1, 100) == 1):
choice = third
return Event("privmsg", "", self.return_to_sender(args),
[choice])
|
yuxans/badgirl
|
src/coin.py
|
Python
|
gpl-2.0
| 1,861
|
#
# Copyright (c) 2008-2015 Citrix Systems, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License")
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from nssrc.com.citrix.netscaler.nitro.resource.base.base_resource import base_resource
from nssrc.com.citrix.netscaler.nitro.resource.base.base_resource import base_response
from nssrc.com.citrix.netscaler.nitro.service.options import options
from nssrc.com.citrix.netscaler.nitro.exception.nitro_exception import nitro_exception
from nssrc.com.citrix.netscaler.nitro.util.nitro_util import nitro_util
class aaauser_vpntrafficpolicy_binding(base_resource) :
""" Binding class showing the vpntrafficpolicy that can be bound to aaauser.
"""
def __init__(self) :
self._policy = ""
self._priority = 0
self._acttype = 0
self._username = ""
self.___count = 0
@property
def priority(self) :
ur"""The priority of the policy.
"""
try :
return self._priority
except Exception as e:
raise e
@priority.setter
def priority(self, priority) :
ur"""The priority of the policy.
"""
try :
self._priority = priority
except Exception as e:
raise e
@property
def policy(self) :
ur"""The policy Name.
"""
try :
return self._policy
except Exception as e:
raise e
@policy.setter
def policy(self, policy) :
ur"""The policy Name.
"""
try :
self._policy = policy
except Exception as e:
raise e
@property
def username(self) :
ur"""User account to which to bind the policy.<br/>Minimum length = 1.
"""
try :
return self._username
except Exception as e:
raise e
@username.setter
def username(self, username) :
ur"""User account to which to bind the policy.<br/>Minimum length = 1
"""
try :
self._username = username
except Exception as e:
raise e
@property
def acttype(self) :
try :
return self._acttype
except Exception as e:
raise e
def _get_nitro_response(self, service, response) :
ur""" converts nitro response into object and returns the object array in case of get request.
"""
try :
result = service.payload_formatter.string_to_resource(aaauser_vpntrafficpolicy_binding_response, response, self.__class__.__name__)
if(result.errorcode != 0) :
if (result.errorcode == 444) :
service.clear_session(self)
if result.severity :
if (result.severity == "ERROR") :
raise nitro_exception(result.errorcode, str(result.message), str(result.severity))
else :
raise nitro_exception(result.errorcode, str(result.message), str(result.severity))
return result.aaauser_vpntrafficpolicy_binding
except Exception as e :
raise e
def _get_object_name(self) :
ur""" Returns the value of object identifier argument
"""
try :
if self.username is not None :
return str(self.username)
return None
except Exception as e :
raise e
@classmethod
def add(cls, client, resource) :
try :
if resource and type(resource) is not list :
updateresource = aaauser_vpntrafficpolicy_binding()
updateresource.username = resource.username
updateresource.policy = resource.policy
updateresource.priority = resource.priority
return updateresource.update_resource(client)
else :
if resource and len(resource) > 0 :
updateresources = [aaauser_vpntrafficpolicy_binding() for _ in range(len(resource))]
for i in range(len(resource)) :
updateresources[i].username = resource[i].username
updateresources[i].policy = resource[i].policy
updateresources[i].priority = resource[i].priority
return cls.update_bulk_request(client, updateresources)
except Exception as e :
raise e
@classmethod
def delete(cls, client, resource) :
try :
if resource and type(resource) is not list :
deleteresource = aaauser_vpntrafficpolicy_binding()
deleteresource.username = resource.username
deleteresource.policy = resource.policy
return deleteresource.delete_resource(client)
else :
if resource and len(resource) > 0 :
deleteresources = [aaauser_vpntrafficpolicy_binding() for _ in range(len(resource))]
for i in range(len(resource)) :
deleteresources[i].username = resource[i].username
deleteresources[i].policy = resource[i].policy
return cls.delete_bulk_request(client, deleteresources)
except Exception as e :
raise e
@classmethod
def get(cls, service, username) :
ur""" Use this API to fetch aaauser_vpntrafficpolicy_binding resources.
"""
try :
obj = aaauser_vpntrafficpolicy_binding()
obj.username = username
response = obj.get_resources(service)
return response
except Exception as e:
raise e
@classmethod
def get_filtered(cls, service, username, filter_) :
ur""" Use this API to fetch filtered set of aaauser_vpntrafficpolicy_binding resources.
Filter string should be in JSON format.eg: "port:80,servicetype:HTTP".
"""
try :
obj = aaauser_vpntrafficpolicy_binding()
obj.username = username
option_ = options()
option_.filter = filter_
response = obj.getfiltered(service, option_)
return response
except Exception as e:
raise e
@classmethod
def count(cls, service, username) :
ur""" Use this API to count aaauser_vpntrafficpolicy_binding resources configued on NetScaler.
"""
try :
obj = aaauser_vpntrafficpolicy_binding()
obj.username = username
option_ = options()
option_.count = True
response = obj.get_resources(service, option_)
if response :
return response[0].__dict__['___count']
return 0
except Exception as e:
raise e
@classmethod
def count_filtered(cls, service, username, filter_) :
ur""" Use this API to count the filtered set of aaauser_vpntrafficpolicy_binding resources.
Filter string should be in JSON format.eg: "port:80,servicetype:HTTP".
"""
try :
obj = aaauser_vpntrafficpolicy_binding()
obj.username = username
option_ = options()
option_.count = True
option_.filter = filter_
response = obj.getfiltered(service, option_)
if response :
return response[0].__dict__['___count']
return 0
except Exception as e:
raise e
class aaauser_vpntrafficpolicy_binding_response(base_response) :
def __init__(self, length=1) :
self.aaauser_vpntrafficpolicy_binding = []
self.errorcode = 0
self.message = ""
self.severity = ""
self.sessionid = ""
self.aaauser_vpntrafficpolicy_binding = [aaauser_vpntrafficpolicy_binding() for _ in range(length)]
|
benfinke/ns_python
|
nssrc/com/citrix/netscaler/nitro/resource/config/aaa/aaauser_vpntrafficpolicy_binding.py
|
Python
|
apache-2.0
| 6,888
|
# -*- coding: utf-8 -*-
# Generated by Django 1.9 on 2016-01-29 21:54
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('mturk', '0005_remove_mturkassignment_worker_id'),
]
operations = [
migrations.AlterField(
model_name='mturkhit',
name='status',
field=models.IntegerField(choices=[(1, 'Created'), (2, 'Completed'), (3, 'Done on Daemo'), (4, 'Expired')], default=1),
),
]
|
aginzberg/crowdsource-platform
|
mturk/migrations/0006_auto_20160129_2154.py
|
Python
|
mit
| 543
|
#Domain managing
import sys
import os
import threading
#from distutils.core import setup
#import py2exe
#setup(console=['domain_management.py'])
domains = []
path = os.getcwd()
#print(path)
if(os.path.exists(path+"\conf") and os.path.isdir(path+"\conf")):
path = path + "\conf"
path = path + "\domain_names.conf"
with open(path,"r") as f:
sitename = f.readline()
for sitename in f:
#print(sitename)
if sitename[0]!='#':
lngth = len(sitename)
sitename = sitename[1:lngth-2]
domains.append(sitename)
f.close()
def eCheck(path):
if path.endswith('/'):
path = path[:len(path)-1]
print(path)
flush,extention=os.path.splitext(path)
print(flush)
print(extention)
extention = extention.lower()
print(extention)
content_type = {
'.png':'image/png',
'.jpg':'image/jpeg',
'.jpeg':'image/jpeg',
'.gif':'image/gif'
}
typ = []
if extention in content_type:
self.send_response(200)
self.send_header('Content-type',content_type[extention])
self.end_headers()
#typ.append("rb")
#typ.append(path)
return "rb"
#with open(path,'rb') as ofile:
# self.wfile.write(ofile.read())
else:
self.send_response(200)
self.send_header("Content-type",'text/plain')
self.end_headers()
typ.append("r")
typ.append(path)
return "r"
#with open(path) as ofile:
# self.wfile.write(bytes(ofile.read(),'utf-8'))
def err():
htcode = '<html>'
htcode += '<head>'
htcode += '<meta charset="utf-8">'
htcode += '<title>Error</title>'
htcode += '</head>'
htcode += '<body style="background-color:black"><br><br>'
htcode += '<i><p style="font-family:serif;font-size:34px;text-align:center;color:white;">404 , Not Found</p></i>'
htcode += '<br></body></html>'
return htcode
#wfile.write(bytes(htcode,"utf-8"))
def loadSite(str):
path = os.getcwd()
path = path + "\Domains"
print(path)
if os.path.exists(path) and os.path.isdir(path):
path = path + "\\" + str
if(os.path.exists(path) and os.path.isdir(path)):
#path += "\index.html"
indexof = path.find('\\')
if(os.path.exists(path)and os.path.isfile(path)):
with open(path) as ofile:
htcode = ofile.read()
ofile.close()
else:
err()
return htcode
#wfile.write(bytes(htcode,"utf-8"))
|
astinaam/Server
|
Server_Final/bin/domain_management.py
|
Python
|
mit
| 2,699
|
# Copyright 2017 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests for object_detection.utils.dataset_util."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import os
import tensorflow.compat.v1 as tf
from object_detection.utils import dataset_util
class DatasetUtilTest(tf.test.TestCase):
def test_read_examples_list(self):
example_list_data = """example1 1\nexample2 2"""
example_list_path = os.path.join(self.get_temp_dir(), 'examples.txt')
with tf.gfile.Open(example_list_path, 'wb') as f:
f.write(example_list_data)
examples = dataset_util.read_examples_list(example_list_path)
self.assertListEqual(['example1', 'example2'], examples)
if __name__ == '__main__':
tf.test.main()
|
tombstone/models
|
research/object_detection/utils/dataset_util_test.py
|
Python
|
apache-2.0
| 1,416
|
#!/usr/bin/python3
# -*- coding: utf-8 -*-
#
# Copyright 2015, Durachenko Aleksey V. <durachenko.aleksey@gmail.com>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from podfmdog_downloader import *
def init(db):
db.create_tables()
sys.exit(0)
def get_prop_download_directory(db):
print("property[download_directory] = %s" %
(db.get_property("download_directory",)))
sys.exit(0)
def set_prop_download_directory(db, download_directory):
db.set_property("download_directory", download_directory)
sys.exit(0)
def channel_list(db):
dl_dir = db.get_property("download_directory",)
for channel in db.get_channels():
print("* %s (%s) -> %s" % (channel['link'],
channel['subdir'],
os.path.join(dl_dir, channel['subdir'])))
sys.exit(0)
def channel_add(db, link, subdir):
if not db.add_channel(link, subdir):
print("the channel is already exists")
sys.exit(0)
def channel_edit(db, link, subdir):
if not db.edit_channel(link, subdir):
print("the channel is not exists")
sys.exit(0)
def channel_remove(db, link):
db.remove_channel(link)
sys.exit(0)
def print_usage():
print("""=== podfmdog conOAtrol v.0.1.0 ===
Usage:
podfmdog_ctl.py <command> <arg1> ... <argN>
Command details:
init -- init the database
set download_directory <path> -- set the download directory
get download_directory -- show the download directory
channel add <rss_url> <subdir> -- add the podcast
channel edit <rss_url> <subdir> -- change the podcast directory
channel remove <rss_url> -- remove the podcast
channel list -- show the podcast list
""")
sys.exit(-1)
if __name__ == "__main__":
if len(sys.argv) > 1:
podfmDb = PodfmPodcastDb()
# init
if sys.argv[1] == "init":
init(podfmDb)
# get <property>
elif sys.argv[1] == "get" and len(sys.argv) > 2:
# get download_directory
if sys.argv[2] == "download_directory":
get_prop_download_directory(podfmDb)
# set <property>
elif sys.argv[1] == "set" and len(sys.argv) > 2:
# set download_directory
if sys.argv[2] == "download_directory" and len(sys.argv) == 4:
set_prop_download_directory(podfmDb, sys.argv[3])
# podcast <command>
elif sys.argv[1] == "channel" and len(sys.argv) > 2:
# podcast add <rss_url> <subdir>
if sys.argv[2] == "add" and len(sys.argv) == 5:
channel_add(podfmDb, sys.argv[3], sys.argv[4])
# podcast edit <rss_url> <subdir>
if sys.argv[2] == "edit" and len(sys.argv) == 5:
channel_edit(podfmDb, sys.argv[3], sys.argv[4])
# podcast remove <rss_url>
elif sys.argv[2] == "remove" and len(sys.argv) == 4:
channel_remove(podfmDb, sys.argv[3])
# podcast list
elif sys.argv[2] == "list":
channel_list(podfmDb)
# invalid
print_usage()
|
AlekseyDurachenko/podfmdog
|
src/podfmdog_ctl.py
|
Python
|
gpl-3.0
| 3,769
|
import os
import re
class ConfigType:
"""Enum-like class that represents the Configuration Type"""
LOCAL = 0
REMOTE = 1
@staticmethod
def name(type):
if ConfigType.LOCAL == type:
return "local"
elif ConfigType.REMOTE == type:
return "remote"
else:
return "unknown"
class ConfigMode:
"""Enum-like class that represents the Configuration Mode"""
SIMPLE = 0
EXPERT = 1
@staticmethod
def name(mode):
if ConfigMode.SIMPLE == mode:
return "simple"
elif ConfigMode.EXPERT == mode:
return "expert"
else:
return "unknown"
class ConfigSchedule:
"""Holds the Configuration Schedule"""
def __init__(self):
self.minute = ""
self.hour = ""
self.dom = ""
self.month = ""
self.dow = ""
def __str__(self):
return self.serialize()
def serialize(self):
schedule = str(self.minute) + " "
schedule += str(self.hour) + " "
schedule += str(self.dom) + " "
schedule += str(self.month) + " "
schedule += str(self.dow)
return schedule
def deserialize(self, schedule):
pattern = "^([^ ]*) ([^ ]*) ([^ ]*) ([^ ]*) ([^ ]*)$"
m = re.match(pattern, schedule)
if not m == None:
self.minute = m.groups()[0]
self.hour = m.groups()[1]
self.dom = m.groups()[2]
self.month = m.groups()[3]
self.dow = m.groups()[4]
class ConfigCommand:
"""Represents a rsync command"""
def __init__(self):
self.options = "-a"
self.source = ""
self.destination = ""
self.user = ""
self.host = ""
def __str__(self):
return self.serialize()
def serialize(self):
command = "rsync "
command += str(self.options) + " "
command += str(self.source) + " "
# Remote rsync
if not self.user == "" and not self.host == "":
command += self.user + "@" + self.host + ":"
command += str(self.destination)
return command
def deserialize(self, command):
pattern = "rsync[ ]+(.*)[ ]+([^ ]+)[ ]+([^ ]+)$"
m = re.match(pattern, command)
if not m == None:
self.options = m.groups()[0]
self.source = m.groups()[1]
self.destination = m.groups()[2]
# Remote rsync
pattern = "(.+)@(.+):(.+)"
m = re.match(pattern, self.destination)
if not m == None:
self.user = m.groups()[0]
self.host = m.groups()[1]
self.destination = m.groups()[2]
class Config:
"""Represents an YABM Configuration"""
def __init__(self):
# Command, schedule and a reference to the cron job
self.command = ConfigCommand()
self.schedule = ConfigSchedule()
self.job = None
# Metadata fields
self.name = "New Configuration"
self.type = ConfigType.LOCAL
self.mode = ConfigMode.SIMPLE
self.enabled = True
def __str__(self):
ret = "name......: " + str(self.name) + "\n"
ret += "type......: " + str(self.type) + "\n"
ret += "mode......: " + str(self.mode) + "\n"
ret += "enabled...: " + str(self.enabled) + "\n"
ret += "command...: " + self.command.serialize() + "\n"
ret += "schedule..: " + self.schedule.serialize() + "\n"
ret += "job.......: " + str(self.job)
return ret
def serialize(self):
config = "tool=YABM"
config += ",name=" + str(self.name)
config += ",type=" + str(self.type)
config += ",mode=" + str(self.mode)
return config
def deserialize(self, metadata, command, schedule):
self.name = Config.get_value("name", metadata)
self.type = int(Config.get_value("type", metadata))
self.mode = int(Config.get_value("mode", metadata))
self.command.deserialize(command)
self.schedule.deserialize(schedule)
def is_valid(self):
self.serialize()
if self.job == None:
return False
return self.job.is_valid()
def execute(self):
if self.is_valid():
os.system(self.job.command)
return True
return False
@staticmethod
def is_config(metadata):
# Jobs containing tool=YABM are managed by YABM
value = Config.get_value("tool", metadata)
if value == None:
return False
return value == "YABM"
@staticmethod
def get_value(key, metadata):
pattern = "(^|.*,[ ]*)" + key + "=([^,]*)($|.*)"
match = re.match(pattern, metadata)
if match:
return match.groups()[1]
return None
|
marvinfy/YABM
|
config.py
|
Python
|
gpl-3.0
| 4,321
|
"""
scikit-learn copy of scipy/sparse/linalg/eigen/lobpcg/lobpcg.py v1.8.0
to be deleted after scipy 1.3.0 becomes a dependency in scikit-lean
++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
Locally Optimal Block Preconditioned Conjugate Gradient Method (LOBPCG).
References
----------
.. [1] A. V. Knyazev (2001),
Toward the Optimal Preconditioned Eigensolver: Locally Optimal
Block Preconditioned Conjugate Gradient Method.
SIAM Journal on Scientific Computing 23, no. 2,
pp. 517-541. :doi:`10.1137/S1064827500366124`
.. [2] A. V. Knyazev, I. Lashuk, M. E. Argentati, and E. Ovchinnikov (2007),
Block Locally Optimal Preconditioned Eigenvalue Xolvers (BLOPEX)
in hypre and PETSc. :arxiv:`0705.2626`
.. [3] A. V. Knyazev's C and MATLAB implementations:
https://github.com/lobpcg/blopex
"""
import warnings
import numpy as np
from scipy.linalg import (inv, eigh, cho_factor, cho_solve,
cholesky, LinAlgError)
from scipy.sparse.linalg import aslinearoperator
from numpy import block as bmat
__all__ = ["lobpcg"]
def _report_nonhermitian(M, name):
"""
Report if `M` is not a Hermitian matrix given its type.
"""
from scipy.linalg import norm
md = M - M.T.conj()
nmd = norm(md, 1)
tol = 10 * np.finfo(M.dtype).eps
tol = max(tol, tol * norm(M, 1))
if nmd > tol:
warnings.warn(
f"Matrix {name} of the type {M.dtype} is not Hermitian: "
f"condition: {nmd} < {tol} fails.",
UserWarning, stacklevel=4
)
def _as2d(ar):
"""
If the input array is 2D return it, if it is 1D, append a dimension,
making it a column vector.
"""
if ar.ndim == 2:
return ar
else: # Assume 1!
aux = np.array(ar, copy=False)
aux.shape = (ar.shape[0], 1)
return aux
def _makeOperator(operatorInput, expectedShape):
"""Takes a dense numpy array or a sparse matrix or
a function and makes an operator performing matrix * blockvector
products."""
if operatorInput is None:
return None
else:
operator = aslinearoperator(operatorInput)
if operator.shape != expectedShape:
raise ValueError("operator has invalid shape")
return operator
def _applyConstraints(blockVectorV, factYBY, blockVectorBY, blockVectorY):
"""Changes blockVectorV in place."""
YBV = np.dot(blockVectorBY.T.conj(), blockVectorV)
tmp = cho_solve(factYBY, YBV)
blockVectorV -= np.dot(blockVectorY, tmp)
def _b_orthonormalize(B, blockVectorV, blockVectorBV=None, retInvR=False):
"""B-orthonormalize the given block vector using Cholesky."""
normalization = blockVectorV.max(axis=0) + np.finfo(blockVectorV.dtype).eps
blockVectorV = blockVectorV / normalization
if blockVectorBV is None:
if B is not None:
blockVectorBV = B(blockVectorV)
else:
blockVectorBV = blockVectorV # Shared data!!!
else:
blockVectorBV = blockVectorBV / normalization
VBV = blockVectorV.T.conj() @ blockVectorBV
try:
# VBV is a Cholesky factor from now on...
VBV = cholesky(VBV, overwrite_a=True)
VBV = inv(VBV, overwrite_a=True)
blockVectorV = blockVectorV @ VBV
# blockVectorV = (cho_solve((VBV.T, True), blockVectorV.T)).T
if B is not None:
blockVectorBV = blockVectorBV @ VBV
# blockVectorBV = (cho_solve((VBV.T, True), blockVectorBV.T)).T
else:
blockVectorBV = None
except LinAlgError:
# raise ValueError('Cholesky has failed')
blockVectorV = None
blockVectorBV = None
VBV = None
if retInvR:
return blockVectorV, blockVectorBV, VBV, normalization
else:
return blockVectorV, blockVectorBV
def _get_indx(_lambda, num, largest):
"""Get `num` indices into `_lambda` depending on `largest` option."""
ii = np.argsort(_lambda)
if largest:
ii = ii[:-num - 1:-1]
else:
ii = ii[:num]
return ii
def lobpcg(
A,
X,
B=None,
M=None,
Y=None,
tol=None,
maxiter=None,
largest=True,
verbosityLevel=0,
retLambdaHistory=False,
retResidualNormsHistory=False,
):
"""Locally Optimal Block Preconditioned Conjugate Gradient Method (LOBPCG)
LOBPCG is a preconditioned eigensolver for large symmetric positive
definite (SPD) generalized eigenproblems.
Parameters
----------
A : {sparse matrix, dense matrix, LinearOperator}
The symmetric linear operator of the problem, usually a
sparse matrix. Often called the "stiffness matrix".
X : ndarray, float32 or float64
Initial approximation to the ``k`` eigenvectors (non-sparse). If `A`
has ``shape=(n,n)`` then `X` should have shape ``shape=(n,k)``.
B : {dense matrix, sparse matrix, LinearOperator}, optional
The right hand side operator in a generalized eigenproblem.
By default, ``B = Identity``. Often called the "mass matrix".
M : {dense matrix, sparse matrix, LinearOperator}, optional
Preconditioner to `A`; by default ``M = Identity``.
`M` should approximate the inverse of `A`.
Y : ndarray, float32 or float64, optional
n-by-sizeY matrix of constraints (non-sparse), sizeY < n
The iterations will be performed in the B-orthogonal complement
of the column-space of Y. Y must be full rank.
tol : scalar, optional
Solver tolerance (stopping criterion).
The default is ``tol=n*sqrt(eps)``.
maxiter : int, optional
Maximum number of iterations. The default is ``maxiter = 20``.
largest : bool, optional
When True, solve for the largest eigenvalues, otherwise the smallest.
verbosityLevel : int, optional
Controls solver output. The default is ``verbosityLevel=0``.
retLambdaHistory : bool, optional
Whether to return eigenvalue history. Default is False.
retResidualNormsHistory : bool, optional
Whether to return history of residual norms. Default is False.
Returns
-------
w : ndarray
Array of ``k`` eigenvalues
v : ndarray
An array of ``k`` eigenvectors. `v` has the same shape as `X`.
lambdas : list of ndarray, optional
The eigenvalue history, if `retLambdaHistory` is True.
rnorms : list of ndarray, optional
The history of residual norms, if `retResidualNormsHistory` is True.
Notes
-----
If both ``retLambdaHistory`` and ``retResidualNormsHistory`` are True,
the return tuple has the following format
``(lambda, V, lambda history, residual norms history)``.
In the following ``n`` denotes the matrix size and ``m`` the number
of required eigenvalues (smallest or largest).
The LOBPCG code internally solves eigenproblems of the size ``3m`` on every
iteration by calling the "standard" dense eigensolver, so if ``m`` is not
small enough compared to ``n``, it does not make sense to call the LOBPCG
code, but rather one should use the "standard" eigensolver, e.g. numpy or
scipy function in this case.
If one calls the LOBPCG algorithm for ``5m > n``, it will most likely break
internally, so the code tries to call the standard function instead.
It is not that ``n`` should be large for the LOBPCG to work, but rather the
ratio ``n / m`` should be large. It you call LOBPCG with ``m=1``
and ``n=10``, it works though ``n`` is small. The method is intended
for extremely large ``n / m``.
The convergence speed depends basically on two factors:
1. How well relatively separated the seeking eigenvalues are from the rest
of the eigenvalues. One can try to vary ``m`` to make this better.
2. How well conditioned the problem is. This can be changed by using proper
preconditioning. For example, a rod vibration test problem (under tests
directory) is ill-conditioned for large ``n``, so convergence will be
slow, unless efficient preconditioning is used. For this specific
problem, a good simple preconditioner function would be a linear solve
for `A`, which is easy to code since A is tridiagonal.
References
----------
.. [1] A. V. Knyazev (2001),
Toward the Optimal Preconditioned Eigensolver: Locally Optimal
Block Preconditioned Conjugate Gradient Method.
SIAM Journal on Scientific Computing 23, no. 2,
pp. 517-541. :doi:`10.1137/S1064827500366124`
.. [2] A. V. Knyazev, I. Lashuk, M. E. Argentati, and E. Ovchinnikov
(2007), Block Locally Optimal Preconditioned Eigenvalue Xolvers
(BLOPEX) in hypre and PETSc. :arxiv:`0705.2626`
.. [3] A. V. Knyazev's C and MATLAB implementations:
https://github.com/lobpcg/blopex
Examples
--------
Solve ``A x = lambda x`` with constraints and preconditioning.
>>> import numpy as np
>>> from scipy.sparse import spdiags, issparse
>>> from scipy.sparse.linalg import lobpcg, LinearOperator
>>> n = 100
>>> vals = np.arange(1, n + 1)
>>> A = spdiags(vals, 0, n, n)
>>> A.toarray()
array([[ 1., 0., 0., ..., 0., 0., 0.],
[ 0., 2., 0., ..., 0., 0., 0.],
[ 0., 0., 3., ..., 0., 0., 0.],
...,
[ 0., 0., 0., ..., 98., 0., 0.],
[ 0., 0., 0., ..., 0., 99., 0.],
[ 0., 0., 0., ..., 0., 0., 100.]])
Constraints:
>>> Y = np.eye(n, 3)
Initial guess for eigenvectors, should have linearly independent
columns. Column dimension = number of requested eigenvalues.
>>> rng = np.random.default_rng()
>>> X = rng.random((n, 3))
Preconditioner in the inverse of A in this example:
>>> invA = spdiags([1./vals], 0, n, n)
The preconditiner must be defined by a function:
>>> def precond( x ):
... return invA @ x
The argument x of the preconditioner function is a matrix inside `lobpcg`,
thus the use of matrix-matrix product ``@``.
The preconditioner function is passed to lobpcg as a `LinearOperator`:
>>> M = LinearOperator(matvec=precond, matmat=precond,
... shape=(n, n), dtype=np.float64)
Let us now solve the eigenvalue problem for the matrix A:
>>> eigenvalues, _ = lobpcg(A, X, Y=Y, M=M, largest=False)
>>> eigenvalues
array([4., 5., 6.])
Note that the vectors passed in Y are the eigenvectors of the 3 smallest
eigenvalues. The results returned are orthogonal to those.
"""
blockVectorX = X
blockVectorY = Y
residualTolerance = tol
if maxiter is None:
maxiter = 20
if blockVectorY is not None:
sizeY = blockVectorY.shape[1]
else:
sizeY = 0
# Block size.
if len(blockVectorX.shape) != 2:
raise ValueError("expected rank-2 array for argument X")
n, sizeX = blockVectorX.shape
if verbosityLevel:
aux = "Solving "
if B is None:
aux += "standard"
else:
aux += "generalized"
aux += " eigenvalue problem with"
if M is None:
aux += "out"
aux += " preconditioning\n\n"
aux += "matrix size %d\n" % n
aux += "block size %d\n\n" % sizeX
if blockVectorY is None:
aux += "No constraints\n\n"
else:
if sizeY > 1:
aux += "%d constraints\n\n" % sizeY
else:
aux += "%d constraint\n\n" % sizeY
print(aux)
A = _makeOperator(A, (n, n))
B = _makeOperator(B, (n, n))
M = _makeOperator(M, (n, n))
if (n - sizeY) < (5 * sizeX):
warnings.warn(
f"The problem size {n} minus the constraints size {sizeY} "
f"is too small relative to the block size {sizeX}. "
f"Using a dense eigensolver instead of LOBPCG.",
UserWarning, stacklevel=2
)
sizeX = min(sizeX, n)
if blockVectorY is not None:
raise NotImplementedError(
"The dense eigensolver does not support constraints."
)
# Define the closed range of indices of eigenvalues to return.
if largest:
eigvals = (n - sizeX, n - 1)
else:
eigvals = (0, sizeX - 1)
A_dense = A(np.eye(n, dtype=A.dtype))
B_dense = None if B is None else B(np.eye(n, dtype=B.dtype))
vals, vecs = eigh(A_dense,
B_dense,
eigvals=eigvals,
check_finite=False)
if largest:
# Reverse order to be compatible with eigs() in 'LM' mode.
vals = vals[::-1]
vecs = vecs[:, ::-1]
return vals, vecs
if (residualTolerance is None) or (residualTolerance <= 0.0):
residualTolerance = np.sqrt(1e-15) * n
# Apply constraints to X.
if blockVectorY is not None:
if B is not None:
blockVectorBY = B(blockVectorY)
else:
blockVectorBY = blockVectorY
# gramYBY is a dense array.
gramYBY = np.dot(blockVectorY.T.conj(), blockVectorBY)
try:
# gramYBY is a Cholesky factor from now on...
gramYBY = cho_factor(gramYBY)
except LinAlgError as e:
raise ValueError("Linearly dependent constraints") from e
_applyConstraints(blockVectorX, gramYBY, blockVectorBY, blockVectorY)
##
# B-orthonormalize X.
blockVectorX, blockVectorBX = _b_orthonormalize(B, blockVectorX)
if blockVectorX is None:
raise ValueError("Linearly dependent initial approximations")
##
# Compute the initial Ritz vectors: solve the eigenproblem.
blockVectorAX = A(blockVectorX)
gramXAX = np.dot(blockVectorX.T.conj(), blockVectorAX)
_lambda, eigBlockVector = eigh(gramXAX, check_finite=False)
ii = _get_indx(_lambda, sizeX, largest)
_lambda = _lambda[ii]
eigBlockVector = np.asarray(eigBlockVector[:, ii])
blockVectorX = np.dot(blockVectorX, eigBlockVector)
blockVectorAX = np.dot(blockVectorAX, eigBlockVector)
if B is not None:
blockVectorBX = np.dot(blockVectorBX, eigBlockVector)
##
# Active index set.
activeMask = np.ones((sizeX,), dtype=bool)
lambdaHistory = [_lambda]
residualNormsHistory = []
previousBlockSize = sizeX
ident = np.eye(sizeX, dtype=A.dtype)
ident0 = np.eye(sizeX, dtype=A.dtype)
##
# Main iteration loop.
blockVectorP = None # set during iteration
blockVectorAP = None
blockVectorBP = None
iterationNumber = -1
restart = True
explicitGramFlag = False
while iterationNumber < maxiter:
iterationNumber += 1
if verbosityLevel > 0:
print("-"*50)
print(f"iteration {iterationNumber}")
if B is not None:
aux = blockVectorBX * _lambda[np.newaxis, :]
else:
aux = blockVectorX * _lambda[np.newaxis, :]
blockVectorR = blockVectorAX - aux
aux = np.sum(blockVectorR.conj() * blockVectorR, 0)
residualNorms = np.sqrt(aux)
residualNormsHistory.append(residualNorms)
ii = np.where(residualNorms > residualTolerance, True, False)
activeMask = activeMask & ii
if verbosityLevel > 2:
print(activeMask)
currentBlockSize = activeMask.sum()
if currentBlockSize != previousBlockSize:
previousBlockSize = currentBlockSize
ident = np.eye(currentBlockSize, dtype=A.dtype)
if currentBlockSize == 0:
break
if verbosityLevel > 0:
print(f"current block size: {currentBlockSize}")
print(f"eigenvalue(s):\n{_lambda}")
print(f"residual norm(s):\n{residualNorms}")
if verbosityLevel > 10:
print(eigBlockVector)
activeBlockVectorR = _as2d(blockVectorR[:, activeMask])
if iterationNumber > 0:
activeBlockVectorP = _as2d(blockVectorP[:, activeMask])
activeBlockVectorAP = _as2d(blockVectorAP[:, activeMask])
if B is not None:
activeBlockVectorBP = _as2d(blockVectorBP[:, activeMask])
if M is not None:
# Apply preconditioner T to the active residuals.
activeBlockVectorR = M(activeBlockVectorR)
##
# Apply constraints to the preconditioned residuals.
if blockVectorY is not None:
_applyConstraints(activeBlockVectorR,
gramYBY,
blockVectorBY,
blockVectorY)
##
# B-orthogonalize the preconditioned residuals to X.
if B is not None:
activeBlockVectorR = activeBlockVectorR - (
blockVectorX @
(blockVectorBX.T.conj() @ activeBlockVectorR)
)
else:
activeBlockVectorR = activeBlockVectorR - (
blockVectorX @
(blockVectorX.T.conj() @ activeBlockVectorR)
)
##
# B-orthonormalize the preconditioned residuals.
aux = _b_orthonormalize(B, activeBlockVectorR)
activeBlockVectorR, activeBlockVectorBR = aux
if activeBlockVectorR is None:
warnings.warn(
f"Failed at iteration {iterationNumber} with accuracies "
f"{residualNorms}\n not reaching the requested "
f"tolerance {residualTolerance}.",
UserWarning, stacklevel=2
)
break
activeBlockVectorAR = A(activeBlockVectorR)
if iterationNumber > 0:
if B is not None:
aux = _b_orthonormalize(
B, activeBlockVectorP, activeBlockVectorBP, retInvR=True
)
activeBlockVectorP, activeBlockVectorBP, invR, normal = aux
else:
aux = _b_orthonormalize(B, activeBlockVectorP, retInvR=True)
activeBlockVectorP, _, invR, normal = aux
# Function _b_orthonormalize returns None if Cholesky fails
if activeBlockVectorP is not None:
activeBlockVectorAP = activeBlockVectorAP / normal
activeBlockVectorAP = np.dot(activeBlockVectorAP, invR)
restart = False
else:
restart = True
##
# Perform the Rayleigh Ritz Procedure:
# Compute symmetric Gram matrices:
if activeBlockVectorAR.dtype == "float32":
myeps = 1
elif activeBlockVectorR.dtype == "float32":
myeps = 1e-4
else:
myeps = 1e-8
if residualNorms.max() > myeps and not explicitGramFlag:
explicitGramFlag = False
else:
# Once explicitGramFlag, forever explicitGramFlag.
explicitGramFlag = True
# Shared memory assingments to simplify the code
if B is None:
blockVectorBX = blockVectorX
activeBlockVectorBR = activeBlockVectorR
if not restart:
activeBlockVectorBP = activeBlockVectorP
# Common submatrices:
gramXAR = np.dot(blockVectorX.T.conj(), activeBlockVectorAR)
gramRAR = np.dot(activeBlockVectorR.T.conj(), activeBlockVectorAR)
if explicitGramFlag:
gramRAR = (gramRAR + gramRAR.T.conj()) / 2
gramXAX = np.dot(blockVectorX.T.conj(), blockVectorAX)
gramXAX = (gramXAX + gramXAX.T.conj()) / 2
gramXBX = np.dot(blockVectorX.T.conj(), blockVectorBX)
gramRBR = np.dot(activeBlockVectorR.T.conj(), activeBlockVectorBR)
gramXBR = np.dot(blockVectorX.T.conj(), activeBlockVectorBR)
else:
gramXAX = np.diag(_lambda)
gramXBX = ident0
gramRBR = ident
gramXBR = np.zeros((sizeX, currentBlockSize), dtype=A.dtype)
def _handle_gramA_gramB_verbosity(gramA, gramB):
if verbosityLevel > 0:
_report_nonhermitian(gramA, "gramA")
_report_nonhermitian(gramB, "gramB")
if verbosityLevel > 10:
# Note: not documented, but leave it in here for now
np.savetxt("gramA.txt", gramA)
np.savetxt("gramB.txt", gramB)
if not restart:
gramXAP = np.dot(blockVectorX.T.conj(), activeBlockVectorAP)
gramRAP = np.dot(activeBlockVectorR.T.conj(), activeBlockVectorAP)
gramPAP = np.dot(activeBlockVectorP.T.conj(), activeBlockVectorAP)
gramXBP = np.dot(blockVectorX.T.conj(), activeBlockVectorBP)
gramRBP = np.dot(activeBlockVectorR.T.conj(), activeBlockVectorBP)
if explicitGramFlag:
gramPAP = (gramPAP + gramPAP.T.conj()) / 2
gramPBP = np.dot(activeBlockVectorP.T.conj(),
activeBlockVectorBP)
else:
gramPBP = ident
gramA = bmat(
[
[gramXAX, gramXAR, gramXAP],
[gramXAR.T.conj(), gramRAR, gramRAP],
[gramXAP.T.conj(), gramRAP.T.conj(), gramPAP],
]
)
gramB = bmat(
[
[gramXBX, gramXBR, gramXBP],
[gramXBR.T.conj(), gramRBR, gramRBP],
[gramXBP.T.conj(), gramRBP.T.conj(), gramPBP],
]
)
_handle_gramA_gramB_verbosity(gramA, gramB)
try:
_lambda, eigBlockVector = eigh(gramA,
gramB,
check_finite=False)
except LinAlgError:
# try again after dropping the direction vectors P from RR
restart = True
if restart:
gramA = bmat([[gramXAX, gramXAR], [gramXAR.T.conj(), gramRAR]])
gramB = bmat([[gramXBX, gramXBR], [gramXBR.T.conj(), gramRBR]])
_handle_gramA_gramB_verbosity(gramA, gramB)
try:
_lambda, eigBlockVector = eigh(gramA,
gramB,
check_finite=False)
except LinAlgError as e:
raise ValueError("eigh has failed in lobpcg iterations") from e
ii = _get_indx(_lambda, sizeX, largest)
if verbosityLevel > 10:
print(ii)
print(f"lambda:\n{_lambda}")
_lambda = _lambda[ii]
eigBlockVector = eigBlockVector[:, ii]
lambdaHistory.append(_lambda)
if verbosityLevel > 10:
print(f"lambda:\n{_lambda}")
# # Normalize eigenvectors!
# aux = np.sum( eigBlockVector.conj() * eigBlockVector, 0 )
# eigVecNorms = np.sqrt( aux )
# eigBlockVector = eigBlockVector / eigVecNorms[np.newaxis, :]
# eigBlockVector, aux = _b_orthonormalize( B, eigBlockVector )
if verbosityLevel > 10:
print(eigBlockVector)
# Compute Ritz vectors.
if B is not None:
if not restart:
eigBlockVectorX = eigBlockVector[:sizeX]
eigBlockVectorR = eigBlockVector[sizeX:
sizeX + currentBlockSize]
eigBlockVectorP = eigBlockVector[sizeX + currentBlockSize:]
pp = np.dot(activeBlockVectorR, eigBlockVectorR)
pp += np.dot(activeBlockVectorP, eigBlockVectorP)
app = np.dot(activeBlockVectorAR, eigBlockVectorR)
app += np.dot(activeBlockVectorAP, eigBlockVectorP)
bpp = np.dot(activeBlockVectorBR, eigBlockVectorR)
bpp += np.dot(activeBlockVectorBP, eigBlockVectorP)
else:
eigBlockVectorX = eigBlockVector[:sizeX]
eigBlockVectorR = eigBlockVector[sizeX:]
pp = np.dot(activeBlockVectorR, eigBlockVectorR)
app = np.dot(activeBlockVectorAR, eigBlockVectorR)
bpp = np.dot(activeBlockVectorBR, eigBlockVectorR)
if verbosityLevel > 10:
print(pp)
print(app)
print(bpp)
blockVectorX = np.dot(blockVectorX, eigBlockVectorX) + pp
blockVectorAX = np.dot(blockVectorAX, eigBlockVectorX) + app
blockVectorBX = np.dot(blockVectorBX, eigBlockVectorX) + bpp
blockVectorP, blockVectorAP, blockVectorBP = pp, app, bpp
else:
if not restart:
eigBlockVectorX = eigBlockVector[:sizeX]
eigBlockVectorR = eigBlockVector[sizeX:
sizeX + currentBlockSize]
eigBlockVectorP = eigBlockVector[sizeX + currentBlockSize:]
pp = np.dot(activeBlockVectorR, eigBlockVectorR)
pp += np.dot(activeBlockVectorP, eigBlockVectorP)
app = np.dot(activeBlockVectorAR, eigBlockVectorR)
app += np.dot(activeBlockVectorAP, eigBlockVectorP)
else:
eigBlockVectorX = eigBlockVector[:sizeX]
eigBlockVectorR = eigBlockVector[sizeX:]
pp = np.dot(activeBlockVectorR, eigBlockVectorR)
app = np.dot(activeBlockVectorAR, eigBlockVectorR)
if verbosityLevel > 10:
print(pp)
print(app)
blockVectorX = np.dot(blockVectorX, eigBlockVectorX) + pp
blockVectorAX = np.dot(blockVectorAX, eigBlockVectorX) + app
blockVectorP, blockVectorAP = pp, app
if B is not None:
aux = blockVectorBX * _lambda[np.newaxis, :]
else:
aux = blockVectorX * _lambda[np.newaxis, :]
blockVectorR = blockVectorAX - aux
aux = np.sum(blockVectorR.conj() * blockVectorR, 0)
residualNorms = np.sqrt(aux)
if np.max(residualNorms) > residualTolerance:
warnings.warn(
f"Exited at iteration {iterationNumber} with accuracies \n"
f"{residualNorms}\n"
f"not reaching the requested tolerance {residualTolerance}.",
UserWarning, stacklevel=2
)
# Future work: Need to add Postprocessing here:
# Making sure eigenvectors "exactly" satisfy the blockVectorY constrains?
# Making sure eigenvecotrs are "exactly" othonormalized by final "exact" RR
# Keeping the best iterates in case of divergence
if verbosityLevel > 0:
print(f"Final eigenvalue(s):\n{_lambda}")
print(f"Final residual norm(s):\n{residualNorms}")
if retLambdaHistory:
if retResidualNormsHistory:
return _lambda, blockVectorX, lambdaHistory, residualNormsHistory
else:
return _lambda, blockVectorX, lambdaHistory
else:
if retResidualNormsHistory:
return _lambda, blockVectorX, residualNormsHistory
else:
return _lambda, blockVectorX
|
manhhomienbienthuy/scikit-learn
|
sklearn/externals/_lobpcg.py
|
Python
|
bsd-3-clause
| 27,320
|
"""Test the TcEx API Module."""
# standard library
import time
from datetime import datetime, timedelta
from random import randint
# third-party
import pytest
from pytest import FixtureRequest
# first-party
from tcex.api.tc.v3.tql.tql_operator import TqlOperator
from tests.api.tc.v3.v3_helpers import TestV3, V3Helper
class TestArtifacts(TestV3):
"""Test TcEx API Interface."""
v3 = None
def setup_method(self):
"""Configure setup before all tests."""
print('') # ensure any following print statements will be on new line
self.v3_helper = V3Helper('artifacts')
self.v3 = self.v3_helper.v3
self.tcex = self.v3_helper.tcex
def test_artifact_api_options(self):
"""Test filter keywords."""
super().obj_api_options()
def test_artifact_filter_keywords(self):
"""Test filter keywords."""
super().obj_filter_keywords()
@pytest.mark.xfail(reason='Verify TC Version running against.')
def test_artifact_object_properties(self):
"""Test properties."""
super().obj_properties()
@pytest.mark.xfail(reason='Verify TC Version running against.')
def test_artifact_object_properties_extra(self):
"""Test properties."""
super().obj_properties_extra()
@pytest.mark.xfail(reason='Verify TC Version running against.')
def test_indicator_associations(self):
"""Test Artifact -> Indicator Associations."""
self.v3_helper.tql_clear(['MyCase-09'], self.v3.cases(), 'name')
indicator = self.v3.indicator(
**{
'ip': '43.24.65.34',
'type': 'Address',
}
)
indicator.create()
indicator_2 = self.v3.indicator(
**{
'ip': '43.24.65.35',
'type': 'Address',
}
)
indicator_2.create()
indicator_3 = {'ip': '43.24.65.36', 'type': 'Address'}
# [Pre-Requisite] - create case
case = self.v3_helper.create_case(name='MyCase-09')
artifact = self.v3.artifact(
**{
'case_id': case.model.id,
'intel_type': 'indicator-ASN',
'summary': f'asn{randint(100, 999)}',
'type': 'ASN',
}
)
self.v3_helper._associations(artifact, indicator, indicator_2, indicator_3)
@pytest.mark.xfail(reason='Verify TC Version running against.')
def test_group_associations(self):
"""Test Artifact -> Group Association"""
# [Pre-Requisite] - clean up past runs.
self.v3_helper.tql_clear(['MyCase-08'], self.v3.cases(), 'name')
self.v3_helper.tql_clear(
['MyAdversary-12', 'StagedGroup-10', 'StagedGroup-11'], self.v3.groups()
)
# [Pre-Requisite] - create case
case = self.v3_helper.create_case(name='MyCase-08')
artifact = self.v3.artifact(
**{
'case_id': case.model.id,
'intel_type': 'indicator-ASN',
'summary': f'asn{randint(100, 999)}',
'type': 'ASN',
}
)
group_2 = self.v3_helper.create_group(name='StagedGroup-10', xid='staged_group_10-xid')
group_3 = self.v3_helper.create_group(name='StagedGroup-11', xid='staged_group_11-xid')
association_data = {'name': 'MyAdversary-12', 'type': 'Adversary'}
self.v3_helper._associations(artifact, group_2, group_3, association_data)
@pytest.mark.xfail(reason='Remove XFail once core fixes PLAT-4689.')
def test_case_associations(self):
"""Test Artifact -> Case Association"""
# [Pre-Requisite] - clean up past runs.
self.v3_helper.tql_clear(
['MyCase-04', 'MyCase-05', 'MyCase-06', 'MyCase-07'], self.v3.cases(), 'name'
)
# [Pre-Requisite] - create case
case = self.v3_helper.create_case(name='MyCase-04')
case_2 = self.v3_helper.create_case(name='MyCase-05')
case_3 = self.v3_helper.create_case(name='MyCase-06')
# [Create Testing] define object data
artifact = self.v3.artifact(
**{
'case_id': case.model.id,
'intel_type': 'indicator-ASN',
'summary': f'asn{randint(100, 999)}',
'type': 'ASN',
}
)
association_data = {'name': 'MyCase-07', 'severity': 'Low', 'status': 'Open'}
self.v3_helper._associations(artifact, case_2, case_3, association_data)
def test_artifact_create_and_retrieve_nested_types(self, request: FixtureRequest):
"""Test Object Creation
A single test case to hit all sub-type creation (e.g., Notes).
"""
# [Pre-Requisite] - create case
case = self.v3_helper.create_case()
# [Create Testing] define object data
artifact_data = {
'case_id': case.model.id,
'intel_type': 'indicator-ASN',
'summary': f'asn{randint(100, 999)}',
'type': 'ASN',
}
# [Create Testing] define nested note data
note_data = {'text': f'sample note for {request.node.name} test case.'}
# [Create Testing] create the object object
artifact = self.v3.artifact(**artifact_data)
# [Create Testing] add the note data to the object
artifact.stage_note(note_data)
# [Create Testing] create the object to the TC API
artifact.create()
# [Retrieve Testing] create the object with id filter,
# using object id from the object created above
artifact = self.v3.artifact(id=artifact.model.id)
# [Retrieve Testing] get the object from the API
artifact.get(params={'fields': 'notes'})
# [Retrieve Testing] test "notes" method
for note in artifact.notes:
# only a single note was added so text should match
assert note.model.text == note_data.get('text')
# [Retrieve Testing] run assertions on the nested note data, which
# is only available due to params being added to the get() method.
assert artifact.model.notes.data[0].text == note_data.get('text')
# [Retrieve Testing] run assertions on returned data
assert artifact.model.intel_type == artifact_data.get('intel_type')
assert artifact.model.summary == artifact_data.get('summary')
assert artifact.model.type == artifact_data.get('type')
assert artifact.model.field_name is None
def test_artifact_all_filters_on_case(self, request: FixtureRequest):
"""Test TQL Filters for artifact on a Case"""
# [Pre-Requisite] - create case
case = self.v3_helper.create_case()
# [Create Testing] define object data
artifact_data = {
'case_id': case.model.id,
'intel_type': 'indicator-File',
'source': 'tcex testing',
'summary': '3BD214E8ACC29E123FE59CC14668407B0EEB1F2AA52E812E98874B7583EC7BDF',
'type': 'File Hash',
}
# [Create Testing] define nested note data
note_data = {'text': f'sample note for {request.node.name} test case.'}
# [Create Testing] create the object
artifact = self.v3.artifact(**artifact_data)
# [Create Testing] add the note data to the object
artifact.stage_note(note_data)
# [Create Testing] create the object to the TC API
artifact.create()
# [Retrieve Testing] create the object with id filter,
# using object id from the object created above
artifact = self.v3.artifact(id=artifact.model.id)
# [Retrieve Testing] get the object from the API
artifact.get(params={'fields': 'notes'})
note_id = artifact.model.notes.data[0].id
# [Retrieve Testing] retrieve object using tql filters
artifacts = self.v3.artifacts(params={'fields': 'analytics'})
# [Filter Testing] analytics_score - This works, but
# the delay in the score updating takes to long
# artifacts.filter.analytics_score(TqlOperator.GT, 50)
# [Filter Testing] case_id
artifacts.filter.case_id(TqlOperator.EQ, case.model.id)
# [Filter Testing] date_added
artifacts.filter.date_added(
TqlOperator.GT, (datetime.utcnow() - timedelta(days=1)).isoformat()
)
# [Filter Testing] has_case - using id filter as it's easily available
artifacts.filter.has_case.id(TqlOperator.EQ, case.model.id)
# TODO: [PLAT-2830] not available via the API currently
# [Filter Testing] has_group
# artifacts.filter.has_group.id(TqlOperator.EQ, ???)
# TODO: [PLAT-2830] not available via the API currently
# [Filter Testing] has_indicator
# artifacts.filter.has_indicator.id(TqlOperator.EQ, ???)
# [Filter Testing] has_note - using <object>_id as it's easily available
artifacts.filter.has_note.artifact_id(TqlOperator.EQ, artifact.model.id)
# [Filter Testing] id
artifacts.filter.id(TqlOperator.EQ, artifact.model.id)
# TODO: [PLAT-2830] not available via the API currently
# [Filter Testing] indicator_active
# artifacts.filter.indicator_active(TqlOperator.EQ, True)
# [Filter Testing] note_id - the note_id has to be retrieved first
artifacts.filter.note_id(TqlOperator.EQ, note_id)
# [Filter Testing] source
artifacts.filter.source(TqlOperator.EQ, artifact_data.get('source'))
# [Filter Testing] summary
artifacts.filter.summary(TqlOperator.EQ, artifact_data.get('summary'))
# [Filter Testing] type
artifacts.filter.type(TqlOperator.EQ, artifact_data.get('type'))
# [Filter Testing] type_name
artifacts.filter.type_name(TqlOperator.EQ, artifact_data.get('type'))
# [Retrieve Testing] get the object from the API
for artifact in artifacts:
assert artifact.model.summary == artifact_data.get('summary')
break
else:
assert False, f'No artifact found for tql -> {artifacts.tql.as_str}'
def test_artifact_all_filter_on_task(self, request: FixtureRequest):
"""Test TQL Filters for artifact on a Task"""
# [Pre-Requisite] - create case
case = self.v3_helper.create_case()
# [Create Testing] define task data
task_data = {
'case_id': case.model.id,
'name': f'name-{request.node.name}',
}
# [Create Testing] create the task object
task = self.v3.task(**task_data)
# [Create Testing] create the task with the TC API
task.create()
# [Create Testing] define object data
artifact_data = {
'intel_type': 'indicator-ASN',
'summary': f'asn{randint(100, 999)}',
'task_id': task.model.id,
'type': 'ASN',
}
# [Create Testing] create the object object
artifact = self.v3.artifact(**artifact_data)
# [Create Testing] create the object with the TC API
artifact.create()
# [Retrieve Testing] retrieve object using tql filters
artifacts = self.v3.artifacts()
# [Filter Testing] has_task -> using id since it's available
artifacts.filter.has_task.id(TqlOperator.EQ, task.model.id)
# [Filter Testing] task_id
artifacts.filter.task_id(TqlOperator.EQ, task.model.id)
# [Retrieve Testing] get the object from the API
# print(f'TQL Filter -> ({artifacts.tql.as_str})')
for artifact in artifacts:
assert artifact.model.summary == artifact_data.get('summary')
break
else:
assert False, f'No artifact found for tql -> {artifacts.tql.as_str}'
def test_artifact_create_by_case_xid(self, request: FixtureRequest):
"""Test Artifact Creation"""
# [Pre-Requisite] - create case and provide a unique xid
case_xid = f'{request.node.name}-{time.time()}'
case = self.v3_helper.create_case(xid=case_xid)
# [Create Testing] define object data
artifact_data = {
'case_xid': case.model.xid,
'intel_type': 'indicator-ASN',
'summary': f'asn{randint(100, 999)}',
'type': 'ASN',
}
# [Create Testing] create the object
artifact = self.v3.artifact(**artifact_data)
# [Create Testing] create the object to the TC API
artifact.create()
# [Retrieve Testing] create the object with id filter,
# using object id from the object created above
artifact = self.v3.artifact(id=artifact.model.id)
# [Retrieve Testing] get the object from the API
artifact.get()
# [Retrieve Testing] run assertions on returned data
assert artifact.model.intel_type == artifact_data.get('intel_type')
assert artifact.model.summary == artifact_data.get('summary')
assert artifact.model.type == artifact_data.get('type')
def test_artifact_delete_by_id(self):
"""Test Artifact Deletion"""
# [Pre-Requisite] - create case and provide a unique xid
case = self.v3_helper.create_case()
# [Create Testing] define object data
artifact_data = {
'case_id': case.model.id,
'intel_type': 'indicator-ASN',
'summary': f'asn{randint(100, 999)}',
'type': 'ASN',
}
# [Create Testing] create the object
artifact = self.v3.artifact(**artifact_data)
# [Create Testing] create the object to the TC API
artifact.create()
# [Retrieve Testing] create the object with id filter,
# using object id from the object created above
artifact = self.v3.artifact(id=artifact.model.id)
# [Delete Testing] remove the object
artifact.delete()
# [Delete Testing] validate the object is removed
with pytest.raises(RuntimeError) as exc_info:
artifact.get()
# [Delete Testing] assert error message contains the correct code
# error -> "(952, 'Error during GET. API status code: 404, ..."
assert '952' in str(exc_info.value)
def test_artifact_get_many(self):
"""Test Artifact Get Many"""
# [Pre-Requisite] - create case
case = self.v3_helper.create_case()
artifact_count = 10
artifact_ids = []
for _ in range(0, artifact_count):
# [Create Testing] define object data
artifact_data = {
'case_id': case.model.id,
'intel_type': 'indicator-ASN',
'summary': f'asn{randint(100, 999)}',
'type': 'ASN',
}
# [Create Testing] create the object
artifact = self.v3.artifact(**artifact_data)
# [Create Testing] create the object to the TC API
artifact.create()
artifact_ids.append(artifact.model.id)
# [Retrieve Testing] iterate over all object looking for needle
artifacts = self.v3.artifacts(params={'resultLimit': 5})
artifacts.filter.case_id(TqlOperator.EQ, case.model.id)
for _, a in enumerate(artifacts):
assert artifact.model.id in artifact_ids
artifact_ids.remove(a.model.id)
assert len(artifacts) == artifact_count
assert not artifact_ids, 'Not all artifacts were returned.'
def test_artifact_task_get_single_by_id_properties(self, request: FixtureRequest):
"""Test Artifact get single attached to task by id"""
# [Pre-Requisite] - create case
case = self.v3_helper.create_case()
# [Create Testing] define task data
task_data = {
'case_id': case.model.id,
'description': f'a description from {request.node.name}',
'name': f'name-{request.node.name}',
'workflow_phase': 0,
'workflow_step': 1,
'xid': f'{request.node.name}-{time.time()}',
}
# [Create Testing] create the task object
task = self.v3.task(**task_data)
# [Create Testing] create the task with the TC API
task.create()
# [Create Testing] define the object file data
file_data = (
'RmFpbGVkIHRvIGZpbmQgbGliIGRpcmVjdG9yeSAoWydsaWJfbGF0ZXN0JywgJ2xpYl8yLjcuMTUnXSkuCg=='
)
# [Create testing] define object data
artifact_data = {
'task_id': task.model.id,
'task_xid': task.model.xid,
'source': 'artifact source',
'file_data': f'{file_data}',
'summary': 'pytest test file artifact',
'type': 'Certificate File',
'note_text': 'artifact note text',
}
# [Create Testing] add the note data to the object
artifact = self.v3.artifact()
# [Create Testing] testing setters on model
artifact.model.task_id = artifact_data.get('task_id')
artifact.model.task_xid = artifact_data.get('task_xid')
artifact.model.file_data = artifact_data.get('file_data')
artifact.model.source = artifact_data.get('source')
artifact.model.summary = artifact_data.get('summary')
artifact.model.type = artifact_data.get('type')
# [Create Testing] add the note data to the object
note_data = {'text': artifact_data.get('note_text')}
artifact.stage_note(note_data)
# [Create Testing] create the object to the TC API
artifact.create()
# [Retrieve Testing] create the object with id filter,
# using object id from the object created above
artifact = self.v3.artifact(id=artifact.model.id)
# [Retrieve Testing] get the object from the API
artifact.get(params={'fields': ['_all_']})
# [Retrieve Testing] run assertions on returned data
assert artifact.model.case_id == case.model.id
assert artifact.model.case_xid == case.model.xid
assert artifact.model.file_data == file_data
assert artifact.model.source == artifact_data.get('source')
assert artifact.model.summary == artifact_data.get('summary')
assert artifact.model.task.name == task.model.name
assert artifact.model.task_id == task.model.id
assert artifact.model.task_xid == task.model.xid
assert artifact.model.intel_type is None
assert artifact.model.type == artifact_data.get('type')
for note in artifact.model.notes.data:
if note.text == artifact_data.get('note_text'):
break
assert False, 'Note not found'
# [Retrieve Testing] assert read-only data
assert artifact.model.analytics_priority_level is None
assert artifact.model.analytics_score is None
assert artifact.model.analytics_type is None
assert artifact.model.artifact_type.name == artifact_data.get('type')
assert artifact.model.parent_case.id == case.model.id
# [Retrieve Testing] test as_entity
assert artifact.as_entity.get('value') == artifact_data.get('summary')
def test_artifact_case_get_single_by_id_properties(self):
"""Test Artifact get single attached to case by id"""
# [Pre-Requisite] - create case
case = self.v3_helper.create_case()
# [Create Testing] define the object file data
file_data = (
'RmFpbGVkIHRvIGZpbmQgbGliIGRpcmVjdG9yeSAoWydsaWJfbGF0ZXN0JywgJ2xpYl8yLjcuMTUnXSkuCg=='
)
# [Create Testing] define object data
artifact_data = {
'case_id': case.model.id,
'case_xid': case.model.xid,
'source': 'artifact source',
'file_data': f'{file_data}',
'summary': 'pytest test file artifact',
'type': 'Certificate File',
'note_text': 'artifact note text',
}
# [Create Testing] create the object
artifact = self.v3.artifact()
# [Create Testing] using model setters
artifact.model.case_id = artifact_data.get('case_id')
artifact.model.case_xid = artifact_data.get('case_xid')
artifact.model.file_data = artifact_data.get('file_data')
artifact.model.source = artifact_data.get('source')
artifact.model.summary = artifact_data.get('summary')
artifact.model.type = artifact_data.get('type')
# [Create Testing] add the note data to the object
notes = {'data': [{'text': artifact_data.get('note_text')}]}
artifact.model.notes = notes
# [Create Testing] create the object to the TC API
artifact.create()
# [Retrieve Testing] define the object with id filter,
# using object id from the object created above
artifact = self.v3.artifact(id=artifact.model.id)
# [Retrieve Testing] get the object from the API
artifact.get(params={'fields': ['_all_']})
# [Retrieve Testing] run assertions on returned data
assert artifact.model.case_id == artifact_data.get('case_id')
assert artifact.model.case_xid == artifact_data.get('case_xid')
assert artifact.model.file_data == file_data
assert artifact.model.source == artifact_data.get('source')
assert artifact.model.summary == artifact_data.get('summary')
assert artifact.model.task_id is None
assert artifact.model.task_xid is None
assert artifact.model.intel_type is None
assert artifact.model.type == artifact_data.get('type')
for note in artifact.model.notes.data: # Double check
if note.text == artifact_data.get('note_text'):
break
assert False, 'Note not found'
# [Retrieve Testing] run assertions on returned data
assert artifact.model.analytics_priority_level is None
assert artifact.model.analytics_score is None
assert artifact.model.analytics_type is None
assert artifact.model.artifact_type.name == artifact_data.get('type')
assert artifact.model.parent_case.id == case.model.id
# [Retrieve Testing] run assertions on returned data
assert artifact.as_entity.get('value') == artifact_data.get('summary')
def test_artifact_update_properties(self):
"""Test updating artifacts properties"""
# [Pre-Requisite] - create case
case = self.v3_helper.create_case()
# [Create Testing] define the object file data
file_data = (
'FmFpbGVkIHRvIGZpbmQgbGliIGRpcmVjdG9yeSAoWydsaWJfbGF0ZXN0JywgJ2xpYl8yLjcuMTUnXSkuCg=='
)
# [Create Testing] define object data
artifact_data = {
'case_id': case.model.id,
'file_data': f'{file_data}',
'summary': f'asn{randint(100, 999)}',
'type': 'Certificate File',
}
# [Create Testing] create the object
artifact = self.v3.artifact(**artifact_data)
# [Create Testing] create the object to the TC API
artifact.create()
# [Create Testing] define the object file data
file_data = (
'GmFpbGVkIHRvIGZpbmQgbGliIGRpcmVjdG9yeSAoWydsaWJfbGF0ZXN0JywgJ2xpYl8yLjcuMTUnXSkuCg=='
)
# [Create Testing] define object data
artifact_data = {
'source': 'artifact source',
'file_data': f'{file_data}',
'summary': f'asn{randint(100, 999)}',
}
# [Update Testing] update object properties
artifact.model.source = artifact_data.get('source')
artifact.model.summary = artifact_data.get('summary')
artifact.model.file_data = artifact_data.get('file_data')
# [Update Testing] update the object to the TC API
artifact.update()
# [Retrieve Testing] get the object from the API
artifact.get(params={'fields': ['_all_']})
# [Retrieve Testing] run assertions on returned data
assert artifact.model.source == artifact_data.get('source')
assert artifact.model.summary == artifact_data.get('summary')
assert artifact.model.file_data == artifact_data.get('file_data')
def test_artifact_get_by_tql_filter_fail_tql(self):
"""Test Artifact Get by TQL"""
# retrieve object using TQL
artifacts = self.v3.artifacts()
artifacts.filter.tql = 'Invalid TQL'
# [Fail Testing] validate the object is removed
with pytest.raises(RuntimeError) as exc_info:
for _ in artifacts:
pass
# [Fail Testing] assert error message contains the correct code
# error -> "(950, 'Error during pagination. API status code: 400, ..."
assert '950' in str(exc_info.value)
assert artifacts.request.status_code == 400
|
ThreatConnect-Inc/tcex
|
tests/api/tc/v3/artifacts/test_artifact_interface.py
|
Python
|
apache-2.0
| 24,882
|
#!/usr/bin/env python
"""
A basic example of how to get a password from the default
keyring of you environment.
"""
import keyring
password = keyring.get_password(u"dummyapp", u"mark.veltzer@gmail.com")
print("your password is [{}]".format(password))
|
veltzer/demos-python
|
src/examples/short/keyring/get.py
|
Python
|
gpl-3.0
| 253
|
# Author: Nic Wolfe <nic@wolfeden.ca>
# URL: http://code.google.com/p/sickbeard/
#
# This file is part of SickRage.
#
# SickRage is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# SickRage is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with SickRage. If not, see <http://www.gnu.org/licenses/>.
import re
import sys
import traceback
import sickbeard
import urllib
import datetime
from lib.dateutil import parser
from common import USER_AGENT, Quality
class SickBeardURLopener(urllib.FancyURLopener):
version = USER_AGENT
class AuthURLOpener(SickBeardURLopener):
"""
URLOpener class that supports http auth without needing interactive password entry.
If the provided username/password don't work it simply fails.
user: username to use for HTTP auth
pw: password to use for HTTP auth
"""
def __init__(self, user, pw):
self.username = user
self.password = pw
# remember if we've tried the username/password before
self.numTries = 0
# call the base class
urllib.FancyURLopener.__init__(self)
def prompt_user_passwd(self, host, realm):
"""
Override this function and instead of prompting just give the
username/password that were provided when the class was instantiated.
"""
# if this is the first try then provide a username/password
if self.numTries == 0:
self.numTries = 1
return (self.username, self.password)
# if we've tried before then return blank which cancels the request
else:
return ('', '')
# this is pretty much just a hack for convenience
def openit(self, url):
self.numTries = 0
return SickBeardURLopener.open(self, url)
class SearchResult:
"""
Represents a search result from an indexer.
"""
def __init__(self, episodes):
self.provider = -1
# release show object
self.show = None
# URL to the NZB/torrent file
self.url = ""
# used by some providers to store extra info associated with the result
self.extraInfo = []
# list of TVEpisode objects that this result is associated with
self.episodes = episodes
# quality of the release
self.quality = Quality.UNKNOWN
# release name
self.name = ""
# size of the release (-1 = n/a)
self.size = -1
# release group
self.release_group = ""
# version
self.version = -1
# hash
self.hash = ""
# content
self.content = ""
def __str__(self):
if self.provider is None:
return "Invalid provider, unable to print self"
myString = self.provider.name + " @ " + self.url + "\n"
myString += "Extra Info:\n"
for extra in self.extraInfo:
myString += " " + extra + "\n"
myString += "Episode: " + str(self.episodes) + "\n"
myString += "Quality: " + Quality.qualityStrings[self.quality] + "\n"
myString += "Name: " + self.name + "\n"
myString += "Size: " + str(self.size) + "\n"
myString += "Release Group: " + str(self.release_group) + "\n"
return myString
def fileName(self):
return self.episodes[0].prettyName() + "." + self.resultType
class NZBSearchResult(SearchResult):
"""
Regular NZB result with an URL to the NZB
"""
resultType = "nzb"
class NZBDataSearchResult(SearchResult):
"""
NZB result where the actual NZB XML data is stored in the extraInfo
"""
resultType = "nzbdata"
class TorrentSearchResult(SearchResult):
"""
Torrent result with an URL to the torrent
"""
resultType = "torrent"
class AllShowsListUI:
"""
This class is for indexer api. Instead of prompting with a UI to pick the
desired result out of a list of shows it tries to be smart about it
based on what shows are in SB.
"""
def __init__(self, config, log=None):
self.config = config
self.log = log
def selectSeries(self, allSeries):
searchResults = []
seriesnames = []
# get all available shows
if allSeries:
if 'searchterm' in self.config:
searchterm = self.config['searchterm']
# try to pick a show that's in my show list
for curShow in allSeries:
if curShow in searchResults:
continue
if 'seriesname' in curShow:
seriesnames.append(curShow['seriesname'])
if 'aliasnames' in curShow:
seriesnames.extend(curShow['aliasnames'].split('|'))
for name in seriesnames:
if searchterm.lower() in name.lower():
if 'firstaired' not in curShow:
curShow['firstaired'] = str(datetime.date.fromordinal(1))
curShow['firstaired'] = re.sub("([-]0{2}){1,}", "", curShow['firstaired'])
fixDate = parser.parse(curShow['firstaired'], fuzzy=True).date()
curShow['firstaired'] = fixDate.strftime("%Y-%m-%d")
if curShow not in searchResults:
searchResults += [curShow]
return searchResults
class ShowListUI:
"""
This class is for tvdb-api. Instead of prompting with a UI to pick the
desired result out of a list of shows it tries to be smart about it
based on what shows are in SB.
"""
def __init__(self, config, log=None):
self.config = config
self.log = log
def selectSeries(self, allSeries):
try:
# try to pick a show that's in my show list
for curShow in allSeries:
if filter(lambda x: int(x.indexerid) == int(curShow['id']), sickbeard.showList):
return curShow
except:
pass
# if nothing matches then return first result
return allSeries[0]
class Proper:
def __init__(self, name, url, date, show):
self.name = name
self.url = url
self.date = date
self.provider = None
self.quality = Quality.UNKNOWN
self.release_group = None
self.version = -1
self.show = show
self.indexer = None
self.indexerid = -1
self.season = -1
self.episode = -1
self.scene_season = -1
self.scene_episode = -1
def __str__(self):
return str(self.date) + " " + self.name + " " + str(self.season) + "x" + str(self.episode) + " of " + str(
self.indexerid) + " from " + str(sickbeard.indexerApi(self.indexer).name)
class ErrorViewer():
"""
Keeps a static list of UIErrors to be displayed on the UI and allows
the list to be cleared.
"""
errors = []
def __init__(self):
ErrorViewer.errors = []
@staticmethod
def add(error):
ErrorViewer.errors.append(error)
@staticmethod
def clear():
ErrorViewer.errors = []
@staticmethod
def get():
return ErrorViewer.errors
class UIError():
"""
Represents an error to be displayed in the web UI.
"""
def __init__(self, message):
self.title = sys.exc_info()[-2]
self.message = message
self.time = datetime.datetime.now().strftime('%Y-%m-%d %H:%M:%S')
|
bcorbet/SickRage
|
sickbeard/classes.py
|
Python
|
gpl-3.0
| 7,984
|
import os
from ftplib import FTP
import sys
import getpass
from tkinter import *
def clearScreen():
os.system('cls')
def connect(host, user, passwd):
try:
ftp = FTP(host)
ftp.login(user=user, passwd=passwd)
print("Successfully connected to " + host + " as " + user + "...")
return ftp
except:
print("Unable to connect...")
sys.exit()
def upload(ftp, file):
ext = os.path.splitext(file)[1]
if ext in (".txt", ".htm", ".html"):
ftp.storlines("STOR " + file, open(file))
else:
ftp.storbinary("STOR " + file, open(file, 'rb'), 1024)
def showCommands():
print("\nCommands:\n-------------")
print("cd <dir> : change current working directory")
print("ls : output the current working directory listing")
print("rm <file> : delete file")
print("rmd <dir> : delete directory")
print("mkdir <dir> : create new directory")
print("dl <file> : download file")
print("ul <file> : upload file")
print("rn <old> <new> : rename old file to new file")
print("clear : clears screen")
print("exit : disconnect\n")
def main():
clearScreen()
print("Welcome to the Python FTP Client")
host = input("Host: ")
user = input("Username: ")
passwd = getpass.getpass("Password: ")
ftp = connect(host, user, passwd)
print("Use 'help' to show commands\n")
command = input(">> ")
while command != "exit":
if command == "help":
showCommands()
command = input(">> ")
continue
try:
if command[:2] == 'cd':
# change dir
d = command.split(' ')[1]
ftp.cwd(d)
elif command[:2] == 'rm' and len(command.split(' ')[0]) == 2:
# remove file
f = command.split(' ')[1]
ftp.delete(f)
elif command[:2] == 'dl':
# download file
f = command.split(' ')[1]
ftp.retrbinary("RETR " + f, open("download_" + f, 'wb').write)
elif command[:2] == 'ls':
# show listing
ftp.dir()
elif command[:2] == 'rn':
# rename file
old = command.split(' ')[1]
new = command.split(' ')[2]
ftp.rename(old, new)
elif command[:2] == 'ul':
# upload new file
f = command.split(' ')[1]
upload(ftp, f)
elif command[:3] == 'rmd':
# remove directory
d = command.split(' ')[1]
ftp.rmd(d)
elif command[:5] == 'mkdir':
# make new directory
d = command.split(' ')[1]
ftp.mkd(d)
elif command == "clear":
# clear screen
clearScreen()
else:
print("Invalid command. Try again...\n")
except:
print("Invalid command. Try again...\n")
command = input(">> ")
try:
ftp.quit()
except:
pass
sys.exit()
if __name__ == "__main__":
main()
|
zach-king/Python-Miscellaneous
|
Networking/ftpClient.py
|
Python
|
gpl-2.0
| 3,235
|
from distutils.core import setup
import os
# Stolen from django-registration
# Compile the list of packages available, because distutils doesn't have
# an easy way to do this.
packages, data_files = [], []
root_dir = os.path.dirname(__file__)
if root_dir:
os.chdir(root_dir)
for dirpath, dirnames, filenames in os.walk('tt_disposal_wells'):
# Ignore dirnames that start with '.'
for i, dirname in enumerate(dirnames):
if dirname.startswith('.'):
del dirnames[i]
if '__init__.py' in filenames:
pkg = dirpath.replace(os.path.sep, '.')
if os.path.altsep:
pkg = pkg.replace(os.path.altsep, '.')
packages.append(pkg)
elif filenames:
prefix = dirpath[len('tt_disposal_wells/'):]
for f in filenames:
data_files.append(os.path.join(prefix, f))
setup(
name='tt_disposal_wells',
version='0.6.2',
description='Texas Tribune: tt_disposal_wells',
author='Tribune Tech',
author_email='tech@texastribune.org',
url='http://github.com/texastribune/tt_disposal_wells/',
packages=packages,
package_data={'tt_disposal_wells': data_files},
classifiers=[
'Development Status :: 5 - Production/Stable',
'Environment :: Web Environment',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Software Development :: Libraries :: Python Modules',
'Topic :: Utilities',
],
)
|
texastribune/tt_disposal_wells
|
setup.py
|
Python
|
apache-2.0
| 1,566
|
# (c) Crown Owned Copyright, 2016. Dstl.
import csv
from django import forms
from django.core.urlresolvers import reverse
from django.db.models import Count
from django.http import HttpResponse, HttpResponseRedirect
from django.shortcuts import redirect
from django.utils import timezone
from django.utils.text import slugify
from django.views.generic import (
CreateView,
DetailView,
ListView,
UpdateView,
View,
)
from taggit.models import Tag
from .models import Link, LinkUsage, LinkEdit
from .forms import LinkUpdateForm
from apps.access import LoginRequiredMixin
from haystack.inputs import AutoQuery
from haystack.query import SearchQuerySet
from apps.search.models import SearchQuery, SearchTerm
import markdown
class LinkDetail(LoginRequiredMixin, DetailView):
model = Link
def get_context_data(self, **kwargs):
context = super(LinkDetail, self).get_context_data(**kwargs)
if self.request.user.is_authenticated():
is_fav = self.request.user.favourites.filter(
id=self.object.id
).exists()
context['favourite'] = is_fav
context['not_lighthouse_link'] = self.object.id not in [1, 2]
if self.object.description is not None:
html = markdown.markdown(self.object.description)
context['html_description'] = html
else:
context['html_description'] = ''
return context
class LinkRedirect(LoginRequiredMixin, DetailView):
model = Link
def get(self, request, *args, **kwargs):
self.object = self.get_object()
if self.object.is_external and request.GET.get('redirect') is None:
return redirect(
reverse('link-interstitial', kwargs={'pk': self.object.pk})
)
# if request.user is not None:
self.object.register_usage(request.user)
return redirect(self.object.destination)
class LinkInterstitial(LoginRequiredMixin, DetailView):
model = Link
template_name_suffix = '_interstitial'
def clean_categories(provided_categories):
cleaned_categories = []
for cat in [cat.strip(' ,') for cat in provided_categories]:
# Check first if there's anything left, and if there is,
# determine if it's one of the checklisted items or from the
# text box (indicated by comma-separation)
if cat:
if (type(cat.split(',') is list)):
cleaned_categories.extend(
[new_cat.strip(' ,') for new_cat in cat.split(',')]
)
else:
cleaned_categories.add(cat)
return cleaned_categories
class CategoriesFormMixin(object):
def get_context_data(self, **kwargs):
context = super(CategoriesFormMixin, self).get_context_data(**kwargs)
context['existing_categories'] = Tag.objects.all()
return context
def clean(self):
cleaned_data = super(CategoriesFormMixin, self).clean()
if not self.request.user.is_authenticated():
raise forms.ValidationError(
"You must be logged in to create a link"
)
return cleaned_data
def form_valid(self, form):
form.instance.owner = self.request.user
# first save gets us the object in the database (when creating a new
# link), as you can't apply tags without a primary key
link = form.save()
provided_categories = form.data.getlist('categories')
cleaned_categories = clean_categories(provided_categories)
form.instance.categories.set(*cleaned_categories)
link.save()
self.object = link
return HttpResponseRedirect(self.get_success_url())
class LinkCreate(LoginRequiredMixin, CategoriesFormMixin, CreateView):
model = Link
fields = [
'name', 'description', 'destination', 'is_external', 'categories'
]
def get_context_data(self, **kwargs):
context = super(LinkCreate, self).get_context_data(**kwargs)
context['not_lighthouse_link'] = True
return context
class LinkUpdate(LoginRequiredMixin, CategoriesFormMixin, UpdateView):
model = Link
form_class = LinkUpdateForm
def get_context_data(self, **kwargs):
context = super(LinkUpdate, self).get_context_data(**kwargs)
context['not_lighthouse_link'] = self.object.pk not in [1, 2]
return context
def form_valid(self, form):
original_link = self.get_object()
form_valid = super(LinkUpdate, self).form_valid(form)
if form_valid:
link = self.get_object()
link.owner = original_link.owner
link.save()
LinkEdit.objects.create(
link=self.object,
user=self.request.user
)
return form_valid
class LinkList(LoginRequiredMixin, ListView):
model = Link
paginate_by = 5
template_name = 'links/link_list.html'
def has_query(self):
return 'q' in self.request.GET and len(self.request.GET['q']) > 0
def has_categories(self):
return 'categories' in self.request.GET
def has_favourites(self):
has_favs = 'favourites' in self.request.GET
return has_favs and self.request.GET.get('favourites') == 'true'
def external_only(self):
types = self.request.GET.getlist('types', [])
if type(types) == str:
types = [types]
return 'internal' not in types and 'external' in types
def internal_only(self):
types = self.request.GET.getlist('types', [])
if type(types) == str:
types = [types]
return 'internal' in types and 'external' not in types
def get_queryset(self):
queryset = super(LinkList, self).get_queryset().order_by('-added')
not_on_page = 'page' not in self.request.GET
if self.has_query():
queryset = SearchQuerySet().filter(
content=AutoQuery(self.request.GET['q']),
).filter_or(
categories=AutoQuery(self.request.GET['q'])
).filter_or(
network_location=AutoQuery(self.request.GET['q'])
)
if self.has_favourites():
fav_pks = [l.pk for l in self.request.user.favourites.all()]
if self.has_query():
queryset = queryset.models(Link).filter(
key__in=fav_pks
)
else:
queryset = queryset.filter(
id__in=fav_pks
).distinct()
if self.has_categories():
categories_to_filter = dict(self.request.GET)['categories']
if type(categories_to_filter) == str:
categories_to_filter = [categories_to_filter]
if self.has_query():
queryset = queryset.models(Link).filter(
categories__in=categories_to_filter
)
else:
# At this point, the queryset should already be ordered because
# of the original get_queryset call at the beginning of this
# function.
queryset = queryset.filter(
categories__name__in=categories_to_filter
).distinct()
if self.external_only() or self.internal_only():
if self.has_query():
queryset = queryset.models(Link).exclude(
is_external=self.internal_only()
)
else:
# At this point, the queryset should already be ordered because
# of the original get_queryset call at the beginning of this
# function.
queryset = queryset.exclude(
is_external=self.internal_only()
).distinct()
if self.has_query() and not self.has_categories() and not_on_page:
# At this point the queryset is a list of SearchResult objects, all
# of them. So, the length is accurate. By the time it reaches
# context, it won't be.
st, created = SearchTerm.objects.get_or_create(
query=self.request.GET.get('q')
)
sq = SearchQuery()
sq.term = st
sq.results_length = len(queryset)
sq.user = self.request.user
sq.save()
return queryset
def get_context_data(self, **kwargs):
context = super(LinkList, self).get_context_data(**kwargs)
querystrings = []
if 'categories' in self.request.GET:
categories_to_filter = dict(self.request.GET)['categories']
if type(categories_to_filter) == str:
categories_to_filter = [categories_to_filter]
else:
categories_to_filter = []
querystrings = ['categories=%s' % c for c in categories_to_filter]
# At this point the context contains object_list which is either a list
# of SearchResult objects or Link objects
types_to_filter = self.request.GET.getlist('types', [])
querystrings += ['types=%s' % t for t in types_to_filter]
if self.has_query():
context['query'] = self.request.GET['q']
context['object_list'] = [result.object for
result in
context['object_list']]
querystrings += ['q=%s' % self.request.GET['q']]
context['categories'] = Tag.objects.all()
context['filtered_categories'] = categories_to_filter
context['filtered_types'] = types_to_filter
context['total_links_in_db'] = Link.objects.count()
context['favourites_filtered'] = self.has_favourites()
context['has_js'] = self.request.session.get('has_js', False)
context['extra_query_strings'] = '&'.join(querystrings)
return context
class LinkStats(LoginRequiredMixin, DetailView):
model = Link
template_name_suffix = '_stats'
class LinkStatsCSV(LoginRequiredMixin, DetailView):
model = Link
def get(self, request, *args, **kwargs):
link = self.get_object()
date = timezone.now().strftime('%Y_%m_%d')
response = HttpResponse(content_type='text/csv')
response['Content-Disposition'] = \
'attachment; filename="lighthouse_%s_%s.csv"' % (
slugify(link.name),
date
)
writer = csv.writer(response)
writer.writerow(['Date', 'Duration', 'User', 'Tool'])
for usage in link.usage.all():
writer.writerow([
usage.start.strftime("%Y-%m-%d %H:%M:%S"),
usage.duration,
usage.user.userid,
usage.link
])
return response
class OverallLinkStats(LoginRequiredMixin, ListView):
template_name = 'links/link_overall_stats.html'
def get_queryset(self):
return sorted(
Link.objects.exclude(id=2).annotate(Count('usage')),
key=lambda o: (o.usage_past_thirty_days()),
reverse=True
)
class OverallLinkStatsCSV(LoginRequiredMixin, View):
def get(self, request, *args, **kwargs):
date = timezone.now().strftime('%Y_%m_%d')
response = HttpResponse(content_type='text/csv')
response['Content-Disposition'] = \
'attachment; filename="lighthouse_full_%s.csv"' % date
writer = csv.writer(response)
writer.writerow(['Date', 'Duration', 'User', 'Tool', 'External?'])
for usage in LinkUsage.objects.all():
external = ''
if usage.link.is_external:
external = 'External'
writer.writerow([
usage.start.strftime("%Y-%m-%d %H:%M:%S"),
usage.duration,
usage.user.userid,
usage.link,
external,
])
return response
|
dstl/lighthouse
|
apps/links/views.py
|
Python
|
mit
| 11,986
|
from src import cron
from src.api import Api
api = Api()
def fetch_photo():
api.fetch_photo()
@cron.route('/worker', methods=['GET'])
def scheduler_worker():
fetch_photo()
return 'fetch photo...'
|
Assassinss/daily-artwork
|
src/worker.py
|
Python
|
gpl-3.0
| 229
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
"""
This script understands various command-line arguments:
Task commands:
-featured use this script for featured articles. Default task if no task
command is specified
-good use this script for good articles.
-lists use this script for featured lists.
-former use this script for removing {{Link FA|xx}} from former
fearured articles
NOTE: you may have all of these commands in one run
Option commands:
-interactive: ask before changing each page
-nocache doesn't include cache files file to remember if the article
already was verified.
-nocache:xx,yy you may ignore language codes xx,yy,... from cache file
-fromlang:xx,yy xx,yy,zz,.. are the languages to be verified.
-fromlang:ar--fi Another possible with range the languages
(sorry, not implemented yet)
-fromall to verify all languages.
-tolang:xx,yy xx,yy,zz,.. are the languages to be updated
-after:zzzz process pages after and including page zzzz
(sorry, not implemented yet)
-side use -side if you want to move all {{Link FA|lang}} next to the
corresponding interwiki links. Default is placing
{{Link FA|lang}} on top of the interwiki links.
(This option is deprecated with wikidata)
-count Only counts how many featured/good articles exist
on all wikis (given with the "-fromlang" argument) or
on several language(s) (when using the "-fromall" argument).
Example: featured.py -fromlang:en,he -count
counts how many featured articles exist in the en and he
wikipedias.
(sorry, not implemented yet)
-quiet no corresponding pages are displayed.
"""
__version__ = '$Id$'
#
# (C) Maxim Razin, 2005
# (C) Leonardo Gregianin, 2005-2008
# (C) xqt, 2009-2013
# (C) Pywikipedia bot team, 2005-2012
#
# Distributed under the terms of the MIT license.
#
import os.path
import pickle
import re
import sys
from copy import copy
import pywikibot
from pywikibot import i18n
from pywikibot import config
from pywikibot.pagegenerators import PreloadingGenerator
def CAT(site, name, hide):
name = site.namespace(14) + ':' + name
cat = pywikibot.Category(site, name)
for article in cat.articles(endsort=hide):
yield article
if hide:
for article in cat.articles(startFrom=unichr(ord(hide) + 1)):
yield article
def BACK(site, name, hide):
p = pywikibot.Page(site, name, ns=10)
return [page for page in p.getReferences(follow_redirects=False,
onlyTemplateInclusion=True)]
def DATA(site, name, hide):
dp = pywikibot.ItemPage(site.data_repository(), name)
try:
title = dp.getSitelink(site)
except pywikibot.PageNotFound:
return
cat = pywikibot.Category(site, title)
for article in cat.articles(endsort=hide):
yield article
if hide:
for article in cat.articles(startFrom=unichr(ord(hide) + 1)):
yield article
# not implemented yet
def TMPL(site, name, hide):
return
# ALL wikis use 'Link FA', and sometimes other localized templates.
# We use _default AND the localized ones
template = {
'_default': ['Link FA'],
'als': ['LinkFA'],
'an': ['Destacato', 'Destacau'],
'ar': [u'وصلة مقالة مختارة'],
'ast': ['Enllaz AD'],
'az': ['Link FM'],
'br': ['Liamm PuB', 'Lien AdQ'],
'ca': [u'Enllaç AD', 'Destacat'],
'cy': ['Cyswllt erthygl ddethol', 'Dolen ED'],
'eo': ['LigoElstara'],
'en': ['Link FA', 'FA link'],
'es': ['Destacado'],
'eu': ['NA lotura'],
'fr': ['Lien AdQ'],
'fur': ['Leam VdC'],
'ga': ['Nasc AR'],
'hi': ['Link FA', 'Lien AdQ'],
'is': [u'Tengill ÚG'],
'it': ['Link AdQ'],
'no': ['Link UA'],
'oc': ['Ligam AdQ', 'Lien AdQ'],
'ro': [u'Legătură AC', u'Legătură AF'],
'sv': ['UA', 'Link UA'],
'tr': ['Link SM'],
'vi': [u'Liên kết chọn lọc'],
'vo': [u'Yüm YG'],
'yi': [u'רא'],
}
template_good = {
'_default': ['Link GA'],
'ar': [u'وصلة مقالة جيدة'],
'da': ['Link GA', 'Link AA'],
'eo': ['LigoLeginda'],
'es': ['Bueno'],
'fr': ['Lien BA'],
'is': ['Tengill GG'],
'it': ['Link VdQ'],
'nn': ['Link AA'],
'no': ['Link AA'],
'pt': ['Bom interwiki'],
## 'tr': ['Link GA', 'Link KM'],
'vi': [u'Liên kết bài chất lượng tốt'],
'wo': ['Lien BA'],
}
template_lists = {
'_default': ['Link FL'],
'no': ['Link GL'],
}
featured_name = {
'wikidata': (DATA, u'Q4387444'),
}
good_name = {
'wikidata': (DATA, 'Q7045856'),
}
lists_name = {
'wikidata': (TMPL, 'Q5857568'),
'ar': (BACK, u'قائمة مختارة'),
'da': (BACK, u'FremragendeListe'),
'de': (BACK, u'Informativ'),
'en': (BACK, u'Featured list'),
'fa': (BACK, u"فهرست برگزیده"),
'id': (BACK, u'Featured list'),
'ja': (BACK, u'Featured List'),
'ksh': (CAT, u"Joode Leß"),
'no': (BACK, u'God liste'),
'pl': (BACK, u'Medalista'),
'pt': (BACK, u'Anexo destacado'),
'ro': (BACK, u'Listă de calitate'),
'ru': (BACK, u'Избранный список или портал'),
'tr': (BACK, u'Seçkin liste'),
'uk': (BACK, u'Вибраний список'),
'vi': (BACK, u'Sao danh sách chọn lọc'),
'zh': (BACK, u'特色列表'),
}
# Third parameter is the sort key indicating articles to hide from the given
# list
former_name = {
'ca': (CAT, u"Arxiu de propostes de la retirada de la distinció"),
'en': (CAT, u"Wikipedia former featured articles", "#"),
'es': (CAT, u"Wikipedia:Artículos anteriormente destacados"),
'fa': (CAT, u"مقالههای برگزیده پیشین"),
'hu': (CAT, u"Korábbi kiemelt cikkek"),
'pl': (CAT, u"Byłe artykuły na medal"),
'pt': (CAT, u"!Ex-Artigos_destacados"),
'ru': (CAT, u"Википедия:Устаревшие избранные статьи"),
'th': (CAT, u"บทความคัดสรรในอดีต"),
'tr': (CAT, u"Vikipedi eski seçkin maddeler"),
'zh': (CAT, u"已撤销的特色条目"),
}
class FeaturedBot(pywikibot.Bot):
# Bot configuration.
# Only the keys of the dict can be passed as init options
# The values are the default values
availableOptions = {
'always': False, # ask for confirmation when putting a page?
'async': False, # asynchron putting a page?
'count': False, # featuredcount
'featured': False,
'former': False,
'fromall': False,
'fromlang': None,
'good': False,
'list': False,
'nocache': list(),
'side': False, # not template_on_top
'quiet': False,
}
def __init__(self, **kwargs):
""" Only accepts options defined in availableOptions """
super(FeaturedBot, self).__init__(**kwargs)
self.editcounter = 0
self.fromlang = None
self.cache = dict()
self.filename = None
self.site = pywikibot.Site()
def hastemplate(self, task):
for tl in self.getTemplateList(self.site.lang, task):
tp = pywikibot.Page(self.site, tl, ns=10)
if not tp.exists():
return
return True
def readcache(self, task):
if not self.getOption('nocache') is True:
self.filename = pywikibot.config.datafilepath("cache", task)
try:
f = open(self.filename, "rb")
self.cache = pickle.load(f)
f.close()
pywikibot.output(u'Cache file %s found with %d items.'
% (self.filename, len(self.cache)))
except IOError:
pywikibot.output(u'Cache file %s not found.' % self.filename)
def writecache(self):
if not self.getOption('nocache') is True:
pywikibot.output(u'Writing %d items to cache file %s.'
% (len(self.cache), self.filename))
f = open(self.filename,"wb")
pickle.dump(self.cache, f)
f.close()
self.cache = dict()
def run(self):
done = False
if self.getOption('good'):
self.run_good()
done = True
if self.getOption('list'):
self.run_list()
done = True
if self.getOption('former'):
self.run_former()
done = True
if self.getOption('featured') or not done:
self.run_featured()
pywikibot.output(u'%d pages written.' % self.editcounter)
def run_good(self):
task = 'good'
if not self.hastemplate(task):
pywikibot.output(u'\nNOTE: % arcticles are not implemented at %.'
% (task, site))
return
if self.getOption('fromall'):
item_no = good_name['wikidata'][1]
dp = pywikibot.ItemPage(pywikibot.Site().data_repository(), item_no)
dp.get()
### Quick and dirty hack - any ideas?
self.fromlang = [key.replace('wiki', '').replace('_', '-')
for key in dp.sitelinks.keys()]
else:
return ### 2DO
self.fromlang.sort()
self.readcache(task)
for code in self.fromlang:
try:
self.treat(code, task)
except KeyboardInterrupt:
pywikibot.output('\nQuitting featured treat...')
break
self.writecache()
# not implemented yet
def run_list(self):
return
# not implemented yet
def run_former(self):
return
def run_featured(self):
task = 'featured'
if not self.hastemplate(task):
pywikibot.output(u'\nNOTE: % arcticles are not implemented at %.'
% (task, site))
return
if self.getOption('fromall'):
item_no = featured_name['wikidata'][1]
dp = pywikibot.ItemPage(pywikibot.Site().data_repository(), item_no)
dp.get()
### Quick and dirty hack - any ideas?
self.fromlang = [key.replace('wiki', '').replace('_', '-')
for key in dp.sitelinks.keys()]
else:
return ### 2DO
self.fromlang.sort()
self.readcache(task)
for code in self.fromlang:
try:
self.treat(code, task)
except KeyboardInterrupt:
pywikibot.output('\nQuitting featured treat...')
break
self.writecache()
def treat(self, code, process):
fromsite = pywikibot.Site(code)
if fromsite != self.site:
self.featuredWithInterwiki(fromsite,
not self.getOption('side'),
process,
self.getOption('quiet'),
config.simulate)
## def load(self, page):
## """
## Loads the given page, does some changes, and saves it.
## """
## try:
## # Load the page
## text = page.get()
## except pywikibot.NoPage:
## pywikibot.output(u"Page %s does not exist; skipping."
## % page.title(asLink=True))
## except pywikibot.IsRedirectPage:
## pywikibot.output(u"Page %s is a redirect; skipping."
## % page.title(asLink=True))
## else:
## return text
## return None
##
## def save(self, text, page, comment=None, minorEdit=True,
## botflag=True):
## # only save if something was changed
## if text == page.get():
## pywikibot.output(u'No changes were needed on %s'
## % page.title(asLink=True))
## return False
##
## # Show the title of the page we're working on.
## # Highlight the title in purple.
## pywikibot.output(u"\n\n>>> \03{lightpurple}%s\03{default} <<<"
## % page.title())
## # show what was changed
## pywikibot.showDiff(page.get(), text)
## pywikibot.output(u'Comment: %s' %comment)
##
## if self.getOption('dry'):
## return False
##
## choice = 'a'
## if not self.getOption('always'):
## choice = pywikibot.inputChoice(
## u'Do you want to accept these changes?',
## ['Yes', 'No', 'All'], ['y', 'N', 'a'], 'N')
## if choice == 'a':
## # Remember the choice
## self.options['always'] = True
##
## if choice != 'n':
## try:
## # Save the page
## page.put(text, comment=comment or self.comment,
## minorEdit=minorEdit, botflag=botflag)
## except pywikibot.LockedPage:
## pywikibot.output(u"Page %s is locked; skipping."
## % page.title(asLink=True))
## except pywikibot.EditConflict:
## pywikibot.output(
## u'Skipping %s because of edit conflict'
## % (page.title()))
## except pywikibot.SpamfilterError, error:
## pywikibot.output(
##u'Cannot change %s because of spam blacklist entry %s'
## % (page.title(), error.url))
## else:
## return True
## return False
def featuredArticles(self, site, task, cache):
wikidata = False
code = site.lang
articles = []
if task == 'good':
info = good_name
code = 'wikidata'
elif task == 'former':
info = former_name
elif task == 'list':
info = lists_name
else:
info = featured_name
code = 'wikidata'
try:
method = info[code][0]
except KeyError:
pywikibot.error(
u'language %s doesn\'t has %s category source.'
% (code, task))
return
name = info[code][1]
# hide #-sorted items on en-wiki
try:
hide = info[code][2]
except IndexError:
hide = None
for p in method(site, name, hide):
if p.namespace() == 0: # Article
articles.append(p)
# Article talk (like in English)
elif p.namespace() == 1 and site.lang != 'el':
articles.append(pywikibot.Page(p.site,
p.title(withNamespace=False)))
pywikibot.output(
'\03{lightred}** %s has %i %s articles\03{default}'
% (site, len(articles), task))
for p in articles:
if p.title() < afterpage:
continue
if u"/" in p.title() and p.namespace() != 0:
pywikibot.output(u"%s is a subpage" % p.title())
continue
if p.title() in cache:
pywikibot.output(u"(cached) %s -> %s" % (p.title(),
cache[p.title()]))
continue
yield copy(p)
def findTranslated(self, page, oursite=None, quiet=False):
if not oursite:
oursite = self.site
if page.isRedirectPage():
page = page.getRedirectTarget()
ourpage = None
for link in page.iterlanglinks():
if link.site == oursite:
ourpage = pywikibot.Page(link)
break
if not ourpage:
if not quiet:
pywikibot.output(u"%s -> no corresponding page in %s"
% (page.title(), oursite))
return
if ourpage.section():
pywikibot.output(u"%s -> our page is a section link: %s"
% (page.title(), ourpage.title()))
return
if not ourpage.exists():
pywikibot.output(u"%s -> our page doesn't exist: %s"
% (page.title(), ourpage.title()))
return
if ourpage.isRedirectPage():
ourpage = ourpage.getRedirectTarget()
pywikibot.output(u"%s -> corresponding page is %s"
% (page.title(), ourpage.title()))
if ourpage.namespace() != 0:
pywikibot.output(u"%s -> not in the main namespace, skipping"
% page.title())
return
if ourpage.isRedirectPage():
pywikibot.output(u"%s -> double redirect, skipping" % page.title())
return
if not ourpage.exists():
pywikibot.output(u"%s -> page doesn't exist, skipping"
% ourpage.title())
return
backpage = None
for link in ourpage.iterlanglinks():
if link.site == page.site:
backpage = pywikibot.Page(link)
break
if not backpage:
pywikibot.output(u"%s -> no back interwiki ref" % page.title())
return
if backpage == page:
# everything is ok
return ourpage
if backpage.isRedirectPage():
backpage = backpage.getRedirectTarget()
if backpage == page:
# everything is ok
return ourpage
pywikibot.output(u"%s -> back interwiki ref target is %s"
% (page.title(), backpage.title()))
def getTemplateList(self, lang, task):
if task == 'good':
try:
templates = template_good[lang]
templates += template_good['_default']
except KeyError:
templates = template_good['_default']
elif task == 'list':
try:
templates = template_lists[lang]
templates += template_lists['_default']
except KeyError:
templates = template_lists['_default']
else: # task in ['former', 'featured']
try:
templates = template[lang]
templates += template['_default']
except KeyError:
templates = template['_default']
return templates
def featuredWithInterwiki(self, fromsite, template_on_top, task,
quiet, dry=False):
tosite = self.site
if not fromsite.lang in self.cache:
self.cache[fromsite.lang] = {}
if not tosite.lang in self.cache[fromsite.lang]:
self.cache[fromsite.lang][tosite.lang] = {}
cc = self.cache[fromsite.lang][tosite.lang]
if self.getOption('nocache') is True or \
fromsite.code in self.getOption('nocache'):
cc = {}
templatelist = self.getTemplateList(tosite.code, task)
findtemplate = '(' + '|'.join(templatelist) + ')'
re_Link_FA = re.compile(ur"\{\{%s\|%s\}\}"
% (findtemplate.replace(u' ', u'[ _]'),
fromsite.code), re.IGNORECASE)
gen = self.featuredArticles(fromsite, task, cc)
gen = PreloadingGenerator(gen)
pairs = []
for a in gen:
if a.isRedirectPage():
a = a.getRedirectTarget()
if not a.exists():
pywikibot.output(u"source page doesn't exist: %s"
% a.title())
continue
atrans = self.findTranslated(a, tosite, quiet)
if not atrans:
continue
if task != 'former':
text = atrans.get()
m = re_Link_FA.search(text)
if m:
pywikibot.output(u"(already done)")
else:
# insert just before interwiki
if (not interactive or
pywikibot.input(
u'Connecting %s -> %s. Proceed? [Y/N]'
% (a.title(), atrans.title())) in ['Y', 'y']):
site = pywikibot.getSite()
comment = pywikibot.setAction(
i18n.twtranslate(site, 'featured-' + task,
{'page': unicode(a)}))
# Moving {{Link FA|xx}} to top of interwikis
if template_on_top:
# Getting the interwiki
iw = pywikibot.getLanguageLinks(text, site)
# Removing the interwiki
text = pywikibot.removeLanguageLinks(text, site)
text += u"\r\n{{%s|%s}}\r\n" % (templatelist[0],
fromsite.code)
# Adding the interwiki
text = pywikibot.replaceLanguageLinks(text,
iw, site)
# Placing {{Link FA|xx}} right next to
# corresponding interwiki
else:
text = (text[:m.end()] +
(u" {{%s|%s}}" % (templatelist[0],
fromsite.code)) +
text[m.end():])
if not dry:
try:
atrans.put(text, comment)
except pywikibot.LockedPage:
pywikibot.output(u'Page %s is locked!'
% atrans.title())
except pywikibot.PageNotSaved, e:
pywikibot.output(u"Page not saved")
cc[a.title()] = atrans.title()
else:
text = atrans.get()
m = re_Link_FA.search(text)
if m:
# insert just before interwiki
if (not interactive or
pywikibot.input(
u'Connecting %s -> %s. Proceed? [Y/N]'
% (a.title(), atrans.title())) in ['Y', 'y']):
site = pywikibot.getSite()
comment = pywikibot.setAction(
i18n.twtranslate(site, 'featured-former',
{'page': unicode(a)}))
text = re.sub(re_Link_FA, '', text)
if not dry:
try:
atrans.put(text, comment)
except pywikibot.LockedPage:
pywikibot.output(u'Page %s is locked!'
% atrans.title())
except pywikibot.PageNotSaved, e:
pywikibot.output(u"Page not saved")
else:
pywikibot.output(u"(already done)")
cc[a.title()] = atrans.title()
def main(*args):
global interactive, afterpage
interactive = 0
afterpage = u"!"
featuredcount = False
fromlang = []
processType = 'featured'
part = False
options = {}
for arg in pywikibot.handleArgs():
if arg == '-interactive':
interactive = 1
elif arg.startswith('-fromlang:'):
fromlang = arg[10:].split(",")
part = True
elif arg.startswith('-after:'):
afterpage = arg[7:]
elif arg.startswith('-nocache:'):
options[arg[1:8]] = arg[9:].split(",")
else:
options[arg[1:].lower()] = True
if part:
try:
# BUG: range with zh-min-nan (3 "-")
if len(fromlang) == 1 and fromlang[0].index("-") >= 0:
start, end = fromlang[0].split("--", 1)
if not start:
start = ""
if not end:
end = "zzzzzzz"
if processType == 'good':
fromlang = [lang for lang in good_name.keys()
if lang >= start and lang <= end]
elif processType == 'list':
fromlang = [lang for lang in lists_name.keys()
if lang >= start and lang <= end]
elif processType == 'former':
fromlang = [lang for lang in former_name.keys()
if lang >= start and lang <= end]
else:
fromlang = [lang for lang in featured_name.keys()
if lang >= start and lang <= end]
except:
pass
## for ll in fromlang:
## fromsite = pywikibot.getSite(ll)
## if featuredcount:
## try:
## featuredArticles(fromsite, processType).next()
## except StopIteration:
## continue
## elif not hasTemplate:
## pywikibot.output(
## u'\nNOTE: %s arcticles are not implemented at %s-wiki.'
## % (processType, pywikibot.getSite().lang))
## pywikibot.output('Quitting program...')
## break
## elif fromsite != pywikibot.getSite():
## featuredWithInterwiki(fromsite, pywikibot.getSite(),
## template_on_top, processType, quiet,
## config.simulate)
if options:
bot = FeaturedBot(**options)
bot.run()
else:
pywikibot.showHelp()
if __name__ == "__main__":
try:
main()
finally:
pywikibot.stopme()
|
legoktm/pywikipedia-rewrite
|
scripts/featured.py
|
Python
|
mit
| 26,345
|
import pytest
from os import getcwd, listdir, mkdir
from os.path import join, isfile, isdir, getsize
from apio.commands.install import cli as cmd_install
from apio.commands.uninstall import cli as cmd_uninstall
from apio.commands.init import cli as cmd_init
from apio.commands.upload import cli as cmd_upload
from apio.commands.examples import cli as cmd_examples
def validate_files_leds(apioproject_dir):
path = join(apioproject_dir, 'leds.v')
assert isfile(path) and getsize(path) > 0
def validate_dir_leds(apioproject_dir):
path = join(apioproject_dir, 'leds')
assert isdir(path) and len(listdir(path)) > 0
@pytest.mark.skipif(pytest.config.getvalue('offline'),
reason="requires internet connection")
def test_complete(clirunner, validate_cliresult, configenv):
with clirunner.isolated_filesystem():
configenv()
# apio uninstall examples
result = clirunner.invoke(
cmd_uninstall, ['examples'], input='y')
validate_cliresult(result)
assert 'Do you want to continue?' in result.output
assert 'Package \'examples\' is not installed' in result.output
# apio install examples@X
result = clirunner.invoke(cmd_install, ['examples@X'])
assert 'Error: Invalid semantic version' in result.output
# apio install examples@0.0.7
result = clirunner.invoke(cmd_install, ['examples@0.0.7'])
validate_cliresult(result)
assert 'Installing examples package' in result.output
assert 'Downloading' in result.output
assert 'Unpacking' in result.output
assert 'has been successfully installed!' in result.output
# apio install examples
result = clirunner.invoke(cmd_install, ['examples'])
validate_cliresult(result)
assert 'Installing examples package' in result.output
assert 'Downloading' in result.output
assert 'Unpacking' in result.output
assert 'has been successfully installed!' in result.output
# apio install examples
result = clirunner.invoke(cmd_install, ['examples'])
validate_cliresult(result)
assert 'Installing examples package' in result.output
assert 'Already installed. Version ' in result.output
# apio install examples -p windows
result = clirunner.invoke(cmd_install, [
'examples', '--platform', 'windows', '--force'])
validate_cliresult(result)
assert 'Installing examples package' in result.output
assert 'Downloading' in result.output
assert 'Unpacking' in result.output
assert 'has been successfully installed!' in result.output
# apio install --list
result = clirunner.invoke(cmd_install, ['--list'])
validate_cliresult(result)
# apio init --board icezum
result = clirunner.invoke(cmd_init, ['--board', 'icezum'])
validate_cliresult(result)
assert 'Creating apio.ini file ...' in result.output
assert 'has been successfully created!' in result.output
# apio upload
result = clirunner.invoke(cmd_upload)
assert result.exit_code == 1
assert 'Board: icezum' in result.output
# apio examples --list
result = clirunner.invoke(cmd_examples, ['--list'])
validate_cliresult(result)
assert 'leds' in result.output
assert 'icezum' in result.output
# apio examples --files missing_example
result = clirunner.invoke(cmd_examples, ['--files', 'missing_example'])
validate_cliresult(result)
assert 'Warning: this example does not exist' in result.output
# apio examples --files leds
result = clirunner.invoke(cmd_examples, ['--files', 'leds'])
validate_cliresult(result)
assert 'Copying leds example files ...' in result.output
assert 'have been successfully created!' in result.output
validate_files_leds(getcwd())
# apio examples --dir leds
result = clirunner.invoke(cmd_examples, ['--dir', 'leds'])
validate_cliresult(result)
assert 'Creating leds directory ...' in result.output
assert 'has been successfully created!' in result.output
validate_dir_leds(getcwd())
# apio examples --dir leds
result = clirunner.invoke(cmd_examples, ['--dir', 'leds'], input='y')
validate_cliresult(result)
assert 'Warning: leds directory already exists' in result.output
assert 'Do you want to replace it?' in result.output
assert 'Creating leds directory ...' in result.output
assert 'has been successfully created!' in result.output
validate_dir_leds(getcwd())
dir_name = 'tmp'
mkdir(dir_name)
# apio examples --files leds --project-dir=tmp
result = clirunner.invoke(
cmd_examples, ['--files', 'leds', '--project-dir=tmp'])
validate_cliresult(result)
assert 'Copying leds example files ...' in result.output
assert 'have been successfully created!' in result.output
validate_files_leds(join(getcwd(), dir_name))
# apio examples --dir leds --project-dir=tmp
result = clirunner.invoke(
cmd_examples, ['--dir', 'leds', '--project-dir=tmp'])
validate_cliresult(result)
assert 'Creating leds directory ...' in result.output
assert 'has been successfully created!' in result.output
validate_dir_leds(join(getcwd(), dir_name))
# apio uninstall examples
result = clirunner.invoke(cmd_uninstall, ['examples'], input='n')
validate_cliresult(result)
assert 'Abort!' in result.output
# apio uninstall examples
result = clirunner.invoke(cmd_uninstall, ['examples'], input='y')
validate_cliresult(result)
assert 'Uninstalling examples package' in result.output
assert 'Do you want to continue?' in result.output
assert 'has been successfully uninstalled!' in result.output
|
set-soft/apio
|
test/packages/test_complete.py
|
Python
|
gpl-2.0
| 6,054
|
import numpy as np
import cv2
img = cv2.imread('check_video.png')
dst = cv2.fastNlMeansDenoisingColored(img, None, 10, 10, 7, 21)
cv2.imshow('before', img)
cv2.imshow('after', dst)
cv2.waitKey(0)
|
FYP-DES5/deepscan-core
|
misc/Denoiser2D.py
|
Python
|
mit
| 199
|
from flask.ext.wtf import Form
from wtforms import TextField, PasswordField, TextAreaField, SelectField, BooleanField, IntegerField
from wtforms.validators import Required
class LoginForm(Form):
username = TextField('username', validators = [Required()])
password = PasswordField('password', validators = [Required()])
class CreatePage(Form):
title = TextField('title', validators = [Required()])
content = TextAreaField('content', validators= [Required()])
class BlogPost(Form):
title = TextField('title', validators = [Required()])
content = TextAreaField('content', validators= [Required()])
class UserForm(Form):
username = TextField('username', validators = [Required()])
display_name = TextField('display_name')
new_password = PasswordField('new_password')
confirm_password = PasswordField('confirm_password')
class ConfigForm(Form):
site_display_name = TextField('site_display_name')
site_title = TextField('site_display_name')
site_strap_line = TextField('site_display_name')
index_page_id = SelectField('index_page_id', coerce=int)
mail_server = TextField('mail_server')
mail_port = IntegerField('mail_port')
mail_username = TextField('mail_username')
mail_password = PasswordField('mail_password')
mail_use_tls = BooleanField('mail_use_tls')
mail_enable = BooleanField('mail_enable')
|
TinnedTuna/speakeasyspeeches
|
wsgi/speakeasy/forms.py
|
Python
|
bsd-2-clause
| 1,385
|
from imports import *
from sklearn.datasets import load_iris
iris_dataset = load_iris()
from sklearn.model_selection import train_test_split
from sklearn.neighbors import KNeighborsClassifier
X_train, X_test, y_train, y_test = train_test_split(iris_dataset["data"], iris_dataset["target"], random_state=0)
knn = KNeighborsClassifier(n_neighbors=1)
knn.fit(X_train,y_train)
print("Test set score: {:.2f}".format(knn.score(X_test,y_test)))
input()
|
TNT-Samuel/Coding-Projects
|
Machine Learning with Python/Chapter 1/P_31.py
|
Python
|
gpl-3.0
| 450
|
from django import forms
from django.contrib import admin
from bluechannel.page.models import Highlight, Type, Event, Template, Page
class HighlightAdmin(admin.ModelAdmin):
save_on_top = True
pass
admin.site.register(Highlight, HighlightAdmin)
class TypeAdmin(admin.ModelAdmin):
save_on_top = True
pass
admin.site.register(Type, TypeAdmin)
class EventAdmin(admin.ModelAdmin):
prepopulated_fields = {'slug': ('name',)}
save_on_top = True
list_display = ('name','event_start_date')
search_fields = ('name','description')
pass
admin.site.register(Event, EventAdmin)
class TemplateAdmin(admin.ModelAdmin):
save_on_top = True
list_display = ('name', 'description')
admin.site.register(Template, TemplateAdmin)
class TemplateModelChoiceField(forms.ModelChoiceField):
"""Based on ModelChoiceField, but using a radio button widget"""
widget = forms.RadioSelect
class PageAdmin(admin.ModelAdmin):
prepopulated_fields = {'slug': ('title',)}
save_on_top = True
list_display = ('title', 'page_title', 'page_type', 'status', 'summary', 'author', 'updated_at', 'in_nav', 'parent')
list_filter = ('status', 'in_nav', 'page_type')
search_fields = ('title', 'page_title', 'summary', 'main_content')
def formfield_for_foreignkey(self, db_field, request, **kwargs):
if db_field.name == "template":
kwargs['form_class'] = TemplateModelChoiceField
return db_field.formfield(**kwargs)
return super(PageAdmin, self).formfield_for_foreignkey(db_field, request, **kwargs)
admin.site.register(Page, PageAdmin)
|
davemerwin/blue-channel
|
bluechannel/page/admin.py
|
Python
|
bsd-3-clause
| 1,616
|
# Copyright (c) 2015 Cloudbase Solutions SRL
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import os
from oslo_log import log
from manila.common import constants
from manila import exception
from manila.i18n import _, _LI
from manila.share.drivers import generic
from manila.share.drivers.windows import windows_utils
LOG = log.getLogger(__name__)
class WindowsSMBHelper(generic.NASHelperBase):
_SHARE_ACCESS_RIGHT_MAP = {
constants.ACCESS_LEVEL_RW: "Change",
constants.ACCESS_LEVEL_RO: "Read"}
_ICACLS_ACCESS_RIGHT_MAP = {
constants.ACCESS_LEVEL_RW: 'M',
constants.ACCESS_LEVEL_RO: 'R'}
def __init__(self, remote_execute, configuration):
self._remote_exec = remote_execute
self.configuration = configuration
self._windows_utils = windows_utils.WindowsUtils(
remote_execute=remote_execute)
def init_helper(self, server):
self._remote_exec(server, "Get-SmbShare")
def create_export(self, server, share_name, recreate=False):
export_location = '\\\\%s\\%s' % (server['public_address'],
share_name)
if not self._share_exists(server, share_name):
share_path = self._windows_utils.normalize_path(
os.path.join(self.configuration.share_mount_path,
share_name))
cmd = ['New-SmbShare', '-Name', share_name, '-Path', share_path]
self._remote_exec(server, cmd)
else:
LOG.info(_LI("Skipping creating export %s as it already exists."),
share_name)
return export_location
def remove_export(self, server, share_name):
if self._share_exists(server, share_name):
cmd = ['Remove-SmbShare', '-Name', share_name, "-Force"]
self._remote_exec(server, cmd)
else:
LOG.debug("Skipping removing export %s as it does not exist.",
share_name)
def _get_volume_path_by_share_name(self, server, share_name):
share_path = self._get_share_path_by_name(server, share_name)
volume_path = self._windows_utils.get_volume_path_by_mount_path(
server, share_path)
return volume_path
def allow_access(self, server, share_name, access_type, access_level,
access_to):
"""Add access for share."""
if access_type != 'user':
reason = _('Only user access type allowed.')
raise exception.InvalidShareAccess(reason=reason)
self._grant_share_access(server, share_name, access_level, access_to)
self._grant_share_path_access(server, share_name,
access_level, access_to)
def _grant_share_access(self, server, share_name, access_level, access_to):
access_right = self._SHARE_ACCESS_RIGHT_MAP[access_level]
cmd = ["Grant-SmbShareAccess", "-Name", share_name,
"-AccessRight", access_right,
"-AccountName", access_to, "-Force"]
self._remote_exec(server, cmd)
self._refresh_acl(server, share_name)
def _grant_share_path_access(self, server, share_name,
access_level, access_to):
# Set NTFS level permissions
access_right = self._ICACLS_ACCESS_RIGHT_MAP[access_level]
ace = '"%(access_to)s:(OI)(CI)%(access_right)s"' % dict(
access_to=access_to, access_right=access_right)
vol_path = self._get_volume_path_by_share_name(server, share_name)
cmd = ["icacls", self._windows_utils.quote_string(vol_path),
"/grant", ace, "/t", "/c"]
self._remote_exec(server, cmd)
def _refresh_acl(self, server, share_name):
cmd = ['Set-SmbPathAcl', '-ShareName', share_name]
self._remote_exec(server, cmd)
def deny_access(self, server, share_name, access, force=False):
access_to = access['access_to']
self._revoke_share_access(server, share_name, access_to)
self._revoke_share_path_access(server, share_name, access_to)
def _revoke_share_access(self, server, share_name, access_to):
cmd = ['Revoke-SmbShareAccess', '-Name', share_name,
'-AccountName', access_to, '-Force']
self._remote_exec(server, cmd)
self._refresh_acl(server, share_name)
def _revoke_share_path_access(self, server, share_name, access_to):
vol_path = self._get_volume_path_by_share_name(server, share_name)
cmd = ["icacls", self._windows_utils.quote_string(vol_path),
"/remove", access_to, "/t", "/c"]
self._remote_exec(server, cmd)
def _get_share_name(self, export_location):
return self._windows_utils.normalize_path(
export_location).split('\\')[-1]
def get_exports_for_share(self, server, old_export_location):
share_name = self._get_share_name(old_export_location)
data = dict(ip=server['public_address'], share_name=share_name)
return ['\\\\%(ip)s\\%(share_name)s' % data]
def _get_share_path_by_name(self, server, share_name,
ignore_missing=False):
cmd = ('Get-SmbShare -Name %s | '
'Select-Object -ExpandProperty Path' % share_name)
check_exit_code = not ignore_missing
(share_path, err) = self._remote_exec(server, cmd,
check_exit_code=check_exit_code)
return share_path.strip() if share_path else None
def get_share_path_by_export_location(self, server, export_location):
share_name = self._get_share_name(export_location)
return self._get_share_path_by_name(server, share_name)
def _share_exists(self, server, share_name):
share_path = self._get_share_path_by_name(server, share_name,
ignore_missing=True)
return bool(share_path)
|
jcsp/manila
|
manila/share/drivers/windows/windows_smb_helper.py
|
Python
|
apache-2.0
| 6,517
|
#(C) Copyright Syd Logan 2016
#(C) Copyright Thousand Smiles Foundation 2016
#
#Licensed under the Apache License, Version 2.0 (the "License");
#you may not use this file except in compliance with the License.
#
#You may obtain a copy of the License at
#http://www.apache.org/licenses/LICENSE-2.0
#
#Unless required by applicable law or agreed to in writing, software
#distributed under the License is distributed on an "AS IS" BASIS,
#WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
#See the License for the specific language governing permissions and
#limitations under the License.
from django.conf.urls import url
from queue.views import QueueView
urlpatterns = [
url(r'^$', QueueView.as_view()),
]
|
slogan621/tscharts
|
queue/urls.py
|
Python
|
apache-2.0
| 735
|
import os
import unittest
import uuid
import floe
import webtest
import json
import wsgiadapter
import logging
import socket
import floe.restapi
import floe.connector
import time
import pymysql
wsgiadapter.logger.addHandler(logging.NullHandler())
mysql_user = os.getenv('MYSQL_USER', 'root')
mysql_pass = os.getenv('MYSQL_PASSWORD', None)
mysql_auth = "%s:%s" % (mysql_user, mysql_pass) \
if mysql_pass is not None else mysql_user
table_prefix_variable = int(time.time())
os.environ['FLOE_URL_TEST_FILE'] = 'file://.test_floe'
os.environ['FLOE_URL_TEST_REST_BOGUS'] = 'http://test-floe/bogus'
os.environ['FLOE_URL_TEST_REST_FILE'] = 'http://test-floe/test_file'
os.environ['FLOE_URL_TEST_REST_BROKEN'] = 'http://test-floe/broken'
adapter = wsgiadapter.WSGIAdapter(floe.floe_server())
floe.restapi.RestClientFloe.session.mount('http://test-floe/', adapter) # noqa
def drop_table(pool, table_name):
statement = "DROP table {}".format(table_name)
try:
with pool.connection() as connection:
with connection.cursor() as cursor:
cursor.execute(statement)
except pymysql.Error as e:
raise e
def is_local_mysql_running():
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
result = sock.connect_ex(('127.0.0.1', 3306))
return True if result == 0 else False
codeship_build = os.getenv('CODESHIP_BUILD')
mysql_test_enable = True if \
os.getenv('MYSQL_TEST_ENABLE', is_local_mysql_running()) \
else False
MYSQL_TEST = unittest.skipIf(codeship_build or not mysql_test_enable,
'mysql test disabled on local and codeship')
BLOB_MAX_CHAR_LEN = 65535
MEDIUM_BLOB_MAX_CHAR_LEN = 16777215
def xid():
return uuid.uuid4().hex
class BrokenFloe(object):
def get(self, key):
raise floe.FloeReadException('failed to read')
def get_multi(self, keys):
raise floe.FloeReadException('failed to read')
def set(self, key, bin_data):
raise floe.FloeWriteException('failed to write')
def set_multi(self, mapping):
raise floe.FloeWriteException('failed to write')
def delete(self, key):
raise floe.FloeDeleteException('failed to delete')
def delete_multi(self, keys):
raise floe.FloeDeleteException('failed to delete')
def flush(self):
pass
def ids(self):
raise floe.FloeReadException('failed to read')
floe.connector._CONNECTIONS['BROKEN'] = BrokenFloe()
class FileFloeTest(unittest.TestCase):
def init_floe(self):
return floe.connect('test_file')
def setUp(self):
self.floe = self.init_floe()
self.floe.flush()
def tearDown(self):
self.floe.flush()
def test_main(self):
store = self.floe
foo = xid()
bar = xid()
bazz = xid()
foo_test_data = os.urandom(4096)
store.set(foo, foo_test_data)
self.assertEqual(store.get(foo), foo_test_data)
foo_test_data = os.urandom(500)
bazz_test_data = os.urandom(200)
store.set_multi({foo: foo_test_data, bazz: bazz_test_data})
self.assertEqual(store.get(foo), foo_test_data)
self.assertEqual(store.get(bazz), bazz_test_data)
self.assertEqual(store.get_multi([foo, bar, bazz]),
{foo: foo_test_data, bazz: bazz_test_data})
ids = {i for i in store.ids()}
self.assertEqual(ids, {bazz, foo})
store.delete(foo)
self.assertEqual(store.get_multi([foo, bar, bazz]),
{bazz: bazz_test_data})
store.delete_multi([foo, bar, bazz])
self.assertEqual(store.get_multi([foo, bar, bazz]), {})
self.assertRaises(floe.FloeInvalidKeyException,
lambda: store.get('foo/bar'))
self.assertRaises(floe.FloeInvalidKeyException,
lambda: store.set('foo/bar', '1'))
self.assertRaises(floe.FloeInvalidKeyException,
lambda: store.delete('foo/bar'))
self.assertRaises(floe.FloeInvalidKeyException,
lambda: store.get('foo/bar'))
self.assertRaises(floe.FloeInvalidKeyException,
lambda: store.set('foo/bar', '1'))
self.assertRaises(floe.FloeInvalidKeyException,
lambda: store.delete('foo/bar'))
class MysqlFloe(FileFloeTest):
def setUp(self):
self.mysql_tables = [
'%s_%s' % (table_name, table_prefix_variable)
for table_name in ['test_floe', 'test_floe_2', 'test_floe_3']
]
for index, table in enumerate(self.mysql_tables):
environ_key = 'FLOE_URL_%s' % table.upper()
url = "mysql://%s@127.0.0.1:3306/test?table=%s" % (
mysql_auth, table)
if index > 0:
url += "&dynamic_char_len=True"
if index > 1:
url += "&bin_data_type=blob"
os.environ[environ_key] = url
super(MysqlFloe, self).setUp()
def tearDown(self):
for table in self.mysql_tables:
store = floe.connect(table)
drop_table(store.pool, table)
def init_floe(self):
return floe.connect(self.mysql_tables[0])
@MYSQL_TEST
def test_main(self):
super(MysqlFloe, self).test_main()
@MYSQL_TEST
def test_uppercase(self):
store = self.floe
foo = xid()
foo_upper = foo.upper()
foo_test_data = os.urandom(10)
foo_upper_test_data = os.urandom(12)
self.assertNotEqual(foo_test_data, foo_upper_test_data)
store.set(foo, foo_test_data)
store.set(foo_upper, foo_upper_test_data)
self.assertEqual(store.get(foo), foo_test_data)
self.assertEqual(store.get(foo_upper), foo_upper_test_data)
@MYSQL_TEST
def test_data_overflow_from_sql(self):
store = floe.connect(self.mysql_tables[1])
foo = xid()
foo_smaller = foo.upper()
foo_data = os.urandom(MEDIUM_BLOB_MAX_CHAR_LEN + 1)
self.assertRaises(
floe.FloeDataOverflowException,
lambda: store.set(foo, foo_data))
foo_smaller_data = foo_data[:-1]
store.set(foo_smaller, foo_smaller_data)
self.assertEqual(store.get(foo_smaller), foo_smaller_data)
@MYSQL_TEST
def test_data_overflow(self):
store = self.floe
foo = xid()
foo_smaller = foo.upper()
foo_data = os.urandom(BLOB_MAX_CHAR_LEN + 1)
self.assertRaises(
floe.FloeDataOverflowException,
lambda: store.set(foo, foo_data))
foo_smaller_data = foo_data[:-1]
store.set(foo_smaller, foo_smaller_data)
self.assertEqual(store.get(foo_smaller), foo_smaller_data)
@MYSQL_TEST
def test_custom_bin_data_type(self):
store = floe.connect(self.mysql_tables[2])
foo = xid()
foo_smaller = foo.upper()
foo_data = os.urandom(BLOB_MAX_CHAR_LEN + 1)
self.assertRaises(
floe.FloeDataOverflowException,
lambda: store.set(foo, foo_data))
foo_smaller_data = foo_data[:-1]
store.set(foo_smaller, foo_smaller_data)
self.assertEqual(store.get(foo_smaller), foo_smaller_data)
class RestServerAdditionalRoute(object):
def on_get(self, req, resp):
resp.content_type = 'text/plain'
resp.body = 'additional'
class RestServerTest(unittest.TestCase):
def init_floe(self):
return floe.connect('test_file')
def setUp(self):
self.floe = self.init_floe()
self.floe.flush()
self.app = webtest.TestApp(floe.floe_server(
routes={'/testroute': RestServerAdditionalRoute()}))
def tearDown(self):
self.floe.flush()
def test_crud(self):
key = xid()
res = self.app.get('/test_file/%s' % key, expect_errors=True)
self.assertEqual(res.status_code, 200)
self.assertEqual(res.content_length, 0)
data = os.urandom(100)
res = self.app.put('/test_file/%s' % key, params=data,
headers={'content-type': 'binary/octet-stream'})
self.assertEqual(res.status_code, 200)
res = self.app.get('/test_file/%s' % key)
self.assertEqual(res.status_code, 200)
self.assertEqual(res.body, data)
res = self.app.delete('/test_file/%s' % key)
self.assertEqual(res.status_code, 200)
def test_keys(self):
keys = {xid() for _ in range(0, 120)}
for key in keys:
res = self.app.put('/test_file/%s' % key, params=os.urandom(10))
res = self.app.get('/test_file')
result_keys = set()
for line in res.body.decode('utf-8').split('\n'):
if line:
result_keys.update(json.loads(line.strip()))
self.assertEqual(keys, result_keys)
def test_nested_dirs(self):
res = self.app.get('/test_file/foo/bar', expect_errors=True)
self.assertEqual(res.status_code, 404)
def test_index(self):
res = self.app.get('/')
self.assertEqual(res.status_code, 200)
self.assertEqual(res.body, b'Floe Microservice')
def test_additional_route(self):
res = self.app.get('/testroute')
self.assertEqual(res.status_code, 200)
self.assertEqual(res.body, b'additional')
class RestClientFileTest(FileFloeTest):
def init_floe(self):
return floe.connect('test_rest_file')
class RestClientMysqlTest(FileFloeTest):
def setUp(self):
table = '%s_%s' % ('rest_mysql', table_prefix_variable)
os.environ['FLOE_URL_TEST_REST_MYSQL'] = 'http://test-floe/%s' % table
environ_key = 'FLOE_URL_%s' % table.upper()
url = "mysql://%s@127.0.0.1:3306/test?table=%s" % (
mysql_auth, table)
self.table = table
os.environ[environ_key] = url
super(RestClientMysqlTest, self).setUp()
def tearDown(self):
store = self.floe
drop_table(store.pool, self.table)
def init_floe(self):
return floe.connect(self.table)
@MYSQL_TEST
def test_main(self):
super(RestClientMysqlTest, self).test_main()
class RestClientMisconfigurationTest(unittest.TestCase):
def init_floe(self):
return floe.connect('test_rest_bogus')
def setUp(self):
self.floe = self.init_floe()
def test_main(self):
store = self.floe
foo = xid()
self.assertRaises(floe.FloeConfigurationException,
lambda: store.get(foo))
self.assertRaises(floe.FloeConfigurationException,
lambda: store.set(foo, '1'))
self.assertRaises(floe.FloeConfigurationException,
lambda: store.delete(foo))
self.assertRaises(floe.FloeConfigurationException,
lambda: [k for k in store.ids()])
class RestClientBrokenTest(unittest.TestCase):
def init_floe(self):
return floe.connect('test_rest_broken')
def setUp(self):
self.floe = self.init_floe()
def test_main(self):
store = self.floe
foo = xid()
self.assertRaises(floe.FloeReadException,
lambda: store.get(foo))
self.assertRaises(floe.FloeWriteException,
lambda: store.set(foo, '1'))
self.assertRaises(floe.FloeDeleteException,
lambda: store.delete(foo))
self.assertRaises(floe.FloeReadException,
lambda: [k for k in store.ids()])
if __name__ == "__main__":
unittest.main(verbosity=2)
|
happybits/floe
|
test.py
|
Python
|
mit
| 11,685
|
KARMA_COST_NEW_TOPIC = 0
KARMA_COST_NEW_LINK = 0
SITE = 'reddit.com'
MAX_CHANGE_PER_VOTE = 10
DEFAULT_PROFILE_KARMA = 20
CREATORS_KARMA_PER_VOTE = 1
DAMP_FACTOR = 1.1
DAMPEN_POINTS_AFTER = 100
TOP_TOPICS_ON_MAINPAGE = 3
NEW_TOPICS_ON_MAINPAGE = 3
TAGS_ON_MAINPAGE = 3
DATE_FORMAT = '%Y-%m-%d'
CALCULATE_RELATED_AFTER = [10, 20, 50]
MAX_RELATED_LINKS = 10
MIN_VOTES_IN_RELATED = 5
LINKS_PER_PAGE = 15
UNALLOWED_TOPIC_NAMES = ['my', 'new', 'about', 'aboutus', 'help', 'up', 'down', 'user', 'admin', 'foo', 'logout', 'register', 'site_media', 'dummy', 'subscribe', 'unsubscribe', 'search', 'buttons', 'recommended', 'createtopics', 'topics', 'tag', 'feeds', 'save', 'upcomment', 'downcomment']
#For Stats Page
TOP_TOPICS = 10
TOP_USERS = 10
TOP_LINKS = 10
#Recommnded for users
#Defaults for cron jobs
sample_corpus_location = 'c:/corpus.db'
log_file = 'c:/log.log'
calculate_recommended_timediff = 60 * 60 #1 hours
min_links_submitted = 5
min_links_liked = 5
max_links_in_corpus = 100000
|
agiliq/django-socialnews
|
socialnews/news/defaults.py
|
Python
|
bsd-3-clause
| 1,029
|
from jinja2 import Template
from autonetkit.design.utils import filters
from autonetkit.design.utils.filters import find_node_by_label
from autonetkit.design.utils.general import group_by
from autonetkit.design.utils.graph_utils import topology_to_nx_graph, wrap_node_ids
from autonetkit.network_model.network_model import NetworkModel
from autonetkit.network_model.types import DeviceType, PortType
from autonetkit.webserver.publish import publish_model_to_webserver
network_model = NetworkModel()
t_phy = network_model.create_topology("physical")
r1 = t_phy.create_node(DeviceType.ROUTER, "r1")
r1.set("x", 0)
r1.set("y", 0)
r1.set("asn", 1)
r2 = t_phy.create_node(DeviceType.ROUTER, "r2")
r3 = t_phy.create_node(DeviceType.ROUTER, "r3")
r4 = t_phy.create_node(DeviceType.ROUTER, "r4")
r5 = t_phy.create_node(DeviceType.ROUTER, "r5")
h1 = t_phy.create_node(DeviceType.HOST, "h1")
h2 = t_phy.create_node(DeviceType.HOST, "h2")
properties = {
"r2": (250, 0, 1),
"r3": (0, 250, 1),
"r4": (250, 250, 1),
"r5": (500, 125, 2),
"h1": (125, 125, 1),
"h2": (500, 250, 2),
}
for node_id, (x, y, asn) in properties.items():
node = find_node_by_label(t_phy, node_id)
node.set("x", x)
node.set("y", y)
node.set("asn", asn)
# create ports
r1p1 = r1.create_port(PortType.PHYSICAL)
h1p1 = h1.create_port(PortType.PHYSICAL)
# and link them
t_phy.create_link(r1p1, h1p1)
# or create directly
t_phy.create_link(r1.create_port(PortType.PHYSICAL), r2.create_port(PortType.PHYSICAL))
# or in a loop
pairs = [(r1, r2), (r1, r3), (r2, r4),
(r3, r4), (r2, r5), (r4, r5), (r5, h2)]
for n1, n2 in pairs:
t_phy.create_link(n1.create_port(PortType.PHYSICAL), n2.create_port(PortType.PHYSICAL))
# create loopbacks
routers = filters.routers(t_phy)
for node in t_phy.nodes():
lo0 = node.create_port(PortType.LOGICAL)
node.set("lo0_id", lo0.id)
# assign port labels
for node in t_phy.nodes():
physical_ports = filters.physical_ports(node)
for index, port in enumerate(physical_ports):
port.set("label", f"eth{index}")
t_ip = network_model.create_topology("ip")
t_ip.add_nodes_from(t_phy.nodes())
t_ip.add_links_from(t_phy.links())
grouped = group_by(t_ip.nodes(), "asn")
for asn, nodes in grouped.items():
for index, node in enumerate(nodes):
lo0 = node.loopback_zero()
loopback_ip = f"172.16.{asn}.{index}"
lo0.set("ip", loopback_ip)
links = [l for l in t_ip.links()
if l.n1.get("asn") == l.n2.get("asn") == asn]
for index, link in enumerate(links):
prefix = f"10.{asn}.{index}"
network = prefix + ".0"
link.p1.set("ip", prefix + ".1")
link.p1.set("network", network)
link.p2.set("ip", prefix + ".2")
link.p2.set("network", network)
# inter-as links
links = [l for l in t_ip.links()
if l.n1.get("asn") != l.n2.get("asn")]
for index, link in enumerate(links):
prefix = f"10.0.{index}"
network = prefix + ".0"
link.p1.set("ip", prefix + ".1")
link.p1.set("network", network)
link.p2.set("ip", prefix + ".2")
t_ospf = network_model.create_topology("ospf")
t_ospf.add_nodes_from(routers)
ebgp_links = [l for l in t_phy.links()
if l.n1.get("asn") == l.n2.get("asn")]
t_ospf.add_links_from(ebgp_links)
t_ibgp = network_model.create_topology("ibgp")
t_ibgp.add_nodes_from(routers)
ibgp_pairs = [(n1, n2) for n1 in t_ibgp.nodes()
for n2 in t_ibgp.nodes()
if n1 != n2 and n1.get("asn") == n2.get("asn")]
for n1, n2 in ibgp_pairs:
p1 = n1.loopback_zero()
p2 = n2.loopback_zero()
t_ibgp.create_link(p1, p2)
t_ebgp = network_model.create_topology("ebgp")
t_ebgp.add_nodes_from(routers)
ebgp_links = [l for l in t_phy.links()
if l.n1.get("asn") != l.n2.get("asn")]
t_ebgp.add_links_from(ebgp_links)
# analysis
import networkx as nx
graph = topology_to_nx_graph(t_phy)
path = nx.shortest_path(graph, h1.id, h2.id)
path = wrap_node_ids(t_phy, path)
p1 = t_phy.create_node_path(path)
# Compile device models
compiled = {}
for node in filters.routers(t_phy):
data = {
"hostname": node.label,
"interfaces": [],
"asn": node.get("asn")
}
for port in filters.physical_ports(node):
ip_port = t_ip.get_port_by_id(port.id)
data["interfaces"].append({
"id": port.label,
"ip": ip_port.get("ip")
})
ospf_node = t_ospf.get_node_by_id(node.id)
ospf_enabled = ospf_node.degree() > 0
data["ospf"] = {"networks": [],
"enabled":ospf_enabled}
for port in filters.physical_ports(ospf_node):
if not port.connected:
continue
ip_port = t_ip.get_port_by_id(port.id)
network = ip_port.get("network")
data["ospf"]["networks"].append(network)
ebgp_node = t_ebgp.get_node_by_id(node.id)
data["ebgp"] = {"neighbors": []}
for peer in ebgp_node.peer_nodes():
ip_peer = t_ip.get_node_by_id(peer.id)
peer_ip = ip_peer.loopback_zero().get("ip")
data["ebgp"]["neighbors"].append({
"ip": peer_ip,
"asn": peer.get("asn")
})
ibgp_node = t_ibgp.get_node_by_id(node.id)
bgp_enabled = ebgp_node.degree() > 0 or ibgp_node.degree() > 0
data["bgp_enabled"] = bgp_enabled
data["ibgp"] = {"neighbors": []}
for peer in ibgp_node.peer_nodes():
ip_peer = t_ip.get_node_by_id(peer.id)
peer_ip = ip_peer.loopback_zero().get("ip")
data["ibgp"]["neighbors"].append({
"ip": peer_ip,
"asn": peer.get("asn")
})
compiled[node] = data
for node in filters.hosts(t_phy):
data = {
"hostname": node.label,
"interfaces": []
}
for port in filters.physical_ports(node):
ip_port = t_ip.get_port_by_id(port.id)
data["interfaces"].append({
"id": port.label,
"ip": ip_port.get("ip")
})
compiled[node] = data
# and render using template
rtr_template_str = """
! router
hostname {{ data.hostname }}
{% for interface in data.interfaces %}
{{interface.id}} {{ interface.ip}} up
{% endfor %}
{% if data.ospf.enabled %}
!
router ospf
{% for network in data.ospf.networks %}
network {{network}}
{% endfor %}
!
{% endif %}
{% if data.bgp_enabled %}
router bgp {{ asn }}
{% for peer in data.ebgp.neighbors %}
neighbor {{peer.ip}} {{peer.asn}}
{% endfor %}
{% for peer in data.ibgp.neighbors %}
neighbor {{peer.ip}} {{peer.asn}}
{% endfor %}
{% endif %}
!
"""
host_template_str = """
! host
hostname {{ data.hostname }}
{% for interface in data.interfaces %}
{{interface.id}} {{ interface.ip}} up
{% endfor %}
"""
templates = {
DeviceType.ROUTER: Template(rtr_template_str, trim_blocks=True),
DeviceType.HOST: Template(host_template_str, trim_blocks=True)
}
for node, data in compiled.items():
template = templates[node.type]
rendered = template.render(data=data)
print(rendered)
publish_model_to_webserver(network_model)
|
sk2/autonetkit
|
autonetkit/tutorial/tutorial.py
|
Python
|
bsd-3-clause
| 7,027
|
# Copyright (c) 2012-2013 Paul Tagliamonte <paultag@debian.org>
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the "Software"),
# to deal in the Software without restriction, including without limitation
# the rights to use, copy, modify, merge, publish, distribute, sublicense,
# and/or sell copies of the Software, and to permit persons to whom the
# Software is furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
# THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
# DEALINGS IN THE SOFTWARE.
import os
import re
from debian.debian_support import version_compare
from sqlalchemy.orm.exc import NoResultFound, MultipleResultsFound
from debile.master.utils import emit
from debile.master.changes import Changes, ChangesFileException
from debile.master.reprepro import Repo, RepoSourceAlreadyRegistered, RepoPackageNotFound
from debile.master.orm import (Person, Builder, Suite, Component, Arch, Group,
GroupSuite, Source, Deb, Job,
create_source, create_jobs)
def process_changes(default_group, config, session, path):
try:
changes = Changes(path)
except Exception as e:
print('SKIP: Error loading changes file {tag} - ({exception}: {args})'.format(
tag=path,
exception=type(e),
args=e.args))
return
try:
changes.validate()
except Exception as e:
print('SKIP: Invalid changes file {tag} ({exception}: {args})'.format(
tag=path,
exception=type(e),
args=e.args))
return
try:
fingerprint = changes.validate_signature(config['keyrings']['pgp'])
except ChangesFileException as e:
return reject_changes(session, changes, "invalid-signature: " + e.message)
group = changes.get('X-Debile-Group', default_group)
try:
group = session.query(Group).filter_by(name=group).one()
except MultipleResultsFound:
return reject_changes(session, changes, "internal-error")
except NoResultFound:
return reject_changes(session, changes, "invalid-group")
#### Sourceful Uploads
if changes.is_source_only_upload():
try:
user = session.query(Person).filter_by(pgp=fingerprint).one()
except NoResultFound:
return reject_changes(session, changes, "invalid-user")
return accept_source_changes(default_group, config, session, changes, user)
#### Binary Uploads
if changes.is_binary_only_upload():
try:
builder = session.query(Builder).filter_by(pgp=fingerprint).one()
except NoResultFound:
return reject_changes(session, changes, "invalid-builder")
return accept_binary_changes(default_group, config, session, changes, builder)
return reject_changes(session, changes, "mixed-upload")
def reject_changes(session, changes, tag):
session.rollback()
print "REJECT: {source} because {tag}".format(
tag=tag, source=changes.get_package_name())
emit('reject', 'source', {
"tag": tag,
"source": changes.get_package_name(),
})
for fp in [changes.get_changes_file()] + changes.get_files():
os.unlink(fp)
# Note this in the log.
def accept_source_changes(default_group, config, session, changes, user):
group = changes.get('X-Debile-Group', default_group)
suite = changes['Distribution']
try:
group_suite = session.query(GroupSuite).join(GroupSuite.group).join(GroupSuite.suite).filter(
Group.name == group,
Suite.name == suite,
).one()
except MultipleResultsFound:
return reject_changes(session, changes, "internal-error")
except NoResultFound:
return reject_changes(session, changes, "invalid-suite-for-group")
dsc = changes.get_dsc_obj()
if dsc['Source'] != changes['Source']:
return reject_changes(session, changes, "dsc-does-not-march-changes")
if dsc['Version'] != changes['Version']:
return reject_changes(session, changes, "dsc-does-not-march-changes")
try:
source = session.query(Source).filter(
Source.name == dsc['Source'],
Source.version == dsc['Version'],
GroupSuite.group == group_suite.group,
).one()
return reject_changes(session, changes, "source-already-in-group")
except MultipleResultsFound:
return reject_changes(session, changes, "internal-error")
except NoResultFound:
pass
oldsources = session.query(Source).filter(
Source.group_suite == group_suite,
Source.name == dsc['Source'],
)
for oldsource in oldsources:
if version_compare(oldsource.version, dsc['Version']) > 0:
return reject_changes(session, changes, "newer-source-already-in-suite")
# Drop any old jobs that are still pending.
for oldsource in oldsources:
for job in oldsource.jobs:
if (not any(job.results) and not any(job.built_binaries)):
session.delete(job)
elif job.failed is None:
job.failed = True
if not any(oldsource.jobs):
session.delete(oldsource)
component = session.query(Component).filter_by(name="main").one()
if 'Build-Architecture-Indep' in dsc:
valid_affinities = dsc['Build-Architecture-Indep']
elif 'X-Build-Architecture-Indep' in dsc:
valid_affinities = dsc['X-Build-Architecture-Indep']
elif 'X-Arch-Indep-Build-Arch' in dsc:
valid_affinities = dsc['X-Arch-Indep-Build-Arch']
else:
valid_affinities = "any"
with session.no_autoflush:
source = create_source(dsc, group_suite, component, user,
config["affinity_preference"], valid_affinities)
create_jobs(source)
session.add(source)
# We have a changes in order. Let's roll.
repo = Repo(group_suite.group.repo_path)
repo.add_changes(changes)
try:
(source.directory, source.dsc_filename) = repo.find_dsc(source)
except RepoPackageNotFound:
return reject_changes(session, changes, "reprepo-package-not-found")
emit('accept', 'source', source.debilize())
# OK. It's safely in the database and repo. Let's cleanup.
for fp in [changes.get_changes_file()] + changes.get_files():
os.unlink(fp)
def accept_binary_changes(default_group, config, session, changes, builder):
# OK. We'll relate this back to a build job.
job = changes.get('X-Debile-Job', None)
if job is None:
return reject_changes(session, changes, "no-job")
job = session.query(Job).get(job)
source = job.source
if changes.get('Source') != source.name:
return reject_changes(session, changes, "binary-source-name-mismatch")
if changes.get("Version") != source.version:
return reject_changes(
session, changes, "binary-source-version-mismatch")
if changes.get('X-Debile-Group', default_group) != source.group.name:
return reject_changes(session, changes, "binary-source-group-mismatch")
if changes.get('Distribution') != source.suite.name:
return reject_changes(session, changes, "binary-source-suite-mismatch")
if builder != job.builder:
return reject_changes(session, changes, "wrong-builder")
anames = changes.get("Architecture").split(None)
arches = session.query(Arch).filter(Arch.name.in_(anames)).all()
binaries = {}
for arch in arches:
if arch.name not in [job.arch.name, "all"]:
return reject_changes(session, changes, "wrong-architecture")
binaries[arch.name] = job.new_binary(arch)
if not binaries:
return reject_changes(session, changes, "no-architecture")
session.add_all(binaries.values())
PATH = re.compile("^/pool/.*/")
ARCH = re.compile(".+_(?P<arch>[^_]+)\.u?deb$")
for entry in changes.get('Files'):
directory = source.directory
if '/' in entry['section']:
component, section = entry['section'].split('/', 1)
directory = PATH.sub("/pool/%s/" % component, directory)
arch = ARCH.match(entry['name']).groupdict().get('arch')
if arch not in binaries:
return reject_changes(session, changes, "bad-architecture-of-file")
deb = Deb(binary=binaries[arch], directory=directory, filename=entry['name'])
session.add(deb)
## OK. Let's make sure we can add this.
try:
repo = Repo(job.group.repo_path)
repo.add_changes(changes)
except RepoSourceAlreadyRegistered:
return reject_changes(session, changes, 'stupid-source-thing')
for binary in binaries.values():
emit('accept', 'binary', binary.debilize())
# OK. It's safely in the database and repo. Let's cleanup.
for fp in [changes.get_changes_file()] + changes.get_files():
os.unlink(fp)
|
Debian/debile
|
debile/master/incoming_changes.py
|
Python
|
mit
| 9,566
|
from project import app, db
from flask_testing import TestCase
from flask import url_for
from project.config import TestConfig
from project.models import User
import json
class UserTestSetup(TestCase):
def create_app(self):
app.config.from_object(TestConfig)
return app
def setUp(self):
self.test_username = 'test'
self.test_password = 'test'
self.test_email = 'test@test.com'
db.create_all()
def tearDown(self):
db.session.remove()
db.drop_all()
def create_user(self):
user = User(
username=self.test_username,
password=self.test_password,
email=self.test_email
)
db.session.add(user)
db.session.commit()
def login_user(self):
self.create_user()
resp = self.client.post(url_for('users.login'),
data=json.dumps({'email': self.test_email, 'password': self.test_password}),
content_type='application/json')
return resp.json['token']
class TestUsers(UserTestSetup):
"""Functions to check user routes"""
def test_user_can_login(self):
"""Check if a registered user can log in"""
self.create_user()
resp = self.client.post(url_for('users.login'),
data=json.dumps({'email': self.test_email, 'password': self.test_password}),
content_type='application/json')
self.assertEquals(resp.json['result'], True)
self.assertEquals(resp.json['username'], self.test_username)
def test_unregistered_user_cannot_login(self):
"""User must be registered to log in"""
resp = self.client.post(url_for('users.login'),
data=json.dumps({'email': self.test_email, 'password': self.test_password}),
content_type='application/json')
self.assertEquals(resp.json['result'], False)
def test_can_register_user(self):
"""Users can be registered"""
resp = self.client.post(url_for('users.register'),
data=json.dumps({
'email': self.test_email,
'password': self.test_password,
'username': self.test_username}
),
content_type='application/json')
self.assert200(resp)
self.assertEquals(resp.json['result'], 'success')
def test_cannot_register_multiple_user(self):
"""Multiple registrations are not allowed"""
self.create_user()
resp = self.client.post(url_for('users.register'),
data=json.dumps({
'email': self.test_email,
'password': self.test_password,
'username': self.test_username}
),
content_type='application/json')
self.assert200(resp)
self.assertEquals(resp.json['result'], 'this user is already registered')
def test_user_can_logout(self):
"""User that is logged in can log out"""
token = self.login_user()
resp = self.client.get(url_for('users.logout'),
headers={'Authorization': 'Bearer ' + token}
)
self.assert200(resp)
self.assertEquals(resp.json['result'], 'success')
def test_get_user_preference(self):
"""User can retrieve task display preference"""
token = self.login_user()
resp = self.client.get(url_for('users.get_user_preferences'),
headers={'Authorization': 'Bearer ' + token}
)
self.assert200(resp)
self.assertEquals(resp.json['show_completed_task'], True)
def test_toggle_user_preference(self):
"""User can toggle task display preference"""
token = self.login_user()
# Set preference to true
resp = self.client.post(url_for('users.show_task_toggle'),
data=json.dumps({'option': True}),
content_type='application/json',
headers={'Authorization': 'Bearer ' + token}
)
self.assert200(resp)
resp = self.client.get(url_for('users.get_user_preferences'),
headers={'Authorization': 'Bearer ' + token})
self.assertEquals(resp.json['show_completed_task'], True)
# Set preference to false
resp = self.client.post(url_for('users.show_task_toggle'),
data=json.dumps({'option': False}),
content_type='application/json',
headers={'Authorization': 'Bearer ' + token}
)
self.assert200(resp)
resp = self.client.get(url_for('users.get_user_preferences'),
headers={'Authorization': 'Bearer ' + token})
self.assertEquals(resp.json['show_completed_task'], False)
class TestAuth(UserTestSetup):
"""Testing of authentication helper functions"""
# Need to figure out how to fake the expired token
def test_auth_routes_require_valid_token(self):
"""User can retrieve task display preference"""
token = "asdf"
resp = self.client.get(url_for('users.get_user_preferences'),
headers={'Authorization': 'Bearer ' + token}
)
self.assert401(resp)
self.assertEquals(resp.json['message'], 'Token is invalid')
def test_auth_routes_require_token(self):
"""User can retrieve task display preference"""
resp = self.client.get(url_for('users.get_user_preferences'))
self.assert401(resp)
self.assertEquals(resp.json['message'], 'Missing authorization header')
|
lingxz/todoapp
|
project/users/user_test.py
|
Python
|
mit
| 6,148
|
# Copyright 2016 IBM Corp.
#
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import mock
import testtools
from pypowervm import exceptions as pvm_ex
from pypowervm.tasks import vopt
from pypowervm.tests import test_fixtures as pvm_fx
class TestVOpt(testtools.TestCase):
"""Tests the vopt file."""
def setUp(self):
super(TestVOpt, self).setUp()
self.apt = self.useFixture(pvm_fx.AdapterFx()).adpt
# Wipe out the static variables, so that the re-validate is called
vopt._cur_vios_uuid = None
vopt._cur_vg_uuid = None
@mock.patch('pypowervm.wrappers.storage.VG.get')
@mock.patch('pypowervm.tasks.partition.get_active_vioses')
def test_validate_vopt_vg1(self, mock_vios_get, mock_vg_get):
"""One VIOS, rootvg found; locals are set."""
# Init objects to test with
mock_vg = mock.Mock()
mock_vg.configure_mock(name='rootvg',
uuid='1e46bbfd-73b6-3c2a-aeab-a1d3f065e92f',
vmedia_repos=['repo'])
mock_vg_get.return_value = [mock_vg]
mock_vios = mock.Mock()
mock_vios.configure_mock(name='the_vios', uuid='vios_uuid',
rmc_state='active')
mock_vios_get.return_value = [mock_vios]
# Run
vio_ret_uuid, vg_ret_uuid = vopt.validate_vopt_repo_exists(self.apt)
self.assertEqual('vios_uuid', vio_ret_uuid)
self.assertEqual('1e46bbfd-73b6-3c2a-aeab-a1d3f065e92f', vg_ret_uuid)
# Validate
self.assertEqual('1e46bbfd-73b6-3c2a-aeab-a1d3f065e92f',
vopt._cur_vg_uuid)
self.assertEqual('vios_uuid', vopt._cur_vios_uuid)
@mock.patch('pypowervm.tasks.partition.get_active_vioses')
@mock.patch('pypowervm.wrappers.storage.VG.get')
@mock.patch('pypowervm.wrappers.storage.VMediaRepos.bld')
def test_validate_vopt_vg2(self, mock_vmr_bld, mock_vg_get, mock_vios_get):
"""Dual VIOS, multiple VGs, repos on non-rootvg."""
vwrap1 = mock.Mock()
vwrap1.configure_mock(name='vio1', rmc_state='active', uuid='vio_id1',
is_mgmt_partition=False)
vwrap2 = mock.Mock()
vwrap2.configure_mock(name='vio2', rmc_state='active', uuid='vio_id2',
is_mgmt_partition=False)
mock_vios_get.return_value = [vwrap1, vwrap2]
vg1 = mock.Mock()
vg1.configure_mock(name='rootvg', vmedia_repos=[], uuid='vg1')
vg2 = mock.Mock()
vg2.configure_mock(name='other1vg', vmedia_repos=[], uuid='vg2')
vg3 = mock.Mock()
vg3.configure_mock(name='rootvg', vmedia_repos=[], uuid='vg3')
vg4 = mock.Mock()
vg4.configure_mock(name='other2vg', vmedia_repos=[1], uuid='vg4')
# 1: Find the media repos on non-rootvg on the second VIOS
mock_vg_get.side_effect = [[vg1, vg2], [vg3, vg4]]
vio_ret_uuid, vg_ret_uuid = vopt.validate_vopt_repo_exists(self.apt)
self.assertEqual('vio_id2', vio_ret_uuid)
self.assertEqual('vg4', vg_ret_uuid)
mock_vios_get.reset_mock()
mock_vg_get.reset_mock()
# 2: At this point, the statics are set. If we validate again, and the
# VG.get returns the right one, we should bail out early.
mock_vg_get.side_effect = None
mock_vg_get.return_value = vg4
vio_ret_uuid, vg_ret_uuid = vopt.validate_vopt_repo_exists(self.apt)
self.assertEqual('vio_id2', vio_ret_uuid)
self.assertEqual('vg4', vg_ret_uuid)
# Statics unchanged
self.assertEqual('vg4', vopt._cur_vg_uuid)
self.assertEqual('vio_id2', vopt._cur_vios_uuid)
# We didn't have to query the VIOS
mock_vios_get.assert_not_called()
# We only did VG.get once
self.assertEqual(1, mock_vg_get.call_count)
mock_vg_get.reset_mock()
# 3: Same again, but this time the repos is somewhere else. We should
# find it.
vg4.vmedia_repos = []
vg2.vmedia_repos = [1]
# The first VG.get is looking for the already-set repos. The second
# will be the feed from the first VIOS. There should be no third call,
# since we should find the repos on VIOS 2.
mock_vg_get.side_effect = [vg4, [vg1, vg2]]
vio_ret_uuid, vg_ret_uuid = vopt.validate_vopt_repo_exists(self.apt)
self.assertEqual('vio_id1', vio_ret_uuid)
self.assertEqual('vg2', vg_ret_uuid)
# And the static values
self.assertEqual('vg2', vopt._cur_vg_uuid)
self.assertEqual('vio_id1', vopt._cur_vios_uuid)
mock_vg_get.reset_mock()
mock_vios_get.reset_mock()
# 4: No repository anywhere - need to create one. The default VG name
# (rootvg) exists in multiple places. Ensure we create in the first
# one, for efficiency.
vg2.vmedia_repos = []
mock_vg_get.side_effect = [vg1, [vg1, vg2], [vg3, vg4]]
vg1.update.return_value = vg1
vio_ret_uuid, vg_ret_uuid = vopt.validate_vopt_repo_exists(self.apt)
self.assertEqual('vio_id1', vio_ret_uuid)
self.assertEqual('vg1', vg_ret_uuid)
self.assertEqual('vg1', vopt._cur_vg_uuid)
self.assertEqual('vio_id1', vopt._cur_vios_uuid)
self.assertEqual([mock_vmr_bld.return_value], vg1.vmedia_repos)
mock_vg_get.reset_mock()
mock_vios_get.reset_mock()
vg1 = mock.MagicMock()
# 5: No repos, need to create one. But not on the mgmt partition.
vwrap1.configure_mock(name='vio1', rmc_state='active', uuid='vio_id1',
is_mgmt_partition=True)
vg3.vmedia_repos = []
mock_vg_get.side_effect = [vg1, [vg1, vg2], [vg3, vg4]]
vg3.update.return_value = vg3
vio_ret_uuid, vg_ret_uuid = vopt.validate_vopt_repo_exists(self.apt)
self.assertEqual('vio_id2', vio_ret_uuid)
self.assertEqual('vg3', vg_ret_uuid)
self.assertEqual('vg3', vopt._cur_vg_uuid)
self.assertEqual('vio_id2', vopt._cur_vios_uuid)
self.assertEqual([mock_vmr_bld.return_value], vg3.vmedia_repos)
mock_vg_get.reset_mock()
mock_vios_get.reset_mock()
vg3 = mock.MagicMock()
# 6: No repos, and a configured VG name that doesn't exist
vwrap1.configure_mock(name='vio1', rmc_state='active', uuid='vio_id1',
is_mgmt_partition=False)
vg4.vmedia_repos = []
mock_vg_get.side_effect = [vg1, [vg1, vg2], [vg3, vg4]]
self.assertRaises(pvm_ex.NoMediaRepoVolumeGroupFound,
vopt.validate_vopt_repo_exists, self.apt,
vopt_media_volume_group='mythicalvg')
# 7: No repos - need to create. Make sure conf setting is honored.
vg1.vmedia_repos = []
mock_vg_get.side_effect = [vg1, [vg1, vg2], [vg3, vg4]]
vg4.update.return_value = vg4
vio_ret_uuid, vg_ret_uuid = vopt.validate_vopt_repo_exists(
self.apt, vopt_media_volume_group='other2vg')
self.assertEqual('vio_id2', vio_ret_uuid)
self.assertEqual('vg4', vg_ret_uuid)
self.assertEqual('vg4', vopt._cur_vg_uuid)
self.assertEqual('vio_id2', vopt._cur_vios_uuid)
self.assertEqual([mock_vmr_bld.return_value], vg4.vmedia_repos)
vg1.update.assert_not_called()
|
powervm/pypowervm
|
pypowervm/tests/tasks/test_vopt.py
|
Python
|
apache-2.0
| 7,973
|
# -*- coding: utf-8 -*-
"""
使用文本
"""
import numpy as np
import matplotlib.pyplot as plt
# 随机数种子
np.random.seed(19680801)
mu, sigma = 100, 15
x = mu + sigma * np.random.randn(10000)
plt.figure(1)
plt.subplot(211)
n, bins, patches = plt.hist(x, 50, normed=1, facecolor='g', alpha=0.75)
plt.xlabel('x')
plt.ylabel('Probs')
plt.title('Histogram')
# 文本
plt.text(60, 0.025, r'$\mu=100, \ \sigma=15$')
plt.axis([40, 160, 0, 0.03])
plt.grid(True) # 显示网格
plt.subplot(212)
t = np.arange(0.0, 5.0, 0.01)
s = np.cos(2*np.pi*t)
line, = plt.plot(t,s, lw=2)
# 注释文本
plt.annotate('local max', xy=(2,1), xytext=(3,1.5),
arrowprops=dict(facecolor='red', shrink=0.05))
plt.ylim(-2,2) # y坐标取值范围
plt.show()
plt.close()
|
zhoujiagen/giant-data-analysis
|
data-computing-giants/python-computing/src/gda/matplotlib/text.py
|
Python
|
mit
| 761
|
#!/usr/bin/env python
import couchdb
import json
import argparse
import logbook
import sys
import os
import yaml
import base64
from Crypto.Cipher import AES
from couchdb import PreconditionFailed
#Set up logging
l = logbook.Logger('CouchDB-Replicator')
class AESDecrypt():
def __init__(self, key):
self.key=key
def decrypt(self, enc):
enc = base64.b64decode(enc)
iv = enc[:AES.block_size]
cipher = AES.new(self.key, AES.MODE_CBC, iv)
return self._unpad(cipher.decrypt(enc[AES.block_size:]))
def _unpad(self, s):
return s[:-ord(s[len(s)-1:])]
class Config(object):
"""Singleton class that holds the confiuration for the CouchDB replicator.
"""
_instance = None
def __new__(self, *args, **kwargs):
if not self._instance:
self._instance = super(Config, self).__new__(self, *args, **kwargs)
return self._instance
def __init__(self, config_file=None):
config= None
try:
if not config_file:
config_file = os.path.join(os.environ['HOME'], '.couchrc')
with open(config_file, 'r') as f:
config= yaml.load(f)
self.login=config.get('replication').get('login')
decrypt=AESDecrypt("{}_keys".format(self.login))
self.password=decrypt.decrypt(config.get('replication').get('password'))
self.source = "http://{}:{}@{}".format(self.login, self.password, config.get('replication').get('SOURCE'))
self.destination= "http://{}:{}@{}".format(self.login, self.password, config.get('replication').get('DESTINATION'))
except:
l.error("Please make sure you've created your own configuration file \
(i.e: ~/.couchrc), and that it contains a source and a destination servers")
sys.exit(-1)
self.exceptions=config.get('exceptions', [])
self.roles = {"members": config.get("roles",{}).get("members", []),
"admins": config.get("roles",{}).get("members", [])
}
def _get_databases_info(source, destination, skip=None):
"""Returns a tuple containing a python representation of source and destination
couchDB instances. It also returns a list of the databases in both instances
(excluding the _replicator database).
"""
if not skip:
skip=[]
s_couch = couchdb.Server(source)
d_couch = couchdb.Server(destination)
_, _, s_dbs = s_couch.resource.get_json('_all_dbs')
_, _, d_dbs = d_couch.resource.get_json('_all_dbs')
l.info("Databases in the source CouchDB instance: {}".format(', '.join(s_dbs)))
l.info("Databases in the destination CouchDB instance: {}".format(', '.join(d_dbs)))
#We don't want to replicate the replicator DB, and want to skip the databases in skip list
skip.append('_replicator')
for db in skip:
try:
s_dbs.remove(db)
except ValueError:
pass
try:
d_dbs.remove(db)
except ValueError:
pass
return s_couch, d_couch, s_dbs, d_dbs
def _setup_continuous(source, destination, copy_security):
"""Set up a continuous replication of all databases in source to destination.
"""
s_couch, d_couch, s_dbs, d_dbs = _get_databases_info(source, destination)
#For each DB in the source CouchDB instance, create a replication document
#and get its _security object to put it in the destination database
for db in s_dbs:
_, _, security = s_couch[db].resource.get_json('_security')
doc = {
'name': '{}_rep'.format(db),
'source': '{}/{}/'.format(source, db),
'target': '{}/{}/'.format(destination, db),
'continuous': True
}
s_rep = s_couch['_replicator']
#Create the DB in the destination if not present
try:
d_couch.create(db)
l.info("Created {} database in destination".format(db))
except PreconditionFailed:
l.info("Database {} already existing in the destination, not creating it".format(db))
#Put the replicator document in source and set security object in destination
l.info("Putting replicator document in _replicator database of source")
s_rep.create(doc)
if copy_security:
l.info("Copying security object to {} database in destination".format(db))
d_couch[db].resource.put('_security', security)
l.info("DONE!")
def _clone(source, destination, copy_security, with_exceptions=False, skip=[]):
"""Creates a complete clone of source in destination.
WARNING: This action will remove ALL content from destination.
"""
l.info("Performing a complete clone from source to destination")
s_couch, d_couch, s_dbs, d_dbs = _get_databases_info(source, destination, skip)
config = Config()
#Delete all databases in destination
l.info("Removing all databases from destination")
for db in d_dbs:
d_couch.delete(db)
#Create all databases abailable in source to destination. Copy data and
#permissions
l.info("Re-creating databases from source into destination")
for db in s_dbs:
#The users database is never deleted
if not db == '_users':
d_couch.create(db)
_, _, security = s_couch[db].resource.get_json('_security')
source_db = '/'.join([source, db])
dest_db = '/'.join([destination, db])
l.info("Copying data from {} in source to destination".format(db))
d_couch.replicate(source_db, dest_db)
if copy_security:
l.info("Copying security object to {} database in destination".format(db))
d_couch[db].resource.put('_security', security)
if with_exceptions:
exceptions = config.exceptions
if not exceptions:
l.warn("--with-exceptions option was present, but didn't find " \
"any EXCEPTIONS list in your .couchrc file.")
else:
l.info("--with-exceptions option was present, removing following documents: {}".format(", ".join(exceptions)))
for exception in exceptions:
try:
d_couch[db].delete(d_couch[db].get(exception))
except:
l.warn("Document {} not found, not deleteing".format(exception))
l.info("DONE!")
def _set_roles(server):
"""Apply the list of roles present in .couchrc to all databases in the server.
"""
security_obj = {"admins": {
"names":[],
"roles":[]
},
"members": {
"names":[],
"roles":[]
}
}
config = Config()
security_obj['admins']['roles'] = config.roles['admins']
security_obj['members']['roles'] = config.roles['members']
s_couch, d_couch, s_dbs, d_dbs = _get_databases_info(source, destination, None)
l.info("Setting roles to destination databases: {}".format(str(security_obj)))
for db in d_dbs:
d_couch[db].resource.put('_security', security_obj)
if __name__ == "__main__":
DESCRIPTION = """Set up complete one-way replication for CouchDB.
Use this script if you want to configure a stage database that will have the
exact same content of your production database.
To do so, the script creates a replication document for each database in the
source CouchDB instance that replicates such database (in continuous mode)
to the destination database.
Security object (permissions per database), are put to the destination databases.
"""
parser = argparse.ArgumentParser(description=DESCRIPTION)
parser.add_argument('action', type=str, help = "Action to perform, either \
configure continuous replication (continuous) or punctual clone (clone)")
parser.add_argument('--source', type=str, help = "Source CouchDB instance, \
with the credentials included in the URL. I.E: http://admin:passw@source_db:5984")
parser.add_argument('--destination', type=str, help = "Destination CouchDB instance, \
with the credentials included in the URL. I.E: http://admin:passw@destination_db:5984")
parser.add_argument('--no-security', action='store_const', const=True, \
help='Do not copy security objects')
parser.add_argument('--with-exceptions', action='store_const', const=True, \
help='List of files to be deleted from the DataBases after being copied. ' \
'To be specified in your .couchrc file')
parser.add_argument('--set-roles', action='store_const', const=True, \
help='List of roles to apply to each database after copied. Only if' \
'--no-security is present.')
parser.add_argument('--skip', nargs="+", type=str,
help=('List of databases to skip during the replication. '
'They will remain intact in the destination database'))
args = parser.parse_args()
source = args.source
destination = args.destination
copy_security = False if args.no_security else True
action = args.action
config = Config()
if not all([source, destination]):
source = config.source
destination = config.destination
actions = ['continuous', 'clone']
if action not in actions:
raise ValueError("Action not recognised, please choose between %s" % \
', '.join(actions))
l.info("Starting replication - source: {}, destination: {}".format( \
source.split('@')[-1], destination.split('@')[-1]))
if action == "continuous":
_setup_continuous(source, destination, copy_security)
else:
_clone(source, destination, copy_security, with_exceptions=args.with_exceptions, skip=args.skip)
if args.set_roles:
if not args.no_security:
l.warn('--set-roles option only takes effect if applied together ' \
'with --no-security. Ignoring it')
else:
_set_roles(destination)
|
vezzi/standalone_scripts
|
couchdb_replication.py
|
Python
|
mit
| 10,270
|
# -*- coding: utf-8 -*-
#
# Doqu is a lightweight schema/query framework for document databases.
# Copyright © 2009—2010 Andrey Mikhaylenko
#
# This file is part of Docu.
#
# Doqu is free software: you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as published
# by the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Doqu is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with Docu. If not, see <http://gnu.org/licenses/>.
"""
WTForms extension
=================
Offers integration with `WTForms`_.
:status: beta
:dependencies: `wtforms`_
.. _WTForms: http://wtforms.simplecodes.com/
The extension provides two new field classes: :class:`QuerySetSelectField` and
:class:`DocumentSelectField` (inspired by `wtforms.ext.django.*`). They connect
the forms with the Doqu API for queries. You can manually create forms with
these fields.
The easiest way to create a :class:`~doqu.document_base.Document`-compliant
form is using the function :func:`document_form_factory`. It returns a form
class based on the document structure::
from doqu import Document
from doqu import validators
from doqu.ext.forms import document_form_factory
class Location(Document):
structure = {'name': unicode}
class Person(Document):
structure = {'name': unicode, 'age': int, 'location': Location}
labels = {'name': 'Full name', 'age': 'Age', 'location': 'Location'}
validators = {'name': [required()]}
PersonForm = document_form_factory(Person)
The last line does the same as this code::
from wtforms import TextField, IntegerField, validators
from doqu.ext.forms import DocumentSelectField
class PersonForm(wtforms.Form):
name = TextField('Full name', [validators.Required()])
age = IntegerField('Age')
location = DocumentSelectField('Location', [], Location)
.. warning:: currently only validators :class:`~doqu.validators.Required` and
:class:`~doqu.validators.Optional` are translated to the form validators;
in the future most of them can be translated automatically.
"""
from doqu import dist
dist.check_dependencies(__name__)
import datetime
try:
import dateutil
except ImportError:
dateutil = None
else:
import wtforms.ext.dateutil.fields
import decimal
import wtforms
import wtforms.ext
from doqu import Document
from doqu.document_base import OneToManyRelation
from doqu.validators import Required, Optional, AnyOf
__all__ = (
'document_form_factory',
'QuerySetSelectField', 'MultiQuerySetSelectField',
'DocumentSelectField', 'MultiDocumentSelectField',
)
TYPE_TO_FORM_FIELD = {
int: wtforms.fields.IntegerField,
float: wtforms.fields.FloatField,
decimal.Decimal: wtforms.fields.DecimalField,
datetime.date: wtforms.fields.DateField,
datetime.datetime: wtforms.fields.DateTimeField,
bool: wtforms.fields.BooleanField,
unicode: wtforms.fields.TextAreaField,
# XXX what to do with wtforms.FileField?
# XXX what about lists?
}
if dateutil is not None:
TYPE_TO_FORM_FIELD.update({
datetime.datetime: wtforms.ext.dateutil.fields.DateTimeField,
datetime.date: wtforms.ext.dateutil.fields.DateField,
})
def document_form_factory(document_class, storage=None):
"""
Expects a :class:`~doqu.document_base.Document` instance, creates and
returns a :class:`wtforms.Form` class for this model.
The form fields are selected depending on the Python type declared by each
property.
:param document_class:
the Doqu document class for which the form should be created
:param storage:
a Docu-compatible storage; we need it to generate lists of choices
for references to other models. If not defined, references will not
appear in the form.
Caveat: the ``unicode`` type can be mapped to TextField and TextAreaField.
It is impossible to guess which one should be used unless maximum length is
defined for the property. TextAreaField is picked by default. It is a good
idea to automatically shrink it with JavaScript so that its size always
matches the contents.
"""
DocumentForm = type(document_class.__name__ + 'Form',
(wtforms.Form,), {})
# XXX should we apply validators, defaults, labels even if structure is not
# provided?
if not document_class.meta.structure:
return DocumentForm
for name, datatype in document_class.meta.structure.iteritems():
defaults = {}
field_validators = document_class.meta.validators.get(name, [])
# XXX private attr used, make it public?
doc_ref = document_class._get_related_document_class(name)
if doc_ref:
if not storage:
# we need a storage to fetch choices for the reference
continue
if isinstance(datatype, OneToManyRelation):
FieldClass = MultiDocumentSelectField
else:
FieldClass = DocumentSelectField
defaults.update(document_class=doc_ref, storage=storage)
else:
skip_field = False
for v in field_validators:
if isinstance(v, AnyOf):
FieldClass = wtforms.fields.SelectField
if 1 == len(v.choices):
# only one "choice" is defined; obviously read-only
skip_field = True
break
# TODO: labels?
defaults.update(choices=zip(v.choices, v.choices))
break
else:
FieldClass = TYPE_TO_FORM_FIELD.get(
datatype, wtforms.fields.TextField)
if skip_field:
continue
label = document_class.meta.labels.get(name, pretty_label(name))
validators = []
required = any(isinstance(x, Required) for x in field_validators)
if required:
if datatype in (bool, float, int, long, decimal.Decimal):
# bool(value) is ok, empty string is not
validators.append(wtforms.validators.NoneOf(['']))
else:
validators.append(wtforms.validators.Required())
else:
validators.append(wtforms.validators.Optional())
if issubclass(FieldClass, QuerySetSelectField):
defaults['allow_blank'] = True
form_field = FieldClass(label, validators, **defaults)
setattr(DocumentForm, name, form_field)
return DocumentForm
# FIXME this is already in utils, innit?
def pretty_label(string):
return unicode(string).capitalize().replace('_', ' ') + ':'
#
# The code below is a modified version of wtforms.ext.django.fields.*
#
class QuerySetSelectField(wtforms.fields.Field):
"""
Given a QuerySet either at initialization or inside a view, will display a
select drop-down field of choices. The `data` property actually will
store/keep an ORM model instance, not the ID. Submitting a choice which is
not in the queryset will result in a validation error.
Specifying `label_attr` in the constructor will use that property of the
model instance for display in the list, else the model object's `__str__`
or `__unicode__` will be used.
If `allow_blank` is set to `True`, then a blank choice will be added to the
top of the list. Selecting this choice will result in the `data` property
being `None`. The label for the blank choice can be set by specifying the
`blank_text` parameter.
"""
widget = wtforms.widgets.Select()
def __init__(self, label=u'', validators=None, queryset=None,
label_attr='', allow_blank=False, blank_text=u'', **kw):
super(QuerySetSelectField, self).__init__(label, validators, **kw)
self.label_attr = label_attr
self.allow_blank = allow_blank
self.blank_text = blank_text
self._set_data(None)
# TODO:
#if queryset is not None:
# self.queryset = queryset.all() # Make sure the queryset is fresh
self.queryset = queryset
def _get_data(self):
if self._formdata is not None:
for obj in self.queryset:
if obj.pk == self._formdata:
self._set_data(obj)
break
return self._data
def _set_data(self, data):
self._data = data
self._formdata = None
data = property(_get_data, _set_data)
def _is_choice_active(self, obj):
return obj == self.data if self.data else False
def iter_choices(self):
#if self.allow_blank: # <-- will validate on save; must display actual state
yield (u'__None', self.blank_text, self.data is None)
for obj in self.queryset:
label = self.label_attr and getattr(obj, self.label_attr) or obj
yield (obj.pk, label, self._is_choice_active(obj))
def process_formdata(self, valuelist):
if valuelist:
if valuelist[0] == '__None': # FIXME: this in NOT safe for k/v DBs
self.data = None
else:
self._data = None
self._formdata = valuelist[0]
def pre_validate(self, form):
if not self.allow_blank or self.data is not None:
for obj in self.queryset:
if self.data == obj:
break
else:
raise wtforms.ValidationError('Not a valid choice')
class DocumentSelectField(QuerySetSelectField):
"""
Like a QuerySetSelectField, except takes a document class instead of a
queryset and lists everything in it.
"""
def __init__(self, label=u'', validators=None, document_class=None,
storage=None, **kw):
super(DocumentSelectField, self).__init__(
label, validators, queryset=document_class.objects(storage), **kw
)
class MultiQuerySetSelectField(QuerySetSelectField):
widget = wtforms.widgets.Select(multiple=True)
def _get_data(self):
if self._formdata is not None:
assert hasattr(self._formdata, '__iter__')
data = []
for obj in self.queryset:
if obj.pk in self._formdata:
data.append(obj)
self._set_data(data)
return self._data
def _set_data(self, data):
self._data = data
self._formdata = None
data = property(_get_data, _set_data)
def _is_choice_active(self, obj):
return obj in self.data if self.data else False
def process_formdata(self, valuelist):
if valuelist:
# FIXME: "__None" in NOT safe for k/v DBs
if len(valuelist) == 1 and valuelist[0] == '__None':
self.data = None
else:
self._data = None
self._formdata = [x for x in valuelist if x]
def pre_validate(self, form):
if not self.allow_blank or self.data is not None:
unmatched = dict((x.pk,True) for x in self.data)
for obj in self.queryset:
if obj.pk in unmatched:
unmatched.pop(obj.pk)
if unmatched:
raise wtforms.ValidationError('Invalid choice(s)')
class MultiDocumentSelectField(MultiQuerySetSelectField, DocumentSelectField):
#widget = wtforms.widgets.Select(multiple=True)
pass
|
neithere/doqu
|
doqu/ext/forms.py
|
Python
|
gpl-3.0
| 11,916
|
#!/usr/bin/env python2.7
import cgi
import json
import os
import re
import SimpleHTTPServer
import SocketServer
import subprocess
import sys
import threading
VERSION = 'v0.14.6'
hostname = ''
try:
command = "bash -c '[[ $(dig +short $HOSTNAME) ]] && echo $HOSTNAME || wget -q -O - icanhazip.com'"
hostname = subprocess.check_output(command, shell=True)
if ':' in hostname:
hostname = ''
except subprocess.CalledProcessError:
pass
key_file = os.getenv('KEY_FILE', None)
if os.path.isfile('/home/ec2-user/.ssh/authorized_keys'):
key_file = '/home/ec2-user/.ssh/authorized_keys'
elif os.path.isfile('/home/ubuntu/.ssh/authorized_keys'):
key_file = '/home/ubuntu/.ssh/authorized_keys'
else:
key_file = '/root/.ssh/authorized_keys'
admin_keys = []
if os.path.isfile(key_file):
try:
command = "cat {0}".format(key_file)
admin_keys = subprocess.check_output(command, shell=True).strip().split("\n")
except subprocess.CalledProcessError:
pass
def check_boot():
if 'onboot' not in sys.argv:
return
init_dir = os.getenv('INIT_DIR', '/etc/init')
systemd_dir = os.getenv('SYSTEMD_DIR', '/etc/systemd/system')
nginx_dir = os.getenv('NGINX_DIR', '/etc/nginx/conf.d')
if os.path.exists(init_dir):
with open('{0}/dokku-installer.conf'.format(init_dir), 'w') as f:
f.write("start on runlevel [2345]\n")
f.write("exec {0} selfdestruct\n".format(os.path.abspath(__file__)))
if os.path.exists(systemd_dir):
with open('{0}/dokku-installer.service'.format(systemd_dir), 'w') as f:
f.write("[Unit]\n")
f.write("Description=Dokku web-installer\n")
f.write("\n")
f.write("[Service]\n")
f.write("ExecStart={0} selfdestruct\n".format(os.path.abspath(__file__)))
f.write("\n")
f.write("[Install]\n")
f.write("WantedBy=multi-user.target\n")
f.write("WantedBy=graphical.target\n")
if os.path.exists(nginx_dir):
with open('{0}/dokku-installer.conf'.format(nginx_dir), 'w') as f:
f.write("upstream dokku-installer { server 127.0.0.1:2000; }\n")
f.write("server {\n")
f.write(" listen 80;\n")
f.write(" location / {\n")
f.write(" proxy_pass http://dokku-installer;\n")
f.write(" }\n")
f.write("}\n")
subprocess.call('rm -f /etc/nginx/sites-enabled/*', shell=True)
sys.exit(0)
class GetHandler(SimpleHTTPServer.SimpleHTTPRequestHandler):
def do_GET(self):
content = PAGE.replace('{VERSION}', VERSION)
content = content.replace('{HOSTNAME}', hostname)
content = content.replace('{AUTHORIZED_KEYS_LOCATION}', key_file)
content = content.replace('{ADMIN_KEYS}', "\n".join(admin_keys))
self.send_response(200)
self.end_headers()
self.wfile.write(content)
def do_POST(self):
if self.path not in ['/setup', '/setup/']:
return
params = cgi.FieldStorage(fp=self.rfile,
headers=self.headers,
environ={
'REQUEST_METHOD': 'POST',
'CONTENT_TYPE': self.headers['Content-Type']})
vhost_enable = 'false'
dokku_root = os.getenv('DOKKU_ROOT', '/home/dokku')
if 'vhost' in params and params['vhost'].value == 'true':
vhost_enable = 'true'
with open('{0}/VHOST'.format(dokku_root), 'w') as f:
f.write(params['hostname'].value)
else:
try:
os.remove('{0}/VHOST'.format(dokku_root))
except OSError:
pass
with open('{0}/HOSTNAME'.format(dokku_root), 'w') as f:
f.write(params['hostname'].value)
for (index, key) in enumerate(params['keys'].value.splitlines(), 1):
user = 'admin'
if self.admin_user_exists() is not None:
user = 'web-admin'
if self.web_admin_user_exists() is not None:
index = int(self.web_admin_user_exists()) + 1
elif self.web_admin_user_exists() is None:
index = 1
elif self.admin_user_exists() is None:
pass
else:
index = int(self.admin_user_exists()) + 1
user = user + str(index)
command = ['sshcommand', 'acl-add', 'dokku', user]
proc = subprocess.Popen(command, stdin=subprocess.PIPE)
proc.stdin.write(key)
proc.stdin.close()
proc.wait()
set_debconf_selection('boolean', 'nginx_enable', 'true')
set_debconf_selection('boolean', 'skip_key_file', 'true')
set_debconf_selection('boolean', 'vhost_enable', vhost_enable)
set_debconf_selection('boolean', 'web_config', 'false')
set_debconf_selection('string', 'hostname', params['hostname'].value)
if 'selfdestruct' in sys.argv:
DeleteInstallerThread()
self.send_response(200)
self.end_headers()
self.wfile.write(json.dumps({'status': 'ok'}))
def web_admin_user_exists(self):
return self.user_exists('web-admin(\d+)')
def admin_user_exists(self):
return self.user_exists('admin(\d+)')
def user_exists(self, name):
command = 'dokku ssh-keys:list'
pattern = re.compile(r'NAME="' + name + '"')
proc = subprocess.Popen(command, shell=True, stdout=subprocess.PIPE)
max_num = 0
exists = False
for line in proc.stdout:
m = pattern.search(line)
if m:
# User of the form `user` or `user#` exists
exists = True
max_num = max(max_num, m.group(1))
if exists:
return max_num
else:
return None
def set_debconf_selection(debconf_type, key, value):
found = False
with open('/etc/os-release', 'r') as f:
for line in f:
if 'debian' in line:
found = True
if not found:
return
ps = subprocess.Popen(['echo', 'dokku dokku/{0} {1} {2}'.format(
key, debconf_type, value
)], stdout=subprocess.PIPE)
try:
subprocess.check_output(['debconf-set-selections'], stdin=ps.stdout)
except subprocess.CalledProcessError:
pass
ps.wait()
class DeleteInstallerThread(object):
def __init__(self, interval=1):
thread = threading.Thread(target=self.run, args=())
thread.daemon = True
thread.start()
def run(self):
command = "rm /etc/nginx/conf.d/dokku-installer.conf && /etc/init.d/nginx stop && /etc/init.d/nginx start"
try:
subprocess.call(command, shell=True)
except:
pass
command = "rm -f /etc/init/dokku-installer.conf /etc/systemd/system/dokku-installer.service && (stop dokku-installer || systemctl stop dokku-installer.service)"
try:
subprocess.call(command, shell=True)
except:
pass
def main():
check_boot()
port = int(os.getenv('PORT', 2000))
httpd = SocketServer.TCPServer(("", port), GetHandler)
print "Listening on 0.0.0.0:{0}, CTRL+C to stop".format(port)
httpd.serve_forever()
PAGE = """
<html>
<head>
<meta charset="utf-8" />
<title>Dokku Setup</title>
<link rel="stylesheet" href="https://stackpath.bootstrapcdn.com/bootstrap/4.1.3/css/bootstrap.min.css" integrity="sha384-MCw98/SFnGE8fJT3GXwEOngsV7Zt27NXFoaoApmYm81iuXoPkFOJwJ8ERdknLPMO" crossorigin="anonymous">
<style>
.bd-callout {
padding: 1.25rem;
margin-top: 1.25rem;
margin-bottom: 1.25rem;
border: 1px solid #eee;
border-left-width: .25rem;
border-radius: .25rem;
}
.bd-callout p:last-child {
margin-bottom: 0;
}
.bd-callout-info {
border-left-color: #5bc0de;
}
pre {
font-size: 80%;
margin-bottom: 0;
}
h1 small {
font-size: 50%;
}
h5 {
font-size: 1rem;
}
.container {
width: 640px;
}
.result {
padding-left: 20px;
}
input.form-control, textarea.form-control {
background-color: #fafbfc;
font-size: 14px;
}
input.form-control::placeholder, textarea.form-control::placeholder {
color: #adb2b8
}
</style>
</head>
<body>
<div class="container">
<form id="form" role="form">
<h1 class="pt-3">Dokku Setup <small class="text-muted">{VERSION}</small></h1>
<div class="alert alert-warning small" role="alert">
<strong>Warning:</strong> The SSH key filled out here can grant root access to the server. Please complete the setup as soon as possible.
</div>
<div class="row">
<div class="col">
<h3>Admin Access</h3>
<div class="form-group">
<label for="key">Public SSH Keys</label><br />
<textarea class="form-control" name="keys" rows="5" id="key" placeholder="Begins with 'ssh-rsa', 'ssh-dss', 'ssh-ed25519', 'ecdsa-sha2-nistp256', 'ecdsa-sha2-nistp384', or 'ecdsa-sha2-nistp521'">{ADMIN_KEYS}</textarea>
<small class="form-text text-muted">Public keys allow users to ssh onto the server as the <code>dokku</code> user, as well as remotely execute Dokku commands. They are currently auto-populated from: <code>{AUTHORIZED_KEYS_LOCATION}</code>, and can be changed later via the <a href="http://dokku.viewdocs.io/dokku/deployment/user-management/" target="_blank"><code>dokku ssh-keys</code></a> plugin.</small>
</div>
</div>
</div>
<div class="row">
<div class="col">
<h3>Hostname Configuration</h3>
<div class="form-group">
<label for="hostname">Hostname</label>
<input class="form-control" type="text" id="hostname" name="hostname" value="{HOSTNAME}" placeholder="A hostname or ip address such as {HOSTNAME}" />
<small class="form-text text-muted">This will be used as the default host for all applications, and can be changed later via the <a href="http://dokku.viewdocs.io/dokku/configuration/domains/" target="_blank"><code>dokku domains:set-global</code></a> command.</small>
</div>
<div class="form-check">
<input class="form-check-input" type="checkbox" id="vhost" name="vhost" value="true">
<label class="form-check-label" for="vhost">Use virtualhost naming for apps</label>
<small class="form-text text-muted">When enabled, Nginx will be run on port 80 and proxy requests to apps based on hostname.</small>
<small class="form-text text-muted">When disabled, a specific port will be setup for each application on first deploy, and requests to that port will be proxied to the relevant app.</small>
</div>
<div class="bd-callout bd-callout-info">
<h5>What will app URLs look like?</h5>
<pre><code id="example">http://hostname:port</code></pre>
</div>
</div>
</div>
<button type="button" onclick="setup()" class="btn btn-primary">Finish Setup</button> <span class="result"></span>
</form>
</div>
<div id="error-output"></div>
<script>
var $ = document.querySelector.bind(document)
function setup() {
if ($("#key").value.trim() == "") {
alert("Your admin public key cannot be blank.")
return
}
if ($("#hostname").value.trim() == "") {
alert("Your hostname cannot be blank.")
return
}
var data = new FormData($("#form"))
var inputs = [].slice.call(document.querySelectorAll("input, textarea, button"))
inputs.forEach(function (input) {
input.disabled = true
})
var result = $(".result")
fetch("/setup", {method: "POST", body: data})
.then(function(response) {
if (response.ok) {
return response.json()
} else {
throw new Error('Server returned error')
}
})
.then(function(response) {
result.classList.add("text-success");
result.textContent = "Success! Redirecting in 3 seconds. .."
setTimeout(function() {
window.location.href = "http://dokku.viewdocs.io/dokku~{VERSION}/deployment/application-deployment/";
}, 3000);
})
.catch(function (error) {
result.classList.add("text-danger");
result.textContent = "Could not send the request"
})
}
function update() {
if ($("#vhost").matches(":checked") && $("#hostname").value.match(/^(\d{1,3}\.){3}\d{1,3}$/)) {
alert("In order to use virtualhost naming, the hostname must not be an IP but a valid domain name.")
$("#vhost").checked = false;
}
if ($("#vhost").matches(':checked')) {
$("#example").textContent = "http://<app-name>."+$("#hostname").value
} else {
$("#example").textContent = "http://"+$("#hostname").value+":<app-port>"
}
}
$("#vhost").addEventListener("change", update);
$("#hostname").addEventListener("input", update);
update();
</script>
</body>
</html>
"""
if __name__ == "__main__":
main()
|
alexquick/dokku
|
contrib/dokku-installer.py
|
Python
|
mit
| 13,390
|
from mock import MagicMock, patch
from . import unittest
from kafka import SimpleConsumer, KafkaConsumer, MultiProcessConsumer
from kafka.common import (
KafkaConfigurationError, FetchResponsePayload, OffsetFetchResponsePayload,
FailedPayloadsError, OffsetAndMessage,
NotLeaderForPartitionError, UnknownTopicOrPartitionError
)
class TestKafkaConsumer(unittest.TestCase):
def test_non_integer_partitions(self):
with self.assertRaises(AssertionError):
SimpleConsumer(MagicMock(), 'group', 'topic', partitions = [ '0' ])
class TestMultiProcessConsumer(unittest.TestCase):
def test_partition_list(self):
client = MagicMock()
partitions = (0,)
with patch.object(MultiProcessConsumer, 'fetch_last_known_offsets') as fetch_last_known_offsets:
MultiProcessConsumer(client, 'testing-group', 'testing-topic', partitions=partitions)
self.assertEqual(fetch_last_known_offsets.call_args[0], (partitions,) )
self.assertEqual(client.get_partition_ids_for_topic.call_count, 0) # pylint: disable=no-member
class TestSimpleConsumer(unittest.TestCase):
def test_simple_consumer_failed_payloads(self):
client = MagicMock()
consumer = SimpleConsumer(client, group=None,
topic='topic', partitions=[0, 1],
auto_commit=False)
def failed_payloads(payload):
return FailedPayloadsError(payload)
client.send_fetch_request.side_effect = self.fail_requests_factory(failed_payloads)
# This should not raise an exception
consumer.get_messages(5)
def test_simple_consumer_leader_change(self):
client = MagicMock()
consumer = SimpleConsumer(client, group=None,
topic='topic', partitions=[0, 1],
auto_commit=False)
# Mock so that only the first request gets a valid response
def not_leader(request):
return FetchResponsePayload(request.topic, request.partition,
NotLeaderForPartitionError.errno, -1, ())
client.send_fetch_request.side_effect = self.fail_requests_factory(not_leader)
# This should not raise an exception
consumer.get_messages(20)
# client should have updated metadata
self.assertGreaterEqual(client.reset_topic_metadata.call_count, 1)
self.assertGreaterEqual(client.load_metadata_for_topics.call_count, 1)
def test_simple_consumer_unknown_topic_partition(self):
client = MagicMock()
consumer = SimpleConsumer(client, group=None,
topic='topic', partitions=[0, 1],
auto_commit=False)
# Mock so that only the first request gets a valid response
def unknown_topic_partition(request):
return FetchResponsePayload(request.topic, request.partition,
UnknownTopicOrPartitionError.errno, -1, ())
client.send_fetch_request.side_effect = self.fail_requests_factory(unknown_topic_partition)
# This should not raise an exception
with self.assertRaises(UnknownTopicOrPartitionError):
consumer.get_messages(20)
def test_simple_consumer_commit_does_not_raise(self):
client = MagicMock()
client.get_partition_ids_for_topic.return_value = [0, 1]
def mock_offset_fetch_request(group, payloads, **kwargs):
return [OffsetFetchResponsePayload(p.topic, p.partition, 0, b'', 0) for p in payloads]
client.send_offset_fetch_request.side_effect = mock_offset_fetch_request
def mock_offset_commit_request(group, payloads, **kwargs):
raise FailedPayloadsError(payloads[0])
client.send_offset_commit_request.side_effect = mock_offset_commit_request
consumer = SimpleConsumer(client, group='foobar',
topic='topic', partitions=[0, 1],
auto_commit=False)
# Mock internal commit check
consumer.count_since_commit = 10
# This should not raise an exception
self.assertFalse(consumer.commit(partitions=[0, 1]))
def test_simple_consumer_reset_partition_offset(self):
client = MagicMock()
def mock_offset_request(payloads, **kwargs):
raise FailedPayloadsError(payloads[0])
client.send_offset_request.side_effect = mock_offset_request
consumer = SimpleConsumer(client, group='foobar',
topic='topic', partitions=[0, 1],
auto_commit=False)
# This should not raise an exception
self.assertEqual(consumer.reset_partition_offset(0), None)
@staticmethod
def fail_requests_factory(error_factory):
# Mock so that only the first request gets a valid response
def fail_requests(payloads, **kwargs):
responses = [
FetchResponsePayload(payloads[0].topic, payloads[0].partition, 0, 0,
[OffsetAndMessage(
payloads[0].offset + i,
"msg %d" % (payloads[0].offset + i))
for i in range(10)]),
]
for failure in payloads[1:]:
responses.append(error_factory(failure))
return responses
return fail_requests
|
gamechanger/kafka-python
|
test/test_consumer.py
|
Python
|
apache-2.0
| 5,524
|
# 442. Find All Duplicates in an Array
# DescriptionHintsSubmissionsDiscussSolution
# DiscussPick One
# Given an array of integers, 1 ≤ a[i] ≤ n (n = size of array), some elements appear twice and others appear once.
#
# Find all the elements that appear twice in this array.
#
# Could you do it without extra space and in O(n) runtime?
#
# Example:
# Input:
# [4,3,2,7,8,2,3,1]
#
# Output:
# [2,3]
class Solution(object):
def findDuplicates(self, nums):
"""
:type nums: List[int]
:rtype: List[int]
"""
res = []
for i, num in enumerate(nums):
index = abs(num) - 1
if nums[index] < 0:
res.append(abs(num))
else:
nums[index] = -nums[index]
return res
|
shawncaojob/LC
|
PY/442_find_all_duplicates_in_an_array.py
|
Python
|
gpl-3.0
| 786
|
# -*- coding: utf-8 -*-
##
## This file is part of Invenio.
## Copyright (C) 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010 CERN.
##
## Invenio is free software; you can redistribute it and/or
## modify it under the terms of the GNU General Public License as
## published by the Free Software Foundation; either version 2 of the
## License, or (at your option) any later version.
##
## Invenio is distributed in the hope that it will be useful, but
## WITHOUT ANY WARRANTY; without even the implied warranty of
## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
## General Public License for more details.
##
## You should have received a copy of the GNU General Public License
## along with Invenio; if not, write to the Free Software Foundation, Inc.,
## 59 Temple Place, Suite 330, Boston, MA 02111-1307, USA.
"""Test cases for the BibFormat engine. Also test
some utilities function in bibformat_utils module"""
__revision__ = "$Id$"
# pylint: disable=C0301
import unittest
import os
import sys
from invenio import bibformat
from invenio import bibformat_engine
from invenio import bibformat_utils
from invenio import bibformat_config
from invenio import bibformatadminlib
from invenio.config import CFG_TMPDIR
from invenio.testutils import make_test_suite, run_test_suite
#CFG_BIBFORMAT_OUTPUTS_PATH = "..%setc%soutput_formats" % (os.sep, os.sep)
#CFG_BIBFORMAT_TEMPLATES_PATH = "..%setc%sformat_templates" % (os.sep, os.sep)
#CFG_BIBFORMAT_ELEMENTS_PATH = "elements"
CFG_BIBFORMAT_OUTPUTS_PATH = "%s" % (CFG_TMPDIR)
CFG_BIBFORMAT_TEMPLATES_PATH = "%s" % (CFG_TMPDIR)
CFG_BIBFORMAT_ELEMENTS_PATH = "%s%stests_bibformat_elements" % (CFG_TMPDIR, os.sep)
CFG_BIBFORMAT_ELEMENTS_IMPORT_PATH = "tests_bibformat_elements"
class FormatTemplateTest(unittest.TestCase):
""" bibformat - tests on format templates"""
def test_get_format_template(self):
"""bibformat - format template parsing and returned structure"""
bibformat_engine.CFG_BIBFORMAT_TEMPLATES_PATH = CFG_BIBFORMAT_TEMPLATES_PATH
#Test correct parsing and structure
template_1 = bibformat_engine.get_format_template("Test1.bft", with_attributes=True)
self.assert_(template_1 is not None)
self.assertEqual(template_1['code'], "test\n<name>this value should stay as it is</name>\n<description>this one too</description>\n")
self.assertEqual(template_1['attrs']['name'], "name_test")
self.assertEqual(template_1['attrs']['description'], "desc_test")
#Test correct parsing and structure of file without description or name
template_2 = bibformat_engine.get_format_template("Test_2.bft", with_attributes=True)
self.assert_(template_2 is not None)
self.assertEqual(template_2['code'], "test")
self.assertEqual(template_2['attrs']['name'], "Test_2.bft")
self.assertEqual(template_2['attrs']['description'], "")
#Test correct parsing and structure of file without description or name
unknown_template = bibformat_engine.get_format_template("test_no_template.test", with_attributes=True)
self.assertEqual(unknown_template, None)
def test_get_format_templates(self):
""" bibformat - loading multiple format templates"""
bibformat_engine.CFG_BIBFORMAT_TEMPLATES_PATH = CFG_BIBFORMAT_TEMPLATES_PATH
templates = bibformat_engine.get_format_templates(with_attributes=True)
#test correct loading
self.assert_("Test1.bft" in templates.keys())
self.assert_("Test_2.bft" in templates.keys())
self.assert_("Test3.bft" in templates.keys())
self.assert_("Test_no_template.test" not in templates.keys())
#Test correct pasrsing and structure
self.assertEqual(templates['Test1.bft']['code'], "test\n<name>this value should stay as it is</name>\n<description>this one too</description>\n")
self.assertEqual(templates['Test1.bft']['attrs']['name'], "name_test")
self.assertEqual(templates['Test1.bft']['attrs']['description'], "desc_test")
def test_get_format_template_attrs(self):
""" bibformat - correct parsing of attributes in format template"""
bibformat_engine.CFG_BIBFORMAT_TEMPLATES_PATH = CFG_BIBFORMAT_TEMPLATES_PATH
attrs = bibformat_engine.get_format_template_attrs("Test1.bft")
self.assertEqual(attrs['name'], "name_test")
self.assertEqual(attrs['description'], "desc_test")
def test_get_fresh_format_template_filename(self):
""" bibformat - getting fresh filename for format template"""
bibformat_engine.CFG_BIBFORMAT_TEMPLATES_PATH = CFG_BIBFORMAT_TEMPLATES_PATH
filename_and_name_1 = bibformat_engine.get_fresh_format_template_filename("Test")
self.assert_(len(filename_and_name_1) >= 2)
self.assertEqual(filename_and_name_1[0], "Test.bft")
filename_and_name_2 = bibformat_engine.get_fresh_format_template_filename("Test1")
self.assert_(len(filename_and_name_2) >= 2)
self.assert_(filename_and_name_2[0] != "Test1.bft")
path = bibformat_engine.CFG_BIBFORMAT_TEMPLATES_PATH + os.sep + filename_and_name_2[0]
self.assert_(not os.path.exists(path))
class FormatElementTest(unittest.TestCase):
""" bibformat - tests on format templates"""
def setUp(self):
# pylint: disable=C0103
"""bibformat - setting python path to test elements"""
sys.path.append('%s' % CFG_TMPDIR)
def test_resolve_format_element_filename(self):
"""bibformat - resolving format elements filename """
bibformat_engine.CFG_BIBFORMAT_ELEMENTS_PATH = CFG_BIBFORMAT_ELEMENTS_PATH
#Test elements filename starting without bfe_, with underscore instead of space
filenames = ["test 1", "test 1.py", "bfe_test 1", "bfe_test 1.py", "BFE_test 1",
"BFE_TEST 1", "BFE_TEST 1.py", "BFE_TeST 1.py", "BFE_TeST 1",
"BfE_TeST 1.py", "BfE_TeST 1","test_1", "test_1.py", "bfe_test_1",
"bfe_test_1.py", "BFE_test_1",
"BFE_TEST_1", "BFE_TEST_1.py", "BFE_Test_1.py", "BFE_TeST_1",
"BfE_TeST_1.py", "BfE_TeST_1"]
for i in range(len(filenames)-2):
filename_1 = bibformat_engine.resolve_format_element_filename(filenames[i])
self.assert_(filename_1 is not None)
filename_2 = bibformat_engine.resolve_format_element_filename(filenames[i+1])
self.assertEqual(filename_1, filename_2)
#Test elements filename starting with bfe_, and with underscores instead of spaces
filenames = ["test 2", "test 2.py", "bfe_test 2", "bfe_test 2.py", "BFE_test 2",
"BFE_TEST 2", "BFE_TEST 2.py", "BFE_TeST 2.py", "BFE_TeST 2",
"BfE_TeST 2.py", "BfE_TeST 2","test_2", "test_2.py", "bfe_test_2",
"bfe_test_2.py", "BFE_test_2",
"BFE_TEST_2", "BFE_TEST_2.py", "BFE_TeST_2.py", "BFE_TeST_2",
"BfE_TeST_2.py", "BfE_TeST_2"]
for i in range(len(filenames)-2):
filename_1 = bibformat_engine.resolve_format_element_filename(filenames[i])
self.assert_(filename_1 is not None)
filename_2 = bibformat_engine.resolve_format_element_filename(filenames[i+1])
self.assertEqual(filename_1, filename_2)
#Test non existing element
non_existing_element = bibformat_engine.resolve_format_element_filename("BFE_NON_EXISTING_ELEMENT")
self.assertEqual(non_existing_element, None)
def test_get_format_element(self):
"""bibformat - format elements parsing and returned structure"""
bibformat_engine.CFG_BIBFORMAT_ELEMENTS_PATH = CFG_BIBFORMAT_ELEMENTS_PATH
bibformat_engine.CFG_BIBFORMAT_ELEMENTS_IMPORT_PATH = CFG_BIBFORMAT_ELEMENTS_IMPORT_PATH
#Test loading with different kind of names, for element with spaces in name, without bfe_
element_1 = bibformat_engine.get_format_element("test 1", with_built_in_params=True)
self.assert_(element_1 is not None)
element_1_bis = bibformat_engine.get_format_element("bfe_tEst_1.py", with_built_in_params=True)
self.assertEqual(element_1, element_1_bis)
#Test loading with different kind of names, for element without spaces in name, wit bfe_
element_2 = bibformat_engine.get_format_element("test 2", with_built_in_params=True)
self.assert_(element_2 is not None)
element_2_bis = bibformat_engine.get_format_element("bfe_tEst_2.py", with_built_in_params=True)
self.assertEqual(element_2, element_2_bis)
#Test loading incorrect elements
element_3 = bibformat_engine.get_format_element("test 3", with_built_in_params=True)
self.assertEqual(element_3, None)
element_4 = bibformat_engine.get_format_element("test 4", with_built_in_params=True)
self.assertEqual(element_4, None)
unknown_element = bibformat_engine.get_format_element("TEST_NO_ELEMENT", with_built_in_params=True)
self.assertEqual(unknown_element, None)
#Test element without docstring
element_5 = bibformat_engine.get_format_element("test_5", with_built_in_params=True)
self.assert_(element_5 is not None)
self.assertEqual(element_5['attrs']['description'], '')
self.assert_({'name':"param1",
'description':"(no description provided)",
'default':""} in element_5['attrs']['params'] )
self.assertEqual(element_5['attrs']['seealso'], [])
#Test correct parsing:
#Test type of element
self.assertEqual(element_1['type'], "python")
#Test name = element filename, with underscore instead of spaces,
#without BFE_ and uppercase
self.assertEqual(element_1['attrs']['name'], "TEST_1")
#Test description parsing
self.assertEqual(element_1['attrs']['description'], "Prints test")
#Test @see: parsing
self.assertEqual(element_1['attrs']['seealso'], ["element2.py", "unknown_element.py"])
#Test @param parsing
self.assert_({'name':"param1",
'description':"desc 1",
'default':""} in element_1['attrs']['params'] )
self.assert_({'name':"param2",
'description':"desc 2",
'default':"default value"} in element_1['attrs']['params'] )
#Test non existing element
non_existing_element = bibformat_engine.get_format_element("BFE_NON_EXISTING_ELEMENT")
self.assertEqual(non_existing_element, None)
def test_get_format_element_attrs_from_function(self):
""" bibformat - correct parsing of attributes in 'format' docstring"""
bibformat_engine.CFG_BIBFORMAT_ELEMENTS_PATH = CFG_BIBFORMAT_ELEMENTS_PATH
bibformat_engine.CFG_BIBFORMAT_ELEMENTS_IMPORT_PATH = CFG_BIBFORMAT_ELEMENTS_IMPORT_PATH
element_1 = bibformat_engine.get_format_element("test 1", with_built_in_params=True)
function = element_1['code']
attrs = bibformat_engine.get_format_element_attrs_from_function(function,
element_1['attrs']['name'],
with_built_in_params=True)
self.assertEqual(attrs['name'], "TEST_1")
#Test description parsing
self.assertEqual(attrs['description'], "Prints test")
#Test @see: parsing
self.assertEqual(attrs['seealso'], ["element2.py", "unknown_element.py"])
def test_get_format_elements(self):
"""bibformat - multiple format elements parsing and returned structure"""
bibformat_engine.CFG_BIBFORMAT_ELEMENTS_PATH = CFG_BIBFORMAT_ELEMENTS_PATH
bibformat_engine.CFG_BIBFORMAT_ELEMENTS_IMPORT_PATH = CFG_BIBFORMAT_ELEMENTS_IMPORT_PATH
elements = bibformat_engine.get_format_elements()
self.assert_(isinstance(elements, dict))
self.assertEqual(elements['TEST_1']['attrs']['name'], "TEST_1")
self.assertEqual(elements['TEST_2']['attrs']['name'], "TEST_2")
self.assert_("TEST_3" not in elements.keys())
self.assert_("TEST_4" not in elements.keys())
def test_get_tags_used_by_element(self):
"""bibformat - identification of tag usage inside element"""
bibformat_engine.CFG_BIBFORMAT_ELEMENTS_PATH = bibformat_config.CFG_BIBFORMAT_ELEMENTS_PATH
bibformat_engine.CFG_BIBFORMAT_ELEMENTS_IMPORT_PATH = bibformat_config.CFG_BIBFORMAT_ELEMENTS_IMPORT_PATH
tags = bibformatadminlib.get_tags_used_by_element('bfe_abstract.py')
self.failUnless(len(tags) == 4,
'Could not correctly identify tags used in bfe_abstract.py')
class OutputFormatTest(unittest.TestCase):
""" bibformat - tests on output formats"""
def test_get_output_format(self):
""" bibformat - output format parsing and returned structure """
bibformat_engine.CFG_BIBFORMAT_OUTPUTS_PATH = CFG_BIBFORMAT_OUTPUTS_PATH
filename_1 = bibformat_engine.resolve_output_format_filename("test1")
output_1 = bibformat_engine.get_output_format(filename_1, with_attributes=True)
self.assertEqual(output_1['attrs']['names']['generic'], "")
self.assert_(isinstance(output_1['attrs']['names']['ln'], dict))
self.assert_(isinstance(output_1['attrs']['names']['sn'], dict))
self.assertEqual(output_1['attrs']['code'], "TEST1")
self.assert_(len(output_1['attrs']['code']) <= 6)
self.assertEqual(len(output_1['rules']), 4)
self.assertEqual(output_1['rules'][0]['field'], '980.a')
self.assertEqual(output_1['rules'][0]['template'], 'Picture_HTML_detailed.bft')
self.assertEqual(output_1['rules'][0]['value'], 'PICTURE ')
self.assertEqual(output_1['rules'][1]['field'], '980.a')
self.assertEqual(output_1['rules'][1]['template'], 'Article.bft')
self.assertEqual(output_1['rules'][1]['value'], 'ARTICLE')
self.assertEqual(output_1['rules'][2]['field'], '980__a')
self.assertEqual(output_1['rules'][2]['template'], 'Thesis_detailed.bft')
self.assertEqual(output_1['rules'][2]['value'], 'THESIS ')
self.assertEqual(output_1['rules'][3]['field'], '980__a')
self.assertEqual(output_1['rules'][3]['template'], 'Pub.bft')
self.assertEqual(output_1['rules'][3]['value'], 'PUBLICATION ')
filename_2 = bibformat_engine.resolve_output_format_filename("TEST2")
output_2 = bibformat_engine.get_output_format(filename_2, with_attributes=True)
self.assertEqual(output_2['attrs']['names']['generic'], "")
self.assert_(isinstance(output_2['attrs']['names']['ln'], dict))
self.assert_(isinstance(output_2['attrs']['names']['sn'], dict))
self.assertEqual(output_2['attrs']['code'], "TEST2")
self.assert_(len(output_2['attrs']['code']) <= 6)
self.assertEqual(output_2['rules'], [])
unknown_output = bibformat_engine.get_output_format("unknow", with_attributes=True)
self.assertEqual(unknown_output, {'rules':[],
'default':"",
'attrs':{'names':{'generic':"", 'ln':{}, 'sn':{}},
'description':'',
'code':"UNKNOW",
'visibility': 1,
'content_type':""}})
def test_get_output_formats(self):
""" bibformat - loading multiple output formats """
bibformat_engine.CFG_BIBFORMAT_OUTPUTS_PATH = CFG_BIBFORMAT_OUTPUTS_PATH
outputs = bibformat_engine.get_output_formats(with_attributes=True)
self.assert_(isinstance(outputs, dict))
self.assert_("TEST1.bfo" in outputs.keys())
self.assert_("TEST2.bfo" in outputs.keys())
self.assert_("unknow.bfo" not in outputs.keys())
#Test correct parsing
output_1 = outputs["TEST1.bfo"]
self.assertEqual(output_1['attrs']['names']['generic'], "")
self.assert_(isinstance(output_1['attrs']['names']['ln'], dict))
self.assert_(isinstance(output_1['attrs']['names']['sn'], dict))
self.assertEqual(output_1['attrs']['code'], "TEST1")
self.assert_(len(output_1['attrs']['code']) <= 6)
def test_get_output_format_attrs(self):
""" bibformat - correct parsing of attributes in output format"""
bibformat_engine.CFG_BIBFORMAT_OUTPUTS_PATH = CFG_BIBFORMAT_OUTPUTS_PATH
attrs= bibformat_engine.get_output_format_attrs("TEST1")
self.assertEqual(attrs['names']['generic'], "")
self.assert_(isinstance(attrs['names']['ln'], dict))
self.assert_(isinstance(attrs['names']['sn'], dict))
self.assertEqual(attrs['code'], "TEST1")
self.assert_(len(attrs['code']) <= 6)
def test_resolve_output_format(self):
""" bibformat - resolving output format filename"""
bibformat_engine.CFG_BIBFORMAT_OUTPUTS_PATH = CFG_BIBFORMAT_OUTPUTS_PATH
filenames = ["test1", "test1.bfo", "TEST1", "TeST1", "TEST1.bfo", "<b>test1"]
for i in range(len(filenames)-2):
filename_1 = bibformat_engine.resolve_output_format_filename(filenames[i])
self.assert_(filename_1 is not None)
filename_2 = bibformat_engine.resolve_output_format_filename(filenames[i+1])
self.assertEqual(filename_1, filename_2)
def test_get_fresh_output_format_filename(self):
""" bibformat - getting fresh filename for output format"""
bibformat_engine.CFG_BIBFORMAT_OUTPUTS_PATH = CFG_BIBFORMAT_OUTPUTS_PATH
filename_and_name_1 = bibformat_engine.get_fresh_output_format_filename("test")
self.assert_(len(filename_and_name_1) >= 2)
self.assertEqual(filename_and_name_1[0], "TEST.bfo")
filename_and_name_1_bis = bibformat_engine.get_fresh_output_format_filename("<test>")
self.assert_(len(filename_and_name_1_bis) >= 2)
self.assertEqual(filename_and_name_1_bis[0], "TEST.bfo")
filename_and_name_2 = bibformat_engine.get_fresh_output_format_filename("test1")
self.assert_(len(filename_and_name_2) >= 2)
self.assert_(filename_and_name_2[0] != "TEST1.bfo")
path = bibformat_engine.CFG_BIBFORMAT_OUTPUTS_PATH + os.sep + filename_and_name_2[0]
self.assert_(not os.path.exists(path))
filename_and_name_3 = bibformat_engine.get_fresh_output_format_filename("test1testlong")
self.assert_(len(filename_and_name_3) >= 2)
self.assert_(filename_and_name_3[0] != "TEST1TESTLONG.bft")
self.assert_(len(filename_and_name_3[0]) <= 6 + 1 + len(bibformat_config.CFG_BIBFORMAT_FORMAT_OUTPUT_EXTENSION))
path = bibformat_engine.CFG_BIBFORMAT_OUTPUTS_PATH + os.sep + filename_and_name_3[0]
self.assert_(not os.path.exists(path))
class PatternTest(unittest.TestCase):
""" bibformat - tests on re patterns"""
def test_pattern_lang(self):
""" bibformat - correctness of pattern 'pattern_lang'"""
text = ''' <h1>Here is my test text</h1>
<p align="center">
<lang><en><b>Some words</b></en><fr>Quelques mots</fr><de>Einige Wörter</de> garbage </lang>
Here ends the middle of my test text
<lang><en><b>English</b></en><fr><b>Français</b></fr><de><b>Deutsch</b></de></lang>
<b>Here ends my test text</b></p>'''
result = bibformat_engine.pattern_lang.search(text)
self.assertEqual(result.group("langs"), "<en><b>Some words</b></en><fr>Quelques mots</fr><de>Einige Wörter</de> garbage ")
text = ''' <h1>Here is my test text</h1>
<BFE_test param="
<lang><en><b>Some words</b></en><fr>Quelques mots</fr><de>Einige Wörter</de> garbage </lang>" />
'''
result = bibformat_engine.pattern_lang.search(text)
self.assertEqual(result.group("langs"), "<en><b>Some words</b></en><fr>Quelques mots</fr><de>Einige Wörter</de> garbage ")
def test_ln_pattern(self):
""" bibformat - correctness of pattern 'ln_pattern'"""
text = "<en><b>Some words</b></en><fr>Quelques mots</fr><de>Einige Wörter</de> garbage "
result = bibformat_engine.ln_pattern.search(text)
self.assertEqual(result.group(1), "en")
self.assertEqual(result.group(2), "<b>Some words</b>")
def test_pattern_format_template_name(self):
""" bibformat - correctness of pattern 'pattern_format_template_name'"""
text = '''
garbage
<name><b>a name</b></name>
<description>a <b>description</b> on
2 lines </description>
<h1>the content of the template</h1>
content
'''
result = bibformat_engine.pattern_format_template_name.search(text)
self.assertEqual(result.group('name'), "<b>a name</b>")
def test_pattern_format_template_desc(self):
""" bibformat - correctness of pattern 'pattern_format_template_desc'"""
text = '''
garbage
<name><b>a name</b></name>
<description>a <b>description</b> on
2 lines </description>
<h1>the content of the template</h1>
content
'''
result = bibformat_engine.pattern_format_template_desc.search(text)
self.assertEqual(result.group('desc'), '''a <b>description</b> on
2 lines ''')
def test_pattern_tag(self):
""" bibformat - correctness of pattern 'pattern_tag'"""
text = '''
garbage but part of content
<name><b>a name</b></name>
<description>a <b>description</b> on
2 lines </description>
<h1>the content of the template</h1>
<BFE_tiTLE param1="<b>value1</b>"
param2=""/>
my content is so nice!
<BFE_title param1="value1"/>
<BFE_title param1="value1"/>
'''
result = bibformat_engine.pattern_tag.search(text)
self.assertEqual(result.group('function_name'), "tiTLE")
self.assertEqual(result.group('params').strip(), '''param1="<b>value1</b>"
param2=""''')
def test_pattern_function_params(self):
""" bibformat - correctness of pattern 'test_pattern_function_params'"""
text = '''
param1="" param2="value2"
param3="<b>value3</b>" garbage
'''
names = ["param1", "param2", "param3"]
values = ["", "value2", "<b>value3</b>"]
results = bibformat_engine.pattern_format_element_params.finditer(text) #TODO
param_i = 0
for match in results:
self.assertEqual(match.group('param'), names[param_i])
self.assertEqual(match.group('value'), values [param_i])
param_i += 1
def test_pattern_format_element_params(self):
""" bibformat - correctness of pattern 'pattern_format_element_params'"""
text = '''
a description for my element
some text
@param param1: desc1
@param param2: desc2
@see: seethis, seethat
'''
names = ["param1", "param2"]
descriptions = ["desc1", "desc2"]
results = bibformat_engine.pattern_format_element_params.finditer(text) #TODO
param_i = 0
for match in results:
self.assertEqual(match.group('name'), names[param_i])
self.assertEqual(match.group('desc'), descriptions[param_i])
param_i += 1
def test_pattern_format_element_seealso(self):
""" bibformat - correctness of pattern 'pattern_format_element_seealso' """
text = '''
a description for my element
some text
@param param1: desc1
@param param2: desc2
@see: seethis, seethat
'''
result = bibformat_engine.pattern_format_element_seealso.search(text)
self.assertEqual(result.group('see').strip(), 'seethis, seethat')
class EscapingAndWashingTest(unittest.TestCase):
""" bibformat - test escaping and washing metadata"""
def test_escaping(self):
""" bibformat - tests escaping HTML characters"""
text = "Is 5 < 6 ? For sure! And what about True && False == True?"
result = bibformat_engine.escape_field(text, mode=0)
self.assertEqual(result, text)
result = bibformat_engine.escape_field(text, mode=1)
self.assertEqual(result, 'Is 5 < 6 ? For sure! And what about True && False == True?')
def test_washing(self):
""" bibformat - test washing HTML tags"""
text = '''Hi dude, <br>, <strong>please login</strong>:<br/>
<a onclick="http://www.mycrappywebsite.com" href="login.html">login here</a></a><SCRIPT>alert("XSS");</SCRIPT>'''
# Keep only basic tags
result = bibformat_engine.escape_field(text, mode=2)
self.assert_('script' not in result.lower())
self.assert_('onclick' not in result.lower())
self.assert_('mycrappywebsite' not in result.lower())
self.assert_('<br>' in result.lower())
self.assert_('<br/>' in result.lower().replace(' ', ''))
# Keep only basic tags only if value starts with <!--HTML-->
# directive. Otherwise escape (which is the case here)
result = bibformat_engine.escape_field(text, mode=3)
self.assert_('<script' not in result.lower())
self.assert_('<' not in result.lower())
result = bibformat_engine.escape_field(text, mode=5)
self.assert_('<script' not in result.lower())
self.assert_('<br' in result.lower())
# Remove all HTML tags
result = bibformat_engine.escape_field(text, mode=4)
self.assert_('script' not in result.lower())
self.assert_('onclick' not in result.lower())
self.assert_('mycrappywebsite' not in result.lower())
self.assert_('strong' not in result.lower())
self.assert_('<br>' not in result.lower())
self.assert_('<br/>' not in result.lower().replace(' ', ''))
self.assert_('login here' in result.lower())
# Keep basic tags + some others (like <img>)
result = bibformat_engine.escape_field(text, mode=5)
self.assert_('script' not in result.lower())
self.assert_('onclick' not in result.lower())
self.assert_('mycrappywebsite' not in result.lower())
self.assert_('<br' in result.lower())
self.assert_('login here' in result.lower())
text2 = text + ' <img src="loginicon" alt="login icon"/>'
result = bibformat_engine.escape_field(text2, mode=5)
self.assert_('<img' in result.lower())
self.assert_('src=' in result.lower())
self.assert_('alt="login icon"' in result.lower())
# Keep some tags only if value starts with <!--HTML-->
# directive. Otherwise escape (which is the case here)
result = bibformat_engine.escape_field(text, mode=6)
self.assert_('<script' not in result.lower())
self.assert_('<' not in result.lower())
result = bibformat_engine.escape_field('<!--HTML-->'+text, mode=6)
self.assert_('<script' not in result.lower())
self.assert_('<br>' in result.lower())
self.assert_('mycrappywebsite' not in result.lower())
# When the value cannot be parsed by our not so smart parser,
# just escape everything
text3 = """Ok, let't try with something unparsable < hehe <a onclick="http://www.mycrappywebsite.com" href="login.html">login</a>"""
result = bibformat_engine.escape_field(text3, mode=2)
self.assert_('mycrappywebsite' not in result.lower() or \
'<a' not in result.lower())
result = bibformat_engine.escape_field(text3, mode=3)
self.assert_('<a' not in result.lower())
result = bibformat_engine.escape_field(text3, mode=5)
self.assert_('mycrappywebsite' not in result.lower() or \
'<a' not in result.lower())
result = bibformat_engine.escape_field(text3, mode=6)
self.assert_('<a' not in result.lower())
class MiscTest(unittest.TestCase):
""" bibformat - tests on various functions"""
def test_parse_tag(self):
""" bibformat - result of parsing tags"""
tags_and_parsed_tags = ['245COc', ['245', 'C', 'O', 'c'],
'245C_c', ['245', 'C', '' , 'c'],
'245__c', ['245', '' , '' , 'c'],
'245__$$c', ['245', '' , '' , 'c'],
'245__$c', ['245', '' , '' , 'c'],
'245 $c', ['245', '' , '' , 'c'],
'245 $$c', ['245', '' , '' , 'c'],
'245__.c', ['245', '' , '' , 'c'],
'245 .c', ['245', '' , '' , 'c'],
'245C_$c', ['245', 'C', '' , 'c'],
'245CO$$c', ['245', 'C', 'O', 'c'],
'245CO.c', ['245', 'C', 'O', 'c'],
'245$c', ['245', '' , '' , 'c'],
'245.c', ['245', '' , '' , 'c'],
'245$$c', ['245', '' , '' , 'c'],
'245__%', ['245', '' , '' , '%'],
'245__$$%', ['245', '' , '' , '%'],
'245__$%', ['245', '' , '' , '%'],
'245 $%', ['245', '' , '' , '%'],
'245 $$%', ['245', '' , '' , '%'],
'245$%', ['245', '' , '' , '%'],
'245.%', ['245', '' , '' , '%'],
'245_O.%', ['245', '' , 'O', '%'],
'245.%', ['245', '' , '' , '%'],
'245$$%', ['245', '' , '' , '%'],
'2%5$$a', ['2%5', '' , '' , 'a'],
'2%%%%a', ['2%%', '%', '%', 'a'],
'2%%__a', ['2%%', '' , '' , 'a'],
'2%%a', ['2%%', '' , '' , 'a']]
for i in range(0, len(tags_and_parsed_tags), 2):
parsed_tag = bibformat_utils.parse_tag(tags_and_parsed_tags[i])
self.assertEqual(parsed_tag, tags_and_parsed_tags[i+1])
class FormatTest(unittest.TestCase):
""" bibformat - generic tests on function that do the formatting. Main functions"""
def setUp(self):
# pylint: disable=C0103
""" bibformat - prepare BibRecord objects"""
self.xml_text_1 = '''
<record>
<controlfield tag="001">33</controlfield>
<datafield tag="980" ind1="" ind2="">
<subfield code="a">thesis</subfield>
</datafield>
<datafield tag="950" ind1="" ind2="">
<subfield code="b">Doe1, John</subfield>
</datafield>
<datafield tag="100" ind1="" ind2="">
<subfield code="a">Doe2, John</subfield>
<subfield code="b">editor</subfield>
</datafield>
<datafield tag="245" ind1="" ind2="1">
<subfield code="a">On the foo and bar1</subfield>
</datafield>
<datafield tag="245" ind1="" ind2="2">
<subfield code="a">On the foo and bar2</subfield>
</datafield>
<datafield tag="088" ind1="" ind2="">
<subfield code="a">99999</subfield>
</datafield>
</record>
'''
#rec_1 = bibrecord.create_record(self.xml_text_1)
self.bfo_1 = bibformat_engine.BibFormatObject(recID=None,
ln='fr',
xml_record=self.xml_text_1)
self.xml_text_2 = '''
<record>
<controlfield tag="001">33</controlfield>
<datafield tag="980" ind1="" ind2="">
<subfield code="b">thesis </subfield>
</datafield>
<datafield tag="950" ind1="" ind2="">
<subfield code="b">Doe1, John</subfield>
</datafield>
<datafield tag="100" ind1="" ind2="">
<subfield code="a">Doe2, John</subfield>
<subfield code="b">editor</subfield>
</datafield>
<datafield tag="245" ind1="" ind2="1">
<subfield code="b">On the foo and bar1</subfield>
</datafield>
<datafield tag="245" ind1="" ind2="2">
<subfield code="b">On the foo and bar2</subfield>
</datafield>
</record>
'''
#self.rec_2 = bibrecord.create_record(xml_text_2)
self.bfo_2 = bibformat_engine.BibFormatObject(recID=None,
ln='fr',
xml_record=self.xml_text_2)
self.xml_text_3 = '''
<record>
<controlfield tag="001">33</controlfield>
<datafield tag="041" ind1="" ind2="">
<subfield code="a">eng</subfield>
</datafield>
<datafield tag="100" ind1="" ind2="">
<subfield code="a">Doe1, John</subfield>
</datafield>
<datafield tag="100" ind1="" ind2="">
<subfield code="a">Doe2, John</subfield>
<subfield code="b">editor</subfield>
</datafield>
<datafield tag="245" ind1="" ind2="1">
<subfield code="a">On the foo and bar1</subfield>
</datafield>
<datafield tag="245" ind1="" ind2="2">
<subfield code="a">On the foo and bar2</subfield>
</datafield>
<datafield tag="980" ind1="" ind2="">
<subfield code="a">article</subfield>
</datafield>
</record>
'''
#self.rec_3 = bibrecord.create_record(xml_text_3)
self.bfo_3 = bibformat_engine.BibFormatObject(recID=None,
ln='fr',
xml_record=self.xml_text_3)
self.empty_record_xml = '''
<record>
<controlfield tag="001">555</controlfield>
</record>'''
def test_decide_format_template(self):
""" bibformat - choice made by function decide_format_template"""
bibformat_engine.CFG_BIBFORMAT_OUTPUTS_PATH = CFG_BIBFORMAT_OUTPUTS_PATH
result = bibformat_engine.decide_format_template(self.bfo_1, "test1")
self.assertEqual(result, "Thesis_detailed.bft")
result = bibformat_engine.decide_format_template(self.bfo_3, "test3")
self.assertEqual(result, "Test3.bft")
#Only default matches
result = bibformat_engine.decide_format_template(self.bfo_2, "test1")
self.assertEqual(result, "Default_HTML_detailed.bft")
#No match at all for record
result = bibformat_engine.decide_format_template(self.bfo_2, "test2")
self.assertEqual(result, None)
#Non existing output format
result = bibformat_engine.decide_format_template(self.bfo_2, "UNKNOW")
self.assertEqual(result, None)
def test_format_record(self):
""" bibformat - correct formatting"""
bibformat_engine.CFG_BIBFORMAT_OUTPUTS_PATH = CFG_BIBFORMAT_OUTPUTS_PATH
bibformat_engine.CFG_BIBFORMAT_ELEMENTS_PATH = CFG_BIBFORMAT_ELEMENTS_PATH
bibformat_engine.CFG_BIBFORMAT_ELEMENTS_IMPORT_PATH = CFG_BIBFORMAT_ELEMENTS_IMPORT_PATH
bibformat_engine.CFG_BIBFORMAT_TEMPLATES_PATH = CFG_BIBFORMAT_TEMPLATES_PATH
#use output format that has no match TEST DISABLED DURING MIGRATION
#result = bibformat_engine.format_record(recID=None, of="test2", xml_record=self.xml_text_2)
#self.assertEqual(result.replace("\n", ""),"")
#use output format that link to unknown template
result = bibformat_engine.format_record(recID=None, of="test3", xml_record=self.xml_text_2)
self.assertEqual(result.replace("\n", ""),"")
#Unknown output format TEST DISABLED DURING MIGRATION
#result = bibformat_engine.format_record(recID=None, of="unkno", xml_record=self.xml_text_3)
#self.assertEqual(result.replace("\n", ""),"")
#Default formatting
result = bibformat_engine.format_record(recID=None, ln='fr', of="test3", xml_record=self.xml_text_3)
self.assertEqual(result,'''<h1>hi</h1> this is my template\ntest<bfe_non_existing_element must disappear/><test_1 non prefixed element must stay as any normal tag/>tfrgarbage\n<br/>test me!<b>ok</b>a default valueeditor\n<br/>test me!<b>ok</b>a default valueeditor\n<br/>test me!<b>ok</b>a default valueeditor\n''')
def test_empty_formatting(self):
"""bibformat - formatting empty record"""
result = bibformat_engine.format_record(recID=0,
of='hb',
verbose=9,
xml_record=self.empty_record_xml)
self.assertEqual(result, '')
# FIXME: The commented test below currently fails, since xm
# format is generated from the database
## result = bibformat_engine.format_record(recID=0,
## of='xm',
## verbose=9,
## xml_record=self.empty_record_xml)
## self.assertEqual(result, self.empty_record_xml)
def test_format_with_format_template(self):
""" bibformat - correct formatting with given template"""
bibformat_engine.CFG_BIBFORMAT_ELEMENTS_PATH = CFG_BIBFORMAT_ELEMENTS_PATH
bibformat_engine.CFG_BIBFORMAT_ELEMENTS_IMPORT_PATH = CFG_BIBFORMAT_ELEMENTS_IMPORT_PATH
bibformat_engine.CFG_BIBFORMAT_TEMPLATES_PATH = CFG_BIBFORMAT_TEMPLATES_PATH
template = bibformat_engine.get_format_template("Test3.bft")
result = bibformat_engine.format_with_format_template(format_template_filename = None,
bfo=self.bfo_1,
verbose=0,
format_template_code=template['code'])
self.assert_(isinstance(result, tuple))
self.assertEqual(result[0],'''<h1>hi</h1> this is my template\ntest<bfe_non_existing_element must disappear/><test_1 non prefixed element must stay as any normal tag/>tfrgarbage\n<br/>test me!<b>ok</b>a default valueeditor\n<br/>test me!<b>ok</b>a default valueeditor\n<br/>test me!<b>ok</b>a default valueeditor\n99999''')
class MarcFilteringTest(unittest.TestCase):
""" bibformat - MARC tag filtering tests"""
def setUp(self):
"""bibformat - prepare MARC filtering tests"""
self.xml_text_4 = '''
<record>
<controlfield tag="001">33</controlfield>
<datafield tag="041" ind1="" ind2="">
<subfield code="a">eng</subfield>
</datafield>
<datafield tag="100" ind1="" ind2="">
<subfield code="a">Doe1, John</subfield>
</datafield>
<datafield tag="100" ind1="" ind2="">
<subfield code="a">Doe2, John</subfield>
<subfield code="b">editor</subfield>
</datafield>
<datafield tag="245" ind1="" ind2="1">
<subfield code="a">On the foo and bar1</subfield>
</datafield>
<datafield tag="245" ind1="" ind2="2">
<subfield code="a">On the foo and bar2</subfield>
</datafield>
<datafield tag="595" ind1="" ind2="2">
<subfield code="a">Confidential comment</subfield>
</datafield>
<datafield tag="980" ind1="" ind2="">
<subfield code="a">article</subfield>
</datafield>
</record>
'''
def test_filtering(self):
"""bibformat - filter hidden fields"""
newxml = bibformat.filter_hidden_fields(self.xml_text_4, user_info=None, filter_tags=['595',], force_filtering=True)
numhfields = newxml.count("595")
self.assertEqual(numhfields, 0)
newxml = bibformat.filter_hidden_fields(self.xml_text_4, user_info=None, filter_tags=['595',], force_filtering=False)
numhfields = newxml.count("595")
self.assertEqual(numhfields, 1)
TEST_SUITE = make_test_suite(FormatTemplateTest,
OutputFormatTest,
FormatElementTest,
PatternTest,
MiscTest,
FormatTest,
EscapingAndWashingTest,
MarcFilteringTest)
if __name__ == '__main__':
run_test_suite(TEST_SUITE)
|
pombredanne/invenio
|
modules/bibformat/lib/bibformat_engine_tests.py
|
Python
|
gpl-2.0
| 40,916
|
# -*- coding: utf-8 -*-
# Copyright (c) 2019, CRS4
#
# Permission is hereby granted, free of charge, to any person obtaining a copy of
# this software and associated documentation files (the "Software"), to deal in
# the Software without restriction, including without limitation the rights to
# use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of
# the Software, and to permit persons to whom the Software is furnished to do so,
# subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS
# FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR
# COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER
# IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('slides_manager', '0005_slide_staining'),
]
operations = [
migrations.AddField(
model_name='slidequalitycontrol',
name='notes',
field=models.TextField(null=True, blank=True),
),
]
|
lucalianas/ProMort
|
promort/slides_manager/migrations/0006_slidequalitycontrol_notes.py
|
Python
|
mit
| 1,522
|
# -*- coding: utf-8 -*-
# Copyright 2022 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# Generated code. DO NOT EDIT!
#
# Snippet for CreateService
# NOTE: This snippet has been automatically generated for illustrative purposes only.
# It may require modifications to work in your environment.
# To install the latest published package dependency, execute the following:
# python3 -m pip install google-cloud-service-management
# [START servicemanagement_v1_generated_ServiceManager_CreateService_sync]
from google.cloud import servicemanagement_v1
def sample_create_service():
# Create a client
client = servicemanagement_v1.ServiceManagerClient()
# Initialize request argument(s)
request = servicemanagement_v1.CreateServiceRequest(
)
# Make the request
operation = client.create_service(request=request)
print("Waiting for operation to complete...")
response = operation.result()
# Handle the response
print(response)
# [END servicemanagement_v1_generated_ServiceManager_CreateService_sync]
|
googleapis/python-service-management
|
samples/generated_samples/servicemanagement_v1_generated_service_manager_create_service_sync.py
|
Python
|
apache-2.0
| 1,564
|
from abc import abstractmethod
from threading import Lock
from typing import TYPE_CHECKING, Dict
from treehopper.api.device_commands import DeviceCommands
from treehopper.api.interfaces import DigitalOut, Pwm, AdcPin, DigitalIn
if TYPE_CHECKING:
from treehopper.api.treehopper_usb import TreehopperUsb, Pin
class PinMode:
"""Enumeration of possible pin modes"""
Reserved, DigitalInput, PushPullOutput, OpenDrainOutput, AnalogInput, SoftPwm, Unassigned = range(7)
## \cond PRIVATE
class PinConfigCommands:
Reserved, MakeDigitalInput, MakePushPullOutput, MakeOpenDrainOutput, MakeAnalogInput, SetDigitalValue = range(6)
## \endcond
class ReferenceLevel:
"""Enumeration of possible ADC reference values"""
Vref_3V3, Vref_1V65, Vref_1V8, Vref_2V4, Vref_3V3Derived, Vref_3V6 = range(6)
class SpiChipSelectPin(DigitalOut):
"""A pin that can be used by an SPI peripheral for chip-select duties"""
@property
@abstractmethod
def spi_module(self):
pass
@property
@abstractmethod
def number(self):
pass
class Pin(AdcPin, DigitalIn, SpiChipSelectPin, Pwm):
"""
Built-in I/O pins
\section quick Quick guide
Once you have connected to a TreehopperUsb board, you can access pins through the \link TreehopperUsb.pins
pins\endlink property of the board.
You can manipulate pins directly:
>>> board = find_boards()[0]
>>> board.connect()
>>> board.pins[3].mode = PinMode.PushPullOutput
>>> board.pins[3].digital_value = True
Or create reference variables:
>>> board = find_boards()[0]
>>> board.connect()
>>> red_led = board.pins[7]
>>> red_led.mode = PinMode.OpenDrainOutput
>>> red_led.digital_value = False
You can choose whether a pin should be a digital input, digital output, analog input, or soft-PWM output by
setting the pin's \link Pin.mode mode\endlink property to one of the values in PinMode.
You can set or retrieve the digital value of a pin by accessing the \link Pin.digital_value digital_value\endlink
property. Note that writing to this property --- even if the pin is an input --- will implicitly change it into
an output.
If the pin is set as an analog input, you can access its data through any of the following properties:
- \link Pin.analog_value analog_value\endlink: retrieve a normalized (0.0 - 1.0) pin value
- \link Pin.analog_voltage analog_voltage\endlink: retrieve the voltage (0.0 - 3.3) on the pin
- \link Pin.adc_value adc_value\endlink: retrieve the raw ADC value (0 - 4095) of the pin
\section more More information
This section dives into more details and electrical characteristics about %Treehopper's pins.
\subsection mode Pin mode
You can choose whether a pin should be a digital input, output, or analog input by setting the pin's Mode property.
\subsection output Digital outputs
All pins on %Treehopper support both push-pull and open-drain outputs. Writing a true or false to the pin's
digital value will flush that value to the pin.
- **Push-Pull**: Push-pull is the most commonly used output mode; when a pin is set to true, %Treehopper will
attempt to drive the pin to logic HIGH (3.3V) — when a pin is set to false, %Treehopper will attempt to drive
the pin to logic LOW (0V — ground).
- **Open-Drain**: Open-drain outputs can only drive a strong logic LOW (0V); in the HIGH state, the pin is
weakly pulled high.
\subsubsection current Output current limitations
%Treehopper's output impedance varies, but is roughly 100 ohm source and 50 ohm sink when supplying weaker loads,
but increases as the load increases. In the worst-case scenario (when short-circuited), %Treehopper can source
approximately 20 mA of current, and sink approximately 40 mA of current. The pin's drivers are rated for a
maximum of 100 mA of output current, so you cannot damage the board by short-circuiting its output to ground or
3.3V.
While this is plenty of current for peripheral ICs and small indicator LEDs, do not expect to drive large arrays
of LEDs, or low-impedance loads like motors, solenoids, or speakers directly from %Treehopper's pins. There are a
wide variety of peripherals in the Treehopper.Libraries package for your language API that can be used for
interfacing with these peripherals.
\warning **To avoid damaging the device permanently, do not source or sink more than 400 mA of combined current
out of the pins on the board!** Note that these limits have nothing to do with the 3.3V supply pins found on
%Treehopper, which can comfortably source 500 mA --- or the unfused 5V pin, which has no imposed current limit (
other than that of your computer).
\subsection input Digital input
%Treehopper's digital inputs are used to sample digital signals — i.e., signals that have either a <i>LOW</i> or
<i>HIGH</i> state. Logic LOW (false) is considered a voltage less than or equal to 0.6V. Logic HIGH (true) is
considered a voltage greater than or equal to 2.7V.
%Treehopper pins are true 5V-tolerant signals; consequently, you do not need any sort of logic-level conversion
or series-limiting resistor when using the pin as a digital input with a 5V source.
You can access the most recent \link Pin.digital_value digital_value\endlink, or use the \link
Pin.digital_value_changed digital_value_changed\endlink event to subscribe to change notifications.
\subsection analog Analog inputs
Each Treehopper pin can be read using the on-board 12-bit ADC. There is no limit to the total number of analog
pins activated at any time.
\subsubsection out Output Format
When the pin is sampled and sent to the host, the value is simultaneously available to the user in three forms:
- \link Pin.adc_value adc_value\endlink -- the raw, 12-bit result from conversion.
- \link Pin.analog_value analog_value\endlink -- the normalized value of the ADC (from 0-1).
- \link Pin.analog_voltage analog_voltage\endlink -- the actual voltage at the pin (taking into account the
reference level).
There are OnChanged events associated with each of these properties:
- \link Pin.analog_voltage_changed analog_voltage_changed\endlink
- \link Pin.analog_value_changed analog_value_changed\endlink
- \link Pin.adc_value_changed adc_value_changed\endlink
Plus thresholds for each of these events that give you fine-grained control over when the event will fire:
- \link Pin.analog_voltage_threshold analog_voltage_threshold\endlink
- \link Pin.analog_value_threshold analog_value_threshold\endlink
- \link Pin.adc_value_threshold adc_value_threshold\endlink
\subsubsection Reference Levels
Each pin has a configurable \link Pin.reference_level reference_level\endlink that can be used to measure the pin
against. The possible reference levels are:
- 3.3V generated by the on-board LDO, rated at 1.5% accuracy (default).
- 3.7V (effective) reference derived from the on-chip 1.85V reference.
- 2.4V on-chip reference rated at 2.1% accuracy.
- 1.85V on-chip reference.
- 1.65V on-chip reference, 1.8% accurate.
- 3.3V (effective) reference that is derived from the on-chip 1.65V reference.
For most ratiometric applications --- i.e., when measuring a device whose output is ratioed to its power supply
--- connect the sensor's power supply to the 3.3V supply pin the %Treehopper and use the default 3.3V reference.
The other reference options are provided for advanced scenarios that involve reading from precision voltage
outputs accurately.
\subsection reads A note about pin reads
All of %Treehopper's pins configured as digital or analog inputs are sampled continuously onboard; when any pin
changes, this data is sent to the host device. When you access the digital or one of the analog value properties,
you're accessing the last received data. This makes property reads instantaneous --- keeping your GUI or
application running responsively.
For almost all applications, changes to digital or analog inputs are to be reacted to (like with switches,
interrupt outputs, encoders), or sampled (like with sensor outputs). Care must be taken, however, if you need to
synchronize pin reads with other functions.
For example, consider the case where you electrically short pins 0 and 1 together on the board, and then run this
code:
>>> pin0 = board.pins[0]
>>> pin1 = board.pins[1]
>>> pin0.mode = PinMode.PushPullOutput
>>> pin1.mode = PinMode.DigitalInput
>>> pin0.digital_value = 0
>>> if pin1.digital_value == pin0.digital_value:
>>> # we generally won't get here, since pin1's DigitalValue
>>> # isn't explicitly read from the pin when we access it; it only returns
>>> # the last value read from a separate pin-reading thread
>>> pass
A work around is to wait for two consecutive pin updates to be received before checking the pin's value. This can
be accomplished by awaiting TreehopperUsb.AwaitPinUpdateAsync().
However, pin updates are only sent to the computer when a pin's value changes, so if you wish to synchronously
sample a pin that might not change, you should set an unused pin as an analog input, which will almost certainly
guarantee a constant stream of pin updates:
>>> board.pins[19].mode = PinMode.AnalogInput # this will ensure we get continuous pin updates
>>> pin0 = board.pins[0]
>>> pin1 = board.pins[1]
>>> pin0.Mode = PinMode.PushPullOutput
>>> pin1.Mode = PinMode.DigitalInput
>>> pin0.DigitalValue = 0
>>> board.await_pin_update() # this first report may have been captured before the output was written
>>> board.await_pin_update() # this report should have the effects of the digital output in it
>>> if pin1.DigitalValue == pin0.DigitalValue:
>>> # we should always get here
>>> pass
\section softpwm SoftPWM functionality
Each %Treehopper pin can be used as a SoftPWM pin.
\section performance Performance Considerations
Writing values to (or changing pin modes of) Treehopper pins will flush to the OS's USB layer immediately,
but there is no way of achieving guaranteed latency.
Occasional writes (say, on the order of every 20 ms or more) will usually flush to the port within a few hundred
microseconds. If your application is chatty, or the bus you're operating on has other devices (especially
isochronous devices like webcams), you may see long periods (a millisecond or more) of delay.
Analog pins take a relatively long time to sample; if you enable tons of analog inputs, the effective sampling
rate will drop by up to two times.
"""
## \cond PRIVATE
def __init__(self, board: 'TreehopperUsb', pin_number: int):
AdcPin.__init__(self, 12, 3.3)
DigitalIn.__init__(self)
DigitalOut.__init__(self)
self.name = "Pin {}".format(pin_number)
self._reference_level = ReferenceLevel.Vref_3V3 # type: ReferenceLevel
self._board = board
self._number = pin_number
self._mode = PinMode.Unassigned # type: PinMode
## \endcond
@property
def spi_module(self):
return self._board.spi
@property
def number(self):
"""Get the pin number"""
return self._number
@property
def digital_value(self) -> bool:
"""Digital value for the pin.
:type: bool
:getter: Returns the last digital value received
:setter: Sets the pin's value immediately
"""
return self._digital_value
@digital_value.setter
def digital_value(self, value):
self._digital_value = value
self._board._send_pin_config([self._number, PinConfigCommands.SetDigitalValue, self._digital_value])
@property
def reference_level(self) -> ReferenceLevel:
"""Gets or sets the ReferenceLevel for the pin"""
return self._reference_level
@reference_level.setter
def reference_level(self, reference_level: ReferenceLevel):
if self._reference_level == reference_level:
return
self._reference_level = reference_level
if self.mode == PinMode.AnalogInput:
self.make_analog_in()
def make_analog_in(self):
"""Make the pin an analog input"""
self._board._send_pin_config([self._number, PinConfigCommands.MakeAnalogInput, self._reference_level])
self._mode = PinMode.AnalogInput
def make_digital_open_drain_out(self):
"""Make the pin an open-drain output"""
self._board._send_pin_config([self._number, PinConfigCommands.MakeOpenDrainOutput])
self._mode = PinMode.OpenDrainOutput
def make_digital_push_pull_out(self):
"""Make the pin a push-pull output"""
self._board._send_pin_config([self._number, PinConfigCommands.MakePushPullOutput])
self._mode = PinMode.PushPullOutput
def make_digital_in(self):
"""Make the pin a digital input"""
self._board._send_pin_config([self._number, PinConfigCommands.MakeDigitalInput])
self._mode = PinMode.DigitalInput
def _update_value(self, b0, b1):
if self.mode == PinMode.AnalogInput:
AdcPin._update_value(self, b0 << 8 | b1)
elif self.mode == PinMode.DigitalInput:
DigitalIn._update_value(self, b0)
@property
def mode(self) -> PinMode:
"""The pin's mode.
:type: PinMode
:getter: Returns the current mode
:setter: Sets the pin's mode
"""
return self._mode
@mode.setter
def mode(self, value: PinMode):
if self._mode == value:
return
if self._mode == PinMode.Reserved and value != PinMode.Unassigned:
raise ValueError(
"This pin is reserved; you must disable the peripheral using it before interacting with it")
self._mode = value
if value == PinMode.AnalogInput:
self.make_analog_in()
elif value == PinMode.DigitalInput:
self.make_digital_in()
elif value == PinMode.OpenDrainOutput:
self.make_digital_open_drain_out()
self._digital_value = False
elif value == PinMode.PushPullOutput:
self.make_digital_push_pull_out()
self._digital_value = False
elif value == PinMode.SoftPwm:
self.enable_pwm()
@property
def duty_cycle(self):
"""Gets or sets the duty cycle of the PWM pin, from 0.0-1.0."""
return self._board._soft_pwm_manager.get_duty_cycle(self)
@duty_cycle.setter
def duty_cycle(self, value):
self._board._soft_pwm_manager.set_duty_cycle(self, value)
@property
def pulse_width(self):
"""Gets or sets the pulse width, in ms, of the pin."""
return self._board._soft_pwm_manager.get_pulse_width(self)
@pulse_width.setter
def pulse_width(self, value):
self._board._soft_pwm_manager.set_pulse_width(self, value)
def enable_pwm(self):
"""Enable the PWM functionality of this pin."""
self._mode = PinMode.SoftPwm
self._board._send_pin_config([self._number, PinConfigCommands.MakePushPullOutput]) #
self._board._soft_pwm_manager.start_pin(self)
def __str__(self):
if self.mode == PinMode.AnalogInput:
return "{}: Analog input, {:0.3f} volts".format(self.name, self.analog_voltage)
elif self.mode == PinMode.DigitalInput:
return "{}: Digital input, {}".format(self.name, self.digital_value)
elif self.mode == PinMode.PushPullOutput:
return "{}: Digital push-pull, {}".format(self.name, self.digital_value)
elif self.mode == PinMode.OpenDrainOutput:
return "{}: Digital open-drain, {}".format(self.name, self.digital_value)
### \cond PRIVATE
class SoftPwmPinConfig:
def __init__(self):
self.duty_cycle = 0
self.pulse_width = 0
self.use_pulse_width = True
self.ticks = 0
self.pin = [] # type: Pin
class SoftPwmManager:
def __init__(self, board: "TreehopperUsb"):
self._board = board
self._pins: Dict[int, SoftPwmPinConfig] = dict()
self._lock = Lock()
self._resolution = 0.25
def start_pin(self, pin: "Pin"):
if pin in self._pins.keys():
return
config = SoftPwmPinConfig()
config.pin = pin
self._pins[pin.number] = config
self.update_config()
def stop_pin(self, pin: "Pin"):
del self._pins[pin.number]
self.update_config()
def set_duty_cycle(self, pin: "Pin", duty: float):
with self._lock:
self._pins[pin.number].duty_cycle = duty
self._pins[pin.number].use_pulse_width = False
self.update_config()
def set_pulse_width(self, pin: "Pin", pulse_width: float):
with self._lock:
self._pins[pin.number].pulse_width = pulse_width
self._pins[pin.number].use_pulse_width = True
self.update_config()
def get_duty_cycle(self, pin: "Pin"):
if not self._pins[pin.number]:
return 0
return self._pins[pin.number].duty_cycle
def get_pulse_width(self, pin: "Pin"):
if not self._pins[pin.number]:
return 0
return self._pins[pin.number].pulse_width
def update_config(self):
if len(self._pins) > 0:
for config in self._pins.values():
if config.use_pulse_width:
config.ticks = config.pulse_width / self._resolution
config.duty_cycle = config.ticks / 65535
else:
config.ticks = config.duty_cycle * 65535
config.pulse_width = config.ticks * self._resolution
ordered_values = list(self._pins.values())
ordered_values.sort(key=lambda x: x.ticks)
count = len(ordered_values) + 1
config = [DeviceCommands.SoftPwmConfig, count]
time = 0
for j in range(count):
if j < len(ordered_values):
ticks = ordered_values[j].ticks - time
else:
ticks = 65535 - time
tmr_val = int(65535 - ticks)
if j == 0:
config.append(0)
else:
config.append(ordered_values[j - 1].pin.number)
config.append(tmr_val >> 8)
config.append(tmr_val & 0xff)
time += ticks
self._board._send_peripheral_config_packet(config)
else:
self._board._send_peripheral_config_packet([DeviceCommands.SoftPwmConfig, 0])
### \endcond
|
treehopper-electronics/treehopper-sdk
|
Python/treehopper/api/pin.py
|
Python
|
mit
| 19,107
|
# (C) Copyright 2014-2015 Hewlett Packard Enterprise Development Company LP
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
Wrapper around python keystone client to assist in getting a properly scoped token and the registered service
endpoint for Monasca.
"""
from keystoneclient.v3 import client
from monascaclient import exc
class KSClient(object):
def __init__(self, **kwargs):
"""Get an endpoint and auth token from Keystone.
:param username: name of user
:param password: user's password
:param user_domain_id: unique identifier of domain username resides in (optional)
:param user_domain_name: name of domain for username (optional), if user_domain_id not specified
:param project_id: unique identifier of project
:param project_name: name of project
:param project_domain_name: name of domain project is in
:param project_domain_id: id of domain project is in
:param auth_url: endpoint to authenticate against
:param token: token to use instead of username/password
"""
kc_args = {'auth_url': kwargs.get('auth_url'),
'insecure': kwargs.get('insecure'),
'timeout': kwargs.get('keystone_timeout')}
if kwargs.get('os_cacert'):
kc_args['cacert'] = kwargs.get('os_cacert')
if kwargs.get('project_id'):
kc_args['project_id'] = kwargs.get('project_id')
elif kwargs.get('project_name'):
kc_args['project_name'] = kwargs.get('project_name')
if kwargs.get('project_domain_name'):
kc_args['project_domain_name'] = kwargs.get('project_domain_name')
elif kwargs.get('domain_name'):
kc_args['project_domain_name'] = kwargs.get('domain_name') # backwards compat to 1.0.30 API
if kwargs.get('project_domain_id'):
kc_args['project_domain_id'] = kwargs.get('project_domain_id')
elif kwargs.get('domain_id'):
kc_args['project_domain_id'] = kwargs.get('domain_id') # backwards compat to 1.0.30 API
if kwargs.get('token'):
kc_args['token'] = kwargs.get('token')
else:
kc_args['username'] = kwargs.get('username')
kc_args['password'] = kwargs.get('password')
# when username not in the default domain (id='default'), supply user domain (as namespace)
if kwargs.get('user_domain_name'):
kc_args['user_domain_name'] = kwargs.get('user_domain_name')
if kwargs.get('user_domain_id'):
kc_args['user_domain_id'] = kwargs.get('user_domain_id')
self._kwargs = kwargs
self._keystone = client.Client(**kc_args)
self._token = None
self._monasca_url = None
@property
def token(self):
"""Token property
Validate token is project scoped and return it if it is
project_id and auth_token were fetched when keystone client was created
"""
if self._token is None:
if self._keystone.project_id:
self._token = self._keystone.auth_token
else:
raise exc.CommandError("No project id or project name.")
return self._token
@property
def monasca_url(self):
"""Return the monasca publicURL registered in keystone."""
if self._monasca_url is None:
if self._kwargs.get('region_name'):
self._monasca_url = self._keystone.service_catalog.url_for(
service_type=self._kwargs.get('service_type') or 'monitoring',
attr='region',
filter_value=self._kwargs.get('region_name'),
endpoint_type=self._kwargs.get('endpoint_type') or 'publicURL')
else:
self._monasca_url = self._keystone.service_catalog.url_for(
service_type=self._kwargs.get('service_type') or 'monitoring',
endpoint_type=self._kwargs.get('endpoint_type') or 'publicURL')
return self._monasca_url
|
sapcc/python-monascaclient
|
monascaclient/ksclient.py
|
Python
|
apache-2.0
| 4,615
|
#!/usr/bin/env python
# Copyright 2020 Google
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
This script needs to be run once to prepare the machine for building SDK.
It will download required python dependencies and also install ccache on mac/linux.
ccache considerably improves the build times.
Please note that this script is aganostic of various desktop configurations.
For example, you can run it once regardless if you are following up with a build of x86 or x64.
Run this script from the root of the repository
Usage:
python scripts/gha/install_prereqs_desktop.py
"""
import utils
def main():
# Install protobuf on linux/mac if its not installed already
if not utils.is_command_installed('protoc'):
if utils.is_linux_os():
# sudo apt install protobuf-compiler
utils.run_command(['apt', 'install', '-y','protobuf-compiler'], as_root=True)
elif utils.is_mac_os():
# brew install protobuf
utils.run_command(['brew', 'install', 'protobuf'])
# Install go on linux/mac if its not installed already
if not utils.is_command_installed('go'):
if utils.is_linux_os():
# sudo apt install -y golang
utils.run_command(['apt', 'install', '-y','golang'], as_root=True)
elif utils.is_mac_os():
# brew install protobuf
utils.run_command(['brew', 'install', 'go'])
# Install openssl on linux/mac if its not installed already
if not utils.is_command_installed('go'):
if utils.is_linux_os():
# sudo apt install -y openssl
utils.run_command(['apt', 'install', '-y','openssl'], as_root=True)
elif utils.is_mac_os():
# brew install protobuf
utils.run_command(['brew', 'install', 'openssl'])
# Install ccache on linux/mac if its not installed already
if not utils.is_command_installed('ccache'):
if utils.is_linux_os():
# sudo apt install ccache
utils.run_command(['apt', 'install', '-y', 'ccache'], as_root=True)
elif utils.is_mac_os():
# brew install ccache
utils.run_command(['brew', 'install', 'ccache'])
# Install clang-format on linux/mac if its not installed already
if not utils.is_command_installed('clang-format'):
if utils.is_linux_os():
# sudo apt install clang-format
utils.run_command(['apt', 'install', '-y','clang-format'], as_root=True)
elif utils.is_mac_os():
# brew install protobuf
utils.run_command(['brew', 'install', 'clang-format'])
# Install required python dependencies.
# On Catalina, python2 in installed as default python.
# Example command:
# python3 -m pip install -r external/pip_requirements.txt --user
utils.run_command(
['python3' if utils.is_command_installed('python3') else 'python', '-m',
'pip', 'install', '-r', 'external/pip_requirements.txt', '--user'] )
if __name__ == '__main__':
main()
|
firebase/firebase-cpp-sdk
|
scripts/gha/install_prereqs_desktop.py
|
Python
|
apache-2.0
| 3,388
|
#!/usr/bin/env python
from setuptools import setup, find_packages
tests_require = [
]
setup(
name='kleenex',
version='0.15.0',
author='David Cramer',
author_email='dcramer@gmail.com',
description='A discovery plugin for Nose which relies on code coverage.',
url='http://github.com/dcramer/kleenex',
packages=find_packages(exclude=["tests"]),
zip_safe=False,
install_requires=[
'coverage>=3.5',
'nose>=0.9',
'simplejson',
'SQLAlchemy>=0.7',
],
entry_points={
'nose.plugins.0.10': [
'kleenex = kleenex.plugin:TestCoveragePlugin'
]
},
license='Apache License 2.0',
tests_require=tests_require,
extras_require={'test': tests_require},
test_suite='runtests.runtests',
include_package_data=True,
classifiers=[
'Intended Audience :: Developers',
'Intended Audience :: System Administrators',
'Operating System :: OS Independent',
'Topic :: Software Development'
],
)
|
dcramer/kleenex
|
setup.py
|
Python
|
apache-2.0
| 1,030
|
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
# Copyright (c) 2014 Mozilla Corporation
#
# Contributors:
# Jeff Bryner jbryner@mozilla.com
import os
import sys
from configlib import getConfig, OptionParser
from datetime import datetime, timedelta
import json
import netaddr
from pytx import init
from pytx import ThreatIndicator
def isIPv4(ip):
try:
# netaddr on it's own considers 1 and 0 to be valid_ipv4
# so a little sanity check prior to netaddr.
# Use IPNetwork instead of valid_ipv4 to allow CIDR
if '.' in ip and len(ip.split('.'))==4:
# some ips are quoted
netaddr.IPNetwork(ip.strip("'").strip('"'))
return True
else:
return False
except:
return False
def isIPv6(ip):
try:
return netaddr.valid_ipv6(ip)
except:
return False
class message(object):
def __init__(self):
'''register our criteria for being passed a message
as a list of lower case strings to match with an rest endpoint
(i.e. blockip matches /blockip)
set the priority if you have a preference for order of plugins
0 goes first, 100 is assumed/default if not sent
Plugins will register in Meteor with attributes:
name: (as below)
description: (as below)
priority: (as below)
file: "plugins.filename" where filename.py is the plugin code.
Plugin gets sent main rest options as:
self.restoptions
self.restoptions['configfile'] will be the .conf file
used by the restapi's index.py file.
'''
self.registration = ['blockip']
self.priority = 10
self.name = "ThreatExchange"
self.description = "Facebook ThreatExchange"
# set my own conf file
# relative path to the rest index.py file
self.configfile = './plugins/facebookThreatExchange.conf'
self.options = None
if os.path.exists(self.configfile):
sys.stdout.write('found conf file {0}\n'.format(self.configfile))
self.initConfiguration()
# set up the threat exchange secret
init(self.options.appid, self.options.appsecret)
def initConfiguration(self):
myparser = OptionParser()
# setup self.options by sending empty list [] to parse_args
(self.options, args) = myparser.parse_args([])
# fill self.options with plugin-specific options
# change this to your default zone for when it's not specified
self.options.defaultTimeZone = getConfig('defaulttimezone', 'US/Pacific', self.configfile)
# threat exchange options
self.options.appid = getConfig('appid',
'',
self.configfile)
self.options.appsecret=getConfig('appsecret',
'',
self.configfile)
def sendToThreatExchange(self,
ipaddress=None,
comment='malicious IP'):
try:
if ipaddress is not None and self.options is not None:
maliciousActor=ThreatIndicator()
maliciousActor.indicator= ipaddress
maliciousActor.threat_type="MALICIOUS_IP"
maliciousActor.type="IP_ADDRESS"
maliciousActor.share_level="GREEN"
maliciousActor.status="MALICIOUS"
maliciousActor.privacy_type="VISIBLE"
maliciousActor.description= comment
maliciousActor.save()
sys.stdout.write('Sent {0} to threat exchange server\n'.format(ipaddress))
except Exception as e:
sys.stderr.write('Error while sending to threatexchange %s: %r\n' % (ipaddress, e))
def onMessage(self, request, response):
'''
request: http://bottlepy.org/docs/dev/api.html#the-request-object
response: http://bottlepy.org/docs/dev/api.html#the-response-object
'''
# format/validate request.json:
ipaddress = None
CIDR = None
comment = 'malicious IP'
duration = None
referenceID = None
userid = None
sendToThreatExchange = False
# loop through the fields of the form
# and fill in our values
try:
for i in request.json:
# were we checked?
if self.name in i.keys():
sendToThreatExchange = i.values()[0]
if 'ipaddress' in i.keys():
ipaddress = i.values()[0]
if 'duration' in i.keys():
duration = i.values()[0]
if 'comment' in i.keys():
comment = i.values()[0]
if 'referenceID' in i.keys():
referenceID = i.values()[0]
if 'userid' in i.keys():
userid = i.values()[0]
# are we configured?
if self.options is None:
sys.stderr.write("ThreatExchange requested but not configured\n")
sendToThreatExchange = False
if sendToThreatExchange and ipaddress is not None:
#figure out the CIDR mask
if isIPv4(ipaddress) or isIPv6(ipaddress):
ipcidr=netaddr.IPNetwork(ipaddress)
if not ipcidr.ip.is_loopback() \
and not ipcidr.ip.is_private() \
and not ipcidr.ip.is_reserved():
# split the ip vs cidr mask
# threat exchange can't accept CIDR addresses
# so send the most significant bit
ipaddress, CIDR = str(ipcidr).split('/')
self.sendToThreatExchange(ipaddress, comment)
sys.stdout.write ('Sent {0} to threat exchange\n'.format(ipaddress))
except Exception as e:
sys.stderr.write('Error handling request.json %r \n'% (e))
return (request, response)
|
serbyy/MozDef
|
rest/plugins/facebookThreatExchange.py
|
Python
|
mpl-2.0
| 6,352
|
#!/usr/bin/env zmpython
#
# ***** BEGIN LICENSE BLOCK *****
# Zimbra Collaboration Suite Server
# Copyright (C) 2005, 2006, 2007, 2008, 2009, 2010, 2011, 2012 Zimbra, Inc.
#
# The contents of this file are subject to the Zimbra Public License
# Version 1.3 ("License"); you may not use this file except in
# compliance with the License. You may obtain a copy of the License at
# http://www.zimbra.com/license.
#
# Software distributed under the License is distributed on an "AS IS"
# basis, WITHOUT WARRANTY OF ANY KIND, either express or implied.
# ***** END LICENSE BLOCK *****
#
import fileinput
import subprocess
import re
from datetime import date
# Number of days of log files to read
numdays = 7
# Find the current total number of log files
p = subprocess.Popen('ls /opt/zimbra/log/access_log* | wc -l', shell=True, stdout=subprocess.PIPE)
numlogfiles = p.stdout.read()
# Get the list of log files to read
lscmdfmt = 'ls /opt/zimbra/log/access_log* | tail -%d | head -%d'
if numlogfiles > numdays:
lscmd = lscmdfmt % (numdays + 1, numdays)
else:
lscmd = lscmdfmt % (numdays, numlogfiles - 1)
p = subprocess.Popen(lscmd, shell=True, stdout=subprocess.PIPE)
resultmap = {}
for file in p.stdout.readlines():
file = file.rstrip()
subprocess.call('echo Reading %s ..' % file, shell=True)
for line in fileinput.input([file]):
l = line.split('"')
if len(l) < 7:
continue
ua = l[5].rstrip()
if not re.match('(zm.*|ZCS.*|zclient.*|.*ZCB.*|Jakarta.*|curl.*|-)', ua) is None:
continue
requrl = l[1]
if not re.match('(.*/zimbraAdmin.*|.*/service/admin.*)', requrl) is None:
continue
result = l[2].split()[0]
if result != '200':
continue
ip = line.split('-')[0].rstrip()
if (ip == '127.0.0.1' or ip == '::1'):
continue
key = ua, ip
curval = resultmap.get(key)
if curval is None:
resultmap[key] = 1
else:
resultmap[key] = curval + 1
reportfile = '/opt/zimbra/zmstat/client_usage_report_%s.csv' % (date.today().isoformat())
subprocess.call('rm -f %s' % reportfile, shell=True)
csv = open(reportfile, 'w')
subprocess.call('echo Writing %s ..' % reportfile, shell=True)
csv.write('"user_agent","client_IP","req_count"\n')
for key, value in resultmap.iteritems():
csv.write('"%s","%s","%s"\n' % (key[0], key[1], value))
csv.close()
|
nico01f/z-pec
|
ZimbraServer/src/libexec/client_usage_report.py
|
Python
|
mit
| 2,449
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('judge', '0026_auto_20151014_0813'),
]
operations = [
migrations.AlterField(
model_name='problem',
name='maxScore',
field=models.DecimalField(default=0, decimal_places=4, max_digits=8),
),
migrations.AlterField(
model_name='solution',
name='score',
field=models.DecimalField(default=0, decimal_places=4, max_digits=8),
),
migrations.AlterField(
model_name='userproblemdata',
name='maxScore',
field=models.DecimalField(default=0, decimal_places=4, max_digits=8),
),
]
|
Alaxe/judgeSystem
|
judge/migrations/0027_auto_20151014_0817.py
|
Python
|
gpl-2.0
| 814
|
'''
This illustrates the NonUniformImage class, which still needs
an axes method interface; either a separate interface, or a
generalization of imshow.
'''
from matplotlib.pyplot import figure, show
import numpy as np
from matplotlib.image import NonUniformImage
from matplotlib import cm
interp = 'nearest'
x = np.linspace(-4, 4, 9)
x2 = x**3
y = np.linspace(-4, 4, 9)
#print('Size %d points' % (len(x) * len(y)))
z = np.sqrt(x[np.newaxis, :]**2 + y[:, np.newaxis]**2)
fig = figure()
fig.suptitle('NonUniformImage class')
ax = fig.add_subplot(221)
im = NonUniformImage(ax, interpolation=interp, extent=(-4, 4, -4, 4),
cmap=cm.Purples)
im.set_data(x, y, z)
ax.images.append(im)
ax.set_xlim(-4, 4)
ax.set_ylim(-4, 4)
ax.set_title(interp)
ax = fig.add_subplot(222)
im = NonUniformImage(ax, interpolation=interp, extent=(-64, 64, -4, 4),
cmap=cm.Purples)
im.set_data(x2, y, z)
ax.images.append(im)
ax.set_xlim(-64, 64)
ax.set_ylim(-4, 4)
ax.set_title(interp)
interp = 'bilinear'
ax = fig.add_subplot(223)
im = NonUniformImage(ax, interpolation=interp, extent=(-4, 4, -4, 4),
cmap=cm.Purples)
im.set_data(x, y, z)
ax.images.append(im)
ax.set_xlim(-4, 4)
ax.set_ylim(-4, 4)
ax.set_title(interp)
ax = fig.add_subplot(224)
im = NonUniformImage(ax, interpolation=interp, extent=(-64, 64, -4, 4),
cmap=cm.Purples)
im.set_data(x2, y, z)
ax.images.append(im)
ax.set_xlim(-64, 64)
ax.set_ylim(-4, 4)
ax.set_title(interp)
show()
|
bundgus/python-playground
|
matplotlib-playground/examples/pylab_examples/image_nonuniform.py
|
Python
|
mit
| 1,509
|
# -*- coding: utf-8 -*-
import pyvabamorf.vabamorf as vm
import atexit
if not vm.FSCInit():
raise Exception('Could not initiate pyvabamorf library. FSCInit() returned false!')
@atexit.register
def terminate():
vm.FSCTerminate()
from morf import analyze, synthesize
from morf import PyVabamorf
|
estnltk/pyvabamorf
|
pyvabamorf/__init__.py
|
Python
|
lgpl-3.0
| 304
|
# Copyright 2015 Red Hat, Inc.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import ast
import hashlib
import inspect
import six
from oslo_versionedobjects import fields as object_fields
from ironic.common import utils
class IntegerField(object_fields.IntegerField):
pass
class UUIDField(object_fields.UUIDField):
pass
class StringField(object_fields.StringField):
pass
class StringAcceptsCallable(object_fields.String):
@staticmethod
def coerce(obj, attr, value):
if callable(value):
value = value()
return super(StringAcceptsCallable, StringAcceptsCallable).coerce(
obj, attr, value)
class StringFieldThatAcceptsCallable(object_fields.StringField):
"""Custom StringField object that allows for functions as default
In some cases we need to allow for dynamic defaults based on configuration
options, this StringField object allows for a function to be passed as a
default, and will only process it at the point the field is coerced
"""
AUTO_TYPE = StringAcceptsCallable()
def __repr__(self):
default = self._default
if (self._default != object_fields.UnspecifiedDefault and
callable(self._default)):
default = "%s-%s" % (
self._default.__name__,
hashlib.md5(inspect.getsource(
self._default).encode()).hexdigest())
return '%s(default=%s,nullable=%s)' % (self._type.__class__.__name__,
default, self._nullable)
class DateTimeField(object_fields.DateTimeField):
pass
class BooleanField(object_fields.BooleanField):
pass
class ListOfStringsField(object_fields.ListOfStringsField):
pass
class ObjectField(object_fields.ObjectField):
pass
class FlexibleDict(object_fields.FieldType):
@staticmethod
def coerce(obj, attr, value):
if isinstance(value, six.string_types):
value = ast.literal_eval(value)
return dict(value)
class FlexibleDictField(object_fields.AutoTypedField):
AUTO_TYPE = FlexibleDict()
# TODO(lucasagomes): In our code we've always translated None to {},
# this method makes this field to work like this. But probably won't
# be accepted as-is in the oslo_versionedobjects library
def _null(self, obj, attr):
if self.nullable:
return {}
super(FlexibleDictField, self)._null(obj, attr)
class EnumField(object_fields.EnumField):
pass
class NotificationLevel(object_fields.Enum):
DEBUG = 'debug'
INFO = 'info'
WARNING = 'warning'
ERROR = 'error'
CRITICAL = 'critical'
ALL = (DEBUG, INFO, WARNING, ERROR, CRITICAL)
def __init__(self):
super(NotificationLevel, self).__init__(
valid_values=NotificationLevel.ALL)
class NotificationLevelField(object_fields.BaseEnumField):
AUTO_TYPE = NotificationLevel()
class NotificationStatus(object_fields.Enum):
START = 'start'
END = 'end'
ERROR = 'error'
SUCCESS = 'success'
ALL = (START, END, ERROR, SUCCESS)
def __init__(self):
super(NotificationStatus, self).__init__(
valid_values=NotificationStatus.ALL)
class NotificationStatusField(object_fields.BaseEnumField):
AUTO_TYPE = NotificationStatus()
class MACAddress(object_fields.FieldType):
@staticmethod
def coerce(obj, attr, value):
return utils.validate_and_normalize_mac(value)
class MACAddressField(object_fields.AutoTypedField):
AUTO_TYPE = MACAddress()
|
ruyang/ironic
|
ironic/objects/fields.py
|
Python
|
apache-2.0
| 4,120
|
# vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2011 OpenStack LLC.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import netaddr
from melange.common import exception
from melange.db import db_api
from melange import ipam
class DbBasedIpGenerator(object):
def __init__(self, ip_block):
self.ip_block = ip_block
def next_ip(self):
allocatable_address = db_api.pop_allocatable_address(
ipam.models.AllocatableIp, ip_block_id=self.ip_block.id)
if allocatable_address is not None:
return allocatable_address
ips = netaddr.IPNetwork(self.ip_block.cidr)
allocatable_ip_counter = (self.ip_block.allocatable_ip_counter
or int(ips[0]))
if(allocatable_ip_counter > int(ips[-1])):
raise exception.NoMoreAddressesError
address = str(netaddr.IPAddress(allocatable_ip_counter))
self.ip_block.update(allocatable_ip_counter=allocatable_ip_counter + 1)
return address
|
rcbops/melange-buildpackage
|
melange/ipv4/db_based_ip_generator.py
|
Python
|
apache-2.0
| 1,576
|
"""
Wrapper for loading templates from "static" templates directories in INSTALLED_APPS
packages. (Code borrowed from django.template.loaders.app_directories.py)
"""
import os
import sys
from django.apps import apps
from django.conf import settings
from django.template import TemplateDoesNotExist
from django.template.loaders.base import Loader
from django.utils._os import safe_join
from django.utils import six
def calculate_app_template_dirs():
if six.PY2:
fs_encoding = sys.getfilesystemencoding() or sys.getdefaultencoding()
app_template_dirs = []
for app_config in apps.get_app_configs():
if not app_config.path:
continue
template_dir = os.path.join(app_config.path, 'static', app_config.name, 'templates')
if os.path.isdir(template_dir):
if six.PY2:
template_dir = template_dir.decode(fs_encoding)
app_template_dirs.append(template_dir)
return tuple(app_template_dirs)
# At compile time, cache the directories to search.
app_template_dirs = calculate_app_template_dirs()
class AppStaticDirectoriesLoader(Loader):
is_usable = True
def get_template_sources(self, template_name, template_dirs=None):
"""
Returns the absolute paths to "template_name", when appended to each
directory in "template_dirs". Any paths that don't lie inside one of the
template dirs are excluded from the result set, for security reasons.
"""
if not template_dirs:
template_dirs = app_template_dirs
for template_dir in template_dirs:
try:
yield safe_join(template_dir, template_name)
except UnicodeDecodeError:
# The template dir name was a bytestring that wasn't valid UTF-8.
raise
except ValueError:
# The joined path was located outside of template_dir.
pass
def load_template_source(self, template_name, template_dirs=None):
for filepath in self.get_template_sources(template_name, template_dirs):
try:
with open(filepath, 'rb') as fp:
return (fp.read().decode(settings.FILE_CHARSET), filepath)
except IOError:
pass
raise TemplateDoesNotExist(template_name)
|
storeys/django-storeys
|
storeys/templateloaders.py
|
Python
|
bsd-2-clause
| 2,341
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
#
# Copyright 2010 British Broadcasting Corporation and Kamaelia Contributors(1)
#
# (1) Kamaelia Contributors are listed in the AUTHORS file and at
# http://www.kamaelia.org/AUTHORS - please extend this file,
# not this notice.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# -------------------------------------------------------------------------
'''
Bi-directional JsonRPC Server and Client for Kamaelia.
Copyright (c) 2009 Rasjid Wilcox and CDG Computer Services.
Licensed to the BBC under a Contributor Agreement
'''
import Axon
from Axon.Handle import Handle
from Axon.background import background
from Axon.Ipc import shutdownMicroprocess, producerFinished
from Kamaelia.Chassis.ConnectedServer import ServerCore
from Kamaelia.Chassis.Pipeline import Pipeline
from Kamaelia.Chassis.Graphline import Graphline
from Kamaelia.Internet.TCPClient import TCPClient
from jsonrpc import JsonRpc20, RPCFault, METHOD_NOT_FOUND, INTERNAL_ERROR, ERROR_MESSAGE, REQUEST, RESPONSE, ERROR, json_split
from traceback import format_exc
from collections import defaultdict
import types, inspect, Queue
# FIXME: add protection from Denial of Service
# decorators to mark funcation args as either
# callback requests or callback notifications
def cb_request(arg_name, response_func, convert_args = False):
def cb_request_dec(func):
if not hasattr(func, '_callbacks_'):
func._callbacks_ = {}
if response_func:
func._callbacks_[arg_name] = ResponseCallback(response_func, convert_args)
else:
func._callbacks_[arg_name] = None
return func
return cb_request_dec
def cb_notification(arg_name):
return cb_request(arg_name, None)
class ResponseCallback(object):
def __init__(self, callback_func, convert_args = False):
'''if convert_args then convert a list, tuple or dict to args in standard jsonrpc way'''
self.callback_func = callback_func
self.convert_args = convert_args
class RequestOrNotification(object):
'If response_callback is None, then this is a notification'
def __init__(self, method, params = None, response_callback = None):
if response_callback: assert isinstance(response_callback, ResponseCallback)
self.method = method
self.params = params
self.response_callback = response_callback
class JsonRpcProtocol(object):
'Protocol Factory for JsonRpc over TCP'
def __init__(self, task_runner, id_prefix = 'server', debug = 0):
self.task_runner = task_runner
self.id_prefix = id_prefix
self.debug = debug
self.dispatch_table = {}
self.callback_table = defaultdict(dict) # try key on actual function
self.requests_on_connect = []
self.requests_on_connect_wait = None # id of request to wait for before sending next
self.requests_sent = {}
self._request_id_num = 1
self.connections = []
def get_request_id(self, request):
req_num = self._request_id_num
if self.id_prefix:
request_id = '%s-%s' % (self.id_prefix, req_num)
else:
request_id = req_num
assert isinstance(request, RequestOrNotification)
self.requests_sent[request_id] = request.response_callback
if request.response_callback:
self.add_callbacks(request.response_callback)
self._request_id_num += 1
return request_id
def add_callbacks(self, function):
if function in self.callback_table:
# already in callback table, so just return
return
if hasattr(function, '_callbacks_'): # 'response_callback'):
for arg_name, response_callback in function._callbacks_.items():
name = function.__name__
self.callback_table[function][arg_name] = response_callback
print 'Added callback for method %s, argument %s' % (name, arg_name)
try:
# args by position - offset needed for instance methods etc
offset = 1 if (hasattr(function, 'im_self') and function.im_self) else 0
arg_num = inspect.getargspec(function)[0].index(arg_name) - offset
self.callback_table[function][arg_num] = response_callback
print 'Added callback for method %s, arg_num %s' % (name, arg_num)
except ValueError:
print 'WARNING: unable to determine argument position for callback on method %s, argument %s.\n' \
'Automatic callback conversion will not occur if called by position.' % (name, arg_name)
def add_function(self, function, name = None):
if name is None:
name = function.__name__
if name in self.dispatch_table:
raise ValueError('rpc method %s already exists!' % name)
self.dispatch_table[name] = function
print 'Added rpc method %s' % name
self.add_callbacks(function)
def add_instance(self, instance, prefix = None):
'''Add all callable attributes of an instance not starting with '_'.
If prefix is none, then the rpc name is just <method_name>,
otherwise it is '<prefix>.<method_name>
'''
for name in dir(instance):
if name[0] != '_':
func = getattr(instance, name, None)
if type(func) == types.MethodType:
if prefix:
rpcname = '%s.%s' % (prefix, func.__name__)
else:
rpcname = func.__name__
self.add_function(func, name = rpcname)
def add_request_on_connect(self, req_or_notification, wait = True):
self.requests_on_connect.append( (req_or_notification, wait) )
def __call__(self, **kwargs):
if self.debug >= 1:
print 'Creating new Protocol Factory: ', str(kwargs)
connection = Graphline( SPLITTER = JsonSplitter(debug = self.debug, factory = self, **kwargs),
DESERIALIZER = Deserializer(debug = self.debug, factory = self, **kwargs),
DISPATCHER = Dispatcher(debug = self.debug, factory = self, **kwargs),
RESPONSESERIALIZER = ResponseSerializer(debug = self.debug, factory = self, **kwargs),
REQUESTSERIALIZER = RequestSerializer(debug = self.debug, factory = self, **kwargs),
FINALIZER = Finalizer(debug = self.debug, factory = self, **kwargs),
TASKRUNNER = self.task_runner,
linkages = { ('self', 'inbox') : ('SPLITTER', 'inbox'),
('self', 'request') : ('REQUESTSERIALIZER', 'request'),
('SPLITTER', 'outbox') : ('DESERIALIZER', 'inbox'),
('DESERIALIZER', 'outbox'): ('DISPATCHER', 'inbox'),
('DESERIALIZER', 'error'): ('RESPONSESERIALIZER', 'inbox'),
('DISPATCHER', 'outbox') : ('TASKRUNNER', 'inbox'),
('DISPATCHER', 'result_out') : ('RESPONSESERIALIZER', 'inbox'),
('DISPATCHER', 'request_out') : ('REQUESTSERIALIZER', 'request'),
('RESPONSESERIALIZER', 'outbox') : ('self', 'outbox'),
('REQUESTSERIALIZER', 'outbox'): ('self', 'outbox'),
('self', 'control') : ('SPLITTER', 'control'),
('SPLITTER', 'signal') : ('DESERIALIZER', 'control'),
('DESERIALIZER', 'signal'): ('DISPATCHER', 'control'),
('DISPATCHER', 'signal') : ('RESPONSESERIALIZER', 'control'),
('RESPONSESERIALIZER', 'signal') : ('REQUESTSERIALIZER', 'control'),
('REQUESTSERIALIZER', 'signal') : ('FINALIZER', 'control'),
('FINALIZER', 'signal') : ('self', 'signal'),
('DISPATCHER', 'wake_requester') : ('REQUESTSERIALIZER', 'control'),
} )
self.connections.append(connection)
return connection
class JsonSplitter(Axon.Component.component):
Inboxes = { 'inbox': 'accepts arbitrary (sequential) pieces of json stings',
'control': 'incoming shutdown requests' }
Outboxes = { 'outbox': 'a single complete json string',
'signal': 'outgoing shutdown requests' }
def __init__(self, **kwargs):
super(JsonSplitter, self).__init__(**kwargs)
self.partial_data = ''
if self.debug >= 3: print 'Created %s' % repr(self)
def main(self):
while not self.shutdown():
if self.dataReady('inbox'):
data = self.recv('inbox')
if self.debug >= 4: print 'Got data: <<%s>>' % data
Json_strings, self.partial_data = json_split(self.partial_data + data)
yield 1
# send to dispatch
for message in Json_strings:
if self.debug >= 3: print 'Sent to deserializer: %s' % message
self.send(message, 'outbox')
yield 1
if not self.anyReady():
self.pause()
yield 1
if self.debug >= 3:
print 'End of main for %s' % self.__class__.__name__
def shutdown(self):
if self.dataReady('control'):
msg = self.recv('control')
if isinstance(msg, shutdownMicroprocess) or isinstance(msg, producerFinished):
if self.debug >= 3: print '%s got shutdown msg: %r' % (self.__class__.__name__, msg)
self.send(msg, 'signal')
return True
return False
class Deserializer(Axon.Component.component):
Inboxes = {'inbox': 'complete json strings',
'control': 'shutdown messages',
}
Outboxes = {'outbox': 'the deserialized request/notification or result',
'error': 'the exception if there was an error deserializing',
'signal': 'shutdown messages',
}
def __init__(self, **kwargs):
super(Deserializer, self).__init__(**kwargs)
self.serializer = JsonRpc20() # FIXME: make this a paramater
if self.debug >= 3: print 'Created %s' % repr(self)
def main(self):
while not self.shutdown():
if self.dataReady('inbox'):
data = self.recv('inbox')
if self.debug >=1: print '--> %s' % data
try:
request = self.serializer.loads_request_response(data)
self.send(request, 'outbox')
except RPCFault, error:
self.send( (error, None), 'error')
if not self.anyReady():
self.pause()
yield 1
if self.debug >= 3:
print 'End of main for %s' % self.__class__.__name__
def shutdown(self):
if self.dataReady('control'):
msg = self.recv('control')
if isinstance(msg, shutdownMicroprocess) or isinstance(msg, producerFinished):
if self.debug >= 3: print '%s got shutdown msg: %r' % (self.__class__.__name__, msg)
self.send(msg, 'signal')
return True
return False
class CallbackProxy(object):
def __init__(self, method_name, response_callback):
self.method_name = method_name
self.response_callback = response_callback
self.params = None
self.component = None
self.outbox_name = None
def set_outbox(self, component, outbox_name):
self.component = component
self.outbox_name = outbox_name
def __call__(self, params = None):
if not self.component or not self.outbox_name:
raise ValueError('component or outbox_name not set')
req = RequestOrNotification(self.method_name, params, self.response_callback)
self.component.send(req, self.outbox_name)
class Dispatcher(Axon.Component.component):
Inboxes = {'inbox': 'rpc request/notification or response objects',
'result_in': 'the function/method result or RequestOrNotification',
'control': 'shutdown messages',
}
Outboxes = {'outbox': '(return_component, method, args, id) tuple for the worker. NOTE: return_component == (self, <boxname>)',
'result_out': 'the result of the request (relayed from result_in)',
'request_out': 'requests from callback functions',
'signal': 'shutdown messages',
'wake_requester': 'wake up RequestSerializer',
}
def __init__(self, **kwargs):
super(Dispatcher, self).__init__(**kwargs)
if self.debug >= 3: print 'Created %s' % repr(self)
def _do_dispatch(self, dispatch_func, args, id, notification, convert_args = True):
'Assumes args is always a list, tuple or dict'
kwargs = {}
if convert_args:
if isinstance(args, dict):
# args by name
args, kwargs = [], args
# find any callback args and replace with callback proxy
for arg_name in set(self.factory.callback_table[dispatch_func].keys()).intersection(set(kwargs.keys())):
kwargs[arg_name] = CallbackProxy(kwargs[arg_name], self.factory.callback_table[dispatch_func][arg_name])
else:
arg_nums = range(len(args))
for arg_num in set(self.factory.callback_table[dispatch_func].keys()).intersection(set(arg_nums)):
args[arg_num] = CallbackProxy(args[arg_num], self.factory.callback_table[dispatch_func][arg_num])
else:
args = [args]
return_box = (self, 'result_in')
dispatch_info = (dispatch_func, args, kwargs)
return_info = (id, notification)
if self.debug >= 3: print 'Sending: %r\n%r\n%r' % (return_box, dispatch_info, return_info)
self.send( (return_box, dispatch_info, return_info), 'outbox')
def _process_request(self, request):
if self.debug >= 3: print 'Got dispatch request: %s' % repr(request)
notification = False
if len(request) == 2:
notification = True
method, args = request
id = None
else:
method, args, id = request
if not notification and method not in self.factory.dispatch_table:
response = ( RPCFault(METHOD_NOT_FOUND, ERROR_MESSAGE[METHOD_NOT_FOUND]), id)
self.send(response, 'result_out')
else:
dispatch_func = self.factory.dispatch_table[method]
self._do_dispatch(dispatch_func, args, id, notification)
def _process_response(self, response):
print '=== Response: %s ===' % repr(response)
result, id = response
response_callback = None
if id == self.factory.requests_on_connect_wait:
self.factory.requests_on_connect_wait = None # clear waiting on this request
if len(self.factory.requests_on_connect):
self.send(Axon.Ipc.notify(self, id), 'wake_requester') # wake requester so it can send pending requests
# look up response callback
try:
response_callback = self.factory.requests_sent.pop(id)
assert isinstance(response_callback, ResponseCallback)
except KeyError:
print 'ERROR: Invalid response id %s' % id
if result is None:
return
if response_callback.convert_args and type(result) not in (types.ListType, types.TupleType, types.DictionaryType):
print "ERROR: Can't convert response result to procedure argments - must be List, Tuple or Dict"
return
if not response_callback:
print 'ERROR: Got result for a notification or request with no callback defined'
else:
self._do_dispatch(response_callback.callback_func, result, id, True, convert_args = response_callback.convert_args) # not really a notification - but we don't return a response to a response
def main(self):
while not self.shutdown():
if self.dataReady('inbox'):
data = self.recv('inbox')
if data[0] == REQUEST:
request = data[1]
self._process_request(request)
elif data[0] == RESPONSE:
# got a response to a request we sent
response = data[1]
self._process_response(response)
elif data[0] == ERROR:
# FIXME: handle error responses
print '!!! GOT ERROR RESPONSE: %s' % repr(data[1])
else:
# FIXME
print 'INTERNAL ERROR: Unexpected message type'
if self.dataReady('result_in'):
data = self.recv('result_in')
result, (id, notification) = data
if isinstance(result, RequestOrNotification):
if self.debug >= 3: print 'Got RequestOrNotification: %s' % result
self.send(result, 'request_out')
else:
if self.debug >= 2: print 'Got result for id %s:\n %s' % (id, repr(result))
if not notification:
self.send((result, id), 'result_out')
if not self.anyReady():
self.pause()
yield 1
if self.debug >= 3:
print 'End of main for %s' % self.__class__.__name__
def shutdown(self):
if self.dataReady('control'):
msg = self.recv('control')
if isinstance(msg, shutdownMicroprocess) or isinstance(msg, producerFinished):
if self.debug >= 3: print '%s got shutdown msg: %r' % (self.__class__.__name__, msg)
self.send(msg, 'signal')
return True
return False
class ResponseSerializer(Axon.Component.component):
Inboxes = {'inbox': '(result, id) tuple',
'control': 'shutdown messages',
}
Outboxes = {'outbox': 'the json-rpc response',
'signal': 'shutdown messages',
}
def __init__(self, **kwargs):
super(ResponseSerializer, self).__init__(**kwargs)
self.serializer = JsonRpc20() # FIXME: make this a paramater
if self.debug >= 3: print 'Created %s' % repr(self)
def main(self):
while not self.shutdown():
if self.dataReady('inbox'):
result, id = self.recv('inbox')
if self.debug >= 3: print 'Got result. Id: %r, Value: %r' % (id, result)
if isinstance(result, RPCFault):
response = self.serializer.dumps_error( result, id)
elif isinstance(result, Exception):
# procedure exception - FIXME: log to logger!
print format_exc()
response = self.serializer.dumps_error( RPCFault(INTERNAL_ERROR, ERROR_MESSAGE[INTERNAL_ERROR]), id )
else:
try:
response = self.serializer.dumps_response(result, id)
except RPCFault, e:
response = self.serializer.dumps_error( e, id)
# serialization error - log to logger!
print format_exc()
response = self.serializer.dumps_error( RPCFault(INTERNAL_ERROR, ERROR_MESSAGE[INTERNAL_ERROR]), id )
response += '\r\n' # make things easier to read if testing with telnet or netcat
if self.debug >= 1:
print '<-- %s' % response
self.send(response, 'outbox')
if not self.anyReady():
self.pause()
yield 1
if self.debug >= 3:
print 'End of main for %s' % self.__class__.__name__
def shutdown(self):
if self.dataReady('control'):
msg = self.recv('control')
if isinstance(msg, shutdownMicroprocess) or isinstance(msg, producerFinished):
if self.debug >= 3: print '%s got shutdown msg: %r' % (self.__class__.__name__, msg)
self.send(msg, 'signal')
return True
return False
class RequestSerializer(Axon.Component.component):
Inboxes = {'inbox': 'not used',
'request' : 'incoming RequestOrNotification objects',
'control': 'wakeup & shutdown messages',
}
Outboxes = {'outbox': 'the json-rpc request / notification',
'signal': 'shutdown messages',
}
def __init__(self, **kwargs):
super(RequestSerializer, self).__init__(**kwargs)
self.serializer = JsonRpc20() # FIXME: make this a paramater
if self.debug >= 3: print 'Created %s' % repr(self)
def _send_req_or_notification(self, req, wait = False):
assert isinstance(req, RequestOrNotification)
if req.response_callback:
id = self.factory.get_request_id(req) # this adds the id to self.requests_sent
if wait:
self.factory.requests_on_connect_wait = id
output = self.serializer.dumps_request(req.method, req.params, id) if req.params \
else self.serializer.dumps_request(req.method, id = id)
else:
output = self.serializer.dumps_notification(req.method, req.params) if req.params \
else self.serializer.dumps_notification(req.method)
output += '\r\n' # make things easier to read if testing with telnet or netcat
if self.debug >= 1: print '<-- %s' % output
self.send(output, 'outbox')
def main(self):
while not self.shutdown():
if len(self.factory.requests_on_connect) and not self.factory.requests_on_connect_wait:
request, wait = self.factory.requests_on_connect.pop(0)
self._send_req_or_notification(request, wait)
if self.dataReady('request'):
req = self.recv('request')
self._send_req_or_notification(req)
if not self.anyReady() and (len(self.factory.requests_on_connect) == 0 or self.factory.requests_on_connect_wait) :
self.pause()
yield 1
if self.debug >= 3:
print 'End of main for %s' % self.__class__.__name__
def shutdown(self):
if self.dataReady('control'):
msg = self.recv('control')
if isinstance(msg, shutdownMicroprocess) or isinstance(msg, producerFinished):
if self.debug >= 3: print '%s got shutdown msg: %r' % (self.__class__.__name__, msg)
self.send(msg, 'signal')
return True
return False
class Finalizer(Axon.Component.component):
Inboxes = {'inbox': 'not used',
'control': 'shutdown messages',
}
Outboxes = {'outbox': 'not used',
'signal': 'shutdown messages',
}
def __init__(self, **kwargs):
super(Finalizer, self).__init__(**kwargs)
if self.debug >= 3: print 'Created %s' % repr(self)
def main(self):
while not self.shutdown():
if not self.anyReady():
self.pause()
yield 1
if self.debug >= 3:
print 'End of main for %s' % self.__class__.__name__
def shutdown(self):
if self.dataReady('control'):
msg = self.recv('control')
if isinstance(msg, shutdownMicroprocess) or isinstance(msg, producerFinished):
if self.debug >= 3: print '%s got shutdown msg: %r' % (self.__class__.__name__, msg)
# FIXME: Log any outstanding request reponses missing
print 'Connection is being closed'
for req_id in self.factory.requests_sent:
print 'WARNING: No response seen to request %s' % req_id
self.send(msg, 'signal')
return True
return False
# -------------------------------------------
def ThreadedTaskRunner(num_workers = 5, debug = 0):
worker_list = []
for dummy in range(num_workers):
worker = ThreadedWorker(debug = debug)
worker.activate()
worker_list.append(worker)
manager = TaskManager(worker_list, debug = debug)
return manager
class ThreadedWorker(Axon.ThreadedComponent.threadedcomponent):
Inboxes = {'inbox': '(function, args, kwargs) tuple',
'control': 'shutdown messages',
}
Outboxes = {'outbox': 'the result or exception or callback request',
'signal': 'shutdown messages',
}
def __init__(self, **kwargs):
super(ThreadedWorker, self).__init__(**kwargs)
if self.debug >= 3: print 'Created %s' % repr(self)
def main(self):
while not self.shutdown():
if self.dataReady('inbox'):
func, args, kwargs = self.recv('inbox')
for arg in args:
if isinstance(arg, CallbackProxy):
arg.set_outbox(self, 'outbox')
for arg_name in kwargs:
if isinstance(kwargs[arg_name], CallbackProxy):
kwargs[arg_name].set_outbox(self, 'outbox')
if self.debug >= 3: print 'Worker %s got data: %r, %r, %r' % (id(self), func, args, kwargs)
try:
result = func(*args, **kwargs)
except Exception, error:
result = error
if self.debug >= 3: print 'Worker %s got result: %r' % (id(self), result)
self.send(result, 'outbox')
if not self.anyReady():
self.pause()
if self.debug >= 3:
print 'End of main for %s' % self.__class__.__name__
def shutdown(self):
if self.dataReady('control'):
msg = self.recv('control')
if isinstance(msg, shutdownMicroprocess) or isinstance(msg, producerFinished):
if self.debug >= 3: print '%s got shutdown msg: %r' % (self.__class__.__name__, msg)
self.send(msg, 'signal')
return True
return False
class TaskManager(Axon.AdaptiveCommsComponent.AdaptiveCommsComponent):
Inboxes = {'inbox': '(return_box, dispatch_info, return_info) tuple',
'control': 'shutdown messages',
}
Outboxes = {'outbox': 'not used',
'signal': 'shutdown messages',
}
'''
return_box = (<sending_component>, <return_box_name>)
dispatch_info = (self.factory.dispatch_table[method], args, kwargs)
return_info = (id, notification)
'''
def __init__(self, workers, debug = 0):
super(TaskManager, self).__init__()
self.debug = debug
self.workers = workers # a list of worker components
self.task_data = [ None for x in range(len(workers)) ] # an available worker has None here
self.work_queue = []
self.worker_box_names = []
self.links = []
# make connections to the workers
for worker_num in range(len(self.workers)):
outbox_name = self.addOutbox('to_worker_%s' % worker_num)
inbox_name = self.addInbox('from_worker_%s' % worker_num)
signal_name = self.addOutbox('signal_worker_%s' % worker_num)
boxnames = {'to': outbox_name, 'from': inbox_name, 'signal': signal_name}
self.worker_box_names.append(boxnames)
outlink = self.link((self, outbox_name), (self.workers[worker_num], 'inbox'))
control_link = self.link((self, signal_name), (self.workers[worker_num], 'control'))
inlink = self.link((self.workers[worker_num], 'outbox'), (self, inbox_name))
self.links.append((outlink, control_link, inlink))
if self.debug >= 3: print 'Created %s' % repr(self)
def main(self):
while not self.shutdown():
if self.dataReady('inbox'):
data = self.recv('inbox')
if self.debug >= 3: print 'Task Manager got data: %s' % repr(data)
self.work_queue.append(data)
if len(self.work_queue) != 0 and None in self.task_data:
return_box, dispatch_info, return_info = self.work_queue.pop(0)
result_box_name = self.addOutbox('%s-%s-%s' % (id(return_box), id(dispatch_info), id(return_info)))
self.link((self, result_box_name), return_box)
worker_num = self.task_data.index(None) # pick the first free worker
self.task_data[worker_num] = (result_box_name, return_box, return_info)
if self.debug >= 3:
print 'Sending task data to worker %s (box %s)' % (worker_num, self.worker_box_names[worker_num]['to'])
print 'Dispatch:', dispatch_info
self.send(dispatch_info, self.worker_box_names[worker_num]['to'])
if self.anyReady():
for worker_num in range(len(self.workers)):
boxname = self.worker_box_names[worker_num]['from']
if self.dataReady(boxname):
data = self.recv(boxname)
if self.debug >= 3: print 'TaskManager got data %r on boxname %s' % (data, boxname)
result_box_name, return_box, return_info = self.task_data[worker_num]
self.send( (data, return_info), result_box_name) # post the result
if not isinstance(data, RequestOrNotification):
if self.debug >= 3: print '** Doing unlink ** on %s' % result_box_name
self.unlink( (self, result_box_name), return_box)
self.deleteOutbox(result_box_name)
self.task_data[worker_num] = None # mark that worker as done
yield 1
if not self.anyReady():
self.pause()
yield 1
if self.debug >= 3:
print 'End of main for %s' % self.__class__.__name__
def shutdown(self):
if self.dataReady('control'):
msg = self.recv('control')
if isinstance(msg, shutdownMicroprocess) or isinstance(msg, producerFinished):
if self.debug >= 3: print '%s got shutdown msg: %r' % (self.__class__.__name__, msg)
for boxnames in self.worker_box_names:
self.send(msg, boxnames['signal'])
self.send(msg, 'signal')
return True
return False
class JsonRPCBase(object):
'Base class for JsonRPC clients and servers'
def __init__(self, workers, debug):
self.workers = workers
self.debug = debug
taskrunner = ThreadedTaskRunner(num_workers = self.workers, debug = self.debug)
self.jsonprotocol = JsonRpcProtocol(taskrunner, debug = self.debug)
def add_function(self, func):
self.jsonprotocol.add_function(func)
def add_instance(self, instance):
self.jsonprotocol.add_instance(instance)
def add_request_on_connect(self, req_or_notification, wait = True):
self.jsonprotocol.add_request_on_connect(req_or_notification, wait)
class JsonRpcTCPServer(JsonRPCBase):
def __init__(self, portnumber, workers = 5, debug = 1):
JsonRPCBase.__init__(self, workers = workers, debug = debug)
self.portnumber = portnumber
self.server = None
def start(self):
if self.debug: print 'Starting JSON-RPC server on port %s' % self.portnumber
self.server = ServerCore( protocol = self.jsonprotocol, port = self.portnumber )
self.server.run()
#FIXME: some way to stop!
class JsonRpcTCPClient(JsonRPCBase):
def __init__(self, host, portnumber, delay = 0, workers = 5, debug = 1):
JsonRPCBase.__init__(self, workers = workers, debug = debug)
self.host = host
self.portnumber = portnumber
self.delay = delay
self.client = Graphline(
TCPCLIENT = TCPClient(self.host, self.portnumber, self.delay),
PROTOCOL = self.jsonprotocol(),
linkages = { ('TCPCLIENT', 'outbox') : ('PROTOCOL', 'inbox'),
('PROTOCOL', 'outbox') : ('TCPCLIENT', 'inbox'),
('TCPCLIENT', 'signal') : ('PROTOCOL', 'control'),
('PROTOCOL', 'signal') : ('TCPCLIENT', 'control'),
} )
self.handle = Handle(self.client)
def start(self):
if self.debug: print 'Starting TCP Client - connecting to %s on port %s' % (self.host, self.portnumber)
##self.client.run()
try:
background().start()
except:
pass # assume already running
self.client.activate()
class Proxy(object):
def __init__(self, host, portnumber, delay = 0, threaded = True, workers = 5, debug = 1):
self.host = host
self.portnumber = portnumber
self.delay = delay
self.threaded = threaded
self.workers = workers
self.debug = debug
self.client = JsonRpcTCPClient(host = host, portnumber = portnumber, delay = delay, threaded = True, workers = workers,
debug = debug)
self.request = RequestProxy(self.client, True)
self.notification = RequestProxy(self.client, False)
class RequestProxy(object):
def __init__(self, client, request = True):
self.client = client
self.request = request
def _remote_call(self, name, params):
client = self.client
|
sparkslabs/kamaelia_
|
Sketches/MPS/BugReports/FixTests/Kamaelia/Kamaelia/Apps/JsonRPC/BDJsonRPC.py
|
Python
|
apache-2.0
| 35,300
|
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
#
"""This module contains a Google Cloud KMS hook"""
import base64
from typing import Optional, Sequence, Tuple, Union
from google.api_core.retry import Retry
from google.cloud.kms_v1 import KeyManagementServiceClient
from airflow.providers.google.common.hooks.base_google import GoogleBaseHook
def _b64encode(s: bytes) -> str:
"""Base 64 encodes a bytes object to a string"""
return base64.b64encode(s).decode("ascii")
def _b64decode(s: str) -> bytes:
"""Base 64 decodes a string to bytes"""
return base64.b64decode(s.encode("utf-8"))
class CloudKMSHook(GoogleBaseHook):
"""
Hook for Google Cloud Key Management service.
:param gcp_conn_id: The connection ID to use when fetching connection info.
:param delegate_to: The account to impersonate using domain-wide delegation of authority,
if any. For this to work, the service account making the request must have
domain-wide delegation enabled.
:param impersonation_chain: Optional service account to impersonate using short-term
credentials, or chained list of accounts required to get the access_token
of the last account in the list, which will be impersonated in the request.
If set as a string, the account must grant the originating account
the Service Account Token Creator IAM role.
If set as a sequence, the identities from the list must grant
Service Account Token Creator IAM role to the directly preceding identity, with first
account from the list granting this role to the originating account.
"""
def __init__(
self,
gcp_conn_id: str = "google_cloud_default",
delegate_to: Optional[str] = None,
impersonation_chain: Optional[Union[str, Sequence[str]]] = None,
) -> None:
super().__init__(
gcp_conn_id=gcp_conn_id,
delegate_to=delegate_to,
impersonation_chain=impersonation_chain,
)
self._conn = None # type: Optional[KeyManagementServiceClient]
def get_conn(self) -> KeyManagementServiceClient:
"""
Retrieves connection to Cloud Key Management service.
:return: Cloud Key Management service object
:rtype: google.cloud.kms_v1.KeyManagementServiceClient
"""
if not self._conn:
self._conn = KeyManagementServiceClient(
credentials=self._get_credentials(), client_info=self.client_info
)
return self._conn
def encrypt(
self,
key_name: str,
plaintext: bytes,
authenticated_data: Optional[bytes] = None,
retry: Optional[Retry] = None,
timeout: Optional[float] = None,
metadata: Sequence[Tuple[str, str]] = (),
) -> str:
"""
Encrypts a plaintext message using Google Cloud KMS.
:param key_name: The Resource Name for the key (or key version)
to be used for encryption. Of the form
``projects/*/locations/*/keyRings/*/cryptoKeys/**``
:param plaintext: The message to be encrypted.
:param authenticated_data: Optional additional authenticated data that
must also be provided to decrypt the message.
:param retry: A retry object used to retry requests. If None is specified, requests will not be
retried.
:param timeout: The amount of time, in seconds, to wait for the request to complete. Note that if
retry is specified, the timeout applies to each individual attempt.
:param metadata: Additional metadata that is provided to the method.
:return: The base 64 encoded ciphertext of the original message.
:rtype: str
"""
response = self.get_conn().encrypt(
request={
'name': key_name,
'plaintext': plaintext,
'additional_authenticated_data': authenticated_data,
},
retry=retry,
timeout=timeout,
metadata=metadata,
)
ciphertext = _b64encode(response.ciphertext)
return ciphertext
def decrypt(
self,
key_name: str,
ciphertext: str,
authenticated_data: Optional[bytes] = None,
retry: Optional[Retry] = None,
timeout: Optional[float] = None,
metadata: Sequence[Tuple[str, str]] = (),
) -> bytes:
"""
Decrypts a ciphertext message using Google Cloud KMS.
:param key_name: The Resource Name for the key to be used for decryption.
Of the form ``projects/*/locations/*/keyRings/*/cryptoKeys/**``
:param ciphertext: The message to be decrypted.
:param authenticated_data: Any additional authenticated data that was
provided when encrypting the message.
:param retry: A retry object used to retry requests. If None is specified, requests will not be
retried.
:param timeout: The amount of time, in seconds, to wait for the request to complete. Note that if
retry is specified, the timeout applies to each individual attempt.
:param metadata: Additional metadata that is provided to the method.
:return: The original message.
:rtype: bytes
"""
response = self.get_conn().decrypt(
request={
'name': key_name,
'ciphertext': _b64decode(ciphertext),
'additional_authenticated_data': authenticated_data,
},
retry=retry,
timeout=timeout,
metadata=metadata,
)
return response.plaintext
|
Acehaidrey/incubator-airflow
|
airflow/providers/google/cloud/hooks/kms.py
|
Python
|
apache-2.0
| 6,513
|
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at https://mozilla.org/MPL/2.0/.
from __future__ import print_function, unicode_literals
import os
import platform
import sys
from distutils.spawn import find_executable
from subprocess import Popen
import shutil
from tempfile import TemporaryFile
SEARCH_PATHS = [
os.path.join("python", "tidy"),
]
# Individual files providing mach commands.
MACH_MODULES = [
os.path.join('python', 'servo', 'bootstrap_commands.py'),
os.path.join('python', 'servo', 'build_commands.py'),
os.path.join('python', 'servo', 'testing_commands.py'),
os.path.join('python', 'servo', 'post_build_commands.py'),
os.path.join('python', 'servo', 'package_commands.py'),
os.path.join('python', 'servo', 'devenv_commands.py'),
]
CATEGORIES = {
'bootstrap': {
'short': 'Bootstrap Commands',
'long': 'Bootstrap the build system',
'priority': 90,
},
'build': {
'short': 'Build Commands',
'long': 'Interact with the build system',
'priority': 80,
},
'post-build': {
'short': 'Post-build Commands',
'long': 'Common actions performed after completing a build.',
'priority': 70,
},
'testing': {
'short': 'Testing',
'long': 'Run tests.',
'priority': 60,
},
'devenv': {
'short': 'Development Environment',
'long': 'Set up and configure your development environment.',
'priority': 50,
},
'build-dev': {
'short': 'Low-level Build System Interaction',
'long': 'Interact with specific parts of the build system.',
'priority': 20,
},
'package': {
'short': 'Package',
'long': 'Create objects to distribute',
'priority': 15,
},
'misc': {
'short': 'Potpourri',
'long': 'Potent potables and assorted snacks.',
'priority': 10,
},
'disabled': {
'short': 'Disabled',
'long': 'The disabled commands are hidden by default. Use -v to display them. These commands are unavailable '
'for your current context, run "mach <command>" to see why.',
'priority': 0,
}
}
# Possible names of executables
# NOTE: Windows Python doesn't provide versioned executables, so we must use
# the plain names. On MSYS, we still use Windows Python.
PYTHON_NAMES = ["python-2.7", "python2.7", "python2", "python"]
def _get_exec_path(names, is_valid_path=lambda _path: True):
for name in names:
path = find_executable(name)
if path and is_valid_path(path):
return path
return None
def _get_virtualenv_script_dir():
# Virtualenv calls its scripts folder "bin" on linux/OSX/MSYS64 but "Scripts" on Windows
if os.name == "nt" and os.sep != "/":
return "Scripts"
return "bin"
def _process_exec(args):
with TemporaryFile() as out:
with TemporaryFile() as err:
process = Popen(args, stdout=out, stderr=err)
process.wait()
if process.returncode:
print('"%s" failed with error code %d:' % ('" "'.join(args), process.returncode))
if sys.version_info >= (3, 0):
stdout = sys.stdout.buffer
else:
stdout = sys.stdout
print('Output:')
out.seek(0)
stdout.flush()
shutil.copyfileobj(out, stdout)
stdout.flush()
print('Error:')
err.seek(0)
stdout.flush()
shutil.copyfileobj(err, stdout)
stdout.flush()
sys.exit(1)
def wpt_path(is_firefox, topdir, *paths):
if is_firefox:
rel = os.path.join("..", "testing", "web-platform")
else:
rel = os.path.join("tests", "wpt")
return os.path.join(topdir, rel, *paths)
def wptrunner_path(is_firefox, topdir, *paths):
wpt_root = wpt_path(is_firefox, topdir)
if is_firefox:
rel = os.path.join(wpt_root, "tests", "tools", "wptrunner")
else:
rel = os.path.join(wpt_root, "web-platform-tests", "tools", "wptrunner")
return os.path.join(topdir, rel, *paths)
def wptserve_path(is_firefox, topdir, *paths):
wpt_root = wpt_path(is_firefox, topdir)
if is_firefox:
rel = os.path.join(wpt_root, "tests", "tools", "wptserve")
else:
rel = os.path.join(wpt_root, "web-platform-tests", "tools", "wptserve")
return os.path.join(topdir, rel, *paths)
def _activate_virtualenv(topdir, is_firefox):
virtualenv_path = os.path.join(topdir, "python", "_virtualenv%d.%d" % (sys.version_info[0], sys.version_info[1]))
check_exec_path = lambda path: path.startswith(virtualenv_path)
python = sys.executable # If there was no python, mach wouldn't have run at all!
if not python:
sys.exit('Failed to find python executable for starting virtualenv.')
script_dir = _get_virtualenv_script_dir()
activate_path = os.path.join(virtualenv_path, script_dir, "activate_this.py")
need_pip_upgrade = False
if not (os.path.exists(virtualenv_path) and os.path.exists(activate_path)):
import imp
try:
imp.find_module('virtualenv')
except ImportError:
sys.exit("Python virtualenv is not installed. Please install it prior to running mach.")
_process_exec([python, "-m", "virtualenv", "-p", python, "--system-site-packages", virtualenv_path])
# We want to upgrade pip when virtualenv created for the first time
need_pip_upgrade = True
exec(compile(open(activate_path).read(), activate_path, 'exec'), dict(__file__=activate_path))
python = _get_exec_path(PYTHON_NAMES, is_valid_path=check_exec_path)
if not python:
sys.exit("Python executable in virtualenv failed to activate.")
# TODO: Right now, we iteratively install all the requirements by invoking
# `pip install` each time. If it were the case that there were conflicting
# requirements, we wouldn't know about them. Once
# https://github.com/pypa/pip/issues/988 is addressed, then we can just
# chain each of the requirements files into the same `pip install` call
# and it will check for conflicts.
requirements_paths = [
os.path.join("python", "requirements.txt"),
wptrunner_path(is_firefox, topdir, "requirements.txt",),
wptrunner_path(is_firefox, topdir, "requirements_firefox.txt"),
wptrunner_path(is_firefox, topdir, "requirements_servo.txt"),
]
if need_pip_upgrade:
# Upgrade pip when virtualenv is created to fix the issue
# https://github.com/servo/servo/issues/11074
_process_exec([python, "-m", "pip", "install", "-I", "-U", "pip"])
for req_rel_path in requirements_paths:
req_path = os.path.join(topdir, req_rel_path)
marker_file = req_rel_path.replace(os.path.sep, '-')
marker_path = os.path.join(virtualenv_path, marker_file)
try:
if os.path.getmtime(req_path) + 10 < os.path.getmtime(marker_path):
continue
except OSError:
pass
_process_exec([python, "-m", "pip", "install", "-I", "-r", req_path])
open(marker_path, 'w').close()
def _ensure_case_insensitive_if_windows():
# The folder is called 'python'. By deliberately checking for it with the wrong case, we determine if the file
# system is case sensitive or not.
if _is_windows() and not os.path.exists('Python'):
print('Cannot run mach in a path on a case-sensitive file system on Windows.')
print('For more details, see https://github.com/pypa/virtualenv/issues/935')
sys.exit(1)
def _is_windows():
return sys.platform == 'win32'
class DummyContext(object):
pass
def is_firefox_checkout(topdir):
parentdir = os.path.normpath(os.path.join(topdir, '..'))
is_firefox = os.path.isfile(os.path.join(parentdir,
'build/mach_bootstrap.py'))
return is_firefox
def bootstrap_command_only(topdir):
# we should activate the venv before importing servo.boostrap
# because the module requires non-standard python packages
_activate_virtualenv(topdir, is_firefox_checkout(topdir))
from servo.bootstrap import bootstrap
context = DummyContext()
context.topdir = topdir
force = False
if len(sys.argv) == 3 and sys.argv[2] == "-f":
force = True
bootstrap(context, force)
return 0
def bootstrap(topdir):
_ensure_case_insensitive_if_windows()
topdir = os.path.abspath(topdir)
len(sys.argv) > 1 and sys.argv[1] == "bootstrap"
# We don't support paths with Unicode characters for now
# https://github.com/servo/servo/issues/10002
try:
# Trick to support both python2 and python3
topdir.encode().decode('ascii')
except UnicodeDecodeError:
print('Cannot run mach in a path with Unicode characters.')
print('Current path:', topdir)
sys.exit(1)
# We don't support paths with spaces for now
# https://github.com/servo/servo/issues/9442
if ' ' in topdir:
print('Cannot run mach in a path with spaces.')
print('Current path:', topdir)
sys.exit(1)
# Ensure we are running Python 2.7+ or Python 3.5+. We put this check here so we generate a
# user-friendly error message rather than a cryptic stack trace on module import.
if sys.version_info < (2, 7) or (sys.version_info >= (3, 0) and sys.version_info < (3, 5)):
print('Python2 (>=2.7) or Python3 (>=3.5) is required to run mach.')
print('You are running Python', platform.python_version())
sys.exit(1)
is_firefox = is_firefox_checkout(topdir)
_activate_virtualenv(topdir, is_firefox)
def populate_context(context, key=None):
if key is None:
return
if key == 'topdir':
return topdir
raise AttributeError(key)
sys.path[0:0] = [os.path.join(topdir, path) for path in SEARCH_PATHS]
sys.path[0:0] = [wpt_path(is_firefox, topdir),
wptrunner_path(is_firefox, topdir),
wptserve_path(is_firefox, topdir)]
import mach.main
mach = mach.main.Mach(os.getcwd())
mach.populate_context_handler = populate_context
for category, meta in CATEGORIES.items():
mach.define_category(category, meta['short'], meta['long'], meta['priority'])
for path in MACH_MODULES:
# explicitly provide a module name
# workaround for https://bugzilla.mozilla.org/show_bug.cgi?id=1549636
file = os.path.basename(path)
module_name = os.path.splitext(file)[0]
mach.load_commands_from_file(os.path.join(topdir, path), module_name)
return mach
|
DominoTree/servo
|
python/mach_bootstrap.py
|
Python
|
mpl-2.0
| 10,973
|
#!/usr/bin/env python
# Calculate the 1000th element of the Fibonacci series. Fast.
# (Another popular tech interview question.)
import numpy;
# Definition of Fibonacci numbers:
# F(1) = 1
# F(2) = 1
# For n = 3, 4, 5, ...: F(n) = F(n-2) + F(n-1).
# Method one: recursion.
# Very inefficient: F(n) is called once, it calls F(n-1) once,
# F(n-2) is called twice (once by F(n) and once by F(n-1)),
# F(n-3) is called thrice (once by F(n-1) and twice by F(n-2)),
# F(n-k) is called F(k+1) times, that is an insane number of calls.
fibonaccirecursion = lambda n: 1 if n <=2 else fibonaccirecursion(n-2) + fibonaccirecursion(n-1);
# Method two: dual recursion. Returns the list [F(n-1),F(n)].
# Calling it with n triggers one call for each of 2, 3, ..., n-1: that is only O(n) calls.
def fibonaccidualrecursion(n):
if n >= 3:
a, b = fibonaccidualrecursion(n-1);
# F(n-2) = a, F(n-1) = b, F(n) = a+b.
return b, a+b;
elif n == 2:
return 1, 1;
elif n == 1:
# F(0) = 0.
return 0, 1;
else:
raise NotImplementedError;
# Method three: loop. Nothing fancy.
# Should be much like dual recursion without function call overhead.
def fibonacciloop(n):
a = 1;
b = 1;
for i in xrange(n-2):
c = a+b;
a = b;
b = c;
return b;
# Method four: even loop. Do two iterations at once to avoid moving around values.
# Slightly faster than simple loop.
def fibonaccievenloop(n):
a = 1;
b = 1;
for i in xrange(int(0.5*(n-2))):
a = a+b;
b = a+b;
if n % 2 == 1:
if n == 1:
return 1;
else:
return a+b;
else:
return b;
# Method five: direct formula.
# This is not faster if we need to calculate all Fibonacci numbers up to F(n),
# but much-much faster if we only need F(n), especially if n is large.
# This is how we solve second order homogeneous linear recursions in general:
# The characteristic polynomial of the recursion is x^2 = 1 + x.
# It has two distinct roots, x_12 = (1 pm sqrt(5)) / 2.
# Therefore a general series is alpha*x_1^n + beta*x_2^n.
# Two initial values, two coefficients, two degrees of freedom.
# (We would use alpha*x^n + beta*n*x^n if x was a multiple root.)
# Turns out |x_2| < 1, so we can omit this term and round.
# Note that if n >= 1475, F(n) cannot be represented as long int any more,
# but the float approximation is still valid.
sqrt5 = numpy.sqrt(5);
fibonaccipower = lambda n: int(numpy.power(0.5*(1.0+sqrt5), n)/sqrt5 + 0.5);
print;
print "Testing.";
print [fibonaccirecursion(n) for n in xrange(1,15)];
print [fibonaccidualrecursion(n)[1] for n in xrange(1,15)];
print [fibonacciloop(n) for n in xrange(1,15)];
print [fibonaccievenloop(n) for n in xrange(1,15)];
print [fibonaccipower(n) for n in xrange(1,15)];
if __name__ == "__main__":
import timeit;
number = 20;
n = 25;
print;
print "Timing n={0:d}.".format(n);
for i in ["fibonaccirecursion", "fibonaccidualrecursion", "fibonacciloop", "fibonaccievenloop", "fibonaccipower"]:
print "{0:s}: {1:f} us".format(i, 1e6*timeit.timeit("{0:s}({1:d})".format(i,n), setup="from __main__ import {0:s}".format(i), number=number));
number = 20;
n = 500;
print;
print "Timing n={0:d} (simple recursion would fill up stack).".format(n);
for i in ["fibonaccidualrecursion", "fibonacciloop", "fibonaccievenloop", "fibonaccipower"]:
print "{0:s}: {1:f} us".format(i, 1e6*timeit.timeit("{0:s}({1:d})".format(i,n), setup="from __main__ import {0:s}".format(i), number=number));
number = 20;
n = 1000;
print;
print "Timing n={0:d} (even dual recursion would fill up stack).".format(n);
for i in ["fibonacciloop", "fibonaccievenloop", "fibonaccipower"]:
print "{0:s}: {1:f} us".format(i, 1e6*timeit.timeit("{0:s}({1:d})".format(i,n), setup="from __main__ import {0:s}".format(i), number=number));
|
bencebeky/etudes
|
fibonacci.py
|
Python
|
gpl-3.0
| 3,792
|
import binascii
import base64
import dateutil.parser
from datetime import timezone
from rest_framework.decorators import api_view
from rest_framework import status
from rest_framework.response import Response
from appliances.models import Appliance
from appliances.reading import (new_reading_from_data,
ReadingException, )
from .crypto import sha256_check_mac, CryptoException
@api_view(['POST'])
def token_collect(request):
if any([x not in request.data
for x in ['id', 'token', 'sensors', 'timestamp']]):
return Response(status=status.HTTP_400_BAD_REQUEST)
id = request.data['id']
token = request.data['token']
sensors = request.data['sensors']
timestamp_str = request.data['timestamp']
appliances = Appliance.objects.filter(id=id, authentication_model='token',
authentication_value=token,
is_active=True)
if appliances.count() != 1:
return Response(status=status.HTTP_400_BAD_REQUEST)
appliance = appliances.first()
try:
timestamp = (dateutil.parser.parse(timestamp_str)
.replace(tzinfo=timezone.utc))
except ValueError:
return Response(status=status.HTTP_400_BAD_REQUEST)
except TypeError:
return Response(status=status.HTTP_400_BAD_REQUEST)
try:
new_reading_from_data(appliance, sensors, timestamp)
except ReadingException:
return Response(status=status.HTTP_400_BAD_REQUEST)
return Response(status=status.HTTP_202_ACCEPTED)
@api_view(['POST'])
def sha_hmac_collect(request):
if any([x not in request.data for x in ['id', 'data', 'mac']]):
return Response(status=status.HTTP_400_BAD_REQUEST)
int_id = request.data['id']
b64_mac = request.data['mac']
payload = request.data['data']
appliances = Appliance.objects.filter(authentication_model='sha_hmac',
pk=int_id)
if appliances.count() != 1:
return Response(status=status.HTTP_400_BAD_REQUEST)
appliance = appliances.first()
try:
binkey = base64.b64decode(appliance.authentication_value)
expected_mac = base64.b64decode(b64_mac)
except binascii.Error:
return Response(status=status.HTTP_400_BAD_REQUEST)
try:
sha256_check_mac(payload.encode(), binkey, expected_mac)
except CryptoException:
return Response(status=status.HTTP_400_BAD_REQUEST)
try:
sensors = payload.split(',')[:-1]
dtstr = payload.split(',')[-1]
sensors = {keqv.split("=")[0]: keqv.split("=")[1] for keqv in sensors}
except IndexError:
return Response(status=status.HTTP_400_BAD_REQUEST)
try:
timestamp = dateutil.parser.parse(dtstr).replace(tzinfo=timezone.utc)
except ValueError:
return Response(status=status.HTTP_400_BAD_REQUEST)
try:
new_reading_from_data(appliance, sensors, timestamp)
return Response(status=status.HTTP_202_ACCEPTED)
except ReadingException:
return Response(status=status.HTTP_400_BAD_REQUEST)
|
piotrb5e3/Dyeus
|
collect/views.py
|
Python
|
gpl-3.0
| 3,159
|
import websocket
import json
import time
from threading import Thread
class OBSRemoteSwitcher():
"""Handler to talk to OBSRemote by websocket.
Handles authentication, SceneChanges and SceneUpdates
"""
def switch_to_scene(self, scene):
# Set the current scene
data = {"request-type":'SetCurrentScene', 'scene-name':scene}
self.send(data)
def update_scenes(self):
data = {"request-type":'GetSceneList'}
self.send(data)
def send(self, data):
if not type(data) == dict or not data:
return False
data = self.json_encoder.encode(data)
try:
self.ws.send(data)
except:
pass
def authenticate(self):
#TODO: Authentication
print 'authenticate not yet implemented'
def start(self):
self.ws = websocket.WebSocketApp("ws://{0}/".format(self.obsurl),
on_message=self.on_message,
on_error = self.on_error,
on_open = self.on_open,
header = ['Sec-WebSocket-Protocol: obsapi'])
websocket.setdefaulttimeout(5)
self.thread = Thread(target=self.ws.run_forever, name='thread-overlayswitcher.sceneswitcher.obsremote.ws.fun_forever')
self.thread.start()
def stop(self):
self.connected = False
self.ws.close()
self.thread._Thread__stop()
def on_message(self, ws, message):
""" Store new information for the overlayswitcher"""
data = self.json_decoder.decode(message)
if data.get('authRequired','False') == 'True':
self.authenticate()
if data.get('update-type','') == 'StreamStatus':
self.stats = data
if data.has_key('streaming'):
pass
if type(data.get('scenes',None)) == list:
pass
# print data.get('current-scene','')
# print '\n'.join(i['name'] for i in data['scenes'])
if data.has_key('current-scene'):
current_scene = data.get('current-scene')
self._overlayswitcher.active_scene = current_scene
def on_error(self, ws, error):
print "Error in the OBS Remote Handler:", error
self.stop()
def on_open(self, ws):
if ws is None or ws.sock is None:
print 'OBSRemote Socket Error!'
return
self.connected = ws.sock.connected
if not self.connected:
print 'Could not establish a connection to OBSRemote! Aborting'
return
else:
print 'Websocket created'
self.update_scenes()
data = {"request-type":'GetAuthRequired'}
self.send(data)
def __init__(self, settings, overlayswitcher):
self.json_encoder = json.JSONEncoder()
self.json_decoder = json.JSONDecoder()
self.password = settings.OBS_REMOTE_PASS
self.obsurl = settings.OBS_REMOTE_URL
self._overlayswitcher = overlayswitcher
self.obs_streaming = 0
self.connected = False #have we got a message yet?
#websocket.enableTrace(True)
self.start()
if __name__ == '__main__':
import settings
import time
import sys
class _(): #Dummy Overlayswitcher to avoid exceptions in updates()
active_scene=None
dummy_ols = _()
if len(sys.argv) > 1:
settings.OBS_REMOTE_URL = sys.argv[1]
handler = OBSRemoteSwitcher(settings, dummy_ols)
print 'OBS Remote works!'
handler.ws.close()
|
sistason/dota2_overlayswitcher
|
utils/sceneswitcher_obsremote.py
|
Python
|
gpl-2.0
| 3,613
|
#!/usr/bin/env python
"""
orfTest.py
Author: Tony Papenfuss
Date: Tue Aug 22 20:14:57 EST 2006
"""
import os, sys
import fasta, sequence
header,seq = fasta.load('NKC.fa')
orfIterator = fasta.load_iter('ORFs.fa')
writer = fasta.MfaWriter('ORFs2.fa')
for h,orf in orfIterator:
chrom,block,orfId,limits = h.split()[0].split('.')
start,end = limits.split('-')
start = int(start)
end = int(end)
if start>end:
strand = '-'
start,end = end,start
s = sequence.translate(sequence.reverseComplement(seq[start-1:end]))
else:
strand = '+'
s = sequence.translate(seq[start-1:end])
if s!=orf: print h
writer.write(h,s + '\n')
writer.close()
|
PapenfussLab/Mungo
|
snippets/orfTest.py
|
Python
|
artistic-2.0
| 721
|
from flask import Blueprint
posts = Blueprint('posts', __name__)
from . import routes
|
goalong/flask-demo
|
app/posts/__init__.py
|
Python
|
mit
| 89
|
# This file is part of Androguard.
#
# Copyright (c) 2012 Geoffroy Gueguen <geoffroy.gueguen@gmail.com>
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS-IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import logging
from collections import defaultdict
from androguard.decompiler.dad.basic_blocks import (build_node_from_block,
StatementBlock, CondBlock)
from androguard.decompiler.dad.instruction import Variable
logger = logging.getLogger('dad.graph')
class Graph(object):
def __init__(self):
self.entry = None
self.exit = None
self.nodes = list()
self.rpo = []
self.edges = defaultdict(list)
self.catch_edges = defaultdict(list)
self.reverse_edges = defaultdict(list)
self.reverse_catch_edges = defaultdict(list)
self.loc_to_ins = None
self.loc_to_node = None
def sucs(self, node):
return self.edges.get(node, [])
def all_sucs(self, node):
return self.edges.get(node, []) + self.catch_edges.get(node, [])
def preds(self, node):
return [n for n in self.reverse_edges.get(node, [])
if not n.in_catch]
def all_preds(self, node):
return (self.reverse_edges.get(node, []) +
self.reverse_catch_edges.get(node, []))
def add_node(self, node):
self.nodes.append(node)
def add_edge(self, e1, e2):
lsucs = self.edges[e1]
if e2 not in lsucs:
lsucs.append(e2)
lpreds = self.reverse_edges[e2]
if e1 not in lpreds:
lpreds.append(e1)
def add_catch_edge(self, e1, e2):
lsucs = self.catch_edges[e1]
if e2 not in lsucs:
lsucs.append(e2)
lpreds = self.reverse_catch_edges[e2]
if e1 not in lpreds:
lpreds.append(e1)
def remove_node(self, node):
preds = self.reverse_edges.get(node, [])
for pred in preds:
self.edges[pred].remove(node)
succs = self.edges.get(node, [])
for suc in succs:
self.reverse_edges[suc].remove(node)
exc_preds = self.reverse_catch_edges.pop(node, [])
for pred in exc_preds:
self.catch_edges[pred].remove(node)
exc_succs = self.catch_edges.pop(node, [])
for suc in exc_succs:
self.reverse_catch_edges[suc].remove(node)
self.nodes.remove(node)
if node in self.rpo:
self.rpo.remove(node)
del node
def number_ins(self):
self.loc_to_ins = {}
self.loc_to_node = {}
num = 0
for node in self.rpo:
start_node = num
num = node.number_ins(num)
end_node = num - 1
self.loc_to_ins.update(node.get_loc_with_ins())
self.loc_to_node[(start_node, end_node)] = node
def get_ins_from_loc(self, loc):
return self.loc_to_ins.get(loc)
def get_node_from_loc(self, loc):
for (start, end), node in self.loc_to_node.iteritems():
if start <= loc <= end:
return node
def remove_ins(self, loc):
ins = self.get_ins_from_loc(loc)
self.get_node_from_loc(loc).remove_ins(loc, ins)
self.loc_to_ins.pop(loc)
def split_if_nodes(self):
'''
Split IfNodes in two nodes, the first node is the header node, the
second one is only composed of the jump condition.
'''
node_map = {n: n for n in self.nodes}
to_update = set()
for node in self.nodes[:]:
if node.type.is_cond:
if len(node.get_ins()) > 1:
pre_ins = node.get_ins()[:-1]
last_ins = node.get_ins()[-1]
pre_node = StatementBlock('%s-pre' % node.name, pre_ins)
cond_node = CondBlock('%s-cond' % node.name, [last_ins])
node_map[node] = pre_node
node_map[pre_node] = pre_node
node_map[cond_node] = cond_node
pre_node.copy_from(node)
cond_node.copy_from(node)
for var in node.var_to_declare:
pre_node.add_variable_declaration(var)
pre_node.type.is_stmt = True
cond_node.true = node.true
cond_node.false = node.false
for pred in self.all_preds(node):
pred_node = node_map[pred]
# Verify that the link is not an exception link
if node not in self.sucs(pred):
self.add_catch_edge(pred_node, pre_node)
continue
if pred is node:
pred_node = cond_node
if pred.type.is_cond: # and not (pred is node):
if pred.true is node:
pred_node.true = pre_node
if pred.false is node:
pred_node.false = pre_node
self.add_edge(pred_node, pre_node)
for suc in self.sucs(node):
self.add_edge(cond_node, node_map[suc])
# We link all the exceptions to the pre node instead of the
# condition node, which should not trigger any of them.
for suc in self.catch_edges.get(node, []):
self.add_catch_edge(pre_node, node_map[suc])
if node is self.entry:
self.entry = pre_node
self.add_node(pre_node)
self.add_node(cond_node)
self.add_edge(pre_node, cond_node)
pre_node.update_attribute_with(node_map)
cond_node.update_attribute_with(node_map)
self.remove_node(node)
else:
to_update.add(node)
for node in to_update:
node.update_attribute_with(node_map)
def simplify(self):
'''
Simplify the CFG by merging/deleting statement nodes when possible:
If statement B follows statement A and if B has no other predecessor
besides A, then we can merge A and B into a new statement node.
We also remove nodes which do nothing except redirecting the control
flow (nodes which only contains a goto).
'''
redo = True
while redo:
redo = False
node_map = {}
to_update = set()
for node in self.nodes[:]:
if node.type.is_stmt and node in self.nodes:
sucs = self.all_sucs(node)
if len(sucs) != 1:
continue
suc = sucs[0]
if len(node.get_ins()) == 0:
if any(pred.type.is_switch
for pred in self.all_preds(node)):
continue
if node is suc:
continue
node_map[node] = suc
for pred in self.all_preds(node):
pred.update_attribute_with(node_map)
if node not in self.sucs(pred):
self.add_catch_edge(pred, suc)
continue
self.add_edge(pred, suc)
redo = True
if node is self.entry:
self.entry = suc
self.remove_node(node)
elif (suc.type.is_stmt and
len(self.all_preds(suc)) == 1 and
not (suc in self.catch_edges) and
not ((node is suc) or (suc is self.entry))):
ins_to_merge = suc.get_ins()
node.add_ins(ins_to_merge)
for var in suc.var_to_declare:
node.add_variable_declaration(var)
new_suc = self.sucs(suc)[0]
if new_suc:
self.add_edge(node, new_suc)
for exception_suc in self.catch_edges.get(suc, []):
self.add_catch_edge(node, exception_suc)
redo = True
self.remove_node(suc)
else:
to_update.add(node)
for node in to_update:
node.update_attribute_with(node_map)
def compute_rpo(self):
'''
Number the nodes in reverse post order.
An RPO traversal visit as many predecessors of a node as possible
before visiting the node itself.
'''
nb = len(self.nodes) + 1
for node in self.post_order():
node.num = nb - node.po
self.rpo = sorted(self.nodes, key=lambda n: n.num)
def post_order(self):
'''
Return the nodes of the graph in post-order i.e we visit all the
children of a node before visiting the node itself.
'''
def _visit(n, cnt):
visited.add(n)
for suc in self.all_sucs(n):
if not suc in visited:
for cnt, s in _visit(suc, cnt):
yield cnt, s
n.po = cnt
yield cnt + 1, n
visited = set()
for _, node in _visit(self.entry, 1):
yield node
def draw(self, name, dname, draw_branches=True):
from pydot import Dot, Edge
g = Dot()
g.set_node_defaults(color='lightgray', style='filled', shape='box',
fontname='Courier', fontsize='10')
for node in sorted(self.nodes, key=lambda x: x.num):
if draw_branches and node.type.is_cond:
g.add_edge(Edge(str(node), str(node.true), color='green'))
g.add_edge(Edge(str(node), str(node.false), color='red'))
else:
for suc in self.sucs(node):
g.add_edge(Edge(str(node), str(suc), color='blue'))
for except_node in self.catch_edges.get(node, []):
g.add_edge(Edge(str(node), str(except_node),
color='black', style='dashed'))
g.write_png('%s/%s.png' % (dname, name))
def immediate_dominators(self):
return dom_lt(self)
def __len__(self):
return len(self.nodes)
def __repr__(self):
return str(self.nodes)
def __iter__(self):
for node in self.nodes:
yield node
def dom_lt(graph):
'''Dominator algorithm from Lengaeur-Tarjan'''
def _dfs(v, n):
semi[v] = n = n + 1
vertex[n] = label[v] = v
ancestor[v] = 0
for w in graph.all_sucs(v):
if not semi[w]:
parent[w] = v
n = _dfs(w, n)
pred[w].add(v)
return n
def _compress(v):
u = ancestor[v]
if ancestor[u]:
_compress(u)
if semi[label[u]] < semi[label[v]]:
label[v] = label[u]
ancestor[v] = ancestor[u]
def _eval(v):
if ancestor[v]:
_compress(v)
return label[v]
return v
def _link(v, w):
ancestor[w] = v
parent, ancestor, vertex = {}, {}, {}
label, dom = {}, {}
pred, bucket = defaultdict(set), defaultdict(set)
# Step 1:
semi = {v: 0 for v in graph.nodes}
n = _dfs(graph.entry, 0)
for i in xrange(n, 1, -1):
w = vertex[i]
# Step 2:
for v in pred[w]:
u = _eval(v)
y = semi[w] = min(semi[w], semi[u])
bucket[vertex[y]].add(w)
pw = parent[w]
_link(pw, w)
# Step 3:
bpw = bucket[pw]
while bpw:
v = bpw.pop()
u = _eval(v)
dom[v] = u if semi[u] < semi[v] else pw
# Step 4:
for i in range(2, n + 1):
w = vertex[i]
dw = dom[w]
if dw != vertex[semi[w]]:
dom[w] = dom[dw]
dom[graph.entry] = None
return dom
def bfs(start):
to_visit = [start]
visited = set([start])
while to_visit:
node = to_visit.pop(0)
yield node
if node.exception_analysis:
for _, _, exception in node.exception_analysis.exceptions:
if exception not in visited:
to_visit.append(exception)
visited.add(exception)
for _, _, child in node.childs:
if child not in visited:
to_visit.append(child)
visited.add(child)
class GenInvokeRetName(object):
def __init__(self):
self.num = 0
self.ret = None
def new(self):
self.num += 1
self.ret = Variable('tmp%d' % self.num)
return self.ret
def set_to(self, ret):
self.ret = ret
def last(self):
return self.ret
def make_node(graph, block, block_to_node, vmap, gen_ret):
node = block_to_node.get(block)
if node is None:
node = build_node_from_block(block, vmap, gen_ret)
block_to_node[block] = node
if block.exception_analysis:
for _type, _, exception_target in block.exception_analysis.exceptions:
exception_node = block_to_node.get(exception_target)
if exception_node is None:
exception_node = build_node_from_block(exception_target,
vmap, gen_ret, _type)
exception_node.in_catch = True
block_to_node[exception_target] = exception_node
graph.add_catch_edge(node, exception_node)
for _, _, child_block in block.childs:
child_node = block_to_node.get(child_block)
if child_node is None:
child_node = build_node_from_block(child_block, vmap, gen_ret)
block_to_node[child_block] = child_node
graph.add_edge(node, child_node)
if node.type.is_switch:
node.add_case(child_node)
if node.type.is_cond:
if_target = ((block.end / 2) - (block.last_length / 2) +
node.off_last_ins)
child_addr = child_block.start / 2
if if_target == child_addr:
node.true = child_node
else:
node.false = child_node
# Check that both branch of the if point to something
# It may happen that both branch point to the same node, in this case
# the false branch will be None. So we set it to the right node.
# TODO: In this situation, we should transform the condition node into
# a statement node
if node.type.is_cond and node.false is None:
node.false = node.true
return node
def construct(start_block, vmap, exceptions):
bfs_blocks = bfs(start_block)
graph = Graph()
gen_ret = GenInvokeRetName()
# Construction of a mapping of basic blocks into Nodes
block_to_node = {}
exceptions_start_block = []
for exception in exceptions:
for _, _, block in exception.exceptions:
exceptions_start_block.append(block)
for block in bfs_blocks:
node = make_node(graph, block, block_to_node, vmap, gen_ret)
graph.add_node(node)
graph.entry = block_to_node[start_block]
del block_to_node, bfs_blocks
graph.compute_rpo()
graph.number_ins()
for node in graph.rpo:
preds = [pred for pred in graph.all_preds(node)
if pred.num < node.num]
if preds and all(pred.in_catch for pred in preds):
node.in_catch = True
# Create a list of Node which are 'return' node
# There should be one and only one node of this type
# If this is not the case, try to continue anyway by setting the exit node
# to the one which has the greatest RPO number (not necessarily the case)
lexit_nodes = [node for node in graph if node.type.is_return]
if len(lexit_nodes) > 1:
# Not sure that this case is possible...
logger.error('Multiple exit nodes found !')
graph.exit = graph.rpo[-1]
elif len(lexit_nodes) < 1:
# A method can have no return if it has throw statement(s) or if its
# body is a while(1) whitout break/return.
logger.debug('No exit node found !')
else:
graph.exit = lexit_nodes[0]
return graph
|
0x0mar/androguard
|
androguard/decompiler/dad/graph.py
|
Python
|
apache-2.0
| 17,086
|
# coding=utf-8
# Copyright 2018 The Tensor2Tensor Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Experiments with Language Models.
Train languagemodel_lm1b32k_packed and measure log-ppl/token (dev).
These numbers need to be multiplied by 1.107893 to get log-ppl/word
for comparison with published results.
Basic training regimen is 300k steps * 8 cores * batch_size=4096
= about 10 epochs
Make sure to eval on CPU or GPU using a large number of steps (1000), since the
TPU eval code doesn't know how to stop at the end of the dev data. Also need
to set activation_type=float32 for eval, since there is currently a conflict
between daisy_chain_getter and activation_type=bfloat16.
RESULTS:
lmx_base: log-ppl/tok=3.40 PPL/word=43.2 (10 hours*8 cores)
lmx_h1k_f4k:
lmx_h2k_f8k:
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from tensor2tensor.models import transformer
from tensor2tensor.utils import registry
@registry.register_hparams
def lmx_base():
"""Transformer on languagemodel_lm1b32k_packed. 50M Params."""
hparams = transformer.transformer_tpu()
# sharing is counterproductive when underparameterized
hparams.shared_embedding_and_softmax_weights = False
# we judge by log-ppl, so label smoothing hurts.
hparams.label_smoothing = 0.0
# This makes the batch size on GPU the same as on TPU for a packed problem
# with sequence length 256.
# TODO(noam): fix the mess that is the data reading pipeline.
hparams.max_length = 256
# larger batch since we only have a decoder
hparams.batch_size = 4096
# save some memory so we can have a larger model
hparams.activation_dtype = "bfloat16"
return hparams
@registry.register_hparams
def lmx_h1k_f4k():
"""Transformer on languagemodel_lm1b32k_packed. 140M Params."""
hparams = lmx_base()
hparams.hidden_size = 1024
hparams.filter_size = 4096
return hparams
@registry.register_hparams
def lmx_h2k_f8k():
"""HParams for training languagemodel_lm1b32k_packed. 430M Params."""
hparams = lmx_base()
hparams.hidden_size = 2048
hparams.filter_size = 8192
return hparams
@registry.register_hparams
def lmx_h3k_f12k():
"""HParams for training languagemodel_lm1b32k_packed. 880M Params."""
hparams = lmx_base()
hparams.hidden_size = 3072
hparams.filter_size = 12288
hparams.batch_size = 2048
hparams.weight_dtype = "bfloat16"
return hparams
@registry.register_hparams
def lmx_h4k_f16k():
"""HParams for training languagemodel_lm1b32k_packed. 1470M Params."""
hparams = lmx_base()
hparams.hidden_size = 4096
hparams.filter_size = 16384
hparams.batch_size = 1024
hparams.weight_dtype = "bfloat16"
return hparams
@registry.register_hparams
def lmx_relative():
"""Language model using relative attention."""
hparams = lmx_base()
hparams.self_attention_type = "dot_product_relative_v2"
hparams.activation_dtype = "float32"
hparams.weight_dtype = "float32"
return hparams
@registry.register_hparams
def lmx_relative_nopos():
"""Language model using relative attention and no positional encoding."""
hparams = lmx_relative()
hparams.pos = "none"
return hparams
@registry.register_hparams
def lmx_moe():
"""Transformer with mixture of experts. 140M Params."""
hparams = lmx_base()
hparams.ffn_layer = "local_moe_tpu"
return hparams
@registry.register_hparams
def lmx_moe_h1k_f4k_x32():
"""Transformer with mixture of experts. 890M Params."""
hparams = lmx_h1k_f4k()
hparams.ffn_layer = "local_moe_tpu"
hparams.moe_num_experts = 32
hparams.weight_dtype = "bfloat16"
hparams.batch_size = 8192
return hparams
@registry.register_hparams
def lmx_moe_h1k_f8k_x16():
"""Transformer with mixture of experts. 890M Params."""
hparams = lmx_h1k_f4k()
hparams.filter_size = 8192
hparams.ffn_layer = "local_moe_tpu"
hparams.moe_num_experts = 16
hparams.weight_dtype = "bfloat16"
hparams.batch_size = 8192
return hparams
@registry.register_hparams
def lmx_h1k_f64k():
"""HParams for training languagemodel_lm1b32k_packed. 880M Params."""
hparams = lmx_base()
hparams.hidden_size = 1024
hparams.filter_size = 65536
hparams.batch_size = 2048
return hparams
|
vthorsteinsson/tensor2tensor
|
tensor2tensor/models/research/lm_experiments.py
|
Python
|
apache-2.0
| 4,751
|
import numpy as np
import pandas
import os
import pdb
#pdb.set_trace()
dircount = 0
rootdirs = os.walk('.').next()[1]
mean_mat = np.zeros((193,len(rootdirs)))
se_mat = np.zeros((193,len(rootdirs)))
N_mat = np.zeros((3,len(rootdirs)))
#for each directory found, iterate round...
for myrootdir in rootdirs:
myfileList = []
usedfilelist = []
print(myrootdir)
# make list of files
for subdir, dirs, files in os.walk(myrootdir):
for file in files:
#print os.path.join(subdir, file)
filepath = subdir + os.sep + file
if filepath.endswith("-0000.csv"):
myfileList.append (filepath)
mat2=np.zeros((193,len(myfileList)))
file_success_count = 0
file_obscured_count = 0
file_unresponsive_count = 0
for filepath in myfileList:
print (' ' + filepath)
#pdb.set_trace()
if ('Unr' in filepath) or ('unr' in filepath): # eliminate unrespons and Didn't respo
print(' deemed unresponsive ' + filepath)
file_unresponsive_count = file_unresponsive_count + 1
elif 'scu' in filepath: # eliminate Obscured
print(' found to be obscured ' + filepath)
file_obscured_count = file_obscured_count + 1
else:
df = pandas.read_csv(filepath, header=0,
names=['Summary','genotype', 'file', 'initial area', 'area spline Max', 'MaxAt', 'actual area max', 'actual area MaxAt', 'residual'])
x = df[['area spline Max']].iloc[2:195,[0]].astype(np.float)
#ignore traces where the max is less than 5.0
ss = x['area spline Max'].iloc[0] # starting value
mm = x.max()[0]
ii = mm-ss
if ii > 5.0 :
y = x['area spline Max'] - ss # subtract starting area, results in data in rows
mat2 [:,[file_success_count]] = y.values.reshape(193,1)
file_success_count = file_success_count + 1
usedfilelist.append(filepath)
else:
print(' found to be unresponsive ' + filepath)
file_unresponsive_count = file_unresponsive_count + 1
#pdb.set_trace()
#mat2=np.delete(mat2, slice(file_success_count,2000), 1) # 1 means delete columns
# Create a Pandas Excel writer using XlsxWriter as the engine.
writer = pandas.ExcelWriter(myrootdir + 'out.xlsx', engine='xlsxwriter')
out_df = pandas.DataFrame(data = mat2[:,:file_success_count])
out_df.columns=usedfilelist
out_df.to_excel (writer, sheet_name=myrootdir)
if file_success_count > 0 :
# now get the workbook etc
workbook = writer.book
workbook.set_size(2000, 1400)
worksheet = writer.sheets[myrootdir]
chart = workbook.add_chart({'type': 'scatter'})
# add the data...
max_row = 193
for i in range(file_success_count):
col = i + 1
chart.add_series({
'name': [myrootdir, 0, col],
'categories': [myrootdir, 1, 0, max_row, 0],
'values': [myrootdir, 1, col, max_row, col],
'marker': {'type': 'none'},
'line': {'width': 1.25},
})
chart.set_x_axis({ 'name': 'frames'})
chart.set_y_axis({ 'name': 'extension'})
chart.set_y_axis({'min': 0, 'max': 15})
chart.set_x_axis({'min': 0, 'max': 190})
chart.set_size({'x_scale': 3, 'y_scale': 3})
worksheet.insert_chart('K2', chart)
#now write a sheet to calculate (in Excel the 75% latency)
worksheet2 = workbook.add_worksheet('seventyFive')
worksheet2.write_string('B1', myrootdir)
worksheet2.write_string('A6', 'max')
worksheet2.write_string('A7', 'max pos')
worksheet2.write_string('A8', 'frames to 75%')
worksheet2.write_string('A9', 'time to 75%')
for i in range (1,file_success_count+1):
worksheet2.write_formula(4,i, '=INDIRECT("\'"&$B$1&"\'!"&ADDRESS(1,COLUMN()))')
worksheet2.write_formula(5,i, '=MAX(INDIRECT("\'"&$B$1&"\'!"&ADDRESS(2,COLUMN())&":"&ADDRESS(200,COLUMN())))')
worksheet2.write_formula(6,i, '=IF(INDIRECT(ADDRESS(ROW()-1,COLUMN()))>0,MATCH(INDIRECT(ADDRESS(ROW()-1,COLUMN())),INDIRECT("\'"&$B$1&"\'!"&ADDRESS(2,COLUMN())&":"&ADDRESS(200,COLUMN())),0),"")')
worksheet2.write_formula(7,i, '=IF(INDIRECT(ADDRESS(ROW()-2,COLUMN()))>0,MATCH(INDIRECT(ADDRESS(ROW()-2,COLUMN()))*0.75,INDIRECT("\'"&$B$1&"\'!"&ADDRESS(2,COLUMN())&":"&ADDRESS(INDIRECT(ADDRESS(ROW()-1,COLUMN())),COLUMN())),1),"")')
worksheet2.write_formula(8,i, '=IF(INDIRECT(ADDRESS(ROW()-3,COLUMN()))>0,(INDIRECT(ADDRESS(ROW()-1,COLUMN()))+7)/200,"")')
writer.save()
#Now calculate the mean
mean_mat[:,dircount] = out_df.mean(axis=1)
se_mat [:,dircount] = out_df.sem(axis=1)
N_mat [0,dircount] = file_success_count
N_mat [1,dircount] = file_unresponsive_count
N_mat [2,dircount] = file_obscured_count
#zero the data
#mean_mat[:,dircount] = mean_mat[:,dircount] - mean_mat[0,dircount]
dircount = dircount + 1
#pdb.set_trace()
mean_df = pandas.DataFrame(data = mean_mat)
mean_df.columns=rootdirs
sd_df = pandas.DataFrame(data = se_mat)
sd_df.columns=rootdirs
N_df = pandas.DataFrame(data = N_mat)
N_df.columns=rootdirs
N_df.rename(index={0:'responded',1:'NR', 2:'Obscured'}, inplace=True)
# Create a Pandas Excel writer using XlsxWriter as the engine.
writer = pandas.ExcelWriter('mean_se.xlsx', engine='xlsxwriter')
# Position the dataframes in the worksheet.
mean_df.to_excel(writer, sheet_name='mean') # Default position, cell A1.
sd_df.to_excel (writer, sheet_name='se')
N_df.to_excel (writer, sheet_name='N')
workbook = writer.book
workbook.set_size(2000, 1400)
worksheet = writer.sheets['N']
nCol = len(rootdirs)
for i in range (1, nCol+1):
worksheet.write_formula(4,i, '=SUM(INDIRECT(ADDRESS(ROW()-3,COLUMN())&":"&(ADDRESS(ROW()-1,COLUMN()))))')
chart = workbook.add_chart({'type': 'column', 'subtype': 'percent_stacked'})
for i in range (1, 4):
chart.add_series({
'name': ['N', i, 0],
'categories': ['N', 0, 1, 0, nCol],
'values': ['N', i, 1, i, nCol],
})
chart.set_size({'x_scale': 2, 'y_scale': 2})
worksheet.insert_chart('K2', chart)
#df2.to_excel(writer, sheet_name='Sheet1', startcol=3)
#df3.to_excel(writer, sheet_name='Sheet1', startrow=6)
writer.save()
|
biol75/PER
|
video_analysis/align_graph.py
|
Python
|
mit
| 7,276
|
# Copyright 2022. ThingsBoard
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import logging
import threading
from time import sleep
from ssl import CERT_REQUIRED, PROTOCOL_TLSv1_2
from thingsboard_gateway.tb_client.tb_gateway_mqtt import TBGatewayMqttClient
log = logging.getLogger("tb_connection")
class TBClient(threading.Thread):
def __init__(self, config, config_folder_path):
super().__init__()
self.setName('Connection thread.')
self.daemon = True
self.__config_folder_path = config_folder_path
self.__config = config
self.__host = config["host"]
self.__port = config.get("port", 1883)
self.__default_quality_of_service = config.get("qos", 1)
credentials = config["security"]
self.__min_reconnect_delay = 1
self.__tls = bool(credentials.get('tls', False) or credentials.get('caCert', False))
self.__ca_cert = None
self.__private_key = None
self.__cert = None
self.__token = None
self.__is_connected = False
self.__stopped = False
self.__paused = False
if credentials.get("accessToken") is not None:
self.__token = str(credentials["accessToken"])
self.client = TBGatewayMqttClient(self.__host, self.__port, self.__token, self, quality_of_service=self.__default_quality_of_service)
if self.__tls:
self.__ca_cert = self.__config_folder_path + credentials.get("caCert") if credentials.get("caCert") is not None else None
self.__private_key = self.__config_folder_path + credentials.get("privateKey") if credentials.get("privateKey") is not None else None
self.__cert = self.__config_folder_path + credentials.get("cert") if credentials.get("cert") is not None else None
self.client._client.tls_set(ca_certs=self.__ca_cert,
certfile=self.__cert,
keyfile=self.__private_key,
tls_version=PROTOCOL_TLSv1_2,
cert_reqs=CERT_REQUIRED,
ciphers=None)
if credentials.get("insecure", False):
self.client._client.tls_insecure_set(True)
# pylint: disable=protected-access
# Adding callbacks
self.client._client._on_connect = self._on_connect
self.client._client._on_disconnect = self._on_disconnect
# self.client._client._on_log = self._on_log
self.start()
# def _on_log(self, *args):
# if "exception" in args[-1]:
# log.exception(args)
# else:
# log.debug(args)
def pause(self):
self.__paused = True
def unpause(self):
self.__paused = False
def is_connected(self):
return self.__is_connected
def _on_connect(self, client, userdata, flags, result_code, *extra_params):
log.debug('TB client %s connected to ThingsBoard', str(client))
if result_code == 0:
self.__is_connected = True
# pylint: disable=protected-access
self.client._on_connect(client, userdata, flags, result_code, *extra_params)
def _on_disconnect(self, client, userdata, result_code):
# pylint: disable=protected-access
if self.client._client != client:
log.info("TB client %s has been disconnected. Current client for connection is: %s", str(client), str(self.client._client))
client.disconnect()
client.loop_stop()
else:
self.__is_connected = False
self.client._on_disconnect(client, userdata, result_code)
def stop(self):
# self.disconnect()
self.client.stop()
self.__stopped = True
def disconnect(self):
self.__paused = True
self.unsubscribe('*')
self.client.disconnect()
def unsubscribe(self, subsription_id):
self.client.gw_unsubscribe(subsription_id)
self.client.unsubscribe_from_attribute(subsription_id)
def connect(self, min_reconnect_delay=10):
self.__paused = False
self.__stopped = False
self.__min_reconnect_delay = min_reconnect_delay
def run(self):
keep_alive = self.__config.get("keep_alive", 120)
try:
while not self.client.is_connected() and not self.__stopped:
if not self.__paused:
if self.__stopped:
break
log.debug("connecting to ThingsBoard")
try:
self.client.connect(keepalive=keep_alive,
min_reconnect_delay=self.__min_reconnect_delay)
except ConnectionRefusedError:
pass
except Exception as e:
log.exception(e)
sleep(1)
except Exception as e:
log.exception(e)
sleep(10)
while not self.__stopped:
try:
if not self.__stopped:
sleep(.2)
else:
break
except KeyboardInterrupt:
self.__stopped = True
except Exception as e:
log.exception(e)
def get_config_folder_path(self):
return self.__config_folder_path
|
thingsboard/thingsboard-gateway
|
thingsboard_gateway/gateway/tb_client.py
|
Python
|
apache-2.0
| 5,963
|
# encoding=utf-8
#
# The Qubes OS Project, http://www.qubes-os.org
#
# Copyright (C) 2016 Bahtiar `kalkin-` Gadimov <bahtiar@gadimov.de>
# Copyright (C) 2016 Marek Marczykowski-Górecki
# <marmarek@invisiblethingslab.com>
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as published by
# the Free Software Foundation; either version 2.1 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License along
# with this program; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
"""Qubes volume and block device managment"""
import argparse
import os
import sys
import qubesadmin
import qubesadmin.exc
import qubesadmin.tools
import qubesadmin.devices
def prepare_table(dev_list):
""" Converts a list of :py:class:`qubes.devices.DeviceInfo` objects to a
list of tupples for the :py:func:`qubes.tools.print_table`.
If :program:`qvm-devices` is running in a TTY, it will ommit duplicate
data.
:param iterable dev_list: List of :py:class:`qubes.devices.DeviceInfo`
objects.
:returns: list of tupples
"""
output = []
header = []
if sys.stdout.isatty():
header += [('BACKEND:DEVID', 'DESCRIPTION', 'USED BY')] # NOQA
for line in dev_list:
output += [(
line.ident,
line.description,
str(line.assignments),
)]
return header + sorted(output)
class Line(object):
"""Helper class to hold single device info for listing"""
# pylint: disable=too-few-public-methods
def __init__(self, device: qubesadmin.devices.DeviceInfo, attached_to=None):
self.ident = "{!s}:{!s}".format(device.backend_domain, device.ident)
self.description = device.description
self.attached_to = attached_to if attached_to else ""
self.frontends = []
@property
def assignments(self):
"""list of frontends the device is assigned to"""
return ', '.join(self.frontends)
def list_devices(args):
""" Called by the parser to execute the qubes-devices list
subcommand. """
app = args.app
devices = set()
try:
if hasattr(args, 'domains') and args.domains:
for domain in args.domains:
for dev in domain.devices[args.devclass].attached():
devices.add(dev)
for dev in domain.devices[args.devclass].available():
devices.add(dev)
else:
for domain in app.domains:
try:
for dev in domain.devices[args.devclass].available():
devices.add(dev)
except qubesadmin.exc.QubesVMNotFoundError:
continue
except qubesadmin.exc.QubesDaemonAccessError:
raise qubesadmin.exc.QubesException(
"Failed to list '%s' devices, this device type either "
"does not exist or you do not have access to it.", args.devclass)
result = {dev: Line(dev) for dev in devices}
for dev in result:
for domain in app.domains:
if domain == dev.backend_domain:
continue
try:
for assignment in domain.devices[args.devclass].assignments():
if dev != assignment:
continue
if assignment.options:
result[dev].frontends.append('{!s} ({})'.format(
domain, ', '.join('{}={}'.format(key, value)
for key, value in
assignment.options.items())))
else:
result[dev].frontends.append(str(domain))
except qubesadmin.exc.QubesVMNotFoundError:
continue
qubesadmin.tools.print_table(prepare_table(result.values()))
def attach_device(args):
""" Called by the parser to execute the :program:`qvm-devices attach`
subcommand.
"""
device_assignment = args.device_assignment
vm = args.domains[0]
options = dict(opt.split('=', 1) for opt in args.option or [])
if args.ro:
options['read-only'] = 'yes'
device_assignment.persistent = args.persistent
device_assignment.options = options
vm.devices[args.devclass].attach(device_assignment)
def detach_device(args):
""" Called by the parser to execute the :program:`qvm-devices detach`
subcommand.
"""
vm = args.domains[0]
if args.device_assignment:
vm.devices[args.devclass].detach(args.device_assignment)
else:
for device_assignment in vm.devices[args.devclass].assignments():
vm.devices[args.devclass].detach(device_assignment)
def init_list_parser(sub_parsers):
""" Configures the parser for the :program:`qvm-devices list` subcommand """
# pylint: disable=protected-access
list_parser = sub_parsers.add_parser('list', aliases=('ls', 'l'),
help='list devices')
vm_name_group = qubesadmin.tools.VmNameGroup(
list_parser, required=False, vm_action=qubesadmin.tools.VmNameAction,
help='list devices assigned to specific domain(s)')
list_parser._mutually_exclusive_groups.append(vm_name_group)
list_parser.set_defaults(func=list_devices)
class DeviceAction(qubesadmin.tools.QubesAction):
""" Action for argument parser that gets the
:py:class:``qubesadmin.device.DeviceAssignment`` from a
BACKEND:DEVICE_ID string.
""" # pylint: disable=too-few-public-methods
def __init__(self, help='A backend & device id combination',
required=True, allow_unknown=False, **kwargs):
# pylint: disable=redefined-builtin
self.allow_unknown = allow_unknown
super().__init__(help=help, required=required,
**kwargs)
def __call__(self, parser, namespace, values, option_string=None):
""" Set ``namespace.device_assignment`` to ``values`` """
setattr(namespace, self.dest, values)
def parse_qubes_app(self, parser, namespace):
app = namespace.app
backend_device_id = getattr(namespace, self.dest)
devclass = namespace.devclass
if backend_device_id is None:
return
try:
vmname, device_id = backend_device_id.split(':', 1)
vm = None
try:
vm = app.domains[vmname]
except KeyError:
parser.error_runtime("no backend vm {!r}".format(vmname))
try:
dev = vm.devices[devclass][device_id]
if not self.allow_unknown and \
isinstance(dev, qubesadmin.devices.UnknownDevice):
raise KeyError(device_id)
except KeyError:
parser.error_runtime(
"backend vm {!r} doesn't expose device {!r}".format(
vmname, device_id))
device_assignment = qubesadmin.devices.DeviceAssignment(
vm, device_id)
setattr(namespace, self.dest, device_assignment)
except ValueError:
parser.error(
'expected a backend vm & device id combination like foo:bar '
'got %s' % backend_device_id)
def get_parser(device_class=None):
"""Create :py:class:`argparse.ArgumentParser` suitable for
:program:`qvm-block`.
"""
parser = qubesadmin.tools.QubesArgumentParser(description=__doc__)
parser.register('action', 'parsers',
qubesadmin.tools.AliasedSubParsersAction)
parser.allow_abbrev = False
if device_class:
parser.add_argument('devclass', const=device_class,
action='store_const',
help=argparse.SUPPRESS)
else:
parser.add_argument('devclass', metavar='DEVICE_CLASS', action='store',
help="Device class to manage ('pci', 'usb', etc)")
# default action
parser.set_defaults(func=list_devices)
sub_parsers = parser.add_subparsers(
title='commands',
description="For more information see qvm-device command -h",
dest='command')
init_list_parser(sub_parsers)
attach_parser = sub_parsers.add_parser(
'attach', help="Attach device to domain", aliases=('at', 'a'))
detach_parser = sub_parsers.add_parser(
"detach", help="Detach device from domain", aliases=('d', 'dt'))
attach_parser.add_argument('VMNAME', nargs=1,
action=qubesadmin.tools.VmNameAction)
detach_parser.add_argument('VMNAME', nargs=1,
action=qubesadmin.tools.VmNameAction)
attach_parser.add_argument(metavar='BACKEND:DEVICE_ID',
dest='device_assignment',
action=DeviceAction)
detach_parser.add_argument(metavar='BACKEND:DEVICE_ID',
dest='device_assignment',
nargs=argparse.OPTIONAL,
action=DeviceAction, allow_unknown=True)
attach_parser.add_argument('--option', '-o', action='append',
help="Set option for the device in opt=value "
"form (can be specified "
"multiple times), see man qvm-device for "
"details")
attach_parser.add_argument('--ro', action='store_true', default=False,
help="Attach device read-only (alias for "
"read-only=yes option, "
"takes precedence)")
attach_parser.add_argument('--persistent', '-p', action='store_true',
default=False,
help="Attach device persistently (so it will "
"be automatically "
"attached at qube startup)")
attach_parser.set_defaults(func=attach_device)
detach_parser.set_defaults(func=detach_device)
parser.add_argument('--list-device-classes', action='store_true',
default=False)
return parser
def main(args=None, app=None):
"""Main routine of :program:`qvm-block`."""
basename = os.path.basename(sys.argv[0])
devclass = None
if basename.startswith('qvm-') and basename != 'qvm-device':
devclass = basename[4:]
parser = get_parser(devclass)
args = parser.parse_args(args, app=app)
if args.list_device_classes:
print('\n'.join(qubesadmin.Qubes().list_deviceclass()))
return 0
try:
args.func(args)
except qubesadmin.exc.QubesException as e:
parser.print_error(str(e))
return 1
return 0
if __name__ == '__main__':
# Special treatment for '--list-device-classes' (alias --list-classes)
curr_action = sys.argv[1:]
if set(curr_action).intersection(
{'--list-device-classes', '--list-classes'}):
sys.exit(main(args=['', '--list-device-classes']))
sys.exit(main())
|
marmarek/qubes-core-mgmt-client
|
qubesadmin/tools/qvm_device.py
|
Python
|
lgpl-2.1
| 11,687
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11.1 on 2017-05-19 20:10
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('experiments_manager', '0006_experiment_public'),
]
operations = [
migrations.AddField(
model_name='experiment',
name='publish_url_zip',
field=models.URLField(null=True),
),
]
|
MOOCworkbench/MOOCworkbench
|
experiments_manager/migrations/0007_experiment_publish_url_zip.py
|
Python
|
mit
| 468
|
import pytest
from unittest import mock
from django.core.urlresolvers import reverse
from django.core import mail
from taiga.base.utils import json
from taiga.hooks.github import event_hooks
from taiga.hooks.github.api import GitHubViewSet
from taiga.hooks.exceptions import ActionSyntaxException
from taiga.projects.issues.models import Issue
from taiga.projects.tasks.models import Task
from taiga.projects.userstories.models import UserStory
from taiga.projects.models import Membership
from taiga.projects.history.services import get_history_queryset_by_model_instance, take_snapshot
from taiga.projects.notifications.choices import NotifyLevel
from taiga.projects.notifications.models import NotifyPolicy
from taiga.projects import services
from .. import factories as f
pytestmark = pytest.mark.django_db
def test_bad_signature(client):
project = f.ProjectFactory()
url = reverse("github-hook-list")
url = "%s?project=%s" % (url, project.id)
data = {}
response = client.post(url, json.dumps(data),
HTTP_X_HUB_SIGNATURE="sha1=badbadbad",
content_type="application/json")
response_content = json.loads(response.content.decode("utf-8"))
assert response.status_code == 400
assert "Bad signature" in response_content["_error_message"]
def test_ok_signature(client):
project = f.ProjectFactory()
f.ProjectModulesConfigFactory(project=project, config={
"github": {
"secret": "tpnIwJDz4e"
}
})
url = reverse("github-hook-list")
url = "%s?project=%s" % (url, project.id)
data = {"test:": "data"}
response = client.post(url, json.dumps(data),
HTTP_X_HUB_SIGNATURE="sha1=3c8e83fdaa266f81c036ea0b71e98eb5e054581a",
content_type="application/json")
assert response.status_code == 204
def test_push_event_detected(client):
project = f.ProjectFactory()
url = reverse("github-hook-list")
url = "%s?project=%s" % (url, project.id)
data = {"commits": [
{"message": "test message"},
]}
GitHubViewSet._validate_signature = mock.Mock(return_value=True)
with mock.patch.object(event_hooks.PushEventHook, "process_event") as process_event_mock:
response = client.post(url, json.dumps(data),
HTTP_X_GITHUB_EVENT="push",
content_type="application/json")
assert process_event_mock.call_count == 1
assert response.status_code == 204
def test_push_event_issue_processing(client):
creation_status = f.IssueStatusFactory()
role = f.RoleFactory(project=creation_status.project, permissions=["view_issues"])
f.MembershipFactory(project=creation_status.project, role=role, user=creation_status.project.owner)
new_status = f.IssueStatusFactory(project=creation_status.project)
issue = f.IssueFactory.create(status=creation_status, project=creation_status.project, owner=creation_status.project.owner)
payload = {"commits": [
{"message": """test message
test TG-%s #%s ok
bye!
""" % (issue.ref, new_status.slug)},
]}
mail.outbox = []
ev_hook = event_hooks.PushEventHook(issue.project, payload)
ev_hook.process_event()
issue = Issue.objects.get(id=issue.id)
assert issue.status.id == new_status.id
assert len(mail.outbox) == 1
def test_push_event_task_processing(client):
creation_status = f.TaskStatusFactory()
role = f.RoleFactory(project=creation_status.project, permissions=["view_tasks"])
f.MembershipFactory(project=creation_status.project, role=role, user=creation_status.project.owner)
new_status = f.TaskStatusFactory(project=creation_status.project)
task = f.TaskFactory.create(status=creation_status, project=creation_status.project, owner=creation_status.project.owner)
payload = {"commits": [
{"message": """test message
test TG-%s #%s ok
bye!
""" % (task.ref, new_status.slug)},
]}
mail.outbox = []
ev_hook = event_hooks.PushEventHook(task.project, payload)
ev_hook.process_event()
task = Task.objects.get(id=task.id)
assert task.status.id == new_status.id
assert len(mail.outbox) == 1
def test_push_event_user_story_processing(client):
creation_status = f.UserStoryStatusFactory()
role = f.RoleFactory(project=creation_status.project, permissions=["view_us"])
f.MembershipFactory(project=creation_status.project, role=role, user=creation_status.project.owner)
new_status = f.UserStoryStatusFactory(project=creation_status.project)
user_story = f.UserStoryFactory.create(status=creation_status, project=creation_status.project, owner=creation_status.project.owner)
payload = {"commits": [
{"message": """test message
test TG-%s #%s ok
bye!
""" % (user_story.ref, new_status.slug)},
]}
mail.outbox = []
ev_hook = event_hooks.PushEventHook(user_story.project, payload)
ev_hook.process_event()
user_story = UserStory.objects.get(id=user_story.id)
assert user_story.status.id == new_status.id
assert len(mail.outbox) == 1
def test_push_event_processing_case_insensitive(client):
creation_status = f.TaskStatusFactory()
role = f.RoleFactory(project=creation_status.project, permissions=["view_tasks"])
f.MembershipFactory(project=creation_status.project, role=role, user=creation_status.project.owner)
new_status = f.TaskStatusFactory(project=creation_status.project)
task = f.TaskFactory.create(status=creation_status, project=creation_status.project, owner=creation_status.project.owner)
payload = {"commits": [
{"message": """test message
test tg-%s #%s ok
bye!
""" % (task.ref, new_status.slug.upper())},
]}
mail.outbox = []
ev_hook = event_hooks.PushEventHook(task.project, payload)
ev_hook.process_event()
task = Task.objects.get(id=task.id)
assert task.status.id == new_status.id
assert len(mail.outbox) == 1
def test_push_event_task_bad_processing_non_existing_ref(client):
issue_status = f.IssueStatusFactory()
payload = {"commits": [
{"message": """test message
test TG-6666666 #%s ok
bye!
""" % (issue_status.slug)},
]}
mail.outbox = []
ev_hook = event_hooks.PushEventHook(issue_status.project, payload)
with pytest.raises(ActionSyntaxException) as excinfo:
ev_hook.process_event()
assert str(excinfo.value) == "The referenced element doesn't exist"
assert len(mail.outbox) == 0
def test_push_event_us_bad_processing_non_existing_status(client):
user_story = f.UserStoryFactory.create()
payload = {"commits": [
{"message": """test message
test TG-%s #non-existing-slug ok
bye!
""" % (user_story.ref)},
]}
mail.outbox = []
ev_hook = event_hooks.PushEventHook(user_story.project, payload)
with pytest.raises(ActionSyntaxException) as excinfo:
ev_hook.process_event()
assert str(excinfo.value) == "The status doesn't exist"
assert len(mail.outbox) == 0
def test_push_event_bad_processing_non_existing_status(client):
issue = f.IssueFactory.create()
payload = {"commits": [
{"message": """test message
test TG-%s #non-existing-slug ok
bye!
""" % (issue.ref)},
]}
mail.outbox = []
ev_hook = event_hooks.PushEventHook(issue.project, payload)
with pytest.raises(ActionSyntaxException) as excinfo:
ev_hook.process_event()
assert str(excinfo.value) == "The status doesn't exist"
assert len(mail.outbox) == 0
def test_issues_event_opened_issue(client):
issue = f.IssueFactory.create()
issue.project.default_issue_status = issue.status
issue.project.default_issue_type = issue.type
issue.project.default_severity = issue.severity
issue.project.default_priority = issue.priority
issue.project.save()
Membership.objects.create(user=issue.owner, project=issue.project, role=f.RoleFactory.create(project=issue.project), is_owner=True)
notify_policy = NotifyPolicy.objects.get(user=issue.owner, project=issue.project)
notify_policy.notify_level = NotifyLevel.watch
notify_policy.save()
payload = {
"action": "opened",
"issue": {
"title": "test-title",
"body": "test-body",
"html_url": "http://github.com/test/project/issues/11",
},
"assignee": {},
"label": {},
"repository": {
"html_url": "test",
},
}
mail.outbox = []
ev_hook = event_hooks.IssuesEventHook(issue.project, payload)
ev_hook.process_event()
assert Issue.objects.count() == 2
assert len(mail.outbox) == 1
def test_issues_event_other_than_opened_issue(client):
issue = f.IssueFactory.create()
issue.project.default_issue_status = issue.status
issue.project.default_issue_type = issue.type
issue.project.default_severity = issue.severity
issue.project.default_priority = issue.priority
issue.project.save()
payload = {
"action": "closed",
"issue": {
"title": "test-title",
"body": "test-body",
"html_url": "http://github.com/test/project/issues/11",
},
"assignee": {},
"label": {},
}
mail.outbox = []
ev_hook = event_hooks.IssuesEventHook(issue.project, payload)
ev_hook.process_event()
assert Issue.objects.count() == 1
assert len(mail.outbox) == 0
def test_issues_event_bad_issue(client):
issue = f.IssueFactory.create()
issue.project.default_issue_status = issue.status
issue.project.default_issue_type = issue.type
issue.project.default_severity = issue.severity
issue.project.default_priority = issue.priority
issue.project.save()
payload = {
"action": "opened",
"issue": {},
"assignee": {},
"label": {},
}
mail.outbox = []
ev_hook = event_hooks.IssuesEventHook(issue.project, payload)
with pytest.raises(ActionSyntaxException) as excinfo:
ev_hook.process_event()
assert str(excinfo.value) == "Invalid issue information"
assert Issue.objects.count() == 1
assert len(mail.outbox) == 0
def test_issue_comment_event_on_existing_issue_task_and_us(client):
project = f.ProjectFactory()
role = f.RoleFactory(project=project, permissions=["view_tasks", "view_issues", "view_us"])
f.MembershipFactory(project=project, role=role, user=project.owner)
user = f.UserFactory()
issue = f.IssueFactory.create(external_reference=["github", "http://github.com/test/project/issues/11"], owner=project.owner, project=project)
take_snapshot(issue, user=user)
task = f.TaskFactory.create(external_reference=["github", "http://github.com/test/project/issues/11"], owner=project.owner, project=project)
take_snapshot(task, user=user)
us = f.UserStoryFactory.create(external_reference=["github", "http://github.com/test/project/issues/11"], owner=project.owner, project=project)
take_snapshot(us, user=user)
payload = {
"action": "created",
"issue": {
"html_url": "http://github.com/test/project/issues/11",
},
"comment": {
"body": "Test body",
},
"repository": {
"html_url": "test",
},
}
mail.outbox = []
assert get_history_queryset_by_model_instance(issue).count() == 0
assert get_history_queryset_by_model_instance(task).count() == 0
assert get_history_queryset_by_model_instance(us).count() == 0
ev_hook = event_hooks.IssueCommentEventHook(issue.project, payload)
ev_hook.process_event()
issue_history = get_history_queryset_by_model_instance(issue)
assert issue_history.count() == 1
assert issue_history[0].comment == "From GitHub:\n\nTest body"
task_history = get_history_queryset_by_model_instance(task)
assert task_history.count() == 1
assert task_history[0].comment == "From GitHub:\n\nTest body"
us_history = get_history_queryset_by_model_instance(us)
assert us_history.count() == 1
assert us_history[0].comment == "From GitHub:\n\nTest body"
assert len(mail.outbox) == 3
def test_issue_comment_event_on_not_existing_issue_task_and_us(client):
issue = f.IssueFactory.create(external_reference=["github", "10"])
take_snapshot(issue, user=issue.owner)
task = f.TaskFactory.create(project=issue.project, external_reference=["github", "10"])
take_snapshot(task, user=task.owner)
us = f.UserStoryFactory.create(project=issue.project, external_reference=["github", "10"])
take_snapshot(us, user=us.owner)
payload = {
"action": "created",
"issue": {
"html_url": "http://github.com/test/project/issues/11",
},
"comment": {
"body": "Test body",
},
"repository": {
"html_url": "test",
},
}
mail.outbox = []
assert get_history_queryset_by_model_instance(issue).count() == 0
assert get_history_queryset_by_model_instance(task).count() == 0
assert get_history_queryset_by_model_instance(us).count() == 0
ev_hook = event_hooks.IssueCommentEventHook(issue.project, payload)
ev_hook.process_event()
assert get_history_queryset_by_model_instance(issue).count() == 0
assert get_history_queryset_by_model_instance(task).count() == 0
assert get_history_queryset_by_model_instance(us).count() == 0
assert len(mail.outbox) == 0
def test_issues_event_bad_comment(client):
issue = f.IssueFactory.create(external_reference=["github", "10"])
take_snapshot(issue, user=issue.owner)
payload = {
"action": "other",
"issue": {},
"comment": {},
"repository": {
"html_url": "test",
},
}
ev_hook = event_hooks.IssueCommentEventHook(issue.project, payload)
mail.outbox = []
with pytest.raises(ActionSyntaxException) as excinfo:
ev_hook.process_event()
assert str(excinfo.value) == "Invalid issue comment information"
assert Issue.objects.count() == 1
assert len(mail.outbox) == 0
def test_api_get_project_modules(client):
project = f.create_project()
f.MembershipFactory(project=project, user=project.owner, is_owner=True)
url = reverse("projects-modules", args=(project.id,))
client.login(project.owner)
response = client.get(url)
assert response.status_code == 200
content = json.loads(response.content.decode("utf-8"))
assert "github" in content
assert content["github"]["secret"] != ""
assert content["github"]["webhooks_url"] != ""
def test_api_patch_project_modules(client):
project = f.create_project()
f.MembershipFactory(project=project, user=project.owner, is_owner=True)
url = reverse("projects-modules", args=(project.id,))
client.login(project.owner)
data = {
"github": {
"secret": "test_secret",
"url": "test_url",
}
}
response = client.patch(url, json.dumps(data), content_type="application/json")
assert response.status_code == 204
config = services.get_modules_config(project).config
assert "github" in config
assert config["github"]["secret"] == "test_secret"
assert config["github"]["webhooks_url"] != "test_url"
def test_replace_github_references():
assert event_hooks.replace_github_references("project-url", "#2") == "[GitHub#2](project-url/issues/2)"
assert event_hooks.replace_github_references("project-url", "#2 ") == "[GitHub#2](project-url/issues/2) "
assert event_hooks.replace_github_references("project-url", " #2 ") == " [GitHub#2](project-url/issues/2) "
assert event_hooks.replace_github_references("project-url", " #2") == " [GitHub#2](project-url/issues/2)"
assert event_hooks.replace_github_references("project-url", "#test") == "#test"
assert event_hooks.replace_github_references("project-url", None) == ""
|
19kestier/taiga-back
|
tests/integration/test_hooks_github.py
|
Python
|
agpl-3.0
| 16,224
|
#!/usr/bin/env python3
# Copyright Yahoo. Licensed under the terms of the Apache 2.0 license. See LICENSE in the project root.
import json
import argparse
import os, sys
# Parsing Elastic Search documents to Vespa documents
# Example of usage: python ES_Vespa_parser.py my_index.json my_index_mapping.json
__author__ = 'henrhoi'
class ElasticSearchParser:
document_file = None
mapping_file = None
application_name = None
schemas = {}
path = ""
_all = True
all_mappings = {}
no_index = []
types = []
def __init__(self):
parser = argparse.ArgumentParser()
parser.add_argument("documents_path", help="location of file with documents to be parsed", type=str)
parser.add_argument("mappings_path", help="location of file with mappings", type=str)
parser.add_argument("--application_name", help="name of application", default="application_name", type=str)
args = parser.parse_args()
self.document_file = args.documents_path
self.mapping_file = args.mappings_path
self.application_name = args.application_name
def main(self):
self.path = os.getcwd() + "/" + self.application_name + "/"
try:
os.mkdir(self.path, 0o777)
print(" > Created folder '" + self.path + "'")
except OSError:
print(" > Folder '" + self.path + "' already existed")
try:
os.makedirs(self.path + "schemas/", 0o777)
print(" > Created folder '" + self.path + "schemas/" + "'")
except OSError:
print(" > Folder '" + self.path + "schemas/" + "' already existed")
self.parse()
self.createServices_xml()
self.createHosts_xml()
def getMapping(self, type):
unparsed_mapping_file = open(self.mapping_file, "r")
type_mapping = {}
for line in unparsed_mapping_file:
data = json.loads(line)
index = list(data.keys())[0]
mappings = data[index]["mappings"]["properties"]
# Checking if some fields could be no-index
try:
_all_enabled = data[index]["mappings"]["_all"]["enabled"]
if not _all_enabled:
self._all = False
print(" > Not all fields in the document type '" + type + "' are searchable. Edit " + self.path + "schemas/" + type + ".sd to control which fields are searchable")
except KeyError:
print(" > All fields in the document type '" + type + "' is searchable")
self.walk(mappings, type_mapping, "properties")
unparsed_mapping_file.close()
if type not in self.schemas:
self.schemas[type] = True
self.types.append(type)
self.createSchema(type, type_mapping)
# Adding mapping to global map with mappings
self.all_mappings[type] = type_mapping
return type_mapping
def parse(self):
file_path = self.path + "documents" + ".json"
unparsed_document_file = open(self.document_file, "r")
vespa_docs = open(file_path, "w")
for line in unparsed_document_file:
data = json.loads(line)
type = data["_type"]
parsed_data = {
"put": "id:"+self.application_name+":" + type + "::" + data["_id"],
"fields": {}
}
# Checking for already existing mapping for a type, if not create a new
if type in self.all_mappings:
mapping = self.all_mappings[type]
else:
mapping = self.getMapping(type)
for key, item in mapping.items():
try:
parsed_data["fields"][key] = data["_source"][key]
except KeyError:
continue
json.dump(parsed_data, vespa_docs)
vespa_docs.write("\n")
vespa_docs.close()
unparsed_document_file.close()
print(" > Parsed all documents '" + ", ".join(self.types) + "' at '" + file_path + "'")
def createSchema(self, type, type_mapping):
file_path = self.path + "schemas/" + type + ".sd"
new_sd = open(file_path, "w")
new_sd.write("search " + type + " {\n")
new_sd.write(" document " + type + " {\n")
for key, item in type_mapping.items():
type = self.get_type(item)
if(type == "nested"):
print(" > SKIPPING FIELD " + key + ", this tool is not yet able to convert nested fields")
continue
new_sd.write(" field " + key + " type " + self.get_type(item) + " {\n")
new_sd.write(" indexing: " + self.get_indexing(key, self.get_type(item)) + "\n")
new_sd.write(" }\n")
new_sd.write(" }\n")
new_sd.write("}\n")
new_sd.close()
print(" > Created schema for '" + type + "' at '" + file_path + "'")
def createServices_xml(self):
file_path = self.path + "services.xml"
new_services = open(file_path, "w")
template = ("<?xml version='1.0' encoding='UTF-8'?>"
"<services version='1.0'>\n\n"
" <container id='default' version='1.0'>\n"
" <search/>\n"
" <document-api/>\n"
" <nodes>\n"
" <node hostalias='node1'/>\n"
" </nodes>\n"
" </container>\n\n"
" <content id='content' version='1.0'>\n"
" <redundancy>1</redundancy>\n"
" <search>\n"
" <visibility-delay>1.0</visibility-delay>\n"
" </search>\n"
" <documents>\n")
for i in range(0, len(self.types)):
template += " <document mode='index' type='" + self.types[i] + "'/>\n"
template += (" </documents>\n"
" <nodes>\n"
" <node hostalias='node1' distribution-key=\"0\"/>\n"
" </nodes>\n"
" <engine>\n"
" <proton>\n"
" <searchable-copies>1</searchable-copies>\n"
" </proton>\n"
" </engine>\n"
" </content>\n\n"
"</services>")
new_services.write(template)
new_services.close()
print(" > Created services.xml at '" + file_path + "'")
def createHosts_xml(self):
file_path = self.path + "hosts.xml"
new_hosts = open(file_path, "w")
template = ("<?xml version=\"1.0\" encoding=\"utf-8\" ?>\n"
"<hosts>\n"
" <host name=\"localhost\">\n"
" <alias>node1</alias>\n"
" </host>\n"
"</hosts>")
new_hosts.write(template)
new_hosts.close()
print(" > Created hosts.xml at '" + file_path + "'")
def get_type(self, type):
return {
"integer": "int",
"string": "string", # for compatability with older ES versions
"text": "string",
"keyword": "string",
"date": "string",
"long": "long",
"double": "double",
"boolean": "string",
"ip": "text",
"byte": "byte",
"float": "float",
"nested": "nested"
}[type]
def get_indexing(self, key, key_type):
if not self._all:
return "summary"
if key not in self.no_index:
if key_type == "string":
return "summary | index"
else:
return "summary | attribute"
return "summary"
def walk(self, node, mapping, parent):
for key, item in node.items():
if isinstance(item, dict):
self.walk(item, mapping, key)
elif key == "type":
mapping[parent] = item
elif key == "include_in_all":
if not item: # Field should not be searchable
self.no_index.append(parent)
elif key == "index" and parent != "properties":
if item == "no": # Field should not be searchable
self.no_index.append(parent)
if __name__ == '__main__':
ElasticSearchParser().main()
|
vespa-engine/vespa
|
config-model/src/main/python/ES_Vespa_parser.py
|
Python
|
apache-2.0
| 8,528
|
# Copyright: (c) 2013, James Cammarata <jcammarata@ansible.com>
# Copyright: (c) 2018, Ansible Project
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import os.path
import re
import shutil
import textwrap
import time
import yaml
from jinja2 import BaseLoader, Environment, FileSystemLoader
from yaml.error import YAMLError
import ansible.constants as C
from ansible import context
from ansible.cli import CLI
from ansible.cli.arguments import option_helpers as opt_help
from ansible.errors import AnsibleError, AnsibleOptionsError
from ansible.galaxy import Galaxy, get_collections_galaxy_meta_info
from ansible.galaxy.api import GalaxyAPI
from ansible.galaxy.collection import (
build_collection,
install_collections,
publish_collection,
validate_collection_name,
validate_collection_path,
)
from ansible.galaxy.login import GalaxyLogin
from ansible.galaxy.role import GalaxyRole
from ansible.galaxy.token import BasicAuthToken, GalaxyToken, KeycloakToken, NoTokenSentinel
from ansible.module_utils.ansible_release import __version__ as ansible_version
from ansible.module_utils._text import to_bytes, to_native, to_text
from ansible.module_utils import six
from ansible.parsing.yaml.loader import AnsibleLoader
from ansible.playbook.role.requirement import RoleRequirement
from ansible.utils.display import Display
from ansible.utils.plugin_docs import get_versioned_doclink
display = Display()
urlparse = six.moves.urllib.parse.urlparse
class GalaxyCLI(CLI):
'''command to manage Ansible roles in shared repositories, the default of which is Ansible Galaxy *https://galaxy.ansible.com*.'''
SKIP_INFO_KEYS = ("name", "description", "readme_html", "related", "summary_fields", "average_aw_composite", "average_aw_score", "url")
def __init__(self, args):
# Inject role into sys.argv[1] as a backwards compatibility step
if len(args) > 1 and args[1] not in ['-h', '--help', '--version'] and 'role' not in args and 'collection' not in args:
# TODO: Should we add a warning here and eventually deprecate the implicit role subcommand choice
# Remove this in Ansible 2.13 when we also remove -v as an option on the root parser for ansible-galaxy.
idx = 2 if args[1].startswith('-v') else 1
args.insert(idx, 'role')
self.api_servers = []
self.galaxy = None
super(GalaxyCLI, self).__init__(args)
def init_parser(self):
''' create an options parser for bin/ansible '''
super(GalaxyCLI, self).init_parser(
desc="Perform various Role and Collection related operations.",
)
# Common arguments that apply to more than 1 action
common = opt_help.argparse.ArgumentParser(add_help=False)
common.add_argument('-s', '--server', dest='api_server', help='The Galaxy API server URL')
common.add_argument('--token', '--api-key', dest='api_key',
help='The Ansible Galaxy API key which can be found at '
'https://galaxy.ansible.com/me/preferences. You can also use ansible-galaxy login to '
'retrieve this key or set the token for the GALAXY_SERVER_LIST entry.')
common.add_argument('-c', '--ignore-certs', action='store_true', dest='ignore_certs',
default=C.GALAXY_IGNORE_CERTS, help='Ignore SSL certificate validation errors.')
opt_help.add_verbosity_options(common)
force = opt_help.argparse.ArgumentParser(add_help=False)
force.add_argument('-f', '--force', dest='force', action='store_true', default=False,
help='Force overwriting an existing role or collection')
github = opt_help.argparse.ArgumentParser(add_help=False)
github.add_argument('github_user', help='GitHub username')
github.add_argument('github_repo', help='GitHub repository')
offline = opt_help.argparse.ArgumentParser(add_help=False)
offline.add_argument('--offline', dest='offline', default=False, action='store_true',
help="Don't query the galaxy API when creating roles")
default_roles_path = C.config.get_configuration_definition('DEFAULT_ROLES_PATH').get('default', '')
roles_path = opt_help.argparse.ArgumentParser(add_help=False)
roles_path.add_argument('-p', '--roles-path', dest='roles_path', type=opt_help.unfrack_path(pathsep=True),
default=C.DEFAULT_ROLES_PATH, action=opt_help.PrependListAction,
help='The path to the directory containing your roles. The default is the first '
'writable one configured via DEFAULT_ROLES_PATH: %s ' % default_roles_path)
default_collections_path = C.config.get_configuration_definition('COLLECTIONS_PATHS').get('default', '')
collections_path = opt_help.argparse.ArgumentParser(add_help=False)
collections_path.add_argument('-p', '--collections-path', dest='collections_path', type=opt_help.unfrack_path(pathsep=True),
default=C.COLLECTIONS_PATHS, action=opt_help.PrependListAction,
help='The path to the directory containing your collections. The default is the first '
'writable one configured via COLLECTIONS_PATHS: %s ' % default_collections_path)
# Add sub parser for the Galaxy role type (role or collection)
type_parser = self.parser.add_subparsers(metavar='TYPE', dest='type')
type_parser.required = True
# Add sub parser for the Galaxy collection actions
collection = type_parser.add_parser('collection', help='Manage an Ansible Galaxy collection.')
collection_parser = collection.add_subparsers(metavar='COLLECTION_ACTION', dest='action')
collection_parser.required = True
self.add_init_options(collection_parser, parents=[common, force])
self.add_build_options(collection_parser, parents=[common, force])
self.add_publish_options(collection_parser, parents=[common])
self.add_install_options(collection_parser, parents=[common, force])
# Add sub parser for the Galaxy role actions
role = type_parser.add_parser('role', help='Manage an Ansible Galaxy role.')
role_parser = role.add_subparsers(metavar='ROLE_ACTION', dest='action')
role_parser.required = True
self.add_init_options(role_parser, parents=[common, force, offline])
self.add_remove_options(role_parser, parents=[common, roles_path])
self.add_delete_options(role_parser, parents=[common, github])
self.add_list_options(role_parser, parents=[common, roles_path])
self.add_search_options(role_parser, parents=[common])
self.add_import_options(role_parser, parents=[common, github])
self.add_setup_options(role_parser, parents=[common, roles_path])
self.add_login_options(role_parser, parents=[common])
self.add_info_options(role_parser, parents=[common, roles_path, offline])
self.add_install_options(role_parser, parents=[common, force, roles_path])
def add_init_options(self, parser, parents=None):
galaxy_type = 'collection' if parser.metavar == 'COLLECTION_ACTION' else 'role'
init_parser = parser.add_parser('init', parents=parents,
help='Initialize new {0} with the base structure of a '
'{0}.'.format(galaxy_type))
init_parser.set_defaults(func=self.execute_init)
init_parser.add_argument('--init-path', dest='init_path', default='./',
help='The path in which the skeleton {0} will be created. The default is the '
'current working directory.'.format(galaxy_type))
init_parser.add_argument('--{0}-skeleton'.format(galaxy_type), dest='{0}_skeleton'.format(galaxy_type),
default=C.GALAXY_ROLE_SKELETON,
help='The path to a {0} skeleton that the new {0} should be based '
'upon.'.format(galaxy_type))
obj_name_kwargs = {}
if galaxy_type == 'collection':
obj_name_kwargs['type'] = validate_collection_name
init_parser.add_argument('{0}_name'.format(galaxy_type), help='{0} name'.format(galaxy_type.capitalize()),
**obj_name_kwargs)
if galaxy_type == 'role':
init_parser.add_argument('--type', dest='role_type', action='store', default='default',
help="Initialize using an alternate role type. Valid types include: 'container', "
"'apb' and 'network'.")
def add_remove_options(self, parser, parents=None):
remove_parser = parser.add_parser('remove', parents=parents, help='Delete roles from roles_path.')
remove_parser.set_defaults(func=self.execute_remove)
remove_parser.add_argument('args', help='Role(s)', metavar='role', nargs='+')
def add_delete_options(self, parser, parents=None):
delete_parser = parser.add_parser('delete', parents=parents,
help='Removes the role from Galaxy. It does not remove or alter the actual '
'GitHub repository.')
delete_parser.set_defaults(func=self.execute_delete)
def add_list_options(self, parser, parents=None):
list_parser = parser.add_parser('list', parents=parents,
help='Show the name and version of each role installed in the roles_path.')
list_parser.set_defaults(func=self.execute_list)
list_parser.add_argument('role', help='Role', nargs='?', metavar='role')
def add_search_options(self, parser, parents=None):
search_parser = parser.add_parser('search', parents=parents,
help='Search the Galaxy database by tags, platforms, author and multiple '
'keywords.')
search_parser.set_defaults(func=self.execute_search)
search_parser.add_argument('--platforms', dest='platforms', help='list of OS platforms to filter by')
search_parser.add_argument('--galaxy-tags', dest='galaxy_tags', help='list of galaxy tags to filter by')
search_parser.add_argument('--author', dest='author', help='GitHub username')
search_parser.add_argument('args', help='Search terms', metavar='searchterm', nargs='*')
def add_import_options(self, parser, parents=None):
import_parser = parser.add_parser('import', parents=parents, help='Import a role')
import_parser.set_defaults(func=self.execute_import)
import_parser.add_argument('--no-wait', dest='wait', action='store_false', default=True,
help="Don't wait for import results.")
import_parser.add_argument('--branch', dest='reference',
help='The name of a branch to import. Defaults to the repository\'s default branch '
'(usually master)')
import_parser.add_argument('--role-name', dest='role_name',
help='The name the role should have, if different than the repo name')
import_parser.add_argument('--status', dest='check_status', action='store_true', default=False,
help='Check the status of the most recent import request for given github_'
'user/github_repo.')
def add_setup_options(self, parser, parents=None):
setup_parser = parser.add_parser('setup', parents=parents,
help='Manage the integration between Galaxy and the given source.')
setup_parser.set_defaults(func=self.execute_setup)
setup_parser.add_argument('--remove', dest='remove_id', default=None,
help='Remove the integration matching the provided ID value. Use --list to see '
'ID values.')
setup_parser.add_argument('--list', dest="setup_list", action='store_true', default=False,
help='List all of your integrations.')
setup_parser.add_argument('source', help='Source')
setup_parser.add_argument('github_user', help='GitHub username')
setup_parser.add_argument('github_repo', help='GitHub repository')
setup_parser.add_argument('secret', help='Secret')
def add_login_options(self, parser, parents=None):
login_parser = parser.add_parser('login', parents=parents,
help="Login to api.github.com server in order to use ansible-galaxy role sub "
"command such as 'import', 'delete', 'publish', and 'setup'")
login_parser.set_defaults(func=self.execute_login)
login_parser.add_argument('--github-token', dest='token', default=None,
help='Identify with github token rather than username and password.')
def add_info_options(self, parser, parents=None):
info_parser = parser.add_parser('info', parents=parents, help='View more details about a specific role.')
info_parser.set_defaults(func=self.execute_info)
info_parser.add_argument('args', nargs='+', help='role', metavar='role_name[,version]')
def add_install_options(self, parser, parents=None):
galaxy_type = 'collection' if parser.metavar == 'COLLECTION_ACTION' else 'role'
args_kwargs = {}
if galaxy_type == 'collection':
args_kwargs['help'] = 'The collection(s) name or path/url to a tar.gz collection artifact. This is ' \
'mutually exclusive with --requirements-file.'
ignore_errors_help = 'Ignore errors during installation and continue with the next specified ' \
'collection. This will not ignore dependency conflict errors.'
else:
args_kwargs['help'] = 'Role name, URL or tar file'
ignore_errors_help = 'Ignore errors and continue with the next specified role.'
install_parser = parser.add_parser('install', parents=parents,
help='Install {0}(s) from file(s), URL(s) or Ansible '
'Galaxy'.format(galaxy_type))
install_parser.set_defaults(func=self.execute_install)
install_parser.add_argument('args', metavar='{0}_name'.format(galaxy_type), nargs='*', **args_kwargs)
install_parser.add_argument('-i', '--ignore-errors', dest='ignore_errors', action='store_true', default=False,
help=ignore_errors_help)
install_exclusive = install_parser.add_mutually_exclusive_group()
install_exclusive.add_argument('-n', '--no-deps', dest='no_deps', action='store_true', default=False,
help="Don't download {0}s listed as dependencies.".format(galaxy_type))
install_exclusive.add_argument('--force-with-deps', dest='force_with_deps', action='store_true', default=False,
help="Force overwriting an existing {0} and its "
"dependencies.".format(galaxy_type))
if galaxy_type == 'collection':
install_parser.add_argument('-p', '--collections-path', dest='collections_path',
default=C.COLLECTIONS_PATHS[0],
help='The path to the directory containing your collections.')
install_parser.add_argument('-r', '--requirements-file', dest='requirements',
help='A file containing a list of collections to be installed.')
else:
install_parser.add_argument('-r', '--role-file', dest='role_file',
help='A file containing a list of roles to be imported.')
install_parser.add_argument('-g', '--keep-scm-meta', dest='keep_scm_meta', action='store_true',
default=False,
help='Use tar instead of the scm archive option when packaging the role.')
def add_build_options(self, parser, parents=None):
build_parser = parser.add_parser('build', parents=parents,
help='Build an Ansible collection artifact that can be publish to Ansible '
'Galaxy.')
build_parser.set_defaults(func=self.execute_build)
build_parser.add_argument('args', metavar='collection', nargs='*', default=('.',),
help='Path to the collection(s) directory to build. This should be the directory '
'that contains the galaxy.yml file. The default is the current working '
'directory.')
build_parser.add_argument('--output-path', dest='output_path', default='./',
help='The path in which the collection is built to. The default is the current '
'working directory.')
def add_publish_options(self, parser, parents=None):
publish_parser = parser.add_parser('publish', parents=parents,
help='Publish a collection artifact to Ansible Galaxy.')
publish_parser.set_defaults(func=self.execute_publish)
publish_parser.add_argument('args', metavar='collection_path',
help='The path to the collection tarball to publish.')
publish_parser.add_argument('--no-wait', dest='wait', action='store_false', default=True,
help="Don't wait for import validation results.")
publish_parser.add_argument('--import-timeout', dest='import_timeout', type=int, default=0,
help="The time to wait for the collection import process to finish.")
def post_process_args(self, options):
options = super(GalaxyCLI, self).post_process_args(options)
display.verbosity = options.verbosity
return options
def run(self):
super(GalaxyCLI, self).run()
self.galaxy = Galaxy()
def server_config_def(section, key, required):
return {
'description': 'The %s of the %s Galaxy server' % (key, section),
'ini': [
{
'section': 'galaxy_server.%s' % section,
'key': key,
}
],
'env': [
{'name': 'ANSIBLE_GALAXY_SERVER_%s_%s' % (section.upper(), key.upper())},
],
'required': required,
}
server_def = [('url', True), ('username', False), ('password', False), ('token', False),
('auth_url', False)]
config_servers = []
# Need to filter out empty strings or non truthy values as an empty server list env var is equal to [''].
server_list = [s for s in C.GALAXY_SERVER_LIST or [] if s]
for server_key in server_list:
# Config definitions are looked up dynamically based on the C.GALAXY_SERVER_LIST entry. We look up the
# section [galaxy_server.<server>] for the values url, username, password, and token.
config_dict = dict((k, server_config_def(server_key, k, req)) for k, req in server_def)
defs = AnsibleLoader(yaml.safe_dump(config_dict)).get_single_data()
C.config.initialize_plugin_configuration_definitions('galaxy_server', server_key, defs)
server_options = C.config.get_plugin_options('galaxy_server', server_key)
# auth_url is used to create the token, but not directly by GalaxyAPI, so
# it doesn't need to be passed as kwarg to GalaxyApi
auth_url = server_options.pop('auth_url', None)
token_val = server_options['token'] or NoTokenSentinel
username = server_options['username']
# default case if no auth info is provided.
server_options['token'] = None
if username:
server_options['token'] = BasicAuthToken(username,
server_options['password'])
else:
if token_val:
if auth_url:
server_options['token'] = KeycloakToken(access_token=token_val,
auth_url=auth_url,
validate_certs=not context.CLIARGS['ignore_certs'])
else:
# The galaxy v1 / github / django / 'Token'
server_options['token'] = GalaxyToken(token=token_val)
config_servers.append(GalaxyAPI(self.galaxy, server_key, **server_options))
cmd_server = context.CLIARGS['api_server']
cmd_token = GalaxyToken(token=context.CLIARGS['api_key'])
if cmd_server:
# Cmd args take precedence over the config entry but fist check if the arg was a name and use that config
# entry, otherwise create a new API entry for the server specified.
config_server = next((s for s in config_servers if s.name == cmd_server), None)
if config_server:
self.api_servers.append(config_server)
else:
self.api_servers.append(GalaxyAPI(self.galaxy, 'cmd_arg', cmd_server, token=cmd_token))
else:
self.api_servers = config_servers
# Default to C.GALAXY_SERVER if no servers were defined
if len(self.api_servers) == 0:
self.api_servers.append(GalaxyAPI(self.galaxy, 'default', C.GALAXY_SERVER, token=cmd_token))
context.CLIARGS['func']()
@property
def api(self):
return self.api_servers[0]
def _parse_requirements_file(self, requirements_file, allow_old_format=True):
"""
Parses an Ansible requirement.yml file and returns all the roles and/or collections defined in it. There are 2
requirements file format:
# v1 (roles only)
- src: The source of the role, required if include is not set. Can be Galaxy role name, URL to a SCM repo or tarball.
name: Downloads the role to the specified name, defaults to Galaxy name from Galaxy or name of repo if src is a URL.
scm: If src is a URL, specify the SCM. Only git or hd are supported and defaults ot git.
version: The version of the role to download. Can also be tag, commit, or branch name and defaults to master.
include: Path to additional requirements.yml files.
# v2 (roles and collections)
---
roles:
# Same as v1 format just under the roles key
collections:
- namespace.collection
- name: namespace.collection
version: version identifier, multiple identifiers are separated by ','
source: the URL or a predefined source name that relates to C.GALAXY_SERVER_LIST
:param requirements_file: The path to the requirements file.
:param allow_old_format: Will fail if a v1 requirements file is found and this is set to False.
:return: a dict containing roles and collections to found in the requirements file.
"""
requirements = {
'roles': [],
'collections': [],
}
b_requirements_file = to_bytes(requirements_file, errors='surrogate_or_strict')
if not os.path.exists(b_requirements_file):
raise AnsibleError("The requirements file '%s' does not exist." % to_native(requirements_file))
display.vvv("Reading requirement file at '%s'" % requirements_file)
with open(b_requirements_file, 'rb') as req_obj:
try:
file_requirements = yaml.safe_load(req_obj)
except YAMLError as err:
raise AnsibleError(
"Failed to parse the requirements yml at '%s' with the following error:\n%s"
% (to_native(requirements_file), to_native(err)))
if file_requirements is None:
raise AnsibleError("No requirements found in file '%s'" % to_native(requirements_file))
def parse_role_req(requirement):
if "include" not in requirement:
role = RoleRequirement.role_yaml_parse(requirement)
display.vvv("found role %s in yaml file" % to_text(role))
if "name" not in role and "src" not in role:
raise AnsibleError("Must specify name or src for role")
return [GalaxyRole(self.galaxy, self.api, **role)]
else:
b_include_path = to_bytes(requirement["include"], errors="surrogate_or_strict")
if not os.path.isfile(b_include_path):
raise AnsibleError("Failed to find include requirements file '%s' in '%s'"
% (to_native(b_include_path), to_native(requirements_file)))
with open(b_include_path, 'rb') as f_include:
try:
return [GalaxyRole(self.galaxy, self.api, **r) for r in
(RoleRequirement.role_yaml_parse(i) for i in yaml.safe_load(f_include))]
except Exception as e:
raise AnsibleError("Unable to load data from include requirements file: %s %s"
% (to_native(requirements_file), to_native(e)))
if isinstance(file_requirements, list):
# Older format that contains only roles
if not allow_old_format:
raise AnsibleError("Expecting requirements file to be a dict with the key 'collections' that contains "
"a list of collections to install")
for role_req in file_requirements:
requirements['roles'] += parse_role_req(role_req)
else:
# Newer format with a collections and/or roles key
extra_keys = set(file_requirements.keys()).difference(set(['roles', 'collections']))
if extra_keys:
raise AnsibleError("Expecting only 'roles' and/or 'collections' as base keys in the requirements "
"file. Found: %s" % (to_native(", ".join(extra_keys))))
for role_req in file_requirements.get('roles', []):
requirements['roles'] += parse_role_req(role_req)
for collection_req in file_requirements.get('collections', []):
if isinstance(collection_req, dict):
req_name = collection_req.get('name', None)
if req_name is None:
raise AnsibleError("Collections requirement entry should contain the key name.")
req_version = collection_req.get('version', '*')
req_source = collection_req.get('source', None)
if req_source:
# Try and match up the requirement source with our list of Galaxy API servers defined in the
# config, otherwise create a server with that URL without any auth.
req_source = next(iter([a for a in self.api_servers if req_source in [a.name, a.api_server]]),
GalaxyAPI(self.galaxy, "explicit_requirement_%s" % req_name, req_source))
requirements['collections'].append((req_name, req_version, req_source))
else:
requirements['collections'].append((collection_req, '*', None))
return requirements
@staticmethod
def exit_without_ignore(rc=1):
"""
Exits with the specified return code unless the
option --ignore-errors was specified
"""
if not context.CLIARGS['ignore_errors']:
raise AnsibleError('- you can use --ignore-errors to skip failed roles and finish processing the list.')
@staticmethod
def _display_role_info(role_info):
text = [u"", u"Role: %s" % to_text(role_info['name'])]
text.append(u"\tdescription: %s" % role_info.get('description', ''))
for k in sorted(role_info.keys()):
if k in GalaxyCLI.SKIP_INFO_KEYS:
continue
if isinstance(role_info[k], dict):
text.append(u"\t%s:" % (k))
for key in sorted(role_info[k].keys()):
if key in GalaxyCLI.SKIP_INFO_KEYS:
continue
text.append(u"\t\t%s: %s" % (key, role_info[k][key]))
else:
text.append(u"\t%s: %s" % (k, role_info[k]))
return u'\n'.join(text)
@staticmethod
def _resolve_path(path):
return os.path.abspath(os.path.expanduser(os.path.expandvars(path)))
@staticmethod
def _get_skeleton_galaxy_yml(template_path, inject_data):
with open(to_bytes(template_path, errors='surrogate_or_strict'), 'rb') as template_obj:
meta_template = to_text(template_obj.read(), errors='surrogate_or_strict')
galaxy_meta = get_collections_galaxy_meta_info()
required_config = []
optional_config = []
for meta_entry in galaxy_meta:
config_list = required_config if meta_entry.get('required', False) else optional_config
value = inject_data.get(meta_entry['key'], None)
if not value:
meta_type = meta_entry.get('type', 'str')
if meta_type == 'str':
value = ''
elif meta_type == 'list':
value = []
elif meta_type == 'dict':
value = {}
meta_entry['value'] = value
config_list.append(meta_entry)
link_pattern = re.compile(r"L\(([^)]+),\s+([^)]+)\)")
const_pattern = re.compile(r"C\(([^)]+)\)")
def comment_ify(v):
if isinstance(v, list):
v = ". ".join([l.rstrip('.') for l in v])
v = link_pattern.sub(r"\1 <\2>", v)
v = const_pattern.sub(r"'\1'", v)
return textwrap.fill(v, width=117, initial_indent="# ", subsequent_indent="# ", break_on_hyphens=False)
def to_yaml(v):
return yaml.safe_dump(v, default_flow_style=False).rstrip()
env = Environment(loader=BaseLoader)
env.filters['comment_ify'] = comment_ify
env.filters['to_yaml'] = to_yaml
template = env.from_string(meta_template)
meta_value = template.render({'required_config': required_config, 'optional_config': optional_config})
return meta_value
############################
# execute actions
############################
def execute_role(self):
"""
Perform the action on an Ansible Galaxy role. Must be combined with a further action like delete/install/init
as listed below.
"""
# To satisfy doc build
pass
def execute_collection(self):
"""
Perform the action on an Ansible Galaxy collection. Must be combined with a further action like init/install as
listed below.
"""
# To satisfy doc build
pass
def execute_build(self):
"""
Build an Ansible Galaxy collection artifact that can be stored in a central repository like Ansible Galaxy.
By default, this command builds from the current working directory. You can optionally pass in the
collection input path (where the ``galaxy.yml`` file is).
"""
force = context.CLIARGS['force']
output_path = GalaxyCLI._resolve_path(context.CLIARGS['output_path'])
b_output_path = to_bytes(output_path, errors='surrogate_or_strict')
if not os.path.exists(b_output_path):
os.makedirs(b_output_path)
elif os.path.isfile(b_output_path):
raise AnsibleError("- the output collection directory %s is a file - aborting" % to_native(output_path))
for collection_path in context.CLIARGS['args']:
collection_path = GalaxyCLI._resolve_path(collection_path)
build_collection(collection_path, output_path, force)
def execute_init(self):
"""
Creates the skeleton framework of a role or collection that complies with the Galaxy metadata format.
Requires a role or collection name. The collection name must be in the format ``<namespace>.<collection>``.
"""
galaxy_type = context.CLIARGS['type']
init_path = context.CLIARGS['init_path']
force = context.CLIARGS['force']
obj_skeleton = context.CLIARGS['{0}_skeleton'.format(galaxy_type)]
obj_name = context.CLIARGS['{0}_name'.format(galaxy_type)]
inject_data = dict(
description='your {0} description'.format(galaxy_type),
ansible_plugin_list_dir=get_versioned_doclink('plugins/plugins.html'),
)
if galaxy_type == 'role':
inject_data.update(dict(
author='your name',
company='your company (optional)',
license='license (GPL-2.0-or-later, MIT, etc)',
role_name=obj_name,
role_type=context.CLIARGS['role_type'],
issue_tracker_url='http://example.com/issue/tracker',
repository_url='http://example.com/repository',
documentation_url='http://docs.example.com',
homepage_url='http://example.com',
min_ansible_version=ansible_version[:3], # x.y
))
obj_path = os.path.join(init_path, obj_name)
elif galaxy_type == 'collection':
namespace, collection_name = obj_name.split('.', 1)
inject_data.update(dict(
namespace=namespace,
collection_name=collection_name,
version='1.0.0',
readme='README.md',
authors=['your name <example@domain.com>'],
license=['GPL-2.0-or-later'],
repository='http://example.com/repository',
documentation='http://docs.example.com',
homepage='http://example.com',
issues='http://example.com/issue/tracker',
build_ignore=[],
))
obj_path = os.path.join(init_path, namespace, collection_name)
b_obj_path = to_bytes(obj_path, errors='surrogate_or_strict')
if os.path.exists(b_obj_path):
if os.path.isfile(obj_path):
raise AnsibleError("- the path %s already exists, but is a file - aborting" % to_native(obj_path))
elif not force:
raise AnsibleError("- the directory %s already exists. "
"You can use --force to re-initialize this directory,\n"
"however it will reset any main.yml files that may have\n"
"been modified there already." % to_native(obj_path))
if obj_skeleton is not None:
own_skeleton = False
skeleton_ignore_expressions = C.GALAXY_ROLE_SKELETON_IGNORE
else:
own_skeleton = True
obj_skeleton = self.galaxy.default_role_skeleton_path
skeleton_ignore_expressions = ['^.*/.git_keep$']
obj_skeleton = os.path.expanduser(obj_skeleton)
skeleton_ignore_re = [re.compile(x) for x in skeleton_ignore_expressions]
if not os.path.exists(obj_skeleton):
raise AnsibleError("- the skeleton path '{0}' does not exist, cannot init {1}".format(
to_native(obj_skeleton), galaxy_type)
)
template_env = Environment(loader=FileSystemLoader(obj_skeleton))
# create role directory
if not os.path.exists(b_obj_path):
os.makedirs(b_obj_path)
for root, dirs, files in os.walk(obj_skeleton, topdown=True):
rel_root = os.path.relpath(root, obj_skeleton)
rel_dirs = rel_root.split(os.sep)
rel_root_dir = rel_dirs[0]
if galaxy_type == 'collection':
# A collection can contain templates in playbooks/*/templates and roles/*/templates
in_templates_dir = rel_root_dir in ['playbooks', 'roles'] and 'templates' in rel_dirs
else:
in_templates_dir = rel_root_dir == 'templates'
dirs[:] = [d for d in dirs if not any(r.match(d) for r in skeleton_ignore_re)]
for f in files:
filename, ext = os.path.splitext(f)
if any(r.match(os.path.join(rel_root, f)) for r in skeleton_ignore_re):
continue
elif galaxy_type == 'collection' and own_skeleton and rel_root == '.' and f == 'galaxy.yml.j2':
# Special use case for galaxy.yml.j2 in our own default collection skeleton. We build the options
# dynamically which requires special options to be set.
# The templated data's keys must match the key name but the inject data contains collection_name
# instead of name. We just make a copy and change the key back to name for this file.
template_data = inject_data.copy()
template_data['name'] = template_data.pop('collection_name')
meta_value = GalaxyCLI._get_skeleton_galaxy_yml(os.path.join(root, rel_root, f), template_data)
b_dest_file = to_bytes(os.path.join(obj_path, rel_root, filename), errors='surrogate_or_strict')
with open(b_dest_file, 'wb') as galaxy_obj:
galaxy_obj.write(to_bytes(meta_value, errors='surrogate_or_strict'))
elif ext == ".j2" and not in_templates_dir:
src_template = os.path.join(rel_root, f)
dest_file = os.path.join(obj_path, rel_root, filename)
template_env.get_template(src_template).stream(inject_data).dump(dest_file, encoding='utf-8')
else:
f_rel_path = os.path.relpath(os.path.join(root, f), obj_skeleton)
shutil.copyfile(os.path.join(root, f), os.path.join(obj_path, f_rel_path))
for d in dirs:
b_dir_path = to_bytes(os.path.join(obj_path, rel_root, d), errors='surrogate_or_strict')
if not os.path.exists(b_dir_path):
os.makedirs(b_dir_path)
display.display("- %s %s was created successfully" % (galaxy_type.title(), obj_name))
def execute_info(self):
"""
prints out detailed information about an installed role as well as info available from the galaxy API.
"""
roles_path = context.CLIARGS['roles_path']
data = ''
for role in context.CLIARGS['args']:
role_info = {'path': roles_path}
gr = GalaxyRole(self.galaxy, self.api, role)
install_info = gr.install_info
if install_info:
if 'version' in install_info:
install_info['installed_version'] = install_info['version']
del install_info['version']
role_info.update(install_info)
remote_data = False
if not context.CLIARGS['offline']:
remote_data = self.api.lookup_role_by_name(role, False)
if remote_data:
role_info.update(remote_data)
if gr.metadata:
role_info.update(gr.metadata)
req = RoleRequirement()
role_spec = req.role_yaml_parse({'role': role})
if role_spec:
role_info.update(role_spec)
data = self._display_role_info(role_info)
# FIXME: This is broken in both 1.9 and 2.0 as
# _display_role_info() always returns something
if not data:
data = u"\n- the role %s was not found" % role
self.pager(data)
def execute_install(self):
"""
Install one or more roles(``ansible-galaxy role install``), or one or more collections(``ansible-galaxy collection install``).
You can pass in a list (roles or collections) or use the file
option listed below (these are mutually exclusive). If you pass in a list, it
can be a name (which will be downloaded via the galaxy API and github), or it can be a local tar archive file.
"""
if context.CLIARGS['type'] == 'collection':
collections = context.CLIARGS['args']
force = context.CLIARGS['force']
output_path = context.CLIARGS['collections_path']
ignore_certs = context.CLIARGS['ignore_certs']
ignore_errors = context.CLIARGS['ignore_errors']
requirements_file = context.CLIARGS['requirements']
no_deps = context.CLIARGS['no_deps']
force_deps = context.CLIARGS['force_with_deps']
if collections and requirements_file:
raise AnsibleError("The positional collection_name arg and --requirements-file are mutually exclusive.")
elif not collections and not requirements_file:
raise AnsibleError("You must specify a collection name or a requirements file.")
if requirements_file:
requirements_file = GalaxyCLI._resolve_path(requirements_file)
requirements = self._parse_requirements_file(requirements_file, allow_old_format=False)['collections']
else:
requirements = []
for collection_input in collections:
requirement = None
if os.path.isfile(to_bytes(collection_input, errors='surrogate_or_strict')) or \
urlparse(collection_input).scheme.lower() in ['http', 'https']:
# Arg is a file path or URL to a collection
name = collection_input
else:
name, dummy, requirement = collection_input.partition(':')
requirements.append((name, requirement or '*', None))
output_path = GalaxyCLI._resolve_path(output_path)
collections_path = C.COLLECTIONS_PATHS
if len([p for p in collections_path if p.startswith(output_path)]) == 0:
display.warning("The specified collections path '%s' is not part of the configured Ansible "
"collections paths '%s'. The installed collection won't be picked up in an Ansible "
"run." % (to_text(output_path), to_text(":".join(collections_path))))
output_path = validate_collection_path(output_path)
b_output_path = to_bytes(output_path, errors='surrogate_or_strict')
if not os.path.exists(b_output_path):
os.makedirs(b_output_path)
install_collections(requirements, output_path, self.api_servers, (not ignore_certs), ignore_errors,
no_deps, force, force_deps)
return 0
role_file = context.CLIARGS['role_file']
if not context.CLIARGS['args'] and role_file is None:
# the user needs to specify one of either --role-file or specify a single user/role name
raise AnsibleOptionsError("- you must specify a user/role name or a roles file")
no_deps = context.CLIARGS['no_deps']
force_deps = context.CLIARGS['force_with_deps']
force = context.CLIARGS['force'] or force_deps
roles_left = []
if role_file:
if not (role_file.endswith('.yaml') or role_file.endswith('.yml')):
raise AnsibleError("Invalid role requirements file, it must end with a .yml or .yaml extension")
roles_left = self._parse_requirements_file(role_file)['roles']
else:
# roles were specified directly, so we'll just go out grab them
# (and their dependencies, unless the user doesn't want us to).
for rname in context.CLIARGS['args']:
role = RoleRequirement.role_yaml_parse(rname.strip())
roles_left.append(GalaxyRole(self.galaxy, self.api, **role))
for role in roles_left:
# only process roles in roles files when names matches if given
if role_file and context.CLIARGS['args'] and role.name not in context.CLIARGS['args']:
display.vvv('Skipping role %s' % role.name)
continue
display.vvv('Processing role %s ' % role.name)
# query the galaxy API for the role data
if role.install_info is not None:
if role.install_info['version'] != role.version or force:
if force:
display.display('- changing role %s from %s to %s' %
(role.name, role.install_info['version'], role.version or "unspecified"))
role.remove()
else:
display.warning('- %s (%s) is already installed - use --force to change version to %s' %
(role.name, role.install_info['version'], role.version or "unspecified"))
continue
else:
if not force:
display.display('- %s is already installed, skipping.' % str(role))
continue
try:
installed = role.install()
except AnsibleError as e:
display.warning(u"- %s was NOT installed successfully: %s " % (role.name, to_text(e)))
self.exit_without_ignore()
continue
# install dependencies, if we want them
if not no_deps and installed:
if not role.metadata:
display.warning("Meta file %s is empty. Skipping dependencies." % role.path)
else:
role_dependencies = role.metadata.get('dependencies') or []
for dep in role_dependencies:
display.debug('Installing dep %s' % dep)
dep_req = RoleRequirement()
dep_info = dep_req.role_yaml_parse(dep)
dep_role = GalaxyRole(self.galaxy, self.api, **dep_info)
if '.' not in dep_role.name and '.' not in dep_role.src and dep_role.scm is None:
# we know we can skip this, as it's not going to
# be found on galaxy.ansible.com
continue
if dep_role.install_info is None:
if dep_role not in roles_left:
display.display('- adding dependency: %s' % to_text(dep_role))
roles_left.append(dep_role)
else:
display.display('- dependency %s already pending installation.' % dep_role.name)
else:
if dep_role.install_info['version'] != dep_role.version:
if force_deps:
display.display('- changing dependant role %s from %s to %s' %
(dep_role.name, dep_role.install_info['version'], dep_role.version or "unspecified"))
dep_role.remove()
roles_left.append(dep_role)
else:
display.warning('- dependency %s (%s) from role %s differs from already installed version (%s), skipping' %
(to_text(dep_role), dep_role.version, role.name, dep_role.install_info['version']))
else:
if force_deps:
roles_left.append(dep_role)
else:
display.display('- dependency %s is already installed, skipping.' % dep_role.name)
if not installed:
display.warning("- %s was NOT installed successfully." % role.name)
self.exit_without_ignore()
return 0
def execute_remove(self):
"""
removes the list of roles passed as arguments from the local system.
"""
if not context.CLIARGS['args']:
raise AnsibleOptionsError('- you must specify at least one role to remove.')
for role_name in context.CLIARGS['args']:
role = GalaxyRole(self.galaxy, self.api, role_name)
try:
if role.remove():
display.display('- successfully removed %s' % role_name)
else:
display.display('- %s is not installed, skipping.' % role_name)
except Exception as e:
raise AnsibleError("Failed to remove role %s: %s" % (role_name, to_native(e)))
return 0
def execute_list(self):
"""
lists the roles installed on the local system or matches a single role passed as an argument.
"""
def _display_role(gr):
install_info = gr.install_info
version = None
if install_info:
version = install_info.get("version", None)
if not version:
version = "(unknown version)"
display.display("- %s, %s" % (gr.name, version))
if context.CLIARGS['role']:
# show the requested role, if it exists
name = context.CLIARGS['role']
gr = GalaxyRole(self.galaxy, self.api, name)
if gr.metadata:
display.display('# %s' % os.path.dirname(gr.path))
_display_role(gr)
else:
display.display("- the role %s was not found" % name)
else:
# show all valid roles in the roles_path directory
roles_path = context.CLIARGS['roles_path']
path_found = False
warnings = []
for path in roles_path:
role_path = os.path.expanduser(path)
if not os.path.exists(role_path):
warnings.append("- the configured path %s does not exist." % role_path)
continue
elif not os.path.isdir(role_path):
warnings.append("- the configured path %s, exists, but it is not a directory." % role_path)
continue
display.display('# %s' % role_path)
path_files = os.listdir(role_path)
path_found = True
for path_file in path_files:
gr = GalaxyRole(self.galaxy, self.api, path_file, path=path)
if gr.metadata:
_display_role(gr)
for w in warnings:
display.warning(w)
if not path_found:
raise AnsibleOptionsError("- None of the provided paths was usable. Please specify a valid path with --roles-path")
return 0
def execute_publish(self):
"""
Publish a collection into Ansible Galaxy. Requires the path to the collection tarball to publish.
"""
collection_path = GalaxyCLI._resolve_path(context.CLIARGS['args'])
wait = context.CLIARGS['wait']
timeout = context.CLIARGS['import_timeout']
publish_collection(collection_path, self.api, wait, timeout)
def execute_search(self):
''' searches for roles on the Ansible Galaxy server'''
page_size = 1000
search = None
if context.CLIARGS['args']:
search = '+'.join(context.CLIARGS['args'])
if not search and not context.CLIARGS['platforms'] and not context.CLIARGS['galaxy_tags'] and not context.CLIARGS['author']:
raise AnsibleError("Invalid query. At least one search term, platform, galaxy tag or author must be provided.")
response = self.api.search_roles(search, platforms=context.CLIARGS['platforms'],
tags=context.CLIARGS['galaxy_tags'], author=context.CLIARGS['author'], page_size=page_size)
if response['count'] == 0:
display.display("No roles match your search.", color=C.COLOR_ERROR)
return True
data = [u'']
if response['count'] > page_size:
data.append(u"Found %d roles matching your search. Showing first %s." % (response['count'], page_size))
else:
data.append(u"Found %d roles matching your search:" % response['count'])
max_len = []
for role in response['results']:
max_len.append(len(role['username'] + '.' + role['name']))
name_len = max(max_len)
format_str = u" %%-%ds %%s" % name_len
data.append(u'')
data.append(format_str % (u"Name", u"Description"))
data.append(format_str % (u"----", u"-----------"))
for role in response['results']:
data.append(format_str % (u'%s.%s' % (role['username'], role['name']), role['description']))
data = u'\n'.join(data)
self.pager(data)
return True
def execute_login(self):
"""
verify user's identify via Github and retrieve an auth token from Ansible Galaxy.
"""
# Authenticate with github and retrieve a token
if context.CLIARGS['token'] is None:
if C.GALAXY_TOKEN:
github_token = C.GALAXY_TOKEN
else:
login = GalaxyLogin(self.galaxy)
github_token = login.create_github_token()
else:
github_token = context.CLIARGS['token']
galaxy_response = self.api.authenticate(github_token)
if context.CLIARGS['token'] is None and C.GALAXY_TOKEN is None:
# Remove the token we created
login.remove_github_token()
# Store the Galaxy token
token = GalaxyToken()
token.set(galaxy_response['token'])
display.display("Successfully logged into Galaxy as %s" % galaxy_response['username'])
return 0
def execute_import(self):
""" used to import a role into Ansible Galaxy """
colors = {
'INFO': 'normal',
'WARNING': C.COLOR_WARN,
'ERROR': C.COLOR_ERROR,
'SUCCESS': C.COLOR_OK,
'FAILED': C.COLOR_ERROR,
}
github_user = to_text(context.CLIARGS['github_user'], errors='surrogate_or_strict')
github_repo = to_text(context.CLIARGS['github_repo'], errors='surrogate_or_strict')
if context.CLIARGS['check_status']:
task = self.api.get_import_task(github_user=github_user, github_repo=github_repo)
else:
# Submit an import request
task = self.api.create_import_task(github_user, github_repo,
reference=context.CLIARGS['reference'],
role_name=context.CLIARGS['role_name'])
if len(task) > 1:
# found multiple roles associated with github_user/github_repo
display.display("WARNING: More than one Galaxy role associated with Github repo %s/%s." % (github_user, github_repo),
color='yellow')
display.display("The following Galaxy roles are being updated:" + u'\n', color=C.COLOR_CHANGED)
for t in task:
display.display('%s.%s' % (t['summary_fields']['role']['namespace'], t['summary_fields']['role']['name']), color=C.COLOR_CHANGED)
display.display(u'\nTo properly namespace this role, remove each of the above and re-import %s/%s from scratch' % (github_user, github_repo),
color=C.COLOR_CHANGED)
return 0
# found a single role as expected
display.display("Successfully submitted import request %d" % task[0]['id'])
if not context.CLIARGS['wait']:
display.display("Role name: %s" % task[0]['summary_fields']['role']['name'])
display.display("Repo: %s/%s" % (task[0]['github_user'], task[0]['github_repo']))
if context.CLIARGS['check_status'] or context.CLIARGS['wait']:
# Get the status of the import
msg_list = []
finished = False
while not finished:
task = self.api.get_import_task(task_id=task[0]['id'])
for msg in task[0]['summary_fields']['task_messages']:
if msg['id'] not in msg_list:
display.display(msg['message_text'], color=colors[msg['message_type']])
msg_list.append(msg['id'])
if task[0]['state'] in ['SUCCESS', 'FAILED']:
finished = True
else:
time.sleep(10)
return 0
def execute_setup(self):
""" Setup an integration from Github or Travis for Ansible Galaxy roles"""
if context.CLIARGS['setup_list']:
# List existing integration secrets
secrets = self.api.list_secrets()
if len(secrets) == 0:
# None found
display.display("No integrations found.")
return 0
display.display(u'\n' + "ID Source Repo", color=C.COLOR_OK)
display.display("---------- ---------- ----------", color=C.COLOR_OK)
for secret in secrets:
display.display("%-10s %-10s %s/%s" % (secret['id'], secret['source'], secret['github_user'],
secret['github_repo']), color=C.COLOR_OK)
return 0
if context.CLIARGS['remove_id']:
# Remove a secret
self.api.remove_secret(context.CLIARGS['remove_id'])
display.display("Secret removed. Integrations using this secret will not longer work.", color=C.COLOR_OK)
return 0
source = context.CLIARGS['source']
github_user = context.CLIARGS['github_user']
github_repo = context.CLIARGS['github_repo']
secret = context.CLIARGS['secret']
resp = self.api.add_secret(source, github_user, github_repo, secret)
display.display("Added integration for %s %s/%s" % (resp['source'], resp['github_user'], resp['github_repo']))
return 0
def execute_delete(self):
""" Delete a role from Ansible Galaxy. """
github_user = context.CLIARGS['github_user']
github_repo = context.CLIARGS['github_repo']
resp = self.api.delete_role(github_user, github_repo)
if len(resp['deleted_roles']) > 1:
display.display("Deleted the following roles:")
display.display("ID User Name")
display.display("------ --------------- ----------")
for role in resp['deleted_roles']:
display.display("%-8s %-15s %s" % (role.id, role.namespace, role.name))
display.display(resp['status'])
return True
|
anryko/ansible
|
lib/ansible/cli/galaxy.py
|
Python
|
gpl-3.0
| 60,443
|
from django.db import models
from sigma_core.models.custom_field import CustomField
from sigma_core.models.group_field import GroupField
class Group(models.Model):
#########################
# Constants and choices #
#########################
##########
# Fields #
##########
name = models.CharField(max_length=254)
is_private = models.BooleanField(default=False)
description = models.TextField(blank=True)
is_protected = models.BooleanField(default=False) # if True, the Group cannot be deleted
can_anyone_join = models.BooleanField(default=False) #if True, people don't need invitation
need_validation_to_join = models.BooleanField(default=False)
# Related fields:
# - invited_users (model User)
# - memberships (model GroupMember)
# - users (model User)
# - fields (model GroupField)
# - subgroups (model Group)
# - group_parents (model Group)
# TODO: Determine whether 'memberships' fields needs to be retrieved every time or not...
@property
def subgroups_list(self):
return [ga.subgroup for ga in self.subgroups.filter(validated=True).select_related('subgroup')]
@property
def group_parents_list(self):
return [ga.parent_group for ga in self.group_parents.filter(validated=True).select_related('parent_group')]
@property
def members_count(self):
return self.memberships.count()
#################
# Model methods #
#################
def can_anyone_join(self):
return self.can_anyone_join
def __str__(self): # pragma: no cover
return self.name
###############
# Permissions #
###############
# Perms for admin site
def has_perm(self, perm, obj=None): # pragma: no cover
return True
def has_module_perms(self, app_label): # pragma: no cover
return True
class GroupAcknowledgment(models.Model):
subgroup = models.ForeignKey(Group, related_name='group_parents')
parent_group = models.ForeignKey(Group, related_name='subgroups')
validated = models.BooleanField(default=False)
delegate_admin = models.BooleanField(default=True)
created = models.DateTimeField(auto_now_add=True)
updated = models.DateTimeField(auto_now=True)
def __str__(self): # pragma: no cover
if self.validated:
return "Group %s acknowledged by Group %s" % (self.subgroup.__str__(), self.parent_group.__str__())
else:
return "Group %s awaiting for acknowledgment by Group %s since %s" % (self.subgroup.__str__(), self.parent_group.__str__(), self.created.strftime("%Y-%m-%d %H:%M"))
|
ProjetSigma/backend
|
sigma_core/models/group.py
|
Python
|
agpl-3.0
| 2,650
|
fib = [1, 2]
while fib[-1] < 4000000:
fib.append(fib[-1] + fib[-2])
print(sum((el for el in fib if el % 2 == 0)))
|
maxiimou/project_euler
|
exercice2.py
|
Python
|
lgpl-3.0
| 119
|
import sys
import redis
import logging
import threading
from polling_engine.src import config
from polling_engine.src import core
class Listener(threading.Thread):
def __init__(self, rds, channels):
threading.Thread.__init__(self)
self.redis = rds
self.pubsub = self.redis.pubsub()
self.pubsub.subscribe(channels)
def work(self, item):
print "received {data} from {channel}".format(**item)
try:
params = core.normalise(item['data'])
message = core.Message(str(params))
message.is_valid()
if message.valid:
sentiment = message.get_sentiment()
if sentiment:
message.update(sentiment)
message.count(sentiment)
else:
print "invalid message - %s" % item
except Exception, err:
print "ERROR: %s -- %s" % (str(err), item)
def run(self):
for item in self.pubsub.listen():
if item['data'] == config.REDIS_CONFIG['KILL_CODE']:
self.pubsub.unsubscribe()
print self, "unsubscribed and finished"
break
else:
####
# use twisted deferreds here
####
self.work(item)
if __name__ == "__main__":
rds = redis.Redis(**config.REDIS_CONFIG['CONN'])
client = Listener(rds, [config.REDIS_CONFIG['CHANNEL']])
client.start()
|
pythias-io/polling_engine
|
src/main.py
|
Python
|
apache-2.0
| 1,498
|
import random
from axelrod import Actions
def random_choice(p=0.5):
"""
Return 'C' with probability `p`, else return 'D'
Emulates Python's random.choice(['C', 'D']) since it is not consistent
across Python 2.7 to Python 3.4"""
r = random.random()
if r < p:
return Actions.C
return Actions.D
def randrange(a, b):
"""Python 2 / 3 compatible randrange. Returns a random integer uniformly
between a and b (inclusive)"""
c = b - a
r = c * random.random()
return a + int(r)
|
marcharper/Axelrod
|
axelrod/random_.py
|
Python
|
mit
| 528
|
from bcbio.ngsalign.bowtie2 import filter_multimappers
def clean_chipseq_alignment(data):
config = data["config"]
aligner = config["algorithm"].get("aligner", None)
assert aligner == "bowtie2", "ChIP-seq only supported for bowtie2."
if aligner == "bowtie2":
work_bam = filter_multimappers(data["work_bam"])
data["work_bam"] = work_bam
return [[data]]
|
hjanime/bcbio-nextgen
|
bcbio/chipseq/__init__.py
|
Python
|
mit
| 393
|
from django.contrib.auth.models import User
from django.db import models
from django.core.urlresolvers import reverse_lazy
import datetime
MAX_CHAR_LENGTH = 50
COUNTY_OPTIONS = (
("Alamance", "Alamance"),
("Forsyth", "Forsyth"),
("Guilford", "Guilford"),
("Montgomery", "Montgomery"),
("Randolph", "Randolph"),
("Rockingham", "Rockingham"),
("Other", "Other")
)
JA_PROGRAM_OPTIONS = (
("JA Ourselves", "JA Ourselves"),
("JA Our Families", "JA Our Families"),
("JA Our Community", "JA Our Community"),
("JA Our City", "JA Our City"),
("JA Our Region", "JA Our Region"),
("JA Our Nation", "JA Our Nation"),
("JA Economics for Success", "JA Economics for Success"),
("JA Global Marketplace - Kit Based", "JA Global Marketplace - Kit Based"),
("JA Global Marketplace - Blended Model", "JA Global Marketplace - Blended Model"),
("JA It's My Future", "JA It's My Future"),
("JA Be Entrepreneurial", "JA Be Entrepreneurial"),
("JA Career", "JA Career"),
("JA Company Program - Blended Model", "JA Company Program - Blended Model"),
("JA Economics", "JA Economics"),
("JA Exploring Economics", "JA Exploring Economics"),
("JA Job Shadow", "JA Job Shadow"),
("JA Personal Financial", "JA Personal Financial"),
("JA Personal Financial - Blended Model", "JA Personal Financial - Blended Model"),
("JA Titan", "JA Titan")
)
class UserProfile(models.Model):
user = models.OneToOneField(User, related_name="profile")
company = models.CharField(max_length=MAX_CHAR_LENGTH, blank=True, null=True)
profile_pic = models.ImageField(upload_to="uploads/profile_pics/", null=True, default=None)
activation_key = models.CharField(max_length=40, default="")
key_expires = models.DateTimeField(default=datetime.date.today)
featured = models.BooleanField(default=False)
def __str__(self):
return self.user.get_username()
def get_absolute_url(self):
return reverse_lazy('ja_social:user_view', kwargs={'pk': self.pk})
class VolunteerRecord(models.Model):
user_profile = models.ForeignKey(UserProfile, on_delete=models.CASCADE)
county = models.CharField(max_length=MAX_CHAR_LENGTH, choices=COUNTY_OPTIONS)
school_name = models.CharField(max_length=MAX_CHAR_LENGTH, blank=False, null=False)
JA_program = models.CharField(max_length=MAX_CHAR_LENGTH, blank=False, null=False, default="", choices=JA_PROGRAM_OPTIONS)
hours = models.PositiveIntegerField(blank=False, null=False, default=0)
date = models.DateField(blank=False, null=False, default=datetime.date.today)
verified = models.BooleanField(blank=False, null=False, default=False)
def __str__(self):
return "%s: %s %s (%d)" % (self.user_profile.user.get_full_name(), self.school_name, self.JA_program, self.hours)
class PhotoRecord(models.Model):
user_profile = models.ForeignKey(UserProfile, on_delete=models.CASCADE)
photo = models.ImageField(upload_to="uploads/gallery_pictures/", blank=False, null=False)
date_uploaded = models.DateField(default=datetime.date.today, null=False)
def create_user_profile(sender, instance, created, **kwargs):
if created:
UserProfile.objects.create(user=instance)
models.signals.post_save.connect(create_user_profile, sender=User)
|
Bryconc/JA-Social
|
ja_social/models.py
|
Python
|
mit
| 3,327
|
#!/usr/bin/env python
# -*- coding: utf-8; py-indent-offset:4 -*-
###############################################################################
#
# Copyright (C) 2015 Daniel Rodriguez
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
###############################################################################
from __future__ import (absolute_import, division, print_function,
unicode_literals)
import testcommon
import backtrader as bt
import backtrader.indicators as btind
chkdatas = 1
chkvals = [
['101.654375', '99.052251', '101.904990'],
]
chkmin = 13
chkind = btind.MomentumOscillator
def test_run(main=False):
datas = [testcommon.getdata(i) for i in range(chkdatas)]
testcommon.runtest(datas,
testcommon.TestStrategy,
main=main,
plot=main,
chkind=chkind,
chkmin=chkmin,
chkvals=chkvals)
if __name__ == '__main__':
test_run(main=True)
|
nicoddemus/backtrader
|
tests/test_ind_momentumoscillator.py
|
Python
|
gpl-3.0
| 1,621
|
def configuration(parent_package='',top_path=None):
from numpy.distutils.misc_util import Configuration
config = Configuration('misc',parent_package,top_path)
config.add_data_files('*.dat')
config.add_data_dir('tests')
return config
if __name__ == '__main__':
from numpy.distutils.core import setup
setup(**configuration(top_path='').todict())
|
WarrenWeckesser/scipy
|
scipy/misc/setup.py
|
Python
|
bsd-3-clause
| 375
|
#! /usr/bin/env python
# -*- coding: utf-8 -*-
#
# Interpreter version: python 2.7
#
def allSame(s):
return not any(filter(lambda x: x != s[0], s))
def hasDigit(s):
return any(char.isdigit() for char in s)
def getVersion(data):
"""
Parse version from changelog written in RST format.
"""
data = data.splitlines()
return next((
v
for v, u in zip(data, data[1:]) # v = version, u = underline
if len(v) == len(u) and allSame(u) and hasDigit(v) and "." in v
))
|
edeposit/edeposit.amqp.models
|
docs/__init__.py
|
Python
|
mit
| 520
|
# -*- coding: utf-8 -*-
# (c) 2017 Diagram Software S.L.
# (c) 2017 Consultoría Informática Studio 73 S.L.
# License AGPL-3 - See http://www.gnu.org/licenses/agpl-3.0.html
from odoo import api, models, fields, _
class L10nEsAeatSii(models.Model):
_name = 'l10n.es.aeat.sii'
name = fields.Char(string="Name")
state = fields.Selection([
('draft', 'Draft'),
('active', 'Active')
], string="State", default="draft")
file = fields.Binary(string="File", required=True)
folder = fields.Char(string="Folder Name", required=True)
date_start = fields.Date(string="Start Date")
date_end = fields.Date(string="End Date")
public_key = fields.Char(string="Public Key", readonly=True)
private_key = fields.Char(string="Private Key", readonly=True)
company_id = fields.Many2one(
comodel_name="res.company",
string="Compañía",
required=True,
default=lambda self: self.env.user.company_id.id
)
@api.multi
def load_password_wizard(self):
self.ensure_one()
return {
'type': 'ir.actions.act_window',
'name': _('Insert Password'),
'res_model': 'l10n.es.aeat.sii.password',
'view_mode': 'form',
'view_type': 'form',
'views': [(False, 'form')],
'target': 'new',
}
@api.multi
def action_active(self):
self.ensure_one()
other_configs = self.search([('id', '!=', self.id),
('company_id', '=', self.company_id.id)])
for config_id in other_configs:
config_id.state = 'draft'
self.state = 'active'
|
acysos/odoo-addons
|
l10n_es_aeat_sii/models/aeat_sii.py
|
Python
|
agpl-3.0
| 1,679
|
import os
import commands
import sys
import json
from optparse import OptionParser
global CROSSWALK_VERSION
with open("../../tools/VERSION", "rt") as pkg_version_file:
pkg_version_raw = pkg_version_file.read()
pkg_version_file.close()
pkg_version_json = json.loads(pkg_version_raw)
CROSSWALK_VERSION = pkg_version_json["main-version"]
try:
usage = "Usage: ./test.py -u [http://host/XWalkRuntimeLib.apk]"
opts_parser = OptionParser(usage=usage)
opts_parser.add_option(
"-u",
"--url",
dest="url",
help="specify the url, e.g. http://host/XWalkRuntimeLib.apk")
global BUILD_PARAMETERS
(BUILD_PARAMETERS, args) = opts_parser.parse_args()
except Exception as e:
print "Got wrong options: %s, exit ..." % e
sys.exit(1)
if not BUILD_PARAMETERS.url:
print "Please add the -u parameter for the url of XWalkRuntimeLib.apk"
sys.exit(1)
version_parts = CROSSWALK_VERSION.split('.')
if len(version_parts) < 4:
print "The crosswalk version is not configured exactly!"
sys.exit(1)
versionType = version_parts[3]
if versionType == '0':
username = commands.getoutput("echo $USER")
repository_aar_path = "/home/%s/.m2/repository/org/xwalk/xwalk_shared_library/%s/" \
"xwalk_shared_library-%s.aar" % \
(username, CROSSWALK_VERSION, CROSSWALK_VERSION)
repository_pom_path = "/home/%s/.m2/repository/org/xwalk/xwalk_shared_library/%s/" \
"xwalk_shared_library-%s.pom" % \
(username, CROSSWALK_VERSION, CROSSWALK_VERSION)
if not os.path.exists(repository_aar_path) or not os.path.exists(repository_pom_path):
wget_cmd = "wget https://download.01.org/crosswalk/releases/crosswalk/" \
"android/canary/%s/crosswalk-shared-%s.aar" % \
(CROSSWALK_VERSION, CROSSWALK_VERSION)
install_cmd = "mvn install:install-file -DgroupId=org.xwalk " \
"-DartifactId=xwalk_shared_library -Dversion=%s -Dpackaging=aar " \
"-Dfile=crosswalk-shared-%s.aar -DgeneratePom=true" % \
(CROSSWALK_VERSION, CROSSWALK_VERSION)
os.system(wget_cmd)
os.system(install_cmd)
library_url = BUILD_PARAMETERS.url
library_url = library_url.replace("/", "\\/")
if os.path.exists("SharedModeLibraryDownload"):
os.system("rm -rf SharedModeLibraryDownload")
os.system("cordova create SharedModeLibraryDownload com.example.sharedModeLibraryDownload SharedModeLibraryDownload")
os.chdir("./SharedModeLibraryDownload")
os.system('sed -i "s/<widget/<widget android-activityName=\\"SharedModeLibraryDownload\\"/g" config.xml')
os.system('sed -i "s/<\/widget>/ <allow-navigation href=\\"*\\" \/>\\n<\/widget>/g" config.xml')
os.system("cordova platform add android")
add_plugin_cmd = "cordova plugin add ../../../tools/cordova-plugin-crosswalk-webview" \
" --variable XWALK_VERSION=\"%s\" --variable XWALK_MODE=\"shared\"" % CROSSWALK_VERSION
print add_plugin_cmd
os.system(add_plugin_cmd)
os.system('sed -i "s/android:supportsRtl=\\"true\\">/android:supportsRtl=\\"true\\">\\n <meta-data android:name=\\"xwalk_apk_url\\" android:value=\\"' + library_url + '\\" \\/>/g" platforms/android/AndroidManifest.xml')
os.system("cordova build android")
os.system("cordova run")
lsstatus = commands.getstatusoutput("ls ./platforms/android/build/outputs/apk/*.apk")
if lsstatus[0] == 0:
print "Build Package Successfully"
else:
print "Build Package Error"
pmstatus = commands.getstatusoutput("adb shell pm list packages |grep com.example.sharedModeLibraryDownload")
if pmstatus[0] == 0:
print "Package Name Consistent"
else:
print "Package Name Inconsistent"
|
JianfengXu/crosswalk-test-suite
|
usecase/usecase-cordova-android-tests/samples/SharedModeLibraryDownload4.x/res/test.py
|
Python
|
bsd-3-clause
| 3,685
|
#!/usr/bin/python
import kudzu
devices = kudzu.probe (kudzu.CLASS_MOUSE,
kudzu.BUS_UNSPEC,
kudzu.PROBE_ONE);
for device in devices:
# (dev, driver, desc) = device
print device
|
fullstory-morgue/kudzu-kanotix
|
testit.py
|
Python
|
gpl-2.0
| 232
|
"""
Copyright (2010-2014) INCUBAID BVBA
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import socket
import logging
import struct
_COLLAPSE_TLOGS = 0x14
_SET_INTERVAL = 0x17
_DOWNLOAD_DB = 0x1b
_OPTIMIZE_DB = 0x25
_DEFRAG_DB = 0x26
_DROP_MASTER = 0x30
_FLUSH_STORE = 0x42
_COPY_DB_TO_HEAD = 0x44
_MAGIC = 0xb1ff0000
_VERSION = 0x00000001
def _int_to(i):
r = struct.pack("I", i)
return r
def _int_from(buff,pos):
r = struct.unpack_from("I",buff, pos)
return r[0], pos + 4
def _int64_from(buff,pos):
r = struct.unpack_from("Q",buff,pos)
return r[0], pos + 8
def _string_to(s):
size = len(s)
r = struct.pack("I%ds" % size, size, s)
return r
def _prologue(clusterId, sock):
m = _int_to(_MAGIC)
m += _int_to(_VERSION)
m += _string_to(clusterId)
sock.sendall(m)
def _receive_all(sock,n):
todo = n
r = ""
while todo:
chunk = sock.recv(todo)
if chunk == "" :
raise RuntimeError("Not enough data on socket. Aborting...")
todo -= len(chunk)
r += chunk
return r
def _receive_int(sock):
sizes = _receive_all(sock,4)
i,_ = _int_from(sizes,0)
return i
def _receive_int64(sock):
buf = _receive_all(sock, 8)
i64,_ = _int64_from(buf,0)
return i64
def _receive_string(sock):
size = _receive_int(sock)
s = _receive_all(sock,size)
return s
def check_error_code(s):
rc = _receive_int(s)
if rc:
msg = _receive_string(s)
raise Exception(rc, msg)
def make_socket(ip, port):
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
sa = (ip, port)
s.connect(sa)
return s
|
Incubaid/arakoon
|
pylabs/extensions/arakoon_ext/server/RemoteControlProtocol.py
|
Python
|
apache-2.0
| 2,133
|
# -*- coding: utf-8 -*-
# Copyright 2022 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from collections import OrderedDict
import functools
import re
from typing import Dict, Optional, Sequence, Tuple, Type, Union
import pkg_resources
from google.api_core.client_options import ClientOptions
from google.api_core import exceptions as core_exceptions
from google.api_core import gapic_v1
from google.api_core import retry as retries
from google.auth import credentials as ga_credentials # type: ignore
from google.oauth2 import service_account # type: ignore
try:
OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault]
except AttributeError: # pragma: NO COVER
OptionalRetry = Union[retries.Retry, object] # type: ignore
from google.api_core import operation # type: ignore
from google.api_core import operation_async # type: ignore
from google.cloud.networkconnectivity_v1.services.hub_service import pagers
from google.cloud.networkconnectivity_v1.types import common
from google.cloud.networkconnectivity_v1.types import hub
from google.cloud.networkconnectivity_v1.types import hub as gcn_hub
from google.protobuf import empty_pb2 # type: ignore
from google.protobuf import field_mask_pb2 # type: ignore
from google.protobuf import timestamp_pb2 # type: ignore
from .transports.base import HubServiceTransport, DEFAULT_CLIENT_INFO
from .transports.grpc_asyncio import HubServiceGrpcAsyncIOTransport
from .client import HubServiceClient
class HubServiceAsyncClient:
"""Network Connectivity Center is a hub-and-spoke abstraction
for network connectivity management in Google Cloud. It reduces
operational complexity through a simple, centralized
connectivity management model.
"""
_client: HubServiceClient
DEFAULT_ENDPOINT = HubServiceClient.DEFAULT_ENDPOINT
DEFAULT_MTLS_ENDPOINT = HubServiceClient.DEFAULT_MTLS_ENDPOINT
hub_path = staticmethod(HubServiceClient.hub_path)
parse_hub_path = staticmethod(HubServiceClient.parse_hub_path)
instance_path = staticmethod(HubServiceClient.instance_path)
parse_instance_path = staticmethod(HubServiceClient.parse_instance_path)
interconnect_attachment_path = staticmethod(
HubServiceClient.interconnect_attachment_path
)
parse_interconnect_attachment_path = staticmethod(
HubServiceClient.parse_interconnect_attachment_path
)
network_path = staticmethod(HubServiceClient.network_path)
parse_network_path = staticmethod(HubServiceClient.parse_network_path)
spoke_path = staticmethod(HubServiceClient.spoke_path)
parse_spoke_path = staticmethod(HubServiceClient.parse_spoke_path)
vpn_tunnel_path = staticmethod(HubServiceClient.vpn_tunnel_path)
parse_vpn_tunnel_path = staticmethod(HubServiceClient.parse_vpn_tunnel_path)
common_billing_account_path = staticmethod(
HubServiceClient.common_billing_account_path
)
parse_common_billing_account_path = staticmethod(
HubServiceClient.parse_common_billing_account_path
)
common_folder_path = staticmethod(HubServiceClient.common_folder_path)
parse_common_folder_path = staticmethod(HubServiceClient.parse_common_folder_path)
common_organization_path = staticmethod(HubServiceClient.common_organization_path)
parse_common_organization_path = staticmethod(
HubServiceClient.parse_common_organization_path
)
common_project_path = staticmethod(HubServiceClient.common_project_path)
parse_common_project_path = staticmethod(HubServiceClient.parse_common_project_path)
common_location_path = staticmethod(HubServiceClient.common_location_path)
parse_common_location_path = staticmethod(
HubServiceClient.parse_common_location_path
)
@classmethod
def from_service_account_info(cls, info: dict, *args, **kwargs):
"""Creates an instance of this client using the provided credentials
info.
Args:
info (dict): The service account private key info.
args: Additional arguments to pass to the constructor.
kwargs: Additional arguments to pass to the constructor.
Returns:
HubServiceAsyncClient: The constructed client.
"""
return HubServiceClient.from_service_account_info.__func__(HubServiceAsyncClient, info, *args, **kwargs) # type: ignore
@classmethod
def from_service_account_file(cls, filename: str, *args, **kwargs):
"""Creates an instance of this client using the provided credentials
file.
Args:
filename (str): The path to the service account private key json
file.
args: Additional arguments to pass to the constructor.
kwargs: Additional arguments to pass to the constructor.
Returns:
HubServiceAsyncClient: The constructed client.
"""
return HubServiceClient.from_service_account_file.__func__(HubServiceAsyncClient, filename, *args, **kwargs) # type: ignore
from_service_account_json = from_service_account_file
@classmethod
def get_mtls_endpoint_and_cert_source(
cls, client_options: Optional[ClientOptions] = None
):
"""Return the API endpoint and client cert source for mutual TLS.
The client cert source is determined in the following order:
(1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the
client cert source is None.
(2) if `client_options.client_cert_source` is provided, use the provided one; if the
default client cert source exists, use the default one; otherwise the client cert
source is None.
The API endpoint is determined in the following order:
(1) if `client_options.api_endpoint` if provided, use the provided one.
(2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the
default mTLS endpoint; if the environment variabel is "never", use the default API
endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise
use the default API endpoint.
More details can be found at https://google.aip.dev/auth/4114.
Args:
client_options (google.api_core.client_options.ClientOptions): Custom options for the
client. Only the `api_endpoint` and `client_cert_source` properties may be used
in this method.
Returns:
Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the
client cert source to use.
Raises:
google.auth.exceptions.MutualTLSChannelError: If any errors happen.
"""
return HubServiceClient.get_mtls_endpoint_and_cert_source(client_options) # type: ignore
@property
def transport(self) -> HubServiceTransport:
"""Returns the transport used by the client instance.
Returns:
HubServiceTransport: The transport used by the client instance.
"""
return self._client.transport
get_transport_class = functools.partial(
type(HubServiceClient).get_transport_class, type(HubServiceClient)
)
def __init__(
self,
*,
credentials: ga_credentials.Credentials = None,
transport: Union[str, HubServiceTransport] = "grpc_asyncio",
client_options: ClientOptions = None,
client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO,
) -> None:
"""Instantiates the hub service client.
Args:
credentials (Optional[google.auth.credentials.Credentials]): The
authorization credentials to attach to requests. These
credentials identify the application to the service; if none
are specified, the client will attempt to ascertain the
credentials from the environment.
transport (Union[str, ~.HubServiceTransport]): The
transport to use. If set to None, a transport is chosen
automatically.
client_options (ClientOptions): Custom options for the client. It
won't take effect if a ``transport`` instance is provided.
(1) The ``api_endpoint`` property can be used to override the
default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT
environment variable can also be used to override the endpoint:
"always" (always use the default mTLS endpoint), "never" (always
use the default regular endpoint) and "auto" (auto switch to the
default mTLS endpoint if client certificate is present, this is
the default value). However, the ``api_endpoint`` property takes
precedence if provided.
(2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable
is "true", then the ``client_cert_source`` property can be used
to provide client certificate for mutual TLS transport. If
not provided, the default SSL client certificate will be used if
present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not
set, no client certificate will be used.
Raises:
google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport
creation failed for any reason.
"""
self._client = HubServiceClient(
credentials=credentials,
transport=transport,
client_options=client_options,
client_info=client_info,
)
async def list_hubs(
self,
request: Union[hub.ListHubsRequest, dict] = None,
*,
parent: str = None,
retry: OptionalRetry = gapic_v1.method.DEFAULT,
timeout: float = None,
metadata: Sequence[Tuple[str, str]] = (),
) -> pagers.ListHubsAsyncPager:
r"""Lists hubs in a given project.
.. code-block:: python
from google.cloud import networkconnectivity_v1
def sample_list_hubs():
# Create a client
client = networkconnectivity_v1.HubServiceClient()
# Initialize request argument(s)
request = networkconnectivity_v1.ListHubsRequest(
parent="parent_value",
)
# Make the request
page_result = client.list_hubs(request=request)
# Handle the response
for response in page_result:
print(response)
Args:
request (Union[google.cloud.networkconnectivity_v1.types.ListHubsRequest, dict]):
The request object. Request for
[HubService.ListHubs][google.cloud.networkconnectivity.v1.HubService.ListHubs]
method.
parent (:class:`str`):
Required. The parent resource's name.
This corresponds to the ``parent`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
retry (google.api_core.retry.Retry): Designation of what errors, if any,
should be retried.
timeout (float): The timeout for this request.
metadata (Sequence[Tuple[str, str]]): Strings which should be
sent along with the request as metadata.
Returns:
google.cloud.networkconnectivity_v1.services.hub_service.pagers.ListHubsAsyncPager:
Response for
[HubService.ListHubs][google.cloud.networkconnectivity.v1.HubService.ListHubs]
method.
Iterating over this object will yield results and
resolve additional pages automatically.
"""
# Create or coerce a protobuf request object.
# Quick check: If we got a request object, we should *not* have
# gotten any keyword arguments that map to the request.
has_flattened_params = any([parent])
if request is not None and has_flattened_params:
raise ValueError(
"If the `request` argument is set, then none of "
"the individual field arguments should be set."
)
request = hub.ListHubsRequest(request)
# If we have keyword arguments corresponding to fields on the
# request, apply these.
if parent is not None:
request.parent = parent
# Wrap the RPC method; this adds retry and timeout information,
# and friendly error handling.
rpc = gapic_v1.method_async.wrap_method(
self._client._transport.list_hubs,
default_timeout=None,
client_info=DEFAULT_CLIENT_INFO,
)
# Certain fields should be provided within the metadata header;
# add these here.
metadata = tuple(metadata) + (
gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)),
)
# Send the request.
response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,)
# This method is paged; wrap the response in a pager, which provides
# an `__aiter__` convenience method.
response = pagers.ListHubsAsyncPager(
method=rpc, request=request, response=response, metadata=metadata,
)
# Done; return the response.
return response
async def get_hub(
self,
request: Union[hub.GetHubRequest, dict] = None,
*,
name: str = None,
retry: OptionalRetry = gapic_v1.method.DEFAULT,
timeout: float = None,
metadata: Sequence[Tuple[str, str]] = (),
) -> hub.Hub:
r"""Gets details about the specified hub.
.. code-block:: python
from google.cloud import networkconnectivity_v1
def sample_get_hub():
# Create a client
client = networkconnectivity_v1.HubServiceClient()
# Initialize request argument(s)
request = networkconnectivity_v1.GetHubRequest(
name="name_value",
)
# Make the request
response = client.get_hub(request=request)
# Handle the response
print(response)
Args:
request (Union[google.cloud.networkconnectivity_v1.types.GetHubRequest, dict]):
The request object. Request for
[HubService.GetHub][google.cloud.networkconnectivity.v1.HubService.GetHub]
method.
name (:class:`str`):
Required. The name of the hub
resource to get.
This corresponds to the ``name`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
retry (google.api_core.retry.Retry): Designation of what errors, if any,
should be retried.
timeout (float): The timeout for this request.
metadata (Sequence[Tuple[str, str]]): Strings which should be
sent along with the request as metadata.
Returns:
google.cloud.networkconnectivity_v1.types.Hub:
A hub is a collection of spokes. A
single hub can contain spokes from
multiple regions. However, if any of a
hub's spokes use the data transfer
feature, the resources associated with
those spokes must all reside in the same
VPC network. Spokes that do not use data
transfer can be associated with any VPC
network in your project.
"""
# Create or coerce a protobuf request object.
# Quick check: If we got a request object, we should *not* have
# gotten any keyword arguments that map to the request.
has_flattened_params = any([name])
if request is not None and has_flattened_params:
raise ValueError(
"If the `request` argument is set, then none of "
"the individual field arguments should be set."
)
request = hub.GetHubRequest(request)
# If we have keyword arguments corresponding to fields on the
# request, apply these.
if name is not None:
request.name = name
# Wrap the RPC method; this adds retry and timeout information,
# and friendly error handling.
rpc = gapic_v1.method_async.wrap_method(
self._client._transport.get_hub,
default_timeout=None,
client_info=DEFAULT_CLIENT_INFO,
)
# Certain fields should be provided within the metadata header;
# add these here.
metadata = tuple(metadata) + (
gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)),
)
# Send the request.
response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,)
# Done; return the response.
return response
async def create_hub(
self,
request: Union[gcn_hub.CreateHubRequest, dict] = None,
*,
parent: str = None,
hub: gcn_hub.Hub = None,
hub_id: str = None,
retry: OptionalRetry = gapic_v1.method.DEFAULT,
timeout: float = None,
metadata: Sequence[Tuple[str, str]] = (),
) -> operation_async.AsyncOperation:
r"""Creates a new hub in the specified project.
.. code-block:: python
from google.cloud import networkconnectivity_v1
def sample_create_hub():
# Create a client
client = networkconnectivity_v1.HubServiceClient()
# Initialize request argument(s)
request = networkconnectivity_v1.CreateHubRequest(
parent="parent_value",
hub_id="hub_id_value",
)
# Make the request
operation = client.create_hub(request=request)
print("Waiting for operation to complete...")
response = operation.result()
# Handle the response
print(response)
Args:
request (Union[google.cloud.networkconnectivity_v1.types.CreateHubRequest, dict]):
The request object. Request for
[HubService.CreateHub][google.cloud.networkconnectivity.v1.HubService.CreateHub]
method.
parent (:class:`str`):
Required. The parent resource.
This corresponds to the ``parent`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
hub (:class:`google.cloud.networkconnectivity_v1.types.Hub`):
Required. The initial values for a
new hub.
This corresponds to the ``hub`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
hub_id (:class:`str`):
Required. A unique identifier for the
hub.
This corresponds to the ``hub_id`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
retry (google.api_core.retry.Retry): Designation of what errors, if any,
should be retried.
timeout (float): The timeout for this request.
metadata (Sequence[Tuple[str, str]]): Strings which should be
sent along with the request as metadata.
Returns:
google.api_core.operation_async.AsyncOperation:
An object representing a long-running operation.
The result type for the operation will be :class:`google.cloud.networkconnectivity_v1.types.Hub` A hub is a collection of spokes. A single hub can contain spokes from
multiple regions. However, if any of a hub's spokes
use the data transfer feature, the resources
associated with those spokes must all reside in the
same VPC network. Spokes that do not use data
transfer can be associated with any VPC network in
your project.
"""
# Create or coerce a protobuf request object.
# Quick check: If we got a request object, we should *not* have
# gotten any keyword arguments that map to the request.
has_flattened_params = any([parent, hub, hub_id])
if request is not None and has_flattened_params:
raise ValueError(
"If the `request` argument is set, then none of "
"the individual field arguments should be set."
)
request = gcn_hub.CreateHubRequest(request)
# If we have keyword arguments corresponding to fields on the
# request, apply these.
if parent is not None:
request.parent = parent
if hub is not None:
request.hub = hub
if hub_id is not None:
request.hub_id = hub_id
# Wrap the RPC method; this adds retry and timeout information,
# and friendly error handling.
rpc = gapic_v1.method_async.wrap_method(
self._client._transport.create_hub,
default_timeout=60.0,
client_info=DEFAULT_CLIENT_INFO,
)
# Certain fields should be provided within the metadata header;
# add these here.
metadata = tuple(metadata) + (
gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)),
)
# Send the request.
response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,)
# Wrap the response in an operation future.
response = operation_async.from_gapic(
response,
self._client._transport.operations_client,
gcn_hub.Hub,
metadata_type=common.OperationMetadata,
)
# Done; return the response.
return response
async def update_hub(
self,
request: Union[gcn_hub.UpdateHubRequest, dict] = None,
*,
hub: gcn_hub.Hub = None,
update_mask: field_mask_pb2.FieldMask = None,
retry: OptionalRetry = gapic_v1.method.DEFAULT,
timeout: float = None,
metadata: Sequence[Tuple[str, str]] = (),
) -> operation_async.AsyncOperation:
r"""Updates the description and/or labels of the
specified hub.
.. code-block:: python
from google.cloud import networkconnectivity_v1
def sample_update_hub():
# Create a client
client = networkconnectivity_v1.HubServiceClient()
# Initialize request argument(s)
request = networkconnectivity_v1.UpdateHubRequest(
)
# Make the request
operation = client.update_hub(request=request)
print("Waiting for operation to complete...")
response = operation.result()
# Handle the response
print(response)
Args:
request (Union[google.cloud.networkconnectivity_v1.types.UpdateHubRequest, dict]):
The request object. Request for
[HubService.UpdateHub][google.cloud.networkconnectivity.v1.HubService.UpdateHub]
method.
hub (:class:`google.cloud.networkconnectivity_v1.types.Hub`):
Required. The state that the hub
should be in after the update.
This corresponds to the ``hub`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`):
Optional. In the case of an update to an existing hub,
field mask is used to specify the fields to be
overwritten. The fields specified in the update_mask are
relative to the resource, not the full request. A field
is overwritten if it is in the mask. If the user does
not provide a mask, then all fields are overwritten.
This corresponds to the ``update_mask`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
retry (google.api_core.retry.Retry): Designation of what errors, if any,
should be retried.
timeout (float): The timeout for this request.
metadata (Sequence[Tuple[str, str]]): Strings which should be
sent along with the request as metadata.
Returns:
google.api_core.operation_async.AsyncOperation:
An object representing a long-running operation.
The result type for the operation will be :class:`google.cloud.networkconnectivity_v1.types.Hub` A hub is a collection of spokes. A single hub can contain spokes from
multiple regions. However, if any of a hub's spokes
use the data transfer feature, the resources
associated with those spokes must all reside in the
same VPC network. Spokes that do not use data
transfer can be associated with any VPC network in
your project.
"""
# Create or coerce a protobuf request object.
# Quick check: If we got a request object, we should *not* have
# gotten any keyword arguments that map to the request.
has_flattened_params = any([hub, update_mask])
if request is not None and has_flattened_params:
raise ValueError(
"If the `request` argument is set, then none of "
"the individual field arguments should be set."
)
request = gcn_hub.UpdateHubRequest(request)
# If we have keyword arguments corresponding to fields on the
# request, apply these.
if hub is not None:
request.hub = hub
if update_mask is not None:
request.update_mask = update_mask
# Wrap the RPC method; this adds retry and timeout information,
# and friendly error handling.
rpc = gapic_v1.method_async.wrap_method(
self._client._transport.update_hub,
default_timeout=60.0,
client_info=DEFAULT_CLIENT_INFO,
)
# Certain fields should be provided within the metadata header;
# add these here.
metadata = tuple(metadata) + (
gapic_v1.routing_header.to_grpc_metadata((("hub.name", request.hub.name),)),
)
# Send the request.
response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,)
# Wrap the response in an operation future.
response = operation_async.from_gapic(
response,
self._client._transport.operations_client,
gcn_hub.Hub,
metadata_type=common.OperationMetadata,
)
# Done; return the response.
return response
async def delete_hub(
self,
request: Union[hub.DeleteHubRequest, dict] = None,
*,
name: str = None,
retry: OptionalRetry = gapic_v1.method.DEFAULT,
timeout: float = None,
metadata: Sequence[Tuple[str, str]] = (),
) -> operation_async.AsyncOperation:
r"""Deletes the specified hub.
.. code-block:: python
from google.cloud import networkconnectivity_v1
def sample_delete_hub():
# Create a client
client = networkconnectivity_v1.HubServiceClient()
# Initialize request argument(s)
request = networkconnectivity_v1.DeleteHubRequest(
name="name_value",
)
# Make the request
operation = client.delete_hub(request=request)
print("Waiting for operation to complete...")
response = operation.result()
# Handle the response
print(response)
Args:
request (Union[google.cloud.networkconnectivity_v1.types.DeleteHubRequest, dict]):
The request object. The request for
[HubService.DeleteHub][google.cloud.networkconnectivity.v1.HubService.DeleteHub].
name (:class:`str`):
Required. The name of the hub to
delete.
This corresponds to the ``name`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
retry (google.api_core.retry.Retry): Designation of what errors, if any,
should be retried.
timeout (float): The timeout for this request.
metadata (Sequence[Tuple[str, str]]): Strings which should be
sent along with the request as metadata.
Returns:
google.api_core.operation_async.AsyncOperation:
An object representing a long-running operation.
The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated
empty messages in your APIs. A typical example is to
use it as the request or the response type of an API
method. For instance:
service Foo {
rpc Bar(google.protobuf.Empty) returns
(google.protobuf.Empty);
}
The JSON representation for Empty is empty JSON
object {}.
"""
# Create or coerce a protobuf request object.
# Quick check: If we got a request object, we should *not* have
# gotten any keyword arguments that map to the request.
has_flattened_params = any([name])
if request is not None and has_flattened_params:
raise ValueError(
"If the `request` argument is set, then none of "
"the individual field arguments should be set."
)
request = hub.DeleteHubRequest(request)
# If we have keyword arguments corresponding to fields on the
# request, apply these.
if name is not None:
request.name = name
# Wrap the RPC method; this adds retry and timeout information,
# and friendly error handling.
rpc = gapic_v1.method_async.wrap_method(
self._client._transport.delete_hub,
default_timeout=60.0,
client_info=DEFAULT_CLIENT_INFO,
)
# Certain fields should be provided within the metadata header;
# add these here.
metadata = tuple(metadata) + (
gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)),
)
# Send the request.
response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,)
# Wrap the response in an operation future.
response = operation_async.from_gapic(
response,
self._client._transport.operations_client,
empty_pb2.Empty,
metadata_type=common.OperationMetadata,
)
# Done; return the response.
return response
async def list_spokes(
self,
request: Union[hub.ListSpokesRequest, dict] = None,
*,
parent: str = None,
retry: OptionalRetry = gapic_v1.method.DEFAULT,
timeout: float = None,
metadata: Sequence[Tuple[str, str]] = (),
) -> pagers.ListSpokesAsyncPager:
r"""Lists the spokes in the specified project and
location.
.. code-block:: python
from google.cloud import networkconnectivity_v1
def sample_list_spokes():
# Create a client
client = networkconnectivity_v1.HubServiceClient()
# Initialize request argument(s)
request = networkconnectivity_v1.ListSpokesRequest(
parent="parent_value",
)
# Make the request
page_result = client.list_spokes(request=request)
# Handle the response
for response in page_result:
print(response)
Args:
request (Union[google.cloud.networkconnectivity_v1.types.ListSpokesRequest, dict]):
The request object. The request for
[HubService.ListSpokes][google.cloud.networkconnectivity.v1.HubService.ListSpokes].
parent (:class:`str`):
Required. The parent resource.
This corresponds to the ``parent`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
retry (google.api_core.retry.Retry): Designation of what errors, if any,
should be retried.
timeout (float): The timeout for this request.
metadata (Sequence[Tuple[str, str]]): Strings which should be
sent along with the request as metadata.
Returns:
google.cloud.networkconnectivity_v1.services.hub_service.pagers.ListSpokesAsyncPager:
The response for
[HubService.ListSpokes][google.cloud.networkconnectivity.v1.HubService.ListSpokes].
Iterating over this object will yield results and
resolve additional pages automatically.
"""
# Create or coerce a protobuf request object.
# Quick check: If we got a request object, we should *not* have
# gotten any keyword arguments that map to the request.
has_flattened_params = any([parent])
if request is not None and has_flattened_params:
raise ValueError(
"If the `request` argument is set, then none of "
"the individual field arguments should be set."
)
request = hub.ListSpokesRequest(request)
# If we have keyword arguments corresponding to fields on the
# request, apply these.
if parent is not None:
request.parent = parent
# Wrap the RPC method; this adds retry and timeout information,
# and friendly error handling.
rpc = gapic_v1.method_async.wrap_method(
self._client._transport.list_spokes,
default_timeout=None,
client_info=DEFAULT_CLIENT_INFO,
)
# Certain fields should be provided within the metadata header;
# add these here.
metadata = tuple(metadata) + (
gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)),
)
# Send the request.
response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,)
# This method is paged; wrap the response in a pager, which provides
# an `__aiter__` convenience method.
response = pagers.ListSpokesAsyncPager(
method=rpc, request=request, response=response, metadata=metadata,
)
# Done; return the response.
return response
async def get_spoke(
self,
request: Union[hub.GetSpokeRequest, dict] = None,
*,
name: str = None,
retry: OptionalRetry = gapic_v1.method.DEFAULT,
timeout: float = None,
metadata: Sequence[Tuple[str, str]] = (),
) -> hub.Spoke:
r"""Gets details about the specified spoke.
.. code-block:: python
from google.cloud import networkconnectivity_v1
def sample_get_spoke():
# Create a client
client = networkconnectivity_v1.HubServiceClient()
# Initialize request argument(s)
request = networkconnectivity_v1.GetSpokeRequest(
name="name_value",
)
# Make the request
response = client.get_spoke(request=request)
# Handle the response
print(response)
Args:
request (Union[google.cloud.networkconnectivity_v1.types.GetSpokeRequest, dict]):
The request object. The request for
[HubService.GetSpoke][google.cloud.networkconnectivity.v1.HubService.GetSpoke].
name (:class:`str`):
Required. The name of the spoke
resource.
This corresponds to the ``name`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
retry (google.api_core.retry.Retry): Designation of what errors, if any,
should be retried.
timeout (float): The timeout for this request.
metadata (Sequence[Tuple[str, str]]): Strings which should be
sent along with the request as metadata.
Returns:
google.cloud.networkconnectivity_v1.types.Spoke:
A spoke represents a connection between your Google Cloud network resources
and a non-Google-Cloud network.
When you create a spoke, you associate it with a hub.
You must also identify a value for exactly one of the
following fields:
- linked_vpn_tunnels
- linked_interconnect_attachments
- linked_router_appliance_instances
"""
# Create or coerce a protobuf request object.
# Quick check: If we got a request object, we should *not* have
# gotten any keyword arguments that map to the request.
has_flattened_params = any([name])
if request is not None and has_flattened_params:
raise ValueError(
"If the `request` argument is set, then none of "
"the individual field arguments should be set."
)
request = hub.GetSpokeRequest(request)
# If we have keyword arguments corresponding to fields on the
# request, apply these.
if name is not None:
request.name = name
# Wrap the RPC method; this adds retry and timeout information,
# and friendly error handling.
rpc = gapic_v1.method_async.wrap_method(
self._client._transport.get_spoke,
default_timeout=None,
client_info=DEFAULT_CLIENT_INFO,
)
# Certain fields should be provided within the metadata header;
# add these here.
metadata = tuple(metadata) + (
gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)),
)
# Send the request.
response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,)
# Done; return the response.
return response
async def create_spoke(
self,
request: Union[hub.CreateSpokeRequest, dict] = None,
*,
parent: str = None,
spoke: hub.Spoke = None,
spoke_id: str = None,
retry: OptionalRetry = gapic_v1.method.DEFAULT,
timeout: float = None,
metadata: Sequence[Tuple[str, str]] = (),
) -> operation_async.AsyncOperation:
r"""Creates a spoke in the specified project and
location.
.. code-block:: python
from google.cloud import networkconnectivity_v1
def sample_create_spoke():
# Create a client
client = networkconnectivity_v1.HubServiceClient()
# Initialize request argument(s)
request = networkconnectivity_v1.CreateSpokeRequest(
parent="parent_value",
spoke_id="spoke_id_value",
)
# Make the request
operation = client.create_spoke(request=request)
print("Waiting for operation to complete...")
response = operation.result()
# Handle the response
print(response)
Args:
request (Union[google.cloud.networkconnectivity_v1.types.CreateSpokeRequest, dict]):
The request object. The request for
[HubService.CreateSpoke][google.cloud.networkconnectivity.v1.HubService.CreateSpoke].
parent (:class:`str`):
Required. The parent resource.
This corresponds to the ``parent`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
spoke (:class:`google.cloud.networkconnectivity_v1.types.Spoke`):
Required. The initial values for a
new spoke.
This corresponds to the ``spoke`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
spoke_id (:class:`str`):
Required. Unique id for the spoke to
create.
This corresponds to the ``spoke_id`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
retry (google.api_core.retry.Retry): Designation of what errors, if any,
should be retried.
timeout (float): The timeout for this request.
metadata (Sequence[Tuple[str, str]]): Strings which should be
sent along with the request as metadata.
Returns:
google.api_core.operation_async.AsyncOperation:
An object representing a long-running operation.
The result type for the operation will be :class:`google.cloud.networkconnectivity_v1.types.Spoke` A spoke represents a connection between your Google Cloud network resources
and a non-Google-Cloud network.
When you create a spoke, you associate it with a hub.
You must also identify a value for exactly one of the
following fields:
- linked_vpn_tunnels
- linked_interconnect_attachments
- linked_router_appliance_instances
"""
# Create or coerce a protobuf request object.
# Quick check: If we got a request object, we should *not* have
# gotten any keyword arguments that map to the request.
has_flattened_params = any([parent, spoke, spoke_id])
if request is not None and has_flattened_params:
raise ValueError(
"If the `request` argument is set, then none of "
"the individual field arguments should be set."
)
request = hub.CreateSpokeRequest(request)
# If we have keyword arguments corresponding to fields on the
# request, apply these.
if parent is not None:
request.parent = parent
if spoke is not None:
request.spoke = spoke
if spoke_id is not None:
request.spoke_id = spoke_id
# Wrap the RPC method; this adds retry and timeout information,
# and friendly error handling.
rpc = gapic_v1.method_async.wrap_method(
self._client._transport.create_spoke,
default_timeout=60.0,
client_info=DEFAULT_CLIENT_INFO,
)
# Certain fields should be provided within the metadata header;
# add these here.
metadata = tuple(metadata) + (
gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)),
)
# Send the request.
response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,)
# Wrap the response in an operation future.
response = operation_async.from_gapic(
response,
self._client._transport.operations_client,
hub.Spoke,
metadata_type=common.OperationMetadata,
)
# Done; return the response.
return response
async def update_spoke(
self,
request: Union[hub.UpdateSpokeRequest, dict] = None,
*,
spoke: hub.Spoke = None,
update_mask: field_mask_pb2.FieldMask = None,
retry: OptionalRetry = gapic_v1.method.DEFAULT,
timeout: float = None,
metadata: Sequence[Tuple[str, str]] = (),
) -> operation_async.AsyncOperation:
r"""Updates the parameters of the specified spoke.
.. code-block:: python
from google.cloud import networkconnectivity_v1
def sample_update_spoke():
# Create a client
client = networkconnectivity_v1.HubServiceClient()
# Initialize request argument(s)
request = networkconnectivity_v1.UpdateSpokeRequest(
)
# Make the request
operation = client.update_spoke(request=request)
print("Waiting for operation to complete...")
response = operation.result()
# Handle the response
print(response)
Args:
request (Union[google.cloud.networkconnectivity_v1.types.UpdateSpokeRequest, dict]):
The request object. Request for
[HubService.UpdateSpoke][google.cloud.networkconnectivity.v1.HubService.UpdateSpoke]
method.
spoke (:class:`google.cloud.networkconnectivity_v1.types.Spoke`):
Required. The state that the spoke
should be in after the update.
This corresponds to the ``spoke`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`):
Optional. In the case of an update to an existing spoke,
field mask is used to specify the fields to be
overwritten. The fields specified in the update_mask are
relative to the resource, not the full request. A field
is overwritten if it is in the mask. If the user does
not provide a mask, then all fields are overwritten.
This corresponds to the ``update_mask`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
retry (google.api_core.retry.Retry): Designation of what errors, if any,
should be retried.
timeout (float): The timeout for this request.
metadata (Sequence[Tuple[str, str]]): Strings which should be
sent along with the request as metadata.
Returns:
google.api_core.operation_async.AsyncOperation:
An object representing a long-running operation.
The result type for the operation will be :class:`google.cloud.networkconnectivity_v1.types.Spoke` A spoke represents a connection between your Google Cloud network resources
and a non-Google-Cloud network.
When you create a spoke, you associate it with a hub.
You must also identify a value for exactly one of the
following fields:
- linked_vpn_tunnels
- linked_interconnect_attachments
- linked_router_appliance_instances
"""
# Create or coerce a protobuf request object.
# Quick check: If we got a request object, we should *not* have
# gotten any keyword arguments that map to the request.
has_flattened_params = any([spoke, update_mask])
if request is not None and has_flattened_params:
raise ValueError(
"If the `request` argument is set, then none of "
"the individual field arguments should be set."
)
request = hub.UpdateSpokeRequest(request)
# If we have keyword arguments corresponding to fields on the
# request, apply these.
if spoke is not None:
request.spoke = spoke
if update_mask is not None:
request.update_mask = update_mask
# Wrap the RPC method; this adds retry and timeout information,
# and friendly error handling.
rpc = gapic_v1.method_async.wrap_method(
self._client._transport.update_spoke,
default_timeout=60.0,
client_info=DEFAULT_CLIENT_INFO,
)
# Certain fields should be provided within the metadata header;
# add these here.
metadata = tuple(metadata) + (
gapic_v1.routing_header.to_grpc_metadata(
(("spoke.name", request.spoke.name),)
),
)
# Send the request.
response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,)
# Wrap the response in an operation future.
response = operation_async.from_gapic(
response,
self._client._transport.operations_client,
hub.Spoke,
metadata_type=common.OperationMetadata,
)
# Done; return the response.
return response
async def delete_spoke(
self,
request: Union[hub.DeleteSpokeRequest, dict] = None,
*,
name: str = None,
retry: OptionalRetry = gapic_v1.method.DEFAULT,
timeout: float = None,
metadata: Sequence[Tuple[str, str]] = (),
) -> operation_async.AsyncOperation:
r"""Deletes the specified spoke.
.. code-block:: python
from google.cloud import networkconnectivity_v1
def sample_delete_spoke():
# Create a client
client = networkconnectivity_v1.HubServiceClient()
# Initialize request argument(s)
request = networkconnectivity_v1.DeleteSpokeRequest(
name="name_value",
)
# Make the request
operation = client.delete_spoke(request=request)
print("Waiting for operation to complete...")
response = operation.result()
# Handle the response
print(response)
Args:
request (Union[google.cloud.networkconnectivity_v1.types.DeleteSpokeRequest, dict]):
The request object. The request for
[HubService.DeleteSpoke][google.cloud.networkconnectivity.v1.HubService.DeleteSpoke].
name (:class:`str`):
Required. The name of the spoke to
delete.
This corresponds to the ``name`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
retry (google.api_core.retry.Retry): Designation of what errors, if any,
should be retried.
timeout (float): The timeout for this request.
metadata (Sequence[Tuple[str, str]]): Strings which should be
sent along with the request as metadata.
Returns:
google.api_core.operation_async.AsyncOperation:
An object representing a long-running operation.
The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated
empty messages in your APIs. A typical example is to
use it as the request or the response type of an API
method. For instance:
service Foo {
rpc Bar(google.protobuf.Empty) returns
(google.protobuf.Empty);
}
The JSON representation for Empty is empty JSON
object {}.
"""
# Create or coerce a protobuf request object.
# Quick check: If we got a request object, we should *not* have
# gotten any keyword arguments that map to the request.
has_flattened_params = any([name])
if request is not None and has_flattened_params:
raise ValueError(
"If the `request` argument is set, then none of "
"the individual field arguments should be set."
)
request = hub.DeleteSpokeRequest(request)
# If we have keyword arguments corresponding to fields on the
# request, apply these.
if name is not None:
request.name = name
# Wrap the RPC method; this adds retry and timeout information,
# and friendly error handling.
rpc = gapic_v1.method_async.wrap_method(
self._client._transport.delete_spoke,
default_timeout=60.0,
client_info=DEFAULT_CLIENT_INFO,
)
# Certain fields should be provided within the metadata header;
# add these here.
metadata = tuple(metadata) + (
gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)),
)
# Send the request.
response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,)
# Wrap the response in an operation future.
response = operation_async.from_gapic(
response,
self._client._transport.operations_client,
empty_pb2.Empty,
metadata_type=common.OperationMetadata,
)
# Done; return the response.
return response
async def __aenter__(self):
return self
async def __aexit__(self, exc_type, exc, tb):
await self.transport.close()
try:
DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(
gapic_version=pkg_resources.get_distribution(
"google-cloud-networkconnectivity",
).version,
)
except pkg_resources.DistributionNotFound:
DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo()
__all__ = ("HubServiceAsyncClient",)
|
googleapis/python-network-connectivity
|
google/cloud/networkconnectivity_v1/services/hub_service/async_client.py
|
Python
|
apache-2.0
| 55,234
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('virtualfolder', '0003_drop_virtualfolder_ordering'),
]
operations = [
migrations.RemoveField(
model_name='virtualfolder',
name='is_browsable',
),
migrations.AddField(
model_name='virtualfolder',
name='is_public',
field=models.BooleanField(default=True, help_text='Whether this virtual folder is public or not.', verbose_name='Is public?'),
preserve_default=True,
),
]
|
Avira/pootle
|
pootle/apps/virtualfolder/migrations/0004_rename_is_browsable_to_is_public.py
|
Python
|
gpl-3.0
| 664
|
# Copyright (c) 2012 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import sys
from docs_server_utils import ToUnicode
from file_system import FileNotFoundError
from future import Future
from path_util import AssertIsDirectory, AssertIsFile, ToDirectory
from third_party.json_schema_compiler import json_parse
from third_party.json_schema_compiler.memoize import memoize
from third_party.motemplate import Motemplate
_CACHEABLE_FUNCTIONS = set()
_SINGLE_FILE_FUNCTIONS = set()
def _GetUnboundFunction(fn):
'''Functions bound to an object are separate from the unbound
defintion. This causes issues when checking for cache membership,
so always get the unbound function, if possible.
'''
return getattr(fn, 'im_func', fn)
def Cache(fn):
'''A decorator which can be applied to the compilation function
passed to CompiledFileSystem.Create, indicating that file/list data
should be cached.
This decorator should be listed first in any list of decorators, along
with the SingleFile decorator below.
'''
_CACHEABLE_FUNCTIONS.add(_GetUnboundFunction(fn))
return fn
def SingleFile(fn):
'''A decorator which can be optionally applied to the compilation function
passed to CompiledFileSystem.Create, indicating that the function only
needs access to the file which is given in the function's callback. When
this is the case some optimisations can be done.
Note that this decorator must be listed first in any list of decorators to
have any effect.
'''
_SINGLE_FILE_FUNCTIONS.add(_GetUnboundFunction(fn))
return fn
def Unicode(fn):
'''A decorator which can be optionally applied to the compilation function
passed to CompiledFileSystem.Create, indicating that the function processes
the file's data as Unicode text.
'''
# The arguments passed to fn can be (self, path, data) or (path, data). In
# either case the last argument is |data|, which should be converted to
# Unicode.
def convert_args(args):
args = list(args)
args[-1] = ToUnicode(args[-1])
return args
return lambda *args: fn(*convert_args(args))
class _CacheEntry(object):
def __init__(self, cache_data, version):
self.cache_data = cache_data
self.version = version
class CompiledFileSystem(object):
'''This class caches FileSystem data that has been processed.
'''
class Factory(object):
'''A class to build a CompiledFileSystem backed by |file_system|.
'''
def __init__(self, object_store_creator):
self._object_store_creator = object_store_creator
def Create(self, file_system, compilation_function, cls, category=None):
'''Creates a CompiledFileSystem view over |file_system| that populates
its cache by calling |compilation_function| with (path, data), where
|data| is the data that was fetched from |path| in |file_system|.
The namespace for the compiled file system is derived similar to
ObjectStoreCreator: from |cls| along with an optional |category|.
'''
assert isinstance(cls, type)
assert not cls.__name__[0].islower() # guard against non-class types
full_name = [cls.__name__, file_system.GetIdentity()]
if category is not None:
full_name.append(category)
def create_object_store(my_category):
# The read caches can start populated (start_empty=False) because file
# updates are picked up by the stat - but only if the compilation
# function is affected by a single file. If the compilation function is
# affected by other files (e.g. compiling a list of APIs available to
# extensions may be affected by both a features file and the list of
# files in the API directory) then this optimisation won't work.
return self._object_store_creator.Create(
CompiledFileSystem,
category='/'.join(full_name + [my_category]),
start_empty=compilation_function not in _SINGLE_FILE_FUNCTIONS)
return CompiledFileSystem(file_system,
compilation_function,
create_object_store('file'),
create_object_store('list'))
@memoize
def ForJson(self, file_system):
'''A CompiledFileSystem specifically for parsing JSON configuration data.
These are memoized over file systems tied to different branches.
'''
return self.Create(file_system,
Cache(SingleFile(lambda _, data:
json_parse.Parse(ToUnicode(data)))),
CompiledFileSystem,
category='json')
@memoize
def ForTemplates(self, file_system):
'''Creates a CompiledFileSystem for parsing templates.
'''
return self.Create(
file_system,
SingleFile(lambda path, text: Motemplate(ToUnicode(text), name=path)),
CompiledFileSystem)
@memoize
def ForUnicode(self, file_system):
'''Creates a CompiledFileSystem for Unicode text processing.
'''
return self.Create(
file_system,
SingleFile(lambda _, text: ToUnicode(text)),
CompiledFileSystem,
category='text')
def __init__(self,
file_system,
compilation_function,
file_object_store,
list_object_store):
self._file_system = file_system
self._compilation_function = compilation_function
self._file_object_store = file_object_store
self._list_object_store = list_object_store
def _Get(self, store, key):
if _GetUnboundFunction(self._compilation_function) in _CACHEABLE_FUNCTIONS:
return store.Get(key)
return Future(value=None)
def _Set(self, store, key, value):
if _GetUnboundFunction(self._compilation_function) in _CACHEABLE_FUNCTIONS:
store.Set(key, value)
def _RecursiveList(self, path):
'''Returns a Future containing the recursive directory listing of |path| as
a flat list of paths.
'''
def split_dirs_from_files(paths):
'''Returns a tuple (dirs, files) where |dirs| contains the directory
names in |paths| and |files| contains the files.
'''
result = [], []
for path in paths:
result[0 if path.endswith('/') else 1].append(path)
return result
def add_prefix(prefix, paths):
return [prefix + path for path in paths]
# Read in the initial list of files. Do this eagerly (i.e. not part of the
# asynchronous Future contract) because there's a greater chance to
# parallelise fetching with the second layer (can fetch multiple paths).
try:
first_layer_dirs, first_layer_files = split_dirs_from_files(
self._file_system.ReadSingle(path).Get())
except FileNotFoundError:
return Future(exc_info=sys.exc_info())
if not first_layer_dirs:
return Future(value=first_layer_files)
def get_from_future_listing(listings):
'''Recursively lists files from directory listing |futures|.
'''
dirs, files = [], []
for dir_name, listing in listings.iteritems():
new_dirs, new_files = split_dirs_from_files(listing)
# |dirs| are paths for reading. Add the full prefix relative to
# |path| so that |file_system| can find the files.
dirs += add_prefix(dir_name, new_dirs)
# |files| are not for reading, they are for returning to the caller.
# This entire function set (i.e. GetFromFileListing) is defined to
# not include the fetched-path in the result, however, |dir_name|
# will be prefixed with |path|. Strip it.
assert dir_name.startswith(path)
files += add_prefix(dir_name[len(path):], new_files)
if dirs:
files += self._file_system.Read(dirs).Then(
get_from_future_listing).Get()
return files
return self._file_system.Read(add_prefix(path, first_layer_dirs)).Then(
lambda results: first_layer_files + get_from_future_listing(results))
def GetFromFile(self, path, skip_not_found=False):
'''Calls |compilation_function| on the contents of the file at |path|.
If |skip_not_found| is True, then None is passed to |compilation_function|.
'''
AssertIsFile(path)
try:
version = self._file_system.Stat(path).version
except FileNotFoundError:
if skip_not_found:
version = None
else:
return Future(exc_info=sys.exc_info())
cache_entry = self._Get(self._file_object_store, path).Get()
if (cache_entry is not None) and (version == cache_entry.version):
return Future(value=cache_entry.cache_data)
def compile_(files):
cache_data = self._compilation_function(path, files)
self._Set(self._file_object_store, path, _CacheEntry(cache_data, version))
return cache_data
return self._file_system.ReadSingle(
path, skip_not_found=skip_not_found).Then(compile_)
def GetFromFileListing(self, path):
'''Calls |compilation_function| on the listing of the files at |path|.
Assumes that the path given is to a directory.
'''
AssertIsDirectory(path)
try:
version = self._file_system.Stat(path).version
except FileNotFoundError:
return Future(exc_info=sys.exc_info())
cache_entry = self._Get(self._list_object_store, path).Get()
if (cache_entry is not None) and (version == cache_entry.version):
return Future(value=cache_entry.cache_data)
def compile_(files):
cache_data = self._compilation_function(path, files)
self._Set(self._list_object_store, path, _CacheEntry(cache_data, version))
return cache_data
return self._RecursiveList(path).Then(compile_)
# _GetFileVersionFromCache and _GetFileListingVersionFromCache are exposed
# *only* so that ChainedCompiledFileSystem can optimise its caches. *Do not*
# use these methods otherwise, they don't do what you want. Use
# FileSystem.Stat on the FileSystem that this CompiledFileSystem uses.
def _GetFileVersionFromCache(self, path):
cache_entry = self._Get(self._file_object_store, path).Get()
if cache_entry is not None:
return Future(value=cache_entry.version)
stat_future = self._file_system.StatAsync(path)
return Future(callback=lambda: stat_future.Get().version)
def _GetFileListingVersionFromCache(self, path):
path = ToDirectory(path)
cache_entry = self._Get(self._list_object_store, path).Get()
if cache_entry is not None:
return Future(value=cache_entry.version)
stat_future = self._file_system.StatAsync(path)
return Future(callback=lambda: stat_future.Get().version)
def GetIdentity(self):
return self._file_system.GetIdentity()
|
s20121035/rk3288_android5.1_repo
|
external/chromium_org/chrome/common/extensions/docs/server2/compiled_file_system.py
|
Python
|
gpl-3.0
| 10,792
|
#
# (c) 2015, Peter Sprygada <psprygada@ansible.com>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
class ModuleDocFragment(object):
# Standard files documentation fragment
DOCUMENTATION = """
options:
host:
description:
- Specifies the DNS host name or address for connecting to the remote
device over the specified transport. The value of host is used as
the destination address for the transport.
required: true
port:
description:
- Specifies the port to use when buiding the connection to the remote
device. This value applies to either I(cli) or I(nxapi). The port
value will default to the approriate transport common port if
none is provided in the task. (cli=22, http=80, https=443).
required: false
default: 0 (use common port)
username:
description:
- Configures the usename to use to authenticate the connection to
the remote device. The value of I(username) is used to authenticate
either the CLI login or the nxapi authentication depending on which
transport is used.
required: true
password:
description:
- Specifies the password to use when authentication the connection to
the remote device. This is a common argument used for either I(cli)
or I(nxapi) transports.
required: false
default: null
transport:
description:
- Configures the transport connection to use when connecting to the
remote device. The transport argument supports connectivity to the
device over cli (ssh) or nxapi.
required: true
default: cli
use_ssl:
description:
- Configures the I(transport) to use SSL if set to true only when the
I(transport) argument is configured as nxapi. If the transport
argument is not nxapi, this value is ignored
required: false
default: no
choices: ['yes', 'no']
provider:
description:
- Convience method that allows all M(nxos) arguments to be passed as
a dict object. All constraints (required, choices, etc) must be
met either by individual arguments or values in this dict.
required: false
default: null
"""
|
yesbox/ansible
|
lib/ansible/utils/module_docs_fragments/nxos.py
|
Python
|
gpl-3.0
| 2,822
|
"""Support for Spider Smart devices."""
from datetime import timedelta
import logging
import voluptuous as vol
from homeassistant.const import (
CONF_PASSWORD, CONF_SCAN_INTERVAL, CONF_USERNAME)
import homeassistant.helpers.config_validation as cv
from homeassistant.helpers.discovery import load_platform
REQUIREMENTS = ['spiderpy==1.3.1']
_LOGGER = logging.getLogger(__name__)
DOMAIN = 'spider'
SPIDER_COMPONENTS = [
'climate',
'switch'
]
SCAN_INTERVAL = timedelta(seconds=120)
CONFIG_SCHEMA = vol.Schema({
DOMAIN: vol.Schema({
vol.Required(CONF_PASSWORD): cv.string,
vol.Required(CONF_USERNAME): cv.string,
vol.Optional(CONF_SCAN_INTERVAL, default=SCAN_INTERVAL):
cv.time_period,
})
}, extra=vol.ALLOW_EXTRA)
def setup(hass, config):
"""Set up Spider Component."""
from spiderpy.spiderapi import SpiderApi
from spiderpy.spiderapi import UnauthorizedException
username = config[DOMAIN][CONF_USERNAME]
password = config[DOMAIN][CONF_PASSWORD]
refresh_rate = config[DOMAIN][CONF_SCAN_INTERVAL]
try:
api = SpiderApi(username, password, refresh_rate.total_seconds())
hass.data[DOMAIN] = {
'controller': api,
'thermostats': api.get_thermostats(),
'power_plugs': api.get_power_plugs()
}
for component in SPIDER_COMPONENTS:
load_platform(hass, component, DOMAIN, {}, config)
_LOGGER.debug("Connection with Spider API succeeded")
return True
except UnauthorizedException:
_LOGGER.error("Can't connect to the Spider API")
return False
|
HydrelioxGitHub/home-assistant
|
homeassistant/components/spider/__init__.py
|
Python
|
apache-2.0
| 1,643
|
# -*- coding: utf-8 -*-
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding model 'State'
db.create_table(u'analytics_state', (
(u'id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('name', self.gf('django.db.models.fields.CharField')(default=None, unique=True, max_length=50)),
('region', self.gf('django.db.models.fields.CharField')(max_length=12)),
))
db.send_create_signal(u'analytics', ['State'])
# Adding unique constraint on 'State', fields ['name', 'region']
db.create_unique(u'analytics_state', ['name', 'region'])
# Adding model 'City'
db.create_table(u'analytics_city', (
(u'id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('name', self.gf('django.db.models.fields.CharField')(default=None, max_length=50)),
('state', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['analytics.State'])),
))
db.send_create_signal(u'analytics', ['City'])
# Adding unique constraint on 'City', fields ['name', 'state']
db.create_unique(u'analytics_city', ['name', 'state_id'])
# Adding model 'SupplyBase'
db.create_table(u'analytics_supplybase', (
(u'id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('year', self.gf('django.db.models.fields.IntegerField')()),
('city', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['analytics.City'])),
('occupation', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['admin.Occupation'])),
('institution', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['admin.Institution'])),
('degree', self.gf('django.db.models.fields.CharField')(default=None, max_length=3)),
('supply', self.gf('django.db.models.fields.IntegerField')()),
))
db.send_create_signal(u'analytics', ['SupplyBase'])
# Adding unique constraint on 'SupplyBase', fields ['year', 'city', 'occupation', 'institution', 'degree']
db.create_unique(u'analytics_supplybase', ['year', 'city_id', 'occupation_id', 'institution_id', 'degree'])
# Adding model 'DemandData'
db.create_table(u'analytics_demanddata', (
(u'id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('year', self.gf('django.db.models.fields.IntegerField')()),
('city', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['analytics.City'])),
('occupation', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['admin.Occupation'])),
('company', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['admin.Company'])),
('demand', self.gf('django.db.models.fields.IntegerField')()),
))
db.send_create_signal(u'analytics', ['DemandData'])
# Adding unique constraint on 'DemandData', fields ['year', 'city', 'occupation', 'company']
db.create_unique(u'analytics_demanddata', ['year', 'city_id', 'occupation_id', 'company_id'])
def backwards(self, orm):
# Removing unique constraint on 'DemandData', fields ['year', 'city', 'occupation', 'company']
db.delete_unique(u'analytics_demanddata', ['year', 'city_id', 'occupation_id', 'company_id'])
# Removing unique constraint on 'SupplyBase', fields ['year', 'city', 'occupation', 'institution', 'degree']
db.delete_unique(u'analytics_supplybase', ['year', 'city_id', 'occupation_id', 'institution_id', 'degree'])
# Removing unique constraint on 'City', fields ['name', 'state']
db.delete_unique(u'analytics_city', ['name', 'state_id'])
# Removing unique constraint on 'State', fields ['name', 'region']
db.delete_unique(u'analytics_state', ['name', 'region'])
# Deleting model 'State'
db.delete_table(u'analytics_state')
# Deleting model 'City'
db.delete_table(u'analytics_city')
# Deleting model 'SupplyBase'
db.delete_table(u'analytics_supplybase')
# Deleting model 'DemandData'
db.delete_table(u'analytics_demanddata')
models = {
'admin.company': {
'Meta': {'object_name': 'Company'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'default': 'None', 'unique': 'True', 'max_length': '100', 'db_index': 'True'}),
'nasscom_membership_number': ('django.db.models.fields.CharField', [], {'default': "'N/A'", 'max_length': '20'}),
'training_provider': ('django.db.models.fields.CharField', [], {'default': "'NO'", 'max_length': '3'}),
'url': ('django.db.models.fields.URLField', [], {'unique': 'True', 'max_length': '100'})
},
'admin.institution': {
'Meta': {'object_name': 'Institution'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'international': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'name': ('django.db.models.fields.CharField', [], {'default': 'None', 'unique': 'True', 'max_length': '100', 'db_index': 'True'}),
'url': ('django.db.models.fields.URLField', [], {'unique': 'True', 'max_length': '100'})
},
'admin.occupation': {
'Meta': {'object_name': 'Occupation'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'default': 'None', 'unique': 'True', 'max_length': '50', 'db_index': 'True'}),
'slug': ('django.db.models.fields.SlugField', [], {'unique': 'True', 'max_length': '50'}),
'sub_sector': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['admin.SubSector']"}),
'tracks': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'to': "orm['admin.Track']", 'null': 'True', 'blank': 'True'})
},
'admin.sector': {
'Meta': {'object_name': 'Sector', 'index_together': "[['name']]"},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'default': 'None', 'unique': 'True', 'max_length': '9', 'db_index': 'True'})
},
'admin.subsector': {
'Meta': {'unique_together': "(('sector', 'name'),)", 'object_name': 'SubSector', 'index_together': "[['name', 'sector']]"},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'default': 'None', 'max_length': '50', 'db_index': 'True'}),
'sector': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['admin.Sector']"})
},
'admin.track': {
'Meta': {'object_name': 'Track'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'default': 'None', 'unique': 'True', 'max_length': '50', 'db_index': 'True'})
},
u'analytics.city': {
'Meta': {'unique_together': "(('name', 'state'),)", 'object_name': 'City'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'default': 'None', 'max_length': '50'}),
'state': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['analytics.State']"})
},
u'analytics.demanddata': {
'Meta': {'unique_together': "(('year', 'city', 'occupation', 'company'),)", 'object_name': 'DemandData'},
'city': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['analytics.City']"}),
'company': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['admin.Company']"}),
'demand': ('django.db.models.fields.IntegerField', [], {}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'occupation': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['admin.Occupation']"}),
'year': ('django.db.models.fields.IntegerField', [], {})
},
u'analytics.state': {
'Meta': {'unique_together': "(('name', 'region'),)", 'object_name': 'State'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'default': 'None', 'unique': 'True', 'max_length': '50'}),
'region': ('django.db.models.fields.CharField', [], {'max_length': '12'})
},
u'analytics.supplybase': {
'Meta': {'unique_together': "(('year', 'city', 'occupation', 'institution', 'degree'),)", 'object_name': 'SupplyBase'},
'city': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['analytics.City']"}),
'degree': ('django.db.models.fields.CharField', [], {'default': 'None', 'max_length': '3'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'institution': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['admin.Institution']"}),
'occupation': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['admin.Occupation']"}),
'supply': ('django.db.models.fields.IntegerField', [], {}),
'year': ('django.db.models.fields.IntegerField', [], {})
}
}
complete_apps = ['analytics']
|
arpitprogressive/arpittest
|
apps/analytics/migrations/0001_initial.py
|
Python
|
bsd-3-clause
| 9,867
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.