repo_name
stringlengths
5
100
path
stringlengths
4
231
language
stringclasses
1 value
license
stringclasses
15 values
size
int64
6
947k
score
float64
0
0.34
prefix
stringlengths
0
8.16k
middle
stringlengths
3
512
suffix
stringlengths
0
8.17k
pavlenko-volodymyr/codingmood
codemood/social/admin.py
Python
mit
270
0.003704
from django.contrib import admin from .models import Post class PostAdmin(admin.ModelAdmin): list_display = ('user', 'mood', 'mood_positive', 'mood_negative', 'mood_neutral', 'created'
) list_filter = ('mood', 'created') admin.site.register(Post, P
ostAdmin)
sdague/home-assistant
homeassistant/components/wink/fan.py
Python
apache-2.0
3,014
0
"""Support for Wink fans.""" import pywink from homeassistant.components.fan import ( SPEED_HIGH, SPEED_LOW, SPEED_MEDIUM, SUPPORT_DIRECTION, SUPPORT_SET_SPEED, FanEntity, ) from . import DOMAIN, WinkDevice SPEED_AUTO = "auto" SPEED_LOWEST = "lowest" SUPPORTED_FEATURES = SUPPORT_DIRECTION + SUPPORT_SET_SPEED def setup_platform(hass, config, add_entities, discovery_info=None): """Set up the Wink platform.""" for fan in pywink.get_fans(): if fan.object_id() + fan.name() not in hass.data[DOMAIN]["unique_ids"]: add_entities([WinkFanDevice(fan, hass)]) class WinkFanDevice(WinkDevice, FanEntity): """Representation of a Wink fan.""" async def async_added_to_hass(self): """Call when entity is added to hass.""" self.hass.data[DOMAIN]["entities"]["fan"].append(self) def set_direction(self, direction: str) -> None: """Set the direction of the fan.""" self.wink.set_fan_direction(direction) def set_speed(self, speed: str) -> None: """Set the speed of the fan.""" self.wink.set_state(True, speed) def turn_on(self, speed: str = None, **kwargs) -> None: """Turn on the fan.""" self.wink.set_state(True, speed) def turn_off(self, **kwargs) -> None: """Turn off the fan.""" self.wink.set_state(False) @property def is_on(self): """Return true if the entity is on.""" return self.wink.state() @property def speed(self) -> str: """Return the current speed.""" current_wink_speed = self.wink.current_fan_speed() if SPEED_AUTO == current_wink_speed: return SPEED_AUTO if SPEED_LOWEST == current_wink_speed: return SPEED_LOWEST if SPEED_LOW == current_wink_speed: return SPEED_LOW if SPEED_MEDIUM == current_wink_speed:
return SPEED_MEDIUM if SPEED_HIGH == current_wink_speed: return SPEED_HIGH return None @property def current_direction(self): """Return direction of the fan [forward, reverse].""" return self.wink.current_fan_direction() @property def speed_list(self) -> list: """Get the list of available speeds.""" wink_supported_speeds = self.wink.
fan_speeds() supported_speeds = [] if SPEED_AUTO in wink_supported_speeds: supported_speeds.append(SPEED_AUTO) if SPEED_LOWEST in wink_supported_speeds: supported_speeds.append(SPEED_LOWEST) if SPEED_LOW in wink_supported_speeds: supported_speeds.append(SPEED_LOW) if SPEED_MEDIUM in wink_supported_speeds: supported_speeds.append(SPEED_MEDIUM) if SPEED_HIGH in wink_supported_speeds: supported_speeds.append(SPEED_HIGH) return supported_speeds @property def supported_features(self) -> int: """Flag supported features.""" return SUPPORTED_FEATURES
dannyroberts/eulxml
eulxml/__init__.py
Python
apache-2.0
903
0
# file eulxml/__init__.py # # Copyright 2010,2011 Emory University Libraries # # License
d under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distrib
uted under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. __version_info__ = (0, 22, 0, None) # Dot-connect all but the last. Last is dash-connected if not None. __version__ = '.'.join([str(i) for i in __version_info__[:-1]]) if __version_info__[-1] is not None: __version__ += ('-%s' % (__version_info__[-1],))
aspc/mainsite
aspc/courses/migrations/0014_auto_20160904_2350.py
Python
mit
1,605
0
# -*- coding: utf-8 -*- # Generated by Django 1.9.9 on 2016-09-04 23:50 from __future__ import unicode_literals from django.db import migrations class Migration(migrations.Migration): dependencies = [ ('courses', '0013_auto_20160903_0212'), ] operations = [ migrations.RenameField( model_name='section', old_name='approachable_rating', n
ew_name='cached_approachable_rating', ), migrations.RenameField( model_name='section', old_name='competency_rating', new_name='cached_competency_rating', ), migrations.RenameField( model_name='section', old_name='difficulty_rati
ng', new_name='cached_difficulty_rating', ), migrations.RenameField( model_name='section', old_name='engagement_rating', new_name='cached_engagement_rating', ), migrations.RenameField( model_name='section', old_name='enthusiasm_rating', new_name='cached_enthusiasm_rating', ), migrations.RenameField( model_name='section', old_name='lecturing_rating', new_name='cached_lecturing_rating', ), migrations.RenameField( model_name='section', old_name='rating', new_name='cached_rating', ), migrations.RenameField( model_name='section', old_name='useful_rating', new_name='cached_useful_rating', ), ]
EricSchles/veyepar
dj/volunteers/urls.py
Python
mit
1,412
0.010623
# volunteers/urls.py from django.conf.urls import * from django.contrib.auth.decorators import login_required from volunteers.views import * urlpatterns = patterns('', #(r'^$', login_required(ShowsInProcessing.as_view()), {}, 'volunteer_show_list'), #(r'^(?P<show_slug>\[-\w]+)/$', login_required(ShowReview.as_view()), {}, 'volunteer_show_review'), (r'^more_videos/(?P<episode_id>\d+)/(?P<slop>\d+)/$', login_required(ExpandCutList.as_view()), {}, 'volunteer_expand_cutlist'), (r'^more_videos/(?P<episode_id>\d+)/(?P<slop>\
d+)/(?P<edit_key>\w+)/$', ExpandCutList.as_view(), {}, 'guest_expand_cutlist'), (r'^reopen/(?P<episode_id>\d+)/$', login_required(ReopenEpisode.as_view()), {}, 'volunteer_reopen'), (r'^reopen/(?P<episode_id>\d+)/(?P<edit_key>\w+)/$', ReopenEpisode.as_view(), {}, 'guest_reopen'), (r'^(?P<show_slug>[-\w]+)/(?P<episode_slug>[-\w]+)/$', login_required(EpisodeReview.as_view())
, {}, 'volunteer_episode_review'), (r'^(?P<show_slug>[-\w]+)/(?P<episode_slug>[-\w]+)/(?P<edit_key>\w+)/$', EpisodeReview.as_view(), {}, 'guest_episode_review'), (r'^(?P<show_slug>[-\w]+)/(?P<episode_slug>[-\w]+)/$', login_required(EpisodeReview.as_view()), {'advanced': True}, 'volunteer_episode_review_advanced'), (r'^(?P<show_slug>[-\w]+)/(?P<episode_slug>[-\w]+)/(?P<edit_key>\w+)/$', EpisodeReview.as_view(), {'advanced': True}, 'guest_episode_review_advanced'), )
okfish/django-oscar-shipping
oscar_shipping/packers.py
Python
bsd-3-clause
4,871
0.006775
from decimal import Decimal as D from django.conf import settings from django.core.exceptions import ObjectDoesNotExist from django.utils.translation import ugettext_lazy as _ from oscar.core import loading Scale = loading.get_class('shipping.scales', 'Scale') weight_precision = getattr(settings, 'OSCAR_SHIPPING_WEIGHT_PRECISION', D('0.000')) volume_precision = getattr(settings, 'OSCAR_SHIPPING_VOLUME_PRECISION', D('0.000')) # per product defaults # 0.1m x 0.1m x 0.1m DEFAULT_BOX = getattr(settings, 'OSCAR_SHIPPING_DEFAULT_BOX', {'width': float('0.1'), 'height': float('0.1'), 'length': float('0.1')}) # 1 Kg DEFAULT_WEIGHT = getattr(settings, 'OSCAR_SHIPPING_DEFAULT_WEIGHT', 1) # basket volue * VOLUME_RATIO = estimated container(s) volume # very simple method VOLUME_RATIO = getattr(settings, 'OSCAR_SHIPPING_VOLUME_RATIO', D('1.3')) class Box(object): height = 0 width = 0 length = 0 def __init__(self, h, w, l): self.height, self.width, self.length = h, w, l @property
def volume(self): return D(self.height*self.width*self.length).quantize(volume_precision) class Container(Box): name = '' def __init__(self, h, w, l, name): self.name = name super(Container, self).__init__(h, w, l) class ProductBox(Box):
""" 'Packs' given product to the virtual box and scale it. Takes size and weight from product attributes (if present) """ weight = 0 def __init__(self, product, size_codes=('width', 'height', 'length'), weight_code='weight', default_weight=DEFAULT_WEIGHT): self.attributes = size_codes attr_vals = {} scale = Scale(attribute_code=weight_code, default_weight=default_weight) try: for attr in self.attributes: attr_vals[attr] = product.attribute_values.get( attribute__code=attr).value except ObjectDoesNotExist: attr_vals = DEFAULT_BOX self.weight = scale.weigh_product(product) for attr in attr_vals.keys(): setattr(self, attr, attr_vals[attr]) class Packer(object): """ To calculate shipping charge the set of containers required. That set should be enough for all items of basket which shoud have appropriate attributes (height,width,lenght) And this is the problem known as Bin Packing Problem """ def __init__(self, containers, **kwargs): self.containers = containers self.attributes = kwargs.get('attribute_codes', ('width', 'height', 'length')) self.weight_code = kwargs.get('weight_code', 'weight') self.default_weight = kwargs.get('default_weight', DEFAULT_WEIGHT) def get_default_container(self, volume): """Generates _virtual_ cube container which does not exists in the db but enough to calculate estimated shipping charge for the basket's volume given """ side = float(volume) ** (1 / 3.0) return Container(side, side, side, _('virtual volume (%s)') % volume) def box_product(self, product): return ProductBox(product, self.attributes, self.weight_code, self.default_weight) def pack_basket(self, basket): # First attempt but very weird volume = 0 weight = 0 box = container = matched = None for line in basket.lines.all(): box = self.box_product(line.product) volume += box.volume * line.quantity weight += box.weight * line.quantity del box volume = volume * VOLUME_RATIO # Calc container volume during DB query excution # source: http://stackoverflow.com/questions/1652577/django-ordering-queryset-by-a-calculated-field # as we can't use computed values in the WHERE clause # we will filter containers as python list # container = self.containers.extra(select={'volume': 'height*width*lenght'})\ # .extra(order_by=['volume'])\ # .extra(where=['"volume">%s'], params=[volume])[0] # select containers which volumes greater than summarized basket volume matched = [c for c in self.containers.all() if c.volume >= volume] if len(matched) > 0: container = matched[0] # TODO: count container's weight - add it to model else: container = self.get_default_container(volume) return [{'weight': D(weight).quantize(weight_precision), 'container': container}]
Danielweber7624/pybuilder
src/main/python/pybuilder/plugins/exec_plugin.py
Python
apache-2.0
3,101
0.00258
# -*- coding: utf-8 -*- # # This file is part of PyBuilder # # Copyright 2011-2015 PyBuilder Team # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. from pybuilder.core import task, use_plugin from pybuilder.errors import BuildFailedException from subprocess import PIPE, Popen import sys use_plugin("core") @task def run_unit_tests(project, logger): run_command('run_unit_tests', project, logger) @task def run_integration_tests(project, logger): run_command('run_integration_tests', project, logger) @task def analyze(project, logger): run_command('analyze', project, logger) @task def package(project, logger): run_command('package', project, logger) @task def publish(project, logger): run_command('publish', project, logger) def _write_command_report(project, stdout, stderr, command_line, phase, process_return_code): project.write_report('exec_%s' % phase, stdout) project.write_report('exec_%s.err' % phase, stderr) def _log_quoted_output(logger, output_type, output, phase): separator = '-' * 5 logger.info('{0} verbatim {1} output of {2} {0}'.format(separator, output_type, phase)) for line in ou
tput.split('\n'): logger.info(line) logger.info('{0} end of verbatim {1} output {0}'.format(separator, output_type)) def run_command(phase, project, logger): command_line = project.get_property('%s_command' % phase) if not command_line: return process_handle = Popen(command_line, stdout=PIPE, stderr=PIPE, shell=True) stdout, stderr = pr
ocess_handle.communicate() stdout, stderr = stdout.decode(sys.stdout.encoding or 'utf-8'), stderr.decode(sys.stderr.encoding or 'utf-8') process_return_code = process_handle.returncode _write_command_report(project, stdout, stderr, command_line, phase, process_return_code) if project.get_property('%s_propagate_stdout' % phase) and stdout: _log_quoted_output(logger, '', stdout, phase) if project.get_property('%s_propagate_stderr' % phase) and stderr: _log_quoted_output(logger, 'error', stderr, phase) if process_return_code != 0: raise BuildFailedException( 'exec plugin command {0} for {1} exited with nonzero code {2}'.format(command_line, phase, process_return_code))
a-krebs/finances
finances/django_registration/urls.py
Python
gpl-3.0
3,598
0.005837
# Copyright (C) 2012 Aaron Krebs akrebs@ualberta.ca # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # You should have received a copy of the GNU General Public License # along with this program. If not, see <http://www.gnu.org/licenses/> from django.views.generic.simple import direct_to_template from django.contrib.auth import views as auth_views from django.conf.urls import patterns, url from django.core.urlresolvers import reverse_lazy from registration.views import register urlpatterns = patterns('', # urls for simple one-step registration url(r'^register/$', register, {'backend': 'registration.backends.simple.SimpleBackend', 'template_name': 'registration/registration_form.hamlpy', }, name='registration_register' ), url(r'^register/closed/$', direct_to_template, {'template': 'registration/registration_closed.hamlpy'}, name='registration_disallowed' ), url(r'^login/$', auth_views.login, {'template_name': 'registration/login.hamlpy'}, name='auth_login' ), url(r'^logout/$', auth_views.logout, {'template_name': 'registration/logout.hamlpy'}, name='auth_logout' ), url(r'^password/change/$', auth_views.password_change, {'template_name': 'registration/password_change_form.hamlpy', # ugh, this is tied to the namespace; needs to be namespace-agnostic
# since the namspace is determined by the importing app # TODO: see Issue #1 'post_change_redirect': reverse_lazy('r
egistration:auth_password_change_done') }, name='auth_password_change' ), url(r'^password/change/done/$', auth_views.password_change_done, {'template_name': 'registration/password_change_done.hamlpy'}, name='auth_password_change_done' ), url(r'^password/reset/$', auth_views.password_reset, {'template_name': 'registration/password_reset_form.hamlpy', # same issue as above 'post_reset_redirect': reverse_lazy('registration:auth_password_reset_done'), 'email_template_name': 'registration/password_reset_email.hamlpy', 'subject_template_name': 'registration/password_reset_subject.hamlpy', }, name='auth_password_reset' ), url(r'^password/reset/confirm/(?P<uidb36>[0-9A-Za-z]+)-(?P<token>.+)/$', auth_views.password_reset_confirm, {'template_name': 'registration/password_reset_confirm.hamlpy', # same issue as above 'post_reset_redirect': reverse_lazy('registration:auth_password_reset_complete'), }, name='auth_password_reset_confirm' ), url(r'^password/reset/complete/$', auth_views.password_reset_complete, {'template_name': 'registration/password_reset_complete.hamlpy'}, name='auth_password_reset_complete' ), url(r'^password/reset/done/$', auth_views.password_reset_done, {'template_name': 'registration/password_reset_done.hamlpy'}, name='auth_password_reset_done' ), )
satyarth/pixelsort
pixelsort/util.py
Python
mit
1,262
0.001585
from colorsys import rgb_to_hsv import time def id_generator(): timestr = time.strftime("%Y%m%d-%H%M%S") return timestr def lightness(pixel): # For backwards compatibility with python2 return rgb_to_hsv(pixel[0], pixel[1], pixel[2])[2] / 255.0 def hue(pixel): return rgb_to_hsv(pixel[0], pixel[1], pixel[2
])[0] / 255.0
def saturation(pixel): return rgb_to_hsv(pixel[0], pixel[1], pixel[2])[1] / 255.0 def crop_to(image_to_crop, reference_image): """ Crops image to the size of a reference image. This function assumes that the relevant image is located in the center and you want to crop away equal sizes on both the left and right as well on both the top and bottom. :param image_to_crop :param reference_image :return: image cropped to the size of the reference image """ reference_size = reference_image.size current_size = image_to_crop.size dx = current_size[0] - reference_size[0] dy = current_size[1] - reference_size[1] left = dx / 2 upper = dy / 2 right = dx / 2 + reference_size[0] lower = dy / 2 + reference_size[1] return image_to_crop.crop( box=( int(left), int(upper), int(right), int(lower)))
HEPData/hepdata
tests/converter_test.py
Python
gpl-2.0
4,511
0.00133
# This file is part of HEPData. # Copyright (C) 2016 CERN. # # HEPData is free software; you can redistribute it # and/or modify it under the terms of the GNU General Public License as # published by the Free Software Foundation; either version 2 of the # License, or (at your option) any later version. # # HEPData is distributed in the hope that it will be # useful, but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU # General Public License for more details. # # You should have receive
d a copy of the GNU General Public License # along with HEPData; if not, write to the # Free Software Foundation, Inc., 59 Temple Place, Suite 330, Boston, # MA 02111-1307, USA. # # In applying this license, CERN does not # waive the privileges and immunities granted to it by virtue of its statu
s # as an Intergovernmental Organization or submit itself to any jurisdiction. import os.path import responses import zipfile from hepdata.config import CFG_CONVERTER_URL from hepdata.modules.converter.tasks import convert_and_store from hepdata.modules.records.utils.old_hepdata import mock_import_old_record def test_convert_and_store_invalid(app, capsys): with app.app_context(): convert_and_store('12345678', 'test_format', True) captured = capsys.readouterr() assert(captured.out == "Unable to find a matching submission for 12345678\n") @responses.activate def test_convert_and_store_valid_yaml(app, capsys, load_submission): with app.app_context(): # Open a .tar.gz file to mock the call to the converter base_dir = os.path.dirname(os.path.realpath(__file__)) test_tar_gz_file = os.path.join(base_dir, 'test_data', '1396331.tar.gz') with open(test_tar_gz_file, "rb") as stream: responses.add(responses.GET, CFG_CONVERTER_URL + '/convert', status=200, headers={'mimetype': 'application/x-gzip'}, body=stream.read(), stream=True) capsys.readouterr() convert_and_store('1487726', 'yaml', True) captured_lines = capsys.readouterr().out.splitlines() assert(captured_lines[0] == "Creating yaml conversion for ins1487726") print(captured_lines) assert(captured_lines[1].startswith("File for ins1487726 created successfully")) file_path = captured_lines[1].split()[-1] assert(file_path.endswith("HEPData-ins1487726-v1-yaml.tar.gz")) assert(os.path.isfile(file_path)) def test_convert_and_store_valid_original(app, capsys, load_submission): with app.app_context(): capsys.readouterr() convert_and_store('1487726', 'original', True) captured_lines = capsys.readouterr().out.splitlines() assert(captured_lines[0] == "Creating original conversion for ins1487726") assert(captured_lines[1].startswith("File created at ")) file_path = captured_lines[1].split()[-1] assert(file_path.endswith("HEPData-ins1487726-v1.zip")) assert(os.path.isfile(file_path)) def test_convert_and_store_valid_original_with_old_resources(app, capsys): with app.app_context(): # Create submission with resources mock_import_old_record() capsys.readouterr() convert_and_store('1299143', 'original', True) captured_lines = capsys.readouterr().out.splitlines() assert(captured_lines[0] == 'Creating original conversion for ins1299143') assert(captured_lines[1].startswith("Creating archive at ")) file_path = captured_lines[1].split()[-1] assert('/converted/' in file_path) assert(file_path.endswith("HEPData-ins1299143-v1.zip")) assert(captured_lines[2] == 'File created at %s' % file_path) assert(os.path.isfile(file_path)) # Check contents of zip with zipfile.ZipFile(file_path) as zip: contents = zip.namelist() assert(len(contents) == 99) # Check for a sample of filenames from yaml and resources for f in ['submission.yaml', 'Table_1.yaml', 'figFigure7a.png']: assert(f in contents) # Check submission file has been updated with new resource location with zip.open('submission.yaml') as f: for line in f.readlines(): line_str = line.decode() if 'location' in line_str: assert('/resource/' not in line_str)
UT-Austin-FIS/django-coverage
django_coverage/utils/coverage_report/templates/default_module_exceptions.py
Python
apache-2.0
2,058
0.00243
""" Copyright 2009 55 Minutes (http://www.55minutes.com) Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. """ import time test_timestamp = time.strftime('%a %Y-%m-%d %H:%M %Z') TOP = """\ <!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Strict//EN" "http://www.w3.org/TR/xhtml1/DTD/xhtml1-strict.dtd"> <html xmlns="http://www.w
3.org/1999/xhtml" xml:lang="en" lang="en"> <head> <meta http-equiv="Content-type" content="text/html;charset=UTF-8" /> <title>Test coverage report: %(title)s</title> <style type="text/css" media="screen"> b
ody { font-family: "Lucida Sans Unicode", "Lucida Grande", sans-serif; font-size: 13px; } #content-header { margin-left: 50px; } #content-header h1 { font-size: 18px; margin-bottom: 0; } #content-header p { font-size: 13px; margin: 0; color: #909090; } #result-list { margin: 0 50px; } #result-list ul { padding-left: 13px; list-style-position: inside; } </style> </head> <body> """ CONTENT_HEADER = """\ <div id="content-header"> <h1>Test Coverage Report: %(title)s</h1>""" CONTENT_HEADER += "<p>Generated: %(test_timestamp)s</p>" %vars() CONTENT_HEADER += "</div>" CONTENT_BODY = """\ <div id="result-list"> <p>%(long_desc)s</p> <ul> %(exception_list)s </ul> Back to <a href="index.html">index</a>. </div> """ EXCEPTION_LINE = "<li>%(module_name)s</li>" BOTTOM = """\ </body> </html> """
jbedorf/tensorflow
tensorflow/python/util/tf_inspect.py
Python
apache-2.0
13,772
0.01002
# Copyright 2017 The TensorFlow Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # ============================================================================== """TFDecorator-aware replacements for the inspect module.""" from __future__ import absolute_import from __future__ import division from __future__ import print_function from collections import namedtuple import functools import inspect as _inspect import six from tensorflow.python.util import tf_decorator ArgSpec = _inspect.ArgSpec if hasattr(_inspect, 'FullArgSpec'): FullArgS
pec = _inspect.FullArgSpec # pylint: disable=invalid-name else: FullArgSpec = namedtuple('FullArgSpec', [ 'args', 'varargs', 'varkw', 'defaults', 'kwonlyargs', 'kwonlydefaults', 'annotations' ]) def _convert_maybe_argspec_to_fullargspec(argspec): if isinstance(argspec, FullArgSpec): return argspec return FullArgSpec( args=
argspec.args, varargs=argspec.varargs, varkw=argspec.keywords, defaults=argspec.defaults, kwonlyargs=[], kwonlydefaults=None, annotations={}) if hasattr(_inspect, 'getfullargspec'): _getfullargspec = _inspect.getfullargspec # pylint: disable=invalid-name def _getargspec(target): """A python3 version of getargspec. Calls `getfullargspec` and assigns args, varargs, varkw, and defaults to a python 2/3 compatible `ArgSpec`. The parameter name 'varkw' is changed to 'keywords' to fit the `ArgSpec` struct. Args: target: the target object to inspect. Returns: An ArgSpec with args, varargs, keywords, and defaults parameters from FullArgSpec. """ fullargspecs = getfullargspec(target) argspecs = ArgSpec( args=fullargspecs.args, varargs=fullargspecs.varargs, keywords=fullargspecs.varkw, defaults=fullargspecs.defaults) return argspecs else: _getargspec = _inspect.getargspec def _getfullargspec(target): """A python2 version of getfullargspec. Args: target: the target object to inspect. Returns: A FullArgSpec with empty kwonlyargs, kwonlydefaults and annotations. """ return _convert_maybe_argspec_to_fullargspec(getargspec(target)) def currentframe(): """TFDecorator-aware replacement for inspect.currentframe.""" return _inspect.stack()[1][0] def getargspec(obj): """TFDecorator-aware replacement for `inspect.getargspec`. Note: `getfullargspec` is recommended as the python 2/3 compatible replacement for this function. Args: obj: A function, partial function, or callable object, possibly decorated. Returns: The `ArgSpec` that describes the signature of the outermost decorator that changes the callable's signature, or the `ArgSpec` that describes the object if not decorated. Raises: ValueError: When callable's signature can not be expressed with ArgSpec. TypeError: For objects of unsupported types. """ if isinstance(obj, functools.partial): return _get_argspec_for_partial(obj) decorators, target = tf_decorator.unwrap(obj) spec = next((d.decorator_argspec for d in decorators if d.decorator_argspec is not None), None) if spec: return spec try: # Python3 will handle most callables here (not partial). return _getargspec(target) except TypeError: pass if isinstance(target, type): try: return _getargspec(target.__init__) except TypeError: pass try: return _getargspec(target.__new__) except TypeError: pass # The `type(target)` ensures that if a class is received we don't return # the signature of it's __call__ method. return _getargspec(type(target).__call__) def _get_argspec_for_partial(obj): """Implements `getargspec` for `functools.partial` objects. Args: obj: The `functools.partial` obeject Returns: An `inspect.ArgSpec` Raises: ValueError: When callable's signature can not be expressed with ArgSpec. """ # When callable is a functools.partial object, we construct its ArgSpec with # following strategy: # - If callable partial contains default value for positional arguments (ie. # object.args), then final ArgSpec doesn't contain those positional arguments. # - If callable partial contains default value for keyword arguments (ie. # object.keywords), then we merge them with wrapped target. Default values # from callable partial takes precedence over those from wrapped target. # # However, there is a case where it is impossible to construct a valid # ArgSpec. Python requires arguments that have no default values must be # defined before those with default values. ArgSpec structure is only valid # when this presumption holds true because default values are expressed as a # tuple of values without keywords and they are always assumed to belong to # last K arguments where K is number of default values present. # # Since functools.partial can give default value to any argument, this # presumption may no longer hold in some cases. For example: # # def func(m, n): # return 2 * m + n # partialed = functools.partial(func, m=1) # # This example will result in m having a default value but n doesn't. This is # usually not allowed in Python and can not be expressed in ArgSpec correctly. # # Thus, we must detect cases like this by finding first argument with default # value and ensures all following arguments also have default values. When # this is not true, a ValueError is raised. n_prune_args = len(obj.args) partial_keywords = obj.keywords or {} args, varargs, keywords, defaults = getargspec(obj.func) # Pruning first n_prune_args arguments. args = args[n_prune_args:] # Partial function may give default value to any argument, therefore length # of default value list must be len(args) to allow each argument to # potentially be given a default value. all_defaults = [None] * len(args) if defaults: all_defaults[-len(defaults):] = defaults # Fill in default values provided by partial function in all_defaults. for kw, default in six.iteritems(partial_keywords): idx = args.index(kw) all_defaults[idx] = default # Find first argument with default value set. first_default = next((idx for idx, x in enumerate(all_defaults) if x), None) # If no default values are found, return ArgSpec with defaults=None. if first_default is None: return ArgSpec(args, varargs, keywords, None) # Checks if all arguments have default value set after first one. invalid_default_values = [ args[i] for i, j in enumerate(all_defaults) if j is None and i > first_default ] if invalid_default_values: raise ValueError('Some arguments %s do not have default value, but they ' 'are positioned after those with default values. This can ' 'not be expressed with ArgSpec.' % invalid_default_values) return ArgSpec(args, varargs, keywords, tuple(all_defaults[first_default:])) def getfullargspec(obj): """TFDecorator-aware replacement for `inspect.getfullargspec`. This wrapper emulates `inspect.getfullargspec` in[^)]* Python2. Args: obj: A callable, possibly decorated. Returns: The `FullArgSpec` that describes the signature of the outermost decorator that changes the callable's signature. If the callable is not decorated, `inspect.getfullargspec()` will be called directly on the callable. """ decorators, target = tf_decorator.unwrap(obj) return next((_convert_maybe_argspec_to_fullargspec(d.decorator_argspec)
hariharaselvam/djangotraining
products/migrations/0002_stat.py
Python
apache-2.0
566
0.001767
# -*- coding: utf-8 -*- # Generated by Django 1.10.2 on 2016-10-16 04:54 from __future__ import unicode_literals from django.db import migrations, models class Migration(migrations.Migration): dependencies
= [ ('products', '0001_initial'), ] operations = [ migrations.CreateModel( name='Stat', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('category_count', models.IntegerField()), ], ), ]
UNICT-DMI/Telegram-DMI-Bot
module/utils/__init__.py
Python
gpl-3.0
24
0
"""Var
ious ut
ilities"""
Ultimaker/Uranium
plugins/FileHandlers/OBJReader/OBJReader.py
Python
lgpl-3.0
5,356
0.003547
# Copyright (c) 2020 Ultimaker B.V. # Copyright (c) 2013 David Braam # Uranium is released under the terms of the LGPLv3 or higher. import os from UM.Job import Job from UM.Logger import Logger from UM.Mesh.MeshReader import MeshReader from UM.Mesh.MeshBuilder import MeshBuilder from UM.Scene.SceneNode import SceneNode class OBJReader(MeshReader): def __init__(self) -> None: super().__init__() self._supported_extensions = [".obj"] def _toAbsoluteIndex(self, max, data): """ Handle negative indices (those are relative to the position, so -2 is the second one before the face). """ return [index if index > 0 else 1 + max + index for index in data] def _read(self, file_name): scene_node = None extension = os.path.splitext(file_name)[1] if extension.lower() in self._supported_extensions: vertex_list = [] normal_list = [] uv_list = [] face_list = [] scene_node = SceneNode() mesh_builder = MeshBuilder() mesh_builder.setFileName(file_name) previous_line_parts = [] f = open(file_name, "rt", encoding = "utf-8") for line in f: parts = previous_line_parts + line.split() previous_line_parts = [] if len(parts) < 1: continue if parts[-1] == "\\": del parts[-1] previous_line_parts = parts continue if parts[0] == "f": parts = [i for i in map(lambda p: p.split("/"), parts)] for idx in range(1, len(parts) - 2): data = self._toAbsoluteIndex(len(vertex_list), [int(parts[1][0]), int(parts[idx + 1][0]), int(parts[idx + 2][0])]) if len(parts[1]) > 1 and parts[1][1] and parts[idx + 1][1] and parts[idx + 2][1]: data += self._toAbsoluteIndex(len(normal_list), [int(parts[1][1]), int(parts[idx + 1][1]), int(parts[idx + 2][1])]) else: data += [0, 0, 0] if len(parts[1]) > 2: data += self._toAbsoluteIndex(len(uv_list), [int(parts[1][2]), int(parts[idx + 1][2]), int(parts[idx + 2][2])]) else: data += [0, 0, 0] face_list.append(data) elif parts[0] == "v": vertex_list.append([float(parts[1]), float(parts[3]), -float(parts[2])]) elif parts[0] == "vn": normal_list.append([float(parts[1]), float(parts[3]), -float(parts[2])]) elif parts[0] == "vt": uv_list.append([float(parts[1]), float(parts[2])]) Job.yieldThread() f.close() mesh_builder.reserv
eVertexCount(3 * len(face_list)) num_vertices = len(vertex_list) for face in face_list: # Substract 1 from index, as obj starts counting at 1 instead of 0 i = face[0] - 1 j = face[1] - 1 k = face[2] - 1 ui = face[3] - 1
uj = face[4] - 1 uk = face[5] - 1 ni = face[6] - 1 nj = face[7] - 1 nk = face[8] - 1 if i < 0 or i >= num_vertices: i = 0 if j < 0 or j >= num_vertices: j = 0 if k < 0 or k >= num_vertices: k = 0 if ni != -1 and nj != -1 and nk != -1: mesh_builder.addFaceWithNormals(vertex_list[i][0], vertex_list[i][1], vertex_list[i][2], normal_list[ni][0], normal_list[ni][1], normal_list[ni][2], vertex_list[j][0], vertex_list[j][1], vertex_list[j][2], normal_list[nj][0], normal_list[nj][1], normal_list[nj][2], vertex_list[k][0], vertex_list[k][1], vertex_list[k][2],normal_list[nk][0], normal_list[nk][1], normal_list[nk][2]) else: mesh_builder.addFaceByPoints(vertex_list[i][0], vertex_list[i][1], vertex_list[i][2], vertex_list[j][0], vertex_list[j][1], vertex_list[j][2], vertex_list[k][0], vertex_list[k][1], vertex_list[k][2]) if ui != -1 and len(uv_list) > ui: mesh_builder.setVertexUVCoordinates(mesh_builder.getVertexCount() - 3, uv_list[ui][0], uv_list[ui][1]) if uj != -1 and len(uv_list) > uj: mesh_builder.setVertexUVCoordinates(mesh_builder.getVertexCount() - 2, uv_list[uj][0], uv_list[uj][1]) if uk != -1 and len(uv_list) > uk: mesh_builder.setVertexUVCoordinates(mesh_builder.getVertexCount() - 1, uv_list[uk][0], uv_list[uk][1]) Job.yieldThread() if not mesh_builder.hasNormals(): mesh_builder.calculateNormals(fast = True) # make sure that the mesh data is not empty if mesh_builder.getVertexCount() == 0: Logger.log("d", "File did not contain valid data, unable to read.") return None # We didn't load anything. scene_node.setMeshData(mesh_builder.build()) return scene_node
audreyr/opencomparison
apiv1/tests/data.py
Python
mit
6,264
0.000958
from grid.models import Grid from django.contrib.auth.models import Group, User, Permission from package.models import Category, PackageExample, Package from grid.models import Element, Feature, GridPackage from core.tests import datautil def load(): category, created = Category.objects.get_or_create( pk=1, slug=u'apps', title=u'App', description=u'Small components used to build projects.', ) package1, created = Package.objects.get_or_create( pk=1, category=category, repo_watchers=0, title=u'Testability', pypi_url='', participants=u'malcomt,jacobian', pypi_downloads=0, repo_url=u'https://github.com/pydanny/django-la-facebook', repo_commits=0, repo_forks=0, slug=u'testability', repo_description=u'Increase your testing ability with this steroid free supplement.', ) package2, created = Package.objects.get_or_create( pk=2, category=category, repo_watchers=0, title=u'Supertester', pypi_url='', participants=u'thetestman', pypi_downloads=0, repo_url=u'https://github.com/pydanny/django-uni-form', repo_commits=0, repo_forks=0, slug=u'supertester', repo_description=u'Test everything under the sun with one command!', ) package3, created = Package.objects.get_or_create( pk=3, category=category, repo_watchers=0, title=u'Serious Testing', pypi_url='', participants=u'pydanny', pypi_downloads=0, repo_url=u'https://github.com/cartwheelweb/packaginator', repo_commits=0, repo_forks=0, slug=u'serious-testing', repo_description=u'Make testing as painless as waxing your legs.', ) package4, created = Package.objects.get_or_create( pk=4, category=category, repo_watchers=0, title=u'Another Test', pypi_url='', participants=u'pydanny', pypi_downloads=0, repo_url=u'https://github.com/djangopackages/djangopackages', repo_commits=0, repo_forks=0, slug=u'another-test', repo_description=u'Yet another test package, with no grid affiliation.', ) grid1, created = Grid.objects.get_or_create( pk=1, description=u'A grid for testing.', title=u'Testing', is_locked=False, slug=u'testing', ) grid2, created = Grid.objects.get_or_create( pk=2, description=u'Another grid for testing.', title=u'Another Testing', is_locked=False, slug=u'another-testing', ) gridpackage1, created = GridPackage.objects.get_or_create( pk=1, package=package1, grid=grid1, ) gridpackage2, created = GridPackage.objects.get_or_create( pk=2, package=package1, grid=grid1, ) gridpackage3, created = GridPackage.objects.get_or_create( pk=3, package=package3, grid=grid1, ) gridpackage4, created = GridPackage.objects.get_or_create( pk=4, package=package3, grid=grid2, ) gridpackage5, created = GridPackage.objects.get_or_create( pk=5, package=package2, grid=grid1, ) feature1, created = Feature.objects.get_or_create( pk=1, title=u'Has tests?', grid=grid1, description=u'Does this package come with tests?', ) feature2, created = Feature.objects.get_or_create( pk=2, title=u'Coolness?', grid=grid1, description=u'Is this package cool?', ) element, created = Element.objects.get_or_create( pk=1, text=u'Yes', feature=feature1, grid_package=gridpackage1, ) group1, created = Group.objects.get_or_create( pk=1, name=u'Moderators', #permissions=[[u'delete_gridpackage', u'grid', u'gridpackage'], [u'delete_feature', u'grid', u'feature']], ) group1.permissions.clear() group1.permissions = [ Permission.objects.get(codename='delete_gridpackage'), Permission.objects.get(codename='delete_feature') ] user1, created = User.objects.get_or_create( pk=1, username=u'user', first_name='', last_name='', is_active=True, is_superuser=False, is_staff=False, last_login=u'2010-01-01 12:00:00', password=u'sha1$644c9$347f3dd85fb609a5745ebe33d0791929bf08f22e', email='', date_joined=u'2010-01-01 12:00:00', ) user2, created = User.objects.get_or_create( pk=2, username=u'cleaner', first_name='', last_name='', is_active=True, is_superuser=False, is_staff=False,
last_login=u'2010-01-01 12:00:00', #groups=[group1], password=u'sha1$e6fe2$78b744e21cddb39117997709218f4c6db4e91894', email='', date_joined=u'2010-01-01 12:00:00', ) user2.groups = [group1] user3, created = User.objects.get_or_create( pk=3, username=u'staff',
first_name='', last_name='', is_active=True, is_superuser=False, is_staff=True, last_login=u'2010-01-01 12:00:00', password=u'sha1$8894d$c4814980edd6778f0ab1632c4270673c0fd40efe', email='', date_joined=u'2010-01-01 12:00:00', ) user4, created = User.objects.get_or_create( pk=4, username=u'admin', first_name='', last_name='', is_active=True, is_superuser=True, is_staff=True, last_login=u'2010-01-01 12:00:00', password=u'sha1$52c7f$59b4f64ffca593e6abd23f90fd1f95cf71c367a4', email='', date_joined=u'2010-01-01 12:00:00', ) packageexample, created = PackageExample.objects.get_or_create( pk=1, package=package1, url=u'http://www.example.com/', active=True, title=u'www.example.com', ) datautil.reset_sequences(Grid, Group, User, Permission, Category, PackageExample, Package, Element, Feature, GridPackage)
modera/mcloud
tests/test_remote.py
Python
apache-2.0
6,741
0.005192
import sys from flexmock import flexmock import inject from mcloud.events import EventBus from mcloud.txdocker import IDockerClient, DockerTwistedClient from mcloud.util import txtimeout import pytest from mcloud.remote import Server, Client, ApiError, Task, ApiRpcServer from twisted.internet import reactor, defer from twisted.python import log import txredisapi as redis class MockServer(Server): message = None def on_message(self, client, message, isBinary=False): self.message = message class MockClient(Client): message = None def on_message(self, message, isBinary=False): self.message = message def sleep(secs): d = defer.Deferred() reactor.callLater(secs, d.callback, None) return d #@pytest.inlineCallbacks #def test_exchange(): # inject.clear() # # #log.startLogging(sys.stdout) # # server = MockServer(port=9999) # server.bind() # # assert len(server.clients) == 0 # # client = MockClient(port=9999) # yield client.connect() # # assert len(server.clients) == 1 # # log.msg('Sending data') # yield client.send('boo') # # yield sleep(0.1) # # assert server.message == 'boo' # # yield server.clients[0].sendMessage('baz') # # yield sleep(0.1) # # assert client.message == 'baz' # # client.shutdown() # server.shutdown() # # yield sleep(0.1) @pytest.inlineCallbacks def test_request_response(): #----------------------------------- # preparations #----------------------------------- # cleanup a bit inject.clear() def my_config(binder): binder.bind('settings', None) inject.configure(my_config) # log.startLogging(sys.stdout) server = Server(port=9998, no_ssl=True) server.bind() client = Client(port=9998, no_ssl=True) yield client.connect() response = yield client.call_sync('ping') assert response == 'pong' client.shutdown() server.shutdown() @pytest.inlineCallbacks def test_request_response_no_such_command(): #----------------------------------- # preparations #----------------------------------- # cleanup a bit inject.clear() def my_config(binder): binder.bind('settings', None) inject.configure(my_config) log.startLogging(sys.stdout) server = Server(port=9996, no_ssl=True) server.bind() client = Client(port=9996, no_ssl=True) yield client.connect() with pytest.raises(ApiError): yield client.call_sync('hoho') client.shutdown() server.shutdown() @pytest.inlineCallbacks def test_tasks(): #----------------------------------- # preparations #----------------------------------- # cleanup a bit inject.clear() rc = yield redis.Connection(dbid=2) eb = EventBus(rc) yield eb.connect() def my_config(binder): binder.bind(redis.Connection, rc) binder.bind(EventBus, eb) binder.bind('settings', None) inject.configure(my_config) yield rc.flushdb() api = inject.instance(ApiRpcServer) #----------------------------------- # Test itself #----------------------------------- # this will emulate some long-running process task_defered = defer.Deferred() # this is mock that will execute our long-running process task = flexmock() task.should_receive('foo').with_args(int, 123, 'test').once().and_return(task_defered) # register our task api.tasks['baz'] = task.foo # start server -> real server on tcp port server = Server(port=9997, no_ssl=True) server.bind() # real client connecton here client = Client(port=9997, no_ssl=True) yield client.connect() # client calls a task task = Task('baz') yield client.call(task, 123, 'test') yield sleep(0.1) assert task.id > 0 assert task.name == 'baz' assert task.is_running is True assert len(server.rpc_server.tasks_running) == 1 assert server.rpc_server.tasks_running[task.id]['name'] == 'baz' assert len(server.rpc_server.task_list()) == 1 # no data should be on client yield sleep(0.1) assert task.data == [] assert task.response is None # now server sends some progress yield server.clients[0].send_event('task.progress.%s' % task.id, 'nami-nami') # and client should receive this data yield sleep(0.1) assert task.data == ['nami-nami'] assert task.is_running is True assert task.response is None # now our long-running proces
s stopped and returned some result yield task_defered.callback('this is respnse') # and client should recieve this resul yield sleep(0.1) assert task.data == ['nami-nam
i'] assert task.is_running == False assert task.response == 'this is respnse' assert len(server.rpc_server.tasks_running) == 0 assert len(server.rpc_server.task_list()) == 0 #----------------------------------- # Cleanup #----------------------------------- client.shutdown() server.shutdown() yield sleep(0.1) @pytest.inlineCallbacks def test_task_terminate(): #----------------------------------- # preparations #----------------------------------- # cleanup a bit inject.clear() rc = yield redis.Connection(dbid=2) eb = EventBus(rc) yield eb.connect() def my_config(binder): binder.bind(redis.Connection, rc) binder.bind(EventBus, eb) binder.bind('settings', None) inject.configure(my_config) yield rc.flushdb() api = inject.instance(ApiRpcServer) #----------------------------------- # Test itself #----------------------------------- # this will emulate some long-running process task_defered = defer.Deferred() # this is mock that will execute our long-running process task = flexmock() task.should_receive('foo').with_args(int, 123, 'test').once().and_return(task_defered) # register our task api.tasks['baz'] = task.foo # start server -> real server on tcp port server = Server(port=9987, no_ssl=True) server.bind() # real client connecton here client = Client(port=9987, no_ssl=True) yield client.connect() # client calls a task task = Task('baz') yield client.call(task, 123, 'test') yield sleep(0.1) assert task.id > 0 assert task.name == 'baz' assert task.is_running is True # now client terminates the task yield sleep(0.1) client.terminate_task(task.id) yield sleep(0.1) assert task.is_running is False #----------------------------------- # Cleanup #----------------------------------- client.shutdown() server.shutdown() yield sleep(0.1)
alfa-addon/addon
plugin.video.alfa/channels/freeporn.py
Python
gpl-3.0
5,059
0.016024
# -*- coding: utf-8 -*- #------------------------------------------------------------ import sys PY3 = False if sys.version_info[0] >= 3: PY3 = True; unicode = str; unichr = chr; long = int if PY3: import urllib.parse as urlparse # Es muy lento en PY2. En PY3 es nativo else: import urlparse # Usamos el nativo de PY2 que es más rápido import re from platformcode import config, logger from core import scrapertools from core.item import Item from core import servertools from core import httptools host = 'https://frprn.com' def mainlist(item): logger.info() itemlist = [] itemlist.append(item.clone(title="Nuevas" , action="lista", url=host)) itemlist.append(item.c
lone(title="Mejor valorada" , action="lista", url=host + "/top-rated/")) itemlist.append(item.clone(title="Mas largo" , action="lista", url=host + "/longest/")) itemlist.append(item.clone(title="Modelos" , action="categorias", url=host + "/models/most-popular/"))
itemlist.append(item.clone(title="Categorias" , action="categorias", url=host + "/categories/")) itemlist.append(item.clone(title="Buscar", action="search")) return itemlist def search(item, texto): logger.info() texto = texto.replace(" ", "%20") item.url = "%s/search/%s/?mode=async&action=get_block&block_id=list_videos_videos&from2=%s&fromStart=1&fromEnd=%s" % (host, texto,1,1) try: return lista(item) except: import sys for line in sys.exc_info(): logger.error("%s" % line) return [] def categorias(item): logger.info() itemlist = [] data = httptools.downloadpage(item.url).data data = re.sub(r"\n|\r|\t|&nbsp;|<br>", "", data) patron = '<li class="thumb thumb-\w+">.*?' patron += '<a href="([^"]+)">.*?' patron += '<img class="lazy" data-original="([^"]+)".*?' patron += '<div class="title">(.*?)</a>' matches = re.compile(patron,re.DOTALL).findall(data) for scrapedurl,scrapedthumbnail,scrapedtitle in matches: scrapedplot = "" title = scrapertools.find_single_match(scrapedtitle,'<div class="text">([^<]+)<') if "/categories/" in item.url: cantidad = scrapertools.find_single_match(scrapedtitle,'<div class="count">(\d+)</div>') scrapedtitle = scrapertools.find_single_match(scrapedtitle,'<div class="name">([^<]+)</div>') title = "%s (%s)" %(scrapedtitle, cantidad) scrapedurl = urlparse.urljoin(item.url,scrapedurl) itemlist.append(item.clone(action="lista", title=title, url=scrapedurl, fanart=scrapedthumbnail, thumbnail=scrapedthumbnail, plot=scrapedplot) ) next_page = scrapertools.find_single_match(data,'<li class="pagination-next"><a href="([^"]+)">') if next_page!="": next_page = urlparse.urljoin(item.url,next_page) itemlist.append(item.clone(action="categorias", title="[COLOR blue]Página Siguiente >>[/COLOR]", url=next_page) ) return itemlist def lista(item): logger.info() itemlist = [] data = httptools.downloadpage(item.url).data data = re.sub(r"\n|\r|\t|&nbsp;|<br>", "", data) patron = '<div class="thumb">.*?' patron += '<a href="([^"]+)".*?' patron += '<img class="lazy" data-original="([^"]+)" alt="([^"]+)".*?' matches = re.compile(patron,re.DOTALL).findall(data) for scrapedurl,scrapedthumbnail,scrapedtitle in matches: url = urlparse.urljoin(item.url,scrapedurl) duracion = "" title = "[COLOR yellow]%s[/COLOR] %s" % (duracion, scrapedtitle) contentTitle = title thumbnail = scrapedthumbnail plot = "" year = "" action = "play" if logger.info() == False: action = "findvideos" itemlist.append(item.clone(action=action, title=title, url=url, thumbnail=thumbnail, fanart=thumbnail, plot=plot, contentTitle = contentTitle)) patron = 'data-from="([^"]+)" data-id="([^"]+)" data-total="([^"]+)" data-page="([^"]+)" data-url="([^"]+)"' matches = re.compile(patron,re.DOTALL).findall(data) for dfrom,id,total,page,purl in matches: page = int(page) page += page next_page = "%s?action=get_block&block_id=%s&%s=%s" %(purl, id, dfrom, page) itemlist.append(item.clone(action="lista", title="[COLOR blue]Página Siguiente >>[/COLOR]", url=next_page) ) return itemlist def findvideos(item): logger.info() itemlist = [] itemlist.append(item.clone(action="play", title= "%s", contentTitle = item.title, url=item.url)) itemlist = servertools.get_servers_itemlist(itemlist, lambda i: i.title % i.server.capitalize()) return itemlist def play(item): logger.info() itemlist = [] itemlist.append(item.clone(action="play", title= "%s", contentTitle = item.title, url=item.url)) itemlist = servertools.get_servers_itemlist(itemlist, lambda i: i.title % i.server.capitalize()) return itemlist
mmasaki/trove
trove/tests/unittests/guestagent/test_pkg.py
Python
apache-2.0
21,209
0
# Copyright (c) 2011 OpenStack Foundation # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless require
d by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import os import re import subprocess from mock import Mock, MagicMock, patch import pexpect from trove.common import exception from trove.commo
n import utils from trove.guestagent import pkg from trove.tests.unittests import trove_testtools """ Unit tests for the classes and functions in pkg.py. """ class PkgDEBInstallTestCase(trove_testtools.TestCase): def setUp(self): super(PkgDEBInstallTestCase, self).setUp() self.pkg = pkg.DebianPackagerMixin() self.pkg_fix = self.pkg._fix self.pkg_fix_package_selections = self.pkg._fix_package_selections p0 = patch('pexpect.spawn') p0.start() self.addCleanup(p0.stop) p1 = patch('trove.common.utils.execute') p1.start() self.addCleanup(p1.stop) self.pkg._fix = Mock(return_value=None) self.pkg._fix_package_selections = Mock(return_value=None) self.pkgName = 'packageName' def tearDown(self): super(PkgDEBInstallTestCase, self).tearDown() self.pkg._fix = self.pkg_fix self.pkg._fix_package_selections = self.pkg_fix_package_selections def test_pkg_is_installed_no_packages(self): packages = [] self.assertTrue(self.pkg.pkg_is_installed(packages)) def test_pkg_is_installed_yes(self): packages = ["package1=1.0", "package2"] self.pkg.pkg_version = MagicMock(side_effect=["1.0", "2.0"]) self.assertTrue(self.pkg.pkg_is_installed(packages)) def test_pkg_is_installed_no(self): packages = ["package1=1.0", "package2", "package3=3.1"] self.pkg.pkg_version = MagicMock(side_effect=["1.0", "2.0", "3.0"]) self.assertFalse(self.pkg.pkg_is_installed(packages)) def test_success_install(self): # test pexpect.spawn.return_value.expect.return_value = 7 pexpect.spawn.return_value.match = False self.assertTrue(self.pkg.pkg_install(self.pkgName, {}, 5000) is None) def test_success_install_with_config_opts(self): # test config_opts = {'option': 'some_opt'} pexpect.spawn.return_value.expect.return_value = 7 pexpect.spawn.return_value.match = False self.assertTrue( self.pkg.pkg_install(self.pkgName, config_opts, 5000) is None) def test_permission_error(self): # test pexpect.spawn.return_value.expect.return_value = 0 pexpect.spawn.return_value.match = False # test and verify self.assertRaises(pkg.PkgPermissionError, self.pkg.pkg_install, self.pkgName, {}, 5000) def test_package_not_found_1(self): # test pexpect.spawn.return_value.expect.return_value = 1 pexpect.spawn.return_value.match = re.match('(.*)', self.pkgName) # test and verify self.assertRaises(pkg.PkgNotFoundError, self.pkg.pkg_install, self.pkgName, {}, 5000) def test_package_not_found_2(self): # test pexpect.spawn.return_value.expect.return_value = 2 pexpect.spawn.return_value.match = re.match('(.*)', self.pkgName) # test and verify self.assertRaises(pkg.PkgNotFoundError, self.pkg.pkg_install, self.pkgName, {}, 5000) def test_run_DPKG_bad_State(self): # test _fix method is called and PackageStateError is thrown pexpect.spawn.return_value.expect.return_value = 4 pexpect.spawn.return_value.match = False # test and verify self.assertRaises(pkg.PkgPackageStateError, self.pkg.pkg_install, self.pkgName, {}, 5000) self.assertTrue(self.pkg._fix.called) def test_admin_lock_error(self): # test 'Unable to lock the administration directory' error pexpect.spawn.return_value.expect.return_value = 5 pexpect.spawn.return_value.match = False # test and verify self.assertRaises(pkg.PkgAdminLockError, self.pkg.pkg_install, self.pkgName, {}, 5000) def test_package_broken_error(self): pexpect.spawn.return_value.expect.return_value = 6 pexpect.spawn.return_value.match = False # test and verify self.assertRaises(pkg.PkgBrokenError, self.pkg.pkg_install, self.pkgName, {}, 5000) def test_timeout_error(self): # test timeout error pexpect.spawn.return_value.expect.side_effect = ( pexpect.TIMEOUT('timeout error')) # test and verify self.assertRaises(pkg.PkgTimeout, self.pkg.pkg_install, self.pkgName, {}, 5000) class PkgDEBRemoveTestCase(trove_testtools.TestCase): def setUp(self): super(PkgDEBRemoveTestCase, self).setUp() self.pkg = pkg.DebianPackagerMixin() self.pkg_version = self.pkg.pkg_version self.pkg_install = self.pkg._install self.pkg_fix = self.pkg._fix p0 = patch('pexpect.spawn') p0.start() self.addCleanup(p0.stop) p1 = patch('trove.common.utils.execute') p1.start() self.addCleanup(p1.stop) self.pkg.pkg_version = Mock(return_value="OK") self.pkg._install = Mock(return_value=None) self.pkg._fix = Mock(return_value=None) self.pkgName = 'packageName' def tearDown(self): super(PkgDEBRemoveTestCase, self).tearDown() self.pkg.pkg_version = self.pkg_version self.pkg._install = self.pkg_install self.pkg._fix = self.pkg_fix def test_remove_no_pkg_version(self): # test pexpect.spawn.return_value.expect.return_value = 6 pexpect.spawn.return_value.match = False with patch.object(self.pkg, 'pkg_version', return_value=None): self.assertTrue(self.pkg.pkg_remove(self.pkgName, 5000) is None) def test_success_remove(self): # test pexpect.spawn.return_value.expect.return_value = 6 pexpect.spawn.return_value.match = False self.assertTrue(self.pkg.pkg_remove(self.pkgName, 5000) is None) def test_permission_error(self): # test pexpect.spawn.return_value.expect.return_value = 0 pexpect.spawn.return_value.match = False # test and verify self.assertRaises(pkg.PkgPermissionError, self.pkg.pkg_remove, self.pkgName, 5000) def test_package_not_found(self): # test pexpect.spawn.return_value.expect.return_value = 1 pexpect.spawn.return_value.match = False # test and verify self.assertRaises(pkg.PkgNotFoundError, self.pkg.pkg_remove, self.pkgName, 5000) def test_package_reinstall_first_1(self): # test pexpect.spawn.return_value.expect.return_value = 2 pexpect.spawn.return_value.match = False # test and verify self.assertRaises(pkg.PkgPackageStateError, self.pkg.pkg_remove, self.pkgName, 5000) self.assertTrue(self.pkg._install.called) self.assertFalse(self.pkg._fix.called) def test_package_reinstall_first_2(self): # test pexpect.spawn.return_value.expect.return_value = 3 pexpect.spawn.return_value.match = False # test and verify self.assertRaises(pkg.PkgPackageStateError, self.pkg.pkg_remove, self.pkgName, 5000) self.assertTrue(self.pkg._install.called) self.assertFalse(self.pkg._fix.called) def test_package_DPKG_fi
SnowWalkerJ/quantlib
quant/data/wind/tables/ccommodityfutureseodprices.py
Python
gpl-3.0
1,944
0.015819
from ....common.db.sql import VARCHAR, Numeric as NUMBER, DateTime as DATETIME, Column, BaseModel, CLOB, DATE VARCHAR2 = VARCHAR class CCommodityFuturesEODPrices(BaseModel): """ 4.182 中国商品期货日行情 Attributes ---------- object_id: VARCHAR2(100) 对象ID s_info_windcode: VARCHAR2(40) Wind代码 trade_dt: VARCHAR2(8) 交易日期 s_dq_presettle: NUMBER(20,4) 前结算价(元)
s_dq_open: NUMBER(20,4) 开盘价(元) s_dq_high: NUMBER(20,4) 最高价(元) s_dq_low: NUMBER(20,4) 最低价(元) s_dq_close: NUMBER(20,4) 收盘价(元) s_dq_settle: NUMBER(20,4) 结算价(元) s_dq_volume: NUMBER(20,4) 成交量(手) s_dq_amount: NUMBER(20,4) 成交金额(万元) s_dq_oi: NUMBER(20,4) 持仓量(手) s_dq_change: NUMBER(20,4) 涨跌(元) 收盘价-前结算价 s_dq_oichange: NUMBER(20,4) 持仓量变化 fs_info_type: VARCH
AR2(10) 合约类型 1:主力合约2:真实合约3:连续合约 opdate: DATETIME opdate opmode: VARCHAR(1) opmode """ __tablename__ = "CCommodityFuturesEODPrices" object_id = Column(VARCHAR2(100), primary_key=True) s_info_windcode = Column(VARCHAR2(40)) trade_dt = Column(VARCHAR2(8)) s_dq_presettle = Column(NUMBER(20,4)) s_dq_open = Column(NUMBER(20,4)) s_dq_high = Column(NUMBER(20,4)) s_dq_low = Column(NUMBER(20,4)) s_dq_close = Column(NUMBER(20,4)) s_dq_settle = Column(NUMBER(20,4)) s_dq_volume = Column(NUMBER(20,4)) s_dq_amount = Column(NUMBER(20,4)) s_dq_oi = Column(NUMBER(20,4)) s_dq_change = Column(NUMBER(20,4)) s_dq_oichange = Column(NUMBER(20,4)) fs_info_type = Column(VARCHAR2(10)) opdate = Column(DATETIME) opmode = Column(VARCHAR(1))
bilbeyt/ituro
ituro/results/models.py
Python
mit
11,573
0.000432
from django.db import models from django.dispatch import receiver from django.utils.encoding import python_2_unicode_compatible from django.conf import settings from django.utils.translation import ugettext_lazy as _ from django.core.validators import MaxValueValidator, MinValueValidator from projects.models import Project from orders.models import LineFollowerStage, LineFollowerJuniorStage class BaseResult(models.Model): score = models.FloatField(verbose_name=_('Score'), blank=True) minutes = models.PositiveSmallIntegerField(verbose_name=_("Minutes")) seconds = models.PositiveSmallIntegerField(verbose_name=_("Seconds")) milliseconds = models.PositiveSmallIntegerField( verbose_name=_("Milliseconds")) disqualification = models.BooleanField( verbose_name=_('Disqualification'), default=False) is_best = models.BooleanField( verbose_name=_("Is best result?"), default=True) created_at = models.DateTimeField(auto_now_add=True) class Meta: abstract = True @property def duration(self): return self.minutes * 60 + self.seconds + self.milliseconds * 0.01 @property def duration_pretty(self): return "{} minutes, {} seconds, {} milliseconds".format( self.minutes, self.seconds, self.milliseconds) @python_2_unicode_compatible class LineFollowerResult(BaseResult): project = models.ForeignKey( Project, limit_choices_to={"category": "line_follower"}) stage = models.ForeignKey( LineFollowerStage, verbose_name=_("Line Follower Stage")) runway_out = models.PositiveSmallIntegerField( verbose_name=_("Runway Out Count"), default=0) class Meta: verbose_name = _("Line Follower Result") verbose_name_plural = _("Line Follower Results") ordering = ['disqualification', 'score'] def __str__(self): return self.project.name @receiver(models.signals.pre_save, sender=LineFollowerResult) def line_follower_result_calculate_score(sender, instance, *args, **kwargs): instance.score = instance.duration * (1 + 0.2 * instance.runway_out) @python_2_unicode_compatible class LineFollowerJuniorResult(BaseResult): project = models.ForeignKey( Project, limit_choices_to={"category": "line_follower_junior"}) stage = models.ForeignKey( LineFollowerJuniorStage, verbose_name=_("Line Follower Junior Stage")) runway_out = models.PositiveSmallIntegerField( verbose_name=_("Runway Out Count"), default=0) class Meta: verbose_name = _("Line Follower Junior Result") verbose_name_plural = _("Line Follower Junior Results") ordering = ['disqualification', 'score'] def __str__(self): return self.project.name @receiver(models.signals.pre_save, s
ender=LineFollowerJuniorResult) def line_follower_junior_result_calculate_score(sender, instance, *args, **kwargs): instance.score = instance.duration * (1 + 0.2 * instance.runway_out) @python_2_unicode_compatible class ConstructionResult(BaseResult): project = m
odels.ForeignKey( Project, limit_choices_to={"category": "construction"}) class Meta: verbose_name = _("Construction Result") verbose_name_plural = _("Construction Results") ordering = [ "disqualification", "-score", "minutes", "seconds", "milliseconds"] def __str__(self): return self.project.name @python_2_unicode_compatible class DroneResult(models.Model): project = models.ForeignKey( Project, limit_choices_to={"category": "drone"}) score = models.FloatField(verbose_name=_('Score'), blank=True) disqualification = models.BooleanField( verbose_name=_('Disqualification'), default=False) is_best = models.BooleanField( verbose_name=_("Is best result?"), default=True) created_at = models.DateTimeField(auto_now_add=True) laps = models.FloatField(verbose_name=_("Laps"), default=0) shortcuts = models.PositiveSmallIntegerField( verbose_name=_("Shortcuts"), default=0) class Meta: verbose_name = _("Drone Result") verbose_name_plural = _("Drone Results") ordering = [ "disqualification", "-score"] def __str__(self): return self.project.name @receiver(models.signals.pre_save, sender=DroneResult) def drone_result_calculate_score(sender, instance, *args, **kwargs): instance.score = instance.laps * 100 - instance.shortcuts * 50 @python_2_unicode_compatible class StairClimbingResult(BaseResult): project = models.ForeignKey( Project, limit_choices_to={"category": "stair_climbing"}) stair1 = models.BooleanField(verbose_name=_("Stair #1"), default=False) stair2 = models.BooleanField(verbose_name=_("Stair #2"), default=False) stair3 = models.BooleanField(verbose_name=_("Stair #3"), default=False) stair4 = models.BooleanField(verbose_name=_("Stair #4"), default=False) stair5 = models.BooleanField(verbose_name=_("Stair #5"), default=False) stair6 = models.BooleanField(verbose_name=_("Stair #6"), default=False) stair7 = models.BooleanField(verbose_name=_("Stair #7"), default=False) down6 = models.BooleanField(verbose_name=_("Down #6"), default=False) down5 = models.BooleanField(verbose_name=_("Down #5"), default=False) down4 = models.BooleanField(verbose_name=_("Down #4"), default=False) down3 = models.BooleanField(verbose_name=_("Down #3"), default=False) down2 = models.BooleanField(verbose_name=_("Down #2"), default=False) down1 = models.BooleanField(verbose_name=_("Down #1"), default=False) plexi_touch = models.PositiveSmallIntegerField( verbose_name=_("Plexi Touch Count"), default=0) is_complete = models.BooleanField( verbose_name=_("Is finish?"), default=False) class Meta: verbose_name = _("Stair Climbing Result") verbose_name_plural = _("Stair Climbing Results") ordering = [ "disqualification", "-score", "minutes", "seconds", "milliseconds"] def __str__(self): return self.project.name @receiver(models.signals.pre_save, sender=StairClimbingResult) def stair_climbing_result_calculate_score(sender, instance, *args, **kwargs): instance.score = sum(( (int(instance.stair1) + int(instance.stair2) + int(instance.stair3)) * 10, int(instance.stair4) * 40, int(instance.stair5) * 80, int(instance.stair6) * 40, int(instance.stair7) * 50, (int(instance.down6) + int(instance.down5) + int(instance.down4)) * 30, (int(instance.down3)) * 50, (int(instance.down1) + int(instance.down2)) * 20, (int(instance.is_complete)) * 40, instance.plexi_touch * (-10) )) @python_2_unicode_compatible class ColorSelectingResult(BaseResult): project = models.ForeignKey( Project, limit_choices_to={"category": "color_selecting"}) obtain = models.PositiveSmallIntegerField( verbose_name=_("Cylinder Obtain Count")) place_success = models.PositiveSmallIntegerField( verbose_name=_("Cylinder Successful Placement Count")) place_failure = models.PositiveSmallIntegerField( verbose_name=_("Cylinder Unsuccessful Placement Count")) class Meta: verbose_name = _("Color Selecting Result") verbose_name_plural = _("Color Selecting Results") ordering = [ "disqualification", "-score", "minutes", "seconds", "milliseconds"] def __str__(self): return self.project.name @receiver(models.signals.pre_save, sender=ColorSelectingResult) def color_selecting_result_calculate_score(sender, instance, *args, **kwargs): instance.score = sum(( instance.obtain * 100, instance.place_success * 200, instance.place_failure * (-50))) @python_2_unicode_compatible class ScenarioResult(BaseResult): project = models.ForeignKey( Project, limit_choices_to={"category": "scenario"}) is_stopped = models.BooleanField( verbose_name=_("Is parked wrongly?"), default=False) is_parked
seibert/blaze-core
blaze/blir/passes.py
Python
bsd-2-clause
6,473
0.006334
import sys import time import lexer import parser import cfg import typecheck import codegen import errors import exc from threading import Lock compilelock = Lock() #------------------------------------------------------------------------ # Pipeline #------------------------------------------------------------------------ class CompileError(RuntimeError): pass class Pipeline(object): def __init__(self, name, passes): self.name = name self.__name__ = name self.passes = passes def __call__(self, ast, env): for ppass in self.passes: ast, env = ppass(ast, env) if errors.occurred(): errors.reset() raise CompileError, ppass.__name__ return ast, env #------------------------------------------------------------------------ # Passes #------------------------------------------------------------------------ def ppass(name): def wrapper(fn): fn.__name__ = name return fn return wrapper # ------------------------------ @ppass("Syntax Parser") def parse_pass(ast, env): parse = parser.make_parser() ast = parse(ast) return ast, env # ------------------------------ @ppass("Type checker") def typecheck_pass(ast, env): symtab = typecheck.typecheck(ast) env['symtab'] = symtab return ast, env # ------------------------------ @ppass("Rewriter") def rewrite_pass(ast, env): return ast, env # ------------------------------ @ppass("Single static assignment") def ssa_pass(ast, env): functions = cfg.ssa_pass(ast) env['functions'] = functions return ast, env # ------------------------------ @ppass("Code generation") def codegen_pass(ast, env): cgen = codegen.LLVMEmitter() blockgen = codegen.BlockEmitter(cgen) env['cgen'] = cgen env['blockgen'] = blockgen functions = env['functions'] lfunctions = [] for name, retty, argtys, start_block in functions: function = blockgen.generate_function( name, retty, argtys, start_block ) function.verify() lfunctions.append(function) env['lfunctions'] = lfunctions return ast, env # ------------------------------ @ppass("LLVM Optimizer") def optimizer_pass(ast, env): cgen = env['cgen'] lfunctions = env['lfunctions'] opt_level = env['args']['O'] optimizer = codegen.LLVMOptimizer(cgen.module, opt_level) # function-level optimize #for lfunc in lfunctions: #optimizer.run(lfunc) #lfunc.verify() # module-level optimization optimizer.runmodule(cgen.module) cgen.module.verify() env['lmodule'] = cgen.module return ast, env # ------------------------------ @ppass("Linker") def linker_pass(ast, env): return ast, env #------------------------------------------------------------------------ # Pipeline Structure #------------------------------------------------------------------------ frontend = Pipeline('frontend', [parse_pass, typecheck_pass, rewrite_pass ]) backend = Pipeline('backend', [ssa_pass, codegen_pass, optimizer_pass, linker_pass, ]) compiler = Pipeline('compile', [frontend, backend ]) #------------------------------------------------------------------------ # Toplevel #------------------------------------------------------------------------ def compile(source, **opts): opts.setdefault('O', 2) env = {'args': opts} with compilelock: ast, env = compiler(source, env) return ast, env #------------------------------------------------------------------------ # Command Line Interface #------------------------------------------------------------------------ def main(): import argparse argp = argparse.ArgumentParser('blirc') argp.add_argument('file', metavar="file", nargs='?', help='Source file') argp.add_argument('-O', metavar="opt", nargs='?', type=int, help='Optimization level', default=2) argp.add_argument('--ddump-parse', action='store_true', help='Dump parse tree') argp.add_argument('--ddump-lex', action='store_true', help='Dump token stream') argp.add_argument('--ddump-blocks', action='store_true', help='Dump the block structure') argp.add_argument('--ddump-tc', action='store_true', help='Dump the type checker state') argp.add_argument('--ddump-optimizer', action='store_true', help='Dump diff of the LLVM optimizer pass') argp.add_argument('--noprelude', action='store_true', help='Don\'t link against the prelude') argp.add_argument('--nooptimize', action='store_true', help='Don\'t run LLVM optimization pass') argp.add_argument('--emit-llvm', action='store_true', help=' Generate output files in LLVM formats ') argp.add_argument('--emit-x86', action='store_true', help=' Generate output files in x86 assembly ') argp.add_argument('--run', action='store_true', help='Execute generated code ') args = argp.parse_args() if args.file: source = open(args.file).read() else: sys.stderr.write('No input\n') sys.exit(1) i
f args.ddump_lex: lexer.ddump_lex(source) if args.ddump_parse: parse
r.ddump_parse(source) if args.ddump_blocks: cfg.ddump_blocks(source) if args.ddump_optimizer: codegen.ddump_optimizer(source) if args.ddump_tc: typecheck.ddump_tc(source) try: # ===================================== start = time.time() with errors.listen(): opts = vars(args) ast, env = compile(source, **opts) timing = time.time() - start # ===================================== if args.emit_llvm: print env['lmodule'] elif args.emit_x86: print env['lmodule'].to_native_assembly() elif args.run: ctx = exc.Context(env) exc.execute(ctx, fname='main') else: print 'Compile time %.3fs' % timing except CompileError as e: sys.stderr.write('FAIL: Failure in compiler phase: %s\n' % e.args[0]) sys.exit(1) errors.reset() if __name__ == '__main__': main()
wasit7/visionmarker
beta/wl_auth/urls.py
Python
mit
244
0.016393
from django.conf.urls import url from . import views urlpatterns = [ url(r'^signin/',
views.signin, name='signin'), url(r'^signout/', views.signout, name='signout'), url(r'^change_password/', views.change_password, name='cha
nge_password'), ]
mytliulei/DCNRobotInstallPackages
windows/win32/scapy-2/scapy/arch/windows/__init__.py
Python
apache-2.0
19,506
0.009382
## This file is part of Scapy ## See http://www.secdev.org/projects/scapy for more informations ## Copyright (C) Philippe Biondi <phil@secdev.org> ## This program is published under a GPLv2 license """ Customizations needed to support Microsoft Windows. """ import os,re,sys,socket,time from glob import glob from scapy.config import conf,ConfClass from scapy.error import Scapy_Exception,log_loading,log_runtime from scapy.utils import atol, inet_aton, inet_ntoa, PcapReader from scapy.base_classes import Gen, Net, SetGen import scapy.plist as plist from scapy.sendrecv import debug, srp1 from scapy.layers.l2 import Ether, ARP from scapy.data import MTU, ETHER_BROADCAST, ETH_P_ARP conf.use_pcap = 1 conf.use_dnet = 1 from scapy.arch import pcapdnet from scapy.arch.pcapdnet import * LOOPBACK_NAME="lo0" WINDOWS = True def _where(filename, dirs=[], env="PATH"): """Find file in current dir or system path""" if not isinstance(dirs, list): dirs = [dirs] if glob(filename): return filename paths = [os.curdir] + os.environ[env].split(os.path.pathsep) + dirs for path in paths: for match in glob(os.path.join(path, filename)): if match: return os.path.normpath(match) raise IOError("File not found: %s" % filename) def win_find_exe(filename, installsubdir=None, env="ProgramFiles"): """Find executable in current dir, system path or given ProgramFiles subdir""" for fn in [filename, filename+".exe"]: try: if installsubdir is None: path = _where(fn) else: path = _where(fn, dirs=[os.path.join(os.environ[env], installsubdir)]) except IOError: path = filename else: break return path class WinProgPath(ConfClass): _default = "<System default>" # We try some magic to find the appropriate executables pdfreader = win_find_exe("AcroRd32") psreader = win_find_exe("gsview32.exe", "Ghostgum/gsview") dot = win_find_exe("dot", "ATT/Graphviz/bin") tcpdump = win_find_exe("windump") tcpreplay = win_find_exe("tcpreplay") display = _default hexedit = win_find_exe("hexer") wireshark = win_find_exe("wireshark", "wireshark") conf.prog = WinProgPath() import _winreg class PcapNameNotFoundError(Scapy_Exception): pass class NetworkInterface(object): """A network interface of your local host""" def __init__(self, dnetdict=None): self.name = None self.ip = None self.mac = None self.pcap_name = None self.win_name = None self.uuid = None self.dnetdict = dnetdict if dnetdict is not None: self.update(dnetdict) def update(self, dnetdict): """Update info about network interface according to given dnet dictionary""" self.name = dnetdict["name"] # Other attributes are optional try: self.ip = socket.inet_ntoa(dnetdict["addr"].ip) except (KeyError, AttributeError, NameError): pass try: self.mac = dnetdict["link_addr"] except KeyError: pass self._update_pcapdata() def _update_pcapdata(self): """Supplement more info from pypcap and the Windows registry""" # XXX: We try eth0 - eth29 by bruteforce and match by IP address, # because only the IP is available in both pypcap and dnet. # This may not work with unorthodox network configurations and is # slow because we have to walk through the Windows registry. for n in range(30): guess = "eth%s" % n win_name = pcapdnet.pcap.ex_name(guess) if win_name.endswith("}"): try: uuid = win_name[win_name.index("{"):win_name.index("}")+1] keyname = r"SYSTEM\CurrentControlSet\Services\Tcpip\Parameters\Interfaces\%s" % uuid try: key = _winreg.OpenKey(_winreg.HKEY_LOCAL_MACHINE, keyname) except WindowsError: log_loading.debug("Couldn't open 'HKEY_LOCAL_MACHINE\\%s' (for guessed pcap iface name '%s')." % (keyname, guess)) continue try: fixed_ip = _winreg.QueryValueEx(key, "IPAddress")[0][0].encode("utf-8") except (WindowsError, UnicodeDecodeError, IndexError): fixed_ip = None try: dhcp_ip = _winreg.QueryValueEx(key, "DhcpIPAddress")[0].encode("utf-8") except (WindowsError, UnicodeDecodeError, IndexError): dhcp_ip = None # "0.0.0.0" or None means the value is not set (at least not correctly). # If both fixed_ip and dhcp_ip are set, fixed_ip takes precedence if fixed_ip is not None and fixed_ip != "0.0.0.0": ip = fixed_ip elif dhcp_ip is not None and dhcp_ip != "0.0.0.0": ip = dhcp_ip else: continue except IOError: continue else: if ip == self.ip: self.pcap_name = guess self.win_name = win_name self.uuid = uuid break else: raise PcapNameNotFoundError def __repr__(self): return "<%s: %s %s %s pcap_name=%s win_name=%s>" % (self.__class__.__name__, self.name, self.ip, self.mac, self.pcap_name, self.win_name) from UserDict import IterableUserDict class NetworkInterfaceDict(IterableUserDict): """Store information about network interfaces and convert between names""" def load_from_dnet(self): """Populate interface table via dnet""" for i in pcapdnet.dnet.intf(): try: # XXX: Only Ethernet for the moment: localhost is not supported by dnet and pcap # We only take interfaces that have an IP address, because the IP # is used for the mapping between dnet and pcap interface names # and this significantly improves Scapy's startup performance if i["name"].startswith("eth") and "addr" in i: self.data[i["name"]] = NetworkInterface(i) except (KeyError, PcapNameNotFoundError): pass if len(self.data) == 0: log_loading.warning("No match between your pcap and dnet network interfaces found. " "You probably won't be able to send packets. " "Deactivating unneeded interfaces and restarting Scapy might help.") def pcap_name(self, devname): """Return pypcap device name for given libdnet/Scapy device name This mapping is necessary because pypcap numbers the devices differently.""" try: pcap_name = self.data[devname].pcap_name except KeyError: raise ValueError("Unknown network interface %r" % devname) else: return pcap_name def devname(self, pcap_name): """Return libdnet/Scapy device name for given pypcap device name This mapping is necessary because pypcap numbers the devices differently.""" for devname, iface in self.items(): if iface.pcap_name == pcap_name: return iface.name raise ValueError("Unknown pypcap network interface %r" % pcap_name) def show(self, resolve_mac=True): """P
rint list of available network interfaces in human readable form""" print "%s %s %s" % ("IFACE".ljust(5), "IP".ljust(15), "MAC
") for iface_name in sorted(self.data.keys()): dev = self.data[iface_name] mac = str(dev.mac) if resolve_mac: mac = conf.manufdb._resolve_MAC(mac)
bruecksen/isimip
isi_mip/core/templatetags/footer.py
Python
mit
833
0.0012
from django import template from django.template.loader import render_to_string from isi_mip.core.models import FooterLinks register = template.Library() @register.simple_tag(takes_context=True) def footer(context, **kwargs): request = conte
xt['request'] settings = FooterLinks.for_site(request.site) page = context.get('page') links = [] for link in settings.footer_links.all(): n
ame = link.name target = link.target.specific if page and target == page: active = True else: active = False if target.url: links.append({'url': target.url + (link.anchor or ''), 'text': name, 'active': active}) context = { 'links': links } template = 'widgets/footer.html' return render_to_string(template, context=context)
lzw120/django
django/contrib/admin/util.py
Python
bsd-3-clause
15,005
0.0012
import datetime import decimal from django.db import models from django.db.models.sql.constants import LOOKUP_SEP from django.db.models.deletion import Collector from django.db.models.related import RelatedObject from django.forms.forms import pretty_name from django.utils import formats from django.utils.html import escape from django.utils.safestring import mark_safe from django.utils.text import capfirst from django.utils import timezone from django.utils.encoding import force_unicode, smart_unicode, smart_str from django.utils.translation import ungettext from django.core.urlresolvers import reverse def lookup_needs_distinct(opts, lookup_path): """ Returns True if 'distinct()' should be used to query the given lookup path. """ field_name = lookup_path.split('__', 1)[0] field = opts.get_field_by_name(field_name)[0] if ((hasattr(field, 'rel') and isinstance(field.rel, models.ManyToManyRel)) or (isinstance(field, models.related.RelatedObject) and not field.field.unique)): return True return False def prepare_lookup_value(key, value): """ Returns a lookup value prepared to be used in queryset filtering. """ # if key ends with __in, split parameter into separate values if key.endswith('__in'): value = value.split(',') # if key ends with __isnull, special case '' and false if key.endswith('__isnull'): if value.lower() in ('', 'false'): value = False else: value = True return value def quote(s): """ Ensure that primary key values do not confuse the admin URLs by escaping any '/', '_' and ':' characters. Similar to urllib.quote, except that the quoting is slightly different so that it doesn't get automatically unquoted by the Web browser. """ if not isinstance(s, basestring): return s res = list(s) for i in range(len(res)): c = res[i] if c in """:/_#?;@&=+$,"<>%\\""": res[i] = '_%02X' % ord(c) return ''.join(res) def unquote(s): """ Undo the effects of quote(). Based heavily on urllib.unquote(). """ mychr = chr myatoi = int list = s.split('_') res = [list[0]] myappend = res.append del list[0] for item in list: if item[1:2]: try: myappend(mychr(myatoi(item[:2], 16)) + item[2:]) except ValueError: myappend('_' + item) else: myappend('_' + item) return "".join(res) def flatten_fieldsets(fieldsets): """Returns a list of field names from an admin fieldsets structure.""" field_names = [] for name, opts in fieldsets: for field in opts['fields']: # type checking feels dirty, but it seems like the best way here if type(field) == tuple: field_names.extend(field) else: field_names.append(field) return field_names def get_deleted_objects(objs, opts, user, admin_site, using): """ Find all objects related to ``objs`` that should also be deleted. ``objs`` must be a homogenous iterable of objects (e.g. a QuerySet). Returns a nested list of strings suitable for display in the template with the ``unordered_list`` filter. """ collector = NestedObjects(using=using) collector.collect(objs) perms_needed = set() def format_callback(obj): has_admin = obj.__class__ in admin_site._registry opts = obj._meta if has_admin: admin_url = reverse('%s:%s_%s_change' % (admin_site.name, opts.app_label, opts.object_name.lower()), None, (quote(obj._get_pk_val()),)) p = '%s.%s' % (opts.app_label, opts.get_delete_permission()) if not user.has_perm(p): perms_needed.add(opts.verbose_name) # Display a link to the admin page. return mark_safe(u'%s: <a href="%s">%s</a>' % (escape(capfirst(opts.verbose_name)), admin_url, escape(obj))) else: # Don't display link to edit, because it either has no # admin or is edited inline. return u'%s: %s' % (capfirst(opts.verbose_name), force_unicode(obj)) to_delete = collector.nested(format_callback) protected = [format_callback(obj) for obj in collector.pro
tected] return to_delete, perms_needed, protected class NestedObjects(Collector): def __init__(self, *args, **kwargs): super(NestedObjects, self).__init__(*args, **kwargs
) self.edges = {} # {from_instance: [to_instances]} self.protected = set() def add_edge(self, source, target): self.edges.setdefault(source, []).append(target) def collect(self, objs, source_attr=None, **kwargs): for obj in objs: if source_attr: self.add_edge(getattr(obj, source_attr), obj) else: self.add_edge(None, obj) try: return super(NestedObjects, self).collect(objs, source_attr=source_attr, **kwargs) except models.ProtectedError as e: self.protected.update(e.protected_objects) def related_objects(self, related, objs): qs = super(NestedObjects, self).related_objects(related, objs) return qs.select_related(related.field.name) def _nested(self, obj, seen, format_callback): if obj in seen: return [] seen.add(obj) children = [] for child in self.edges.get(obj, ()): children.extend(self._nested(child, seen, format_callback)) if format_callback: ret = [format_callback(obj)] else: ret = [obj] if children: ret.append(children) return ret def nested(self, format_callback=None): """ Return the graph as a nested list. """ seen = set() roots = [] for root in self.edges.get(None, ()): roots.extend(self._nested(root, seen, format_callback)) return roots def model_format_dict(obj): """ Return a `dict` with keys 'verbose_name' and 'verbose_name_plural', typically for use with string formatting. `obj` may be a `Model` instance, `Model` subclass, or `QuerySet` instance. """ if isinstance(obj, (models.Model, models.base.ModelBase)): opts = obj._meta elif isinstance(obj, models.query.QuerySet): opts = obj.model._meta else: opts = obj return { 'verbose_name': force_unicode(opts.verbose_name), 'verbose_name_plural': force_unicode(opts.verbose_name_plural) } def model_ngettext(obj, n=None): """ Return the appropriate `verbose_name` or `verbose_name_plural` value for `obj` depending on the count `n`. `obj` may be a `Model` instance, `Model` subclass, or `QuerySet` instance. If `obj` is a `QuerySet` instance, `n` is optional and the length of the `QuerySet` is used. """ if isinstance(obj, models.query.QuerySet): if n is None: n = obj.count() obj = obj.model d = model_format_dict(obj) singular, plural = d["verbose_name"], d["verbose_name_plural"] return ungettext(singular, plural, n or 0) def lookup_field(name, obj, model_admin=None): opts = obj._meta try: f = opts.get_field(name) except models.FieldDoesNotExist: # For non-field values, the value is either a method, property or # returned via a callable. if callable(name): attr = name value = attr(obj) elif (model_admin is not None and hasattr(model_admin, name) and not name == '__str__' and not name == '__unicode__'): attr = getattr(model_admin, name) value = attr(obj) else: attr = getattr(obj, name) if c
thibault/UrbanJungle
site/urbanjungle/controllers/frontend.py
Python
gpl-3.0
2,808
0.006766
import os import Image from flask import Module, request, current_app, render_template, jsonify, send_file, abort from werkzeug import secure_filename from urbanjungle.models import db from urbanjungle.models.report import Report from sqlalchemy.ext.serializer import dumps frontend = Module(__name__) def allowed_file(filename): return '.' in filename and filename.rsplit('.', 1)[1].lower() in current_app.config['ALLOWED_EXTENSIONS'] @frontend.route('/report/<latitude>,<longitude>', methods=['GET', 'PUT']) def upload(latitude, longitude): '''Handle file upload''' if request.method == 'PUT': file = request.files['file'] if file and allowed_file(file.filename): r = Report(latitude, longitude) db.session.add(r) db.session.commit() filename = '%s.jpg' % r.id file.save(os.path.join(current_app.config['UPLOAD_FOLDER'], filename)) return '' else: abort(403) else: return ''' <!doctype html> <title>Upload new File</title> <h1>Upload new File</h1> <form action="" method="put" enctype=multipart/form-data> <p><input type=file name=file> <input type=submit value=Upload> </form> ''' @frontend.route('/report/thumbnail/<report_id>.jpg') def generate_thumbnail(report_id): ''' Generate thumbnail for given image. This uri should be passed through flask only if the thumb file does not exists. Otherwise, it should be served as a static file. ''' image_path = os.path.join(current_app.config['UPLOAD_FOLDER'], report_id + '.jpg') thumb_path = os.path.join(current_app.config['THUMBS_FOLDER'], report_id + '.jpg') if '..' in imag
e_path or not os.path.exists(image_path): abort(404) if not os.path.exists(thumb_path): image = Image.open(image_path) image.thumbnail((current_app.config['THUMB_WIDTH'], current
_app.config['THUMB_HEIGHT']), \ Image.ANTIALIAS) image.save(thumb_path) return send_file(thumb_path, mimetype="image/jpeg") @frontend.route('/map') def map(): '''Render the main map page''' return render_template('map.html') @frontend.route('/map/markers/<ne_lat>,<ne_lng>,<sw_lat>,<sw_lng>.json') def get_markers(ne_lat, ne_lng, sw_lat, sw_lng): ''' Return markers related to the given frame. Send them in a json format ''' markers = Report.query \ .filter(Report.latitude < ne_lat) \ .filter(Report.latitude > sw_lat) \ .filter(Report.longitude < ne_lng) \ .filter(Report.longitude > sw_lng) \ .all() json_markers = { 'markers' : [ marker.__json__() for marker in markers ] } return jsonify(json_markers)
adaptivdesign/odooku-compat
odooku/cli/commands/info.py
Python
apache-2.0
365
0
import click __all__ = [ 'info' ] INFO_MESSAGE = """ Odooku -------------------------------------- availa
ble modules: {num_modules} """ @click.command() @click.pass_context def info(ctx): logger = ( ctx.obj['logger'] ) from odoo.modules import get_modules print INFO_MESSAGE.format( num_modules=len(get_module
s()) )
genie9/pulp
pulp_simulator.py
Python
gpl-3.0
13,319
0.015166
import sys import numpy import scipy import json import itertools import random import os from sys import stderr, exit, argv from scipy.sparse.linalg import spsolve from sklearn.metrics.pairwise import euclidean_distances from nltk.stem import SnowballStemmer def load_data_sparse(prefix) : return scipy.sparse.csr_matrix((numpy.load(prefix + '.data.npy'), numpy.load(prefix + '.indices.npy'), numpy.load(prefix + '.indptr.npy')), shape=tuple(numpy.load(prefix + '.shape.npy'))) def load_data() : return load_data_sparse('linrel') def load_features_json(fname) : with open(fname) as f : return json.load(f) def load_features() : return load_features_json('linrel_features.json') def load_topics() : return load_features_json('linrel_topics.json') def get_machine_learning_articles() : return [ int(k) for k,v in load_topics().iteritems() if 'stat.ML' in
v ] def order_keys_by_value(d) : return [ i[0] for i in sorted(d.items(), key=lambda x : x[1], reverse=True) ] def okapi_bm25(query, n, data, features) : stemmer = SnowballStemmer('english') query_terms = [ stemmer.stem(term) for term in query.lower().split() ] tmp = {} for qt in query_terms : if qt not in features :
continue findex = features[qt] for aindex in numpy.nonzero(data[:, findex])[0] : akey = aindex.item() if akey not in tmp : tmp[akey] = 1.0 tmp[akey] *= data[aindex,findex] return order_keys_by_value(tmp)[:n] def linrel(articles, feedback, n, data, features, mew=1.0, exploration_rate=0.1) : assert len(articles) == len(feedback), "articles and feedback are not the same length" X = data num_articles = X.shape[0] num_features = X.shape[1] X_t = X[ numpy.array(articles) ] X_tt = X_t.transpose() I = mew * scipy.sparse.identity(num_features, format='dia') W = spsolve((X_tt * X_t) + I, X_tt) A = X * W Y_t = numpy.matrix(feedback).transpose() tmpA = numpy.array(A.todense()) normL2 = numpy.matrix(numpy.sqrt(numpy.sum(tmpA * tmpA, axis=1))).transpose() # W * Y_t is the keyword weights K = W * Y_t mean = A * Y_t variance = (exploration_rate / 2.0) * normL2 I_t = mean + variance linrel_ordered = numpy.argsort(I_t.transpose()[0]).tolist()[0] top_n = [] for i in linrel_ordered[::-1] : if i not in articles : top_n.append(i) if len(top_n) == n : break return top_n, \ mean[ numpy.array(top_n) ].transpose().tolist()[0], \ variance[ numpy.array(top_n) ].transpose().tolist()[0] def average_distance_to_target(articles, target, distances) : return numpy.min(distances[ numpy.array(articles) ]) def main() : if len(argv) != 4 : print >> stderr, "Usage: %s <article index|random> <output dir> <exploration rate>" % argv[0] exit(1) # parse input try : experiment_target = int(argv[1]) if argv[1] != 'random' else None except ValueError : print >> stderr, "Error, %s is not an integer!" % argv[1] exit(1) results_dir = argv[2] if not os.path.isdir(results_dir) : print >> stderr, "Error, %s is not a directory/does not exist!" % results_dir exit(1) try : test_explore_rate = float(argv[3]) except ValueError : print >> stderr, "Error, %s is not a float!" % argv[3] exit(1) # constants num_shown = 10 num_iterations = 10 num_selections = range(num_shown + 1) #test_explore_rate = 0.1 experiment_query = "machine learning" # load the data data = load_data() num_articles = data.shape[0] num_features = data.shape[1] print "loaded %d articles x %d features" % (num_articles, num_features) features = load_features() print "loaded %d features" % len(features) machine_learning_articles = get_machine_learning_articles() num_ml_articles = len(machine_learning_articles) print "loaded %d stat.ML articles" % num_ml_articles # make sure the data is consistent assert len(features) == num_features, \ "the number of features differed in the matrix vs the feature list" # make sure the input is correct assert experiment_target is None or experiment_target in machine_learning_articles, \ "article %d is not a machine learning article!" % experiment_target # pick a random target document if needed if not experiment_target : experiment_target = machine_learning_articles[random.randint(0, num_ml_articles-1)] print "random selection of target article %d" % experiment_target # test if this has been done before out_filename = results_filename(results_dir, experiment_target) if os.path.exists(out_filename) : print "%s exists, exiting..." % out_filename exit(0) # precalculate all the distances between all documents and the target print "calculating distances to article %d" % experiment_target experiment_distances = euclidean_distances(data, data[experiment_target, :]) # run an initial query using tfidf print "running okapi bm25 with query '%s'" % experiment_query experiment_articles = okapi_bm25(experiment_query, num_shown, data, features) experiment_feedback = [] experiment_means = [] experiment_variances = [] # run for X iterations for iteration in range(num_iterations) : # count = 0 # print >> stderr, "iter %d - %d" % (iteration, count), # # best_feedback = None # best_average_distance = sys.float_info.max # best_version = -1 # user can pick 0 -> 10 articles # for i in num_selections : # # go through all possible combinations of feedback # # to select what the user does # for selections in itertools.combinations(range(num_shown), i) : # feedback = [ 1.0 if i in selections else 0.0 for i in range(num_shown) ] # # # run linrel without exploration using generated feedback # articles,means,variances = linrel(experiment_articles, # experiment_feedback + feedback, # num_shown, # data, # features, # exploration_rate=0.0) # # # test if these documents are better than the 'current best feedback' # # based on average (?) distance to target # average_distance = average_distance_to_target(articles, # experiment_target, # experiment_distances) # # if average_distance < best_average_distance : # best_version = count # best_feedback = feedback # best_average_distance = average_distance # # count += 1 # print >> stderr, "\riter %d - %d (best = %d, distance = %f)" % (iteration, count, best_version, best_average_distance), remaining_articles = range(num_shown) selected_articles = [] # BASE AVERAGE SHOULD BE WITH NO SELECTIONS articles,means,variances = linrel(experiment_articles, experiment_feedback + ([0.0] * num_shown), num_shown, data, features, exploration_rate=0.0) current_average_distance = average_distance_to_target(articles, experiment_target, e
kelvinguu/lang2program
strongsup/predicate.py
Python
apache-2.0
1,835
0
"""Predicate: output token.""" from gtd.utils import ComparableMi
xin class Predicate(ComparableMixin): """Represents a step in the logical form (i.e., an output token).""" __slots__ = ['_name', '_original_string', '_types'] def __init__(self, name, original_string=None, types=None): """Create Predicate. Args:
name (unicode) original_string (unicode) types (tuple[unicode]) """ self._name = name self._original_string = original_string self._types = types or tuple() def __eq__(self, other): return (isinstance(other, Predicate) and self._name == other._name) def __hash__(self): return hash(self._name) @property def _cmpkey(self): return self._name def __str__(self): return self._name __repr__ = __str__ @property def name(self): """Name of the predicate. Should be unique among the predicates in the same context. Returns: unicode """ return self._name @property def original_string(self): """Original string of the predicate. Can be None. Returns: unicode or None """ return self._original_string @property def types(self): """A collection of types. Returns: tuple[unicode] """ return self._types @property def delexicalized_name(self): """A placeholder used in a delexicalized utterance. Can be None if the predicate should not be used for delexicalization. A subclass can customize this method to return different placeholders for different predicate types. Returns: unicode or None """ return 'PRED'
hryamzik/ansible
lib/ansible/modules/windows/win_defrag.py
Python
gpl-3.0
2,682
0.002237
#!/usr/bin/python # -*- coding: utf-8 -*- # Copyright: 2017, Dag Wieers (@dagwieers) <d
ag@wieers.com> # GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt) ANSIBLE_METADATA = {'metadata_version': '1.1', 'status': ['preview'], 'suppo
rted_by': 'community'} DOCUMENTATION = r''' --- module: win_defrag version_added: '2.4' short_description: Consolidate fragmented files on local volumes description: - Locates and consolidates fragmented files on local volumes to improve system performance. - 'More information regarding C(win_defrag) is available from: U(https://technet.microsoft.com/en-us/library/cc731650(v=ws.11).aspx)' options: include_volumes: description: - A list of drive letters or mount point paths of the volumes to be defragmented. - If this parameter is omitted, all volumes (not excluded) will be fragmented. type: list exclude_volumes: description: - A list of drive letters or mount point paths to exclude from defragmentation. type: list freespace_consolidation: description: - Perform free space consolidation on the specified volumes. priority: description: - Run the operation at low or normal priority. choices: [ low, normal ] default: low parallel: description: - Run the operation on each volume in parallel in the background. type: bool default: 'no' requirements: - defrag.exe author: - Dag Wieers (@dagwieers) ''' EXAMPLES = r''' - name: Defragment all local volumes (in parallel) win_defrag: parallel: yes - name: 'Defragment all local volumes, except C: and D:' win_defrag: exclude_volumes: [ C, D ] - name: 'Defragment volume D: with normal priority' win_defrag: include_volumes: D priority: normal - name: Consolidate free space (useful when reducing volumes) win_defrag: freespace_consolidation: yes ''' RETURN = r''' cmd: description: The complete command line used by the module returned: always type: string sample: defrag.exe /C /V rc: description: The return code for the command returned: always type: int sample: 0 stdout: description: The standard output from the command returned: always type: string sample: Success. stderr: description: The error output from the command returned: always type: string sample: msg: description: Possible error message on failure returned: failed type: string sample: Command 'defrag.exe' not found in $env:PATH. changed: description: Whether or not any changes were made. returned: always type: bool sample: True '''
Johnzero/erp
openerp/addons/fetchmail/fetchmail.py
Python
agpl-3.0
12,859
0.006766
# -*- coding: utf-8 -*- ############################################################################## # # OpenERP, Open Source Management Solution # Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>). # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as # published by the Free Software Foundation, either version 3 of the # License, or (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Affero General Public License for more details. # # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. # ############################################################################## import logging import time from imaplib import IMAP4 from imaplib import IMAP4_SSL from poplib import POP3 from poplib import POP3_SSL try: import cStringIO as StringIO except ImportError: import Str
ingIO import zipfile import base64 import ad
dons import netsvc from osv import osv, fields import tools from tools.translate import _ logger = logging.getLogger('fetchmail') class fetchmail_server(osv.osv): """Incoming POP/IMAP mail server account""" _name = 'fetchmail.server' _description = "POP/IMAP Server" _order = 'priority' _columns = { 'name':fields.char('Name', size=256, required=True, readonly=False), 'active':fields.boolean('Active', required=False), 'state':fields.selection([ ('draft', 'Not Confirmed'), ('done', 'Confirmed'), ], 'State', select=True, readonly=True), 'server' : fields.char('Server Name', size=256, readonly=True, help="Hostname or IP of the mail server", states={'draft':[('readonly', False)]}), 'port' : fields.integer('Port', readonly=True, states={'draft':[('readonly', False)]}), 'type':fields.selection([ ('pop', 'POP Server'), ('imap', 'IMAP Server'), ('local', 'Local Server'), ], 'Server Type', select=True, required=True, readonly=False), 'is_ssl':fields.boolean('SSL/TLS', help="Connections are encrypted with SSL/TLS through a dedicated port (default: IMAPS=993, POP3S=995)"), 'attach':fields.boolean('Keep Attachments', help="Whether attachments should be downloaded. " "If not enabled, incoming emails will be stripped of any attachments before being processed"), 'original':fields.boolean('Keep Original', help="Whether a full original copy of each email should be kept for reference" "and attached to each processed message. This will usually double the size of your message database."), 'date': fields.datetime('Last Fetch Date', readonly=True), 'user' : fields.char('Username', size=256, readonly=True, states={'draft':[('readonly', False)]}), 'password' : fields.char('Password', size=1024, readonly=True, states={'draft':[('readonly', False)]}), 'action_id':fields.many2one('ir.actions.server', 'Server Action', help="Optional custom server action to trigger for each incoming mail, " "on the record that was created or updated by this mail"), 'object_id': fields.many2one('ir.model', "Create a New Record", required=True, help="Process each incoming mail as part of a conversation " "corresponding to this document type. This will create " "new documents for new conversations, or attach follow-up " "emails to the existing conversations (documents)."), 'priority': fields.integer('Server Priority', readonly=True, states={'draft':[('readonly', False)]}, help="Defines the order of processing, " "lower values mean higher priority"), 'message_ids': fields.one2many('mail.message', 'fetchmail_server_id', 'Messages', readonly=True), 'configuration' : fields.text('Configuration'), 'script' : fields.char('Script', readonly=True, size=64), } _defaults = { 'state': "draft", 'type': "pop", 'active': True, 'priority': 5, 'attach': True, 'script': '/mail/static/scripts/openerp_mailgate.py', } def onchange_server_type(self, cr, uid, ids, server_type=False, ssl=False, object_id=False): port = 0 values = {} if server_type == 'pop': port = ssl and 995 or 110 elif server_type == 'imap': port = ssl and 993 or 143 else: values['server'] = '' values['port'] = port conf = { 'dbname' : cr.dbname, 'uid' : uid, 'model' : 'MODELNAME', } if object_id: m = self.pool.get('ir.model') r = m.read(cr,uid,[object_id],['model']) conf['model']=r[0]['model'] values['configuration'] = """Use the below script with the following command line options with your Mail Transport Agent (MTA) openerp_mailgate.py -u %(uid)d -p PASSWORD -o %(model)s -d %(dbname)s --host=HOSTNAME --port=PORT """ % conf return {'value':values} def set_draft(self, cr, uid, ids, context=None): self.write(cr, uid, ids , {'state':'draft'}) return True def connect(self, cr, uid, server_id, context=None): if isinstance(server_id, (list,tuple)): server_id = server_id[0] server = self.browse(cr, uid, server_id, context) if server.type == 'imap': if server.is_ssl: connection = IMAP4_SSL(server.server, int(server.port)) else: connection = IMAP4(server.server, int(server.port)) connection.login(server.user, server.password) elif server.type == 'pop': if server.is_ssl: connection = POP3_SSL(server.server, int(server.port)) else: connection = POP3(server.server, int(server.port)) #TODO: use this to remove only unread messages #connection.user("recent:"+server.user) connection.user(server.user) connection.pass_(server.password) return connection def button_confirm_login(self, cr, uid, ids, context=None): if context is None: context = {} for server in self.browse(cr, uid, ids, context=context): try: connection = server.connect() server.write({'state':'done'}) except Exception, e: logger.exception("Failed to connect to %s server %s", server.type, server.name) raise osv.except_osv(_("Connection test failed!"), _("Here is what we got instead:\n %s") % tools.ustr(e)) finally: try: if connection: if server.type == 'imap': connection.close() elif server.type == 'pop': connection.quit() except Exception: # ignored, just a consequence of the previous exception pass return True def _fetch_mails(self, cr, uid, ids=False, context=None): if not ids: ids = self.search(cr, uid, [('state','=','done')]) return self.fetch_mail(cr, uid, ids, context=context) def fetch_mail(self, cr, uid, ids, context=None): """WARNING: meant for cron usage only - will co
andrewyoung1991/abjad
abjad/tools/systemtools/Configuration.py
Python
gpl-3.0
6,475
0.001853
# -*- encoding: utf-8 -*- from __future__ import print_function import abc import configobj import os import time import validate from abjad.tools.abctools.AbjadObject import AbjadObject class Configuration(AbjadObject): r'''A configuration object. ''' ### CLASS VARIABLES ### __slots__ = ( '_settings', ) ### INITIALIZER ### def __init__(self): from abjad.tools import systemtools # verify configuration directory if not os.path.exists(self.configuration_directory): os.makedirs(self.configuration_directory) # attempt to load config from disk, and validate # a config object will be created if none is found on disk config = configobj.ConfigObj( self.configuration_file_path, configspec=self._config_specification ) # validate validator = validate.Validator() validation = config.validate(validator, copy=True) # replace failing key:value pairs with default values if validation is not True: for key, valid in validation.items(): if not valid: default = config.default_values[key] message = 'Warning: config key {!r} failed validation,' message += ' setting to default: {!r}.' message = message.format(key, default) print(message) config[key] = default # setup output formatting config.write_empty_values = True config.comments.update(self._option_comments) config.initial_comment = self._initial_comment # write to disk if doesn't exist if not os.path.exists(self.configuration_file_path): if not os.path.exists(self.configuration_directory): os.makedirs(self.configuration_directory) config.write() # write to disk if different from current else: # prevent ConfigObj from automatically writing config.filename = None with open(self.configuration_file_path, 'r') as f: old_config_lines = f.read() old_config_lines = old_config_lines.splitlines() old_config_lines = [line for line in old_config_lines if 'configuration file created on' not in line] old_config_lines = '\n'.join(old_config_lines) new_config_lines = config.write(None) new_config_lines = [line for line in new_config_lines if 'configuration file created on' not in line] new_config_lines = '\n'.join(new_config_lines) lines_are_equal = systemtools.TestManager.compare( old_config_lines, new_config_lines, ) # print('----------------------------------------') # print('TESTING:', type(self)) # print() # print('OLD:') # print() # print(old_config_lines) # print() # print('NEW:') # print() # print(new_config_lines) # print() # print('EQUAL?', lines_are_equal) # print() if not lines_are_equal: # print('WRITING') # print() with open(self.configuration_file_path, 'w') as file_pointer: config.write(file_pointer) # turn the ConfigObj instance into a standard dict, # and replace its empty string values with Nones, # caching the result on this AbjadConfiguration instance. self._settings = dict(config) for key, value in self._settings.items(): if value == '' or value == 'None': self._settings[key] = None ### SPECIAL METHODS ### def __delitem__(self, i): r'''Deletes item `i` from configuration. Returns none. ''' del(self._settings[i]) def __getitem__(self, i): r'''Gets item `i` from configuration. Returns none. ''' return self._settings[i] def __iter__(self): r'''Iterates configuration settings. Returns generator. ''' for key in self._settings: yield key def __len__(self): r'''Gets the number of settings in configuration. Returns nonnegative integer. ''' return len(self._settings) def __setitem__(self, i, arg): r'''Sets configuration item `i` to `arg`. Returns none. ''' self._settings[i] = arg ### PRIVATE METHODS ### @abc.abstractmethod def _get_option_definitions(self): raise NotImplementedError ### PRIVATE PROPERTIES ### @property def _config_specification(self): specs = self._option_specification return ['{} = {}'.format(key, value) for key, value in sorted(specs.items())] @property def _current_time(self): return time.strftime("%d %B %Y %H:%M:%S") @abc.abstractproperty def _initial_comment(self): raise NotImplementedError @property def _option_comments(self): options = self._get_option_definitions() comments = [(key, options[key]['comment']) for key in options] return dict(comments) @property def _option_specification(self): options = self._get_option_definitions() specs = [(key, options[key]['spec']) for key in options] return dict(specs) ### PUBLIC PROPERTIES ### @abc.abstractproperty def configuration_directory(self): r'''Gets configuration directory. Returns string. ''' raise NotImplementedError @abc.ab
stractproperty def configurat
ion_file_name(self): r'''Gets configuration file name. Returns string. ''' raise NotImplementedError @property def configuration_file_path(self): r'''Gets configuration file path. Returns string. ''' return os.path.join( self.configuration_directory, self.configuration_file_name, ) @property def home_directory(self): r'''Gets home directory. Returns string. ''' path = os.environ.get('HOME') or \ os.environ.get('HOMEPATH') or \ os.environ.get('APPDATA') return os.path.abspath(path)
wangxiangyu/horizon
openstack_dashboard/dashboards/project/firewalls/forms.py
Python
apache-2.0
16,187
0
# Copyright 2013, Big Switch Networks, Inc # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import abc import logging from django.core.urlresolvers import reverse from django.utils.translation import ugettext_lazy as _ from horizon import exceptions from horizon import forms from horizon import messages from horizon.utils import validators from openstack_dashboard import api port_validator = validators.validate_port_or_colon_separated_port_range LOG = logging.getLogger(__name__) class UpdateRule(forms.SelfHandlingForm): name = forms.CharField(max_length=80, label=_("Name"), required=False) description = forms.CharField( required=False, max_length=80, label=_("Description")) protocol = forms.ChoiceField( label=_("Protocol"), required=False, choices=[('TCP', _('TCP')), ('UDP', _('UDP')), ('ICMP', _('ICMP')), ('ANY', _('ANY'))], help_text=_('Protocol for the firewall rule')) action = forms.ChoiceField( label=_("Action"), required=False, choices=[('ALLOW', _('ALLOW')), ('DENY', _('DENY'))], help_text=_('Action for the firewall rule')) source_ip_address = forms.IPField( label=_("Source IP Address/Subnet"), version=forms.IPv4 | forms.IPv6, required=False, mask=True, help_text=_('Source IP address or subnet')) destination_ip_address = forms.IPField( label=_('Destination IP Address/Subnet'), version=forms.IPv4 | forms.IPv6, required=False, mask=True, help_text=_('Destination IP address or subnet')) source_port = forms.CharField( max_length=80, label=_("Source Port/Port Range"), required=False, validators=[port_validator], help_text=_('Source port (integer in [1, 65535] or range in a:b)')) destination_port = forms.CharField( max_length=80, label=_("Destination Port/Port Range"), required=False, validators=[port_validator], help_text=_('Destination port (integer in [1, 65535] or range' ' in a:b)')) shared = forms.BooleanField(label=_("Shared"), required=False) enabled = forms.BooleanField(label=_("Enabled"), required=False) failure_url = 'horizon:project:firewalls:index' def handle(self, request, context): rule_id = self.initial['rule_id'] name_or_id = context.get('name') or rule_id if context['protocol'] == 'ANY': context['protocol'] = None for f in ['source_ip_address', 'destination_ip_address', 'source_port', 'destination_port']: if not context[f]: context[f] = None try: rule = api.fwaas.rule_update(request, rule_id, **context) msg = _('Rule %s was successfully updated.') % name_or_id LOG.debug(msg) messages.success(request, msg) return rule except Exception as e: msg = (_('Failed to update rule %(name)s: %(reason)s') % {'name': name_or_id, 'reason': e}) LOG.error(msg) redirect = reverse(self.failure_url) exceptions.handle(request, msg, redirect=redirect) class UpdatePolicy(forms.SelfHandlingForm): name = forms.CharField(max_length=80, label=_("Name"), required=False) description = forms.CharField(required=False, max_length=80, label=_("Description")) shared = forms.BooleanField(label=_("Shared"), required=False) audited = forms.BooleanField(label=_("Audited"), required=False) failure_url = 'horizon:project:firewalls:index' def handle(self, request, context): policy_id = self.initial['policy_id'] name_or_id = context.get('name') or policy_id try: policy = api.fwaas.policy_update(request, policy_id, **context) msg = _('Policy %s was successfully updated.') % name_or_id LOG.debug(msg) messages.success(request, msg) return policy except Exception as e: msg = _('Failed to update policy %(name)s: %(reason)s') % {
'name': name_or_id, 'reason': e} LOG.error(msg) redirect = reverse(self.failure_url) exceptions.handle(request, msg, redirect=redirect) class UpdateFirewall(forms.SelfHandlingForm): name = forms.CharField(max_length=80, label=_("Name"), required=False) description = forms.CharField(max_length=80,
label=_("Description"), required=False) firewall_policy_id = forms.ChoiceField(label=_("Policy")) admin_state_up = forms.ChoiceField(choices=[(True, _('UP')), (False, _('DOWN'))], label=_("Admin State")) failure_url = 'horizon:project:firewalls:index' def __init__(self, request, *args, **kwargs): super(UpdateFirewall, self).__init__(request, *args, **kwargs) try: tenant_id = self.request.user.tenant_id policies = api.fwaas.policy_list_for_tenant(request, tenant_id) policies = sorted(policies, key=lambda policy: policy.name) except Exception: exceptions.handle(request, _('Unable to retrieve policy list.')) policies = [] policy_id = kwargs['initial']['firewall_policy_id'] policy_name = [p.name for p in policies if p.id == policy_id][0] firewall_policy_id_choices = [(policy_id, policy_name)] for p in policies: if p.id != policy_id: firewall_policy_id_choices.append((p.id, p.name_or_id)) self.fields['firewall_policy_id'].choices = firewall_policy_id_choices def handle(self, request, context): firewall_id = self.initial['firewall_id'] name_or_id = context.get('name') or firewall_id context['admin_state_up'] = (context['admin_state_up'] == 'True') try: firewall = api.fwaas.firewall_update(request, firewall_id, **context) msg = _('Firewall %s was successfully updated.') % name_or_id LOG.debug(msg) messages.success(request, msg) return firewall except Exception as e: msg = _('Failed to update firewall %(name)s: %(reason)s') % { 'name': name_or_id, 'reason': e} LOG.error(msg) redirect = reverse(self.failure_url) exceptions.handle(request, msg, redirect=redirect) class InsertRuleToPolicy(forms.SelfHandlingForm): firewall_rule_id = forms.ChoiceField(label=_("Insert Rule")) insert_before = forms.ChoiceField(label=_("Before"), required=False) insert_after = forms.ChoiceField(label=_("After"), required=False) failure_url = 'horizon:project:firewalls:index' def __init__(self, request, *args, **kwargs): super(InsertRuleToPolicy, self).__init__(request, *args, **kwargs) try: tenant_id = self.request.user.tenant_id all_rules = api.fwaas.rule_list_for_tenant(request, tenant_id) all_rules = sorted(all_rules, key=lambda rule: rule.name_or_id) available_rules = [r for r in all_rules if not r.firewall_policy_id] current_rules = [] for r in kwargs['initial']['
PersianWikipedia/pywikibot-core
pywikibot/families/vikidia_family.py
Python
mit
661
0
# -*- coding: utf-8 -*- """Family module for Vikidia.""" # # (C) Pywikibot team, 2010-2018 # # Distributed under the terms of the MIT license. # from __futu
re__ import absolute_import, division, unicode_literals from pywikibot import family class Family(family.SubdomainFamily): """Family class for Vikidia.""" name = 'vikidia' domain = 'vikidia.org' codes = ['ca', 'de', 'el', 'en', 'es', 'eu', 'fr', 'hy', 'it', 'ru', 'scn'] # Sites we want to edit bu
t not count as real languages test_codes = ['central', 'test'] def protocol(self, code): """Return https as the protocol for this family.""" return 'https'
j-towns/pymanopt
pymanopt/tools/autodiff/_tensorflow.py
Python
bsd-3-clause
2,913
0
""" Module containing functions to differentiate functions using tensorflow. """ try: import tensorflow as tf from tensorflow.python.ops.gradients import _hessian_vector_product except ImportError: tf = None from ._backend import Backend, assert_backend_available class TensorflowBackend(Backend): def __init__(self): if tf is not None: self._session = tf.Session() def __str__(self): return "tensorflow" def is_available(self): return tf is not None @assert_backend_available def is_compatible(self, objective, argument): if isinstance(objective, tf.Tensor): if (argument is None or not isinstance(argument, tf.Variable) and not all([isinstance(arg, tf.Variable) for arg in argument])): raise ValueError( "Tensorflow backend requires an argument (or sequence of
" "arguments) with respect to which compilation is to be " "carried out") return True return False @assert_backend_available def compile_function(self, objective, argument): if not isinstance(argument, list): def func(x): feed_dict = {argument: x} return self._session.run(objective, feed_dict) else: def func(x): feed_dict = {i:
d for i, d in zip(argument, x)} return self._session.run(objective, feed_dict) return func @assert_backend_available def compute_gradient(self, objective, argument): """ Compute the gradient of 'objective' and return as a function. """ tfgrad = tf.gradients(objective, argument) if not isinstance(argument, list): def grad(x): feed_dict = {argument: x} return self._session.run(tfgrad[0], feed_dict) else: def grad(x): feed_dict = {i: d for i, d in zip(argument, x)} return self._session.run(tfgrad, feed_dict) return grad @assert_backend_available def compute_hessian(self, objective, argument): if not isinstance(argument, list): argA = tf.Variable(tf.zeros(tf.shape(argument))) tfhess = _hessian_vector_product(objective, [argument], [argA]) def hess(x, a): feed_dict = {argument: x, argA: a} return self._session.run(tfhess[0], feed_dict) else: argA = [tf.Variable(tf.zeros(tf.shape(arg))) for arg in argument] tfhess = _hessian_vector_product(objective, argument, argA) def hess(x, a): feed_dict = {i: d for i, d in zip(argument+argA, x+a)} return self._session.run(tfhess, feed_dict) return hess
conklinbd/MovementAnalysis
TemplateInstall/PortalDeploy/StageApp.py
Python
apache-2.0
726
0.009642
""" @author: ArcGIS for Intelligence @contact: defensesolutions@esri.com @company: Esri @version: 1.0 @description: Used to stage the apps for Movement Analysis @requirements: Python 2.7.x, ArcGIS 10.3.1 @copyright: Esri, 2015 """ import arcresthelper from arcresthelper import portalautomation log_file='./logs
/DamageAssessment.log' configFiles= ['./configs/StageApp.json'] globalLoginInfo = './configs/GlobalLoginInfo.json' dateTimeFormat = '%Y-%m-%d %H:%M' pa = portalautomation.portalautomation(globalLoginInfo) pa.setLog(log_file=log_file) pa.publishfromconfig(config
Files=configFiles, combinedApp=None, dateTimeFormat=dateTimeFormat) del pa
cortext/crawtextV2
~/venvs/crawler/lib/python2.7/site-packages/lxml/html/soupparser.py
Python
mit
4,360
0.002982
__doc__ = """External interface to the BeautifulSoup HTML parser. """ __all__ = ["fromstring", "parse", "convert_tree"] from lxml import etree, html from BeautifulSoup import \ BeautifulSoup, Tag, Comment, ProcessingInstruction, NavigableString def fromstring(data, beautifulsoup=None, makeelement=None, **bsargs): """Parse a string of HTML data into an Element tree using the BeautifulSoup parser. Returns the root ``<html>`` Element of the tree. You can pass a different BeautifulSoup parser through the `beautifulsoup` keyword, and a diffent Element factory function through the `makeelement` keyword. By default, the standard ``BeautifulSoup`` class and the default factory of `lxml.html` are used. """ return _parse(data, beautifulsoup, makeelement, **bsargs) def parse(file, beautifulsoup=None, makeelement=None, **bsargs): """Parse a file into an ElemenTree using the BeautifulSoup parser. You can pass a different BeautifulSoup parser through the `beautifulsoup` keyword, and a diffent Element factory function th
rough the `makeelement` keyword. By default, the standard ``BeautifulSoup`` class and the default factory of `lxml.html` are used. ""
" if not hasattr(file, 'read'): file = open(file) root = _parse(file, beautifulsoup, makeelement, **bsargs) return etree.ElementTree(root) def convert_tree(beautiful_soup_tree, makeelement=None): """Convert a BeautifulSoup tree to a list of Element trees. Returns a list instead of a single root Element to support HTML-like soup with more than one root element. You can pass a different Element factory through the `makeelement` keyword. """ if makeelement is None: makeelement = html.html_parser.makeelement root = _convert_tree(beautiful_soup_tree, makeelement) children = root.getchildren() for child in children: root.remove(child) return children # helpers def _parse(source, beautifulsoup, makeelement, **bsargs): if beautifulsoup is None: beautifulsoup = BeautifulSoup if makeelement is None: makeelement = html.html_parser.makeelement if 'convertEntities' not in bsargs: bsargs['convertEntities'] = 'html' tree = beautifulsoup(source, **bsargs) root = _convert_tree(tree, makeelement) # from ET: wrap the document in a html root element, if necessary if len(root) == 1 and root[0].tag == "html": return root[0] root.tag = "html" return root def _convert_tree(beautiful_soup_tree, makeelement): root = makeelement(beautiful_soup_tree.name, attrib=dict(beautiful_soup_tree.attrs)) _convert_children(root, beautiful_soup_tree, makeelement) return root def _convert_children(parent, beautiful_soup_tree, makeelement): SubElement = etree.SubElement et_child = None for child in beautiful_soup_tree: if isinstance(child, Tag): et_child = SubElement(parent, child.name, attrib=dict( [(k, unescape(v)) for (k,v) in child.attrs])) _convert_children(et_child, child, makeelement) elif type(child) is NavigableString: _append_text(parent, et_child, unescape(child)) else: if isinstance(child, Comment): parent.append(etree.Comment(child)) elif isinstance(child, ProcessingInstruction): parent.append(etree.ProcessingInstruction( *child.split(' ', 1))) else: # CData _append_text(parent, et_child, unescape(child)) def _append_text(parent, element, text): if element is None: parent.text = (parent.text or '') + text else: element.tail = (element.tail or '') + text # copied from ET's ElementSoup try: from html.entities import name2codepoint # Python 3 except ImportError: from htmlentitydefs import name2codepoint import re handle_entities = re.compile("&(\w+);").sub def unescape(string): if not string: return '' # work around oddities in BeautifulSoup's entity handling def unescape_entity(m): try: return unichr(name2codepoint[m.group(1)]) except KeyError: return m.group(0) # use as is return handle_entities(unescape_entity, string)
ijzer/cwbot-ndy
kol/data/Patterns.py
Python
bsd-3-clause
28,912
0.014596
""" This module holds all of the regular expression patterns that pykol uses. It makes sense to store them all in the same place since many patterns are used by multiple requests. The 'patterns' data object is a dictionary mapping patternId to pattern. If pattern is a tuple, then the first element of the tuple should be the pattern while the second element is a flag to pass to re.compile (like re.DOTALL). """ import re patterns = { # General patterns. "whitespace" : r'([\t ]+)', "results" : r'<b>Results:<\/b><\/td><\/tr><tr><td[^<>]*><center><table><tr><td>(.*?)</td></tr></table></center></td></tr>', "htmlComment" : r'<!--.*?-->', "htmlTag" : r'<[^>]*?>', # Login-related patterns. "accountPwd" : r'var pwdhash = "([0-9a-f]+)";', "accountId" : r'var playerid = ([0-9]+);', "accountName" : r'<a [^<>]*href="charsheet\.php">(?:<b>)?([^<>]+)<', "badPassword" : r'<b>Login failed\. Bad password\.<\/b>', "loginChallenge" : r'name="?challenge"?\s+value="?([0-9a-f]+)"?', "loginURL" : r'^(.*)login\.php\?loginid=([0-9a-f]+)', "mainFrameset" : r'<frameset id="?rootset"?', "tooManyLoginsFailuresFromThisIP" : r'Too many login failures from this IP', "waitOneMinuteLoginError" : r'Please wait a minute', "waitTwoMinutesLoginError" : r"you'll need to wait a couple of minutes before you can log in again\.", "waitFiveMinutesLoginError" : r"Please wait five minutes and try again\.", "waitFifteenMinutesLoginError" : r'Please wait fifteen minutes and try again\.', # Item-related patterns. "menuItem" : r'<input type=radio name=whichitem value="?(-?[0-9]+)"?></td><td><img .*? onclick=\'descitem\("?([^"]+)"?\);\'>', "acquireSingleItem" : r'<td[^>]*><img src="[^"]*" alt="[^"]*" title="[^"]*"[^>]*descitem\(([0-9]+)\)[^>]*><\/td><td[^>]*>You acquire an item', "acquireMultipleItems" : r'<td[^>]*><img src="[^"]*" alt="[^"]*" title="[^"]*"[^>]*descitem\(([0-9]+)\)[^>]*><\/td><td[^>]*>You acquire <b>([0-9,]*) ', "gainMeat" : r'<td><img src="[^"]*meat\.gif"[^>]*><\/td><td[^>]*>You gain ([0-9,]*?) Meat\.<\/td>', "loseMeat" : r'You lose ([0-9,]*?) Meat', "isCocktailcraftingIngredient" : (r'<br>\(Cocktailcrafting ingredient\)<br>'), "isCookingIngredient" : r'<br>\(Cooking ingredient\)<br>', "isJewelrymakingComponent" : r'<br>\(Jewelrymaking component\)<br>', "isMeatsmithingComponent" : r'<br>\(Meatsmithing component\)<br>', "inventorySingleItem" : r'<img [^>]*descitem\(([0-9]+)[^>]*></td><td[^>]*><b[^>]*>([^<>]+)</b>&nbsp;<span><\/span>',
"inventoryMultipleItems" : r'<img [^>]*descitem\(([0-9]+)[^>]*></td><td[^>]*><b[^>]*>([^<>]+)</b>&nbsp;<span>\(([0-9]+)\)<\/span>', "itemAutosell" : r'<br>Selling Price: <b>(\d*) Meat\.<\/b>', "itemImage" : r'<img src="http:\/\/images\.kingdomofloathing\.com\/itemimages\/(.*?)"',
"itemName" : r'<b>(.+?)<\/b>', "itemType" : r'<br>Type: <b>([^<]*)<.*\/b><br>', "tooFull" : r"You're too full to eat that\.", "tooDrunk" : r"You're way too drunk already\.", "notBooze" : r"That's not booze\.", "notFood" : r"That's not something you can eat\.", "notEquip" : r"That's not something you can equip\. And stop screwing with the URLs\.", "notEnoughToUse" : r"<table><tr><td>You don't have that many of that item.</td></tr></table>", "notMultiUse" : r"<table><tr><td>That item isn't usable in quantity.</td></tr></table>", # Message-related patterns. "brickMessage" : r"http:\/\/images\.kingdomofloathing\.com\/adventureimages\/(brokewin|bigbrick)\.gif", "candyHeartMessage" : r"http:\/\/images\.kingdomofloathing\.com\/otherimages\/heart\/hearttop\.gif", "coffeeMessage" : r"http:\/\/images\.kingdomofloathing\.com\/otherimages\/heart\/cuptop\.gif", "fullMessage" : ('<tr><td[^>]*><input type=checkbox name="sel([0-9]+)".*?<b>[^<]*<\/b> <a href="showplayer\.php\?who=([0-9]+)">([^<]*)<\/a>.*?<b>Date:<\/b>([^<]*?)</b>.*?<blockquote>(.*?)<\/blockquote>', re.DOTALL), "userInHardcoreRonin" : r'<center><table><tr><td>That player cannot receive Meat or items from other players right now\.', "userIgnoringUs" : r"<center><table><tr><td>This message could not be sent, because you are on that player's ignore list\.<\/td><\/tr><\/table><\/center>", "notEnoughItemsToSend" : r"<center><table><tr><td>You don't have enough of one of the items you're trying to send\.<\/td><\/tr><\/table><\/center>", "messageSent" : r"<td><center>Message sent\.<\/center><\/td>", "kmailNotSentUserTrendy" : r"<center><table><tr><td>That player would never use something as old and outmoded as", "weAreIgnoringUser" : r"<td>This message could not be sent, because that player is on your ignore list\.<\/td>", # Error patterns. "cantPulverizeItem" : r"<td>That's not something you can pulverize\.<\/td>", "notEnoughItems" : r"(?:<td>You haven't got that many\.<\/td>)|(?:You don't have the item you're trying to use\.)|(?:You don't have the item you're trying to equip\.)", # Chat patterns. "currentChatChannel" : r'<font color="?#?\w+"?>Currently in channel: ([^<>]+)<', "chatLastSeen" : r"lastseen:([0-9]+)", "chatChannel" : r'^<font color="?#?\w+"?>\[([^<>]+)\]<\/font> ', "chatMessage" : r'<b><a target="?mainpane"? href="showplayer\.php\?who=(-?[0-9]+)"><font color="?#?\w+"?>([^<>]+)<\/font>(?:<\/b>|<\/a>|:)* (.*)$', "chatEmote" : r'<b><i><a target="?mainpane"? href="showplayer\.php\?who=([0-9]+)"><font color="?#?\w+"?>([^<>]+)<\/b><\/font><\/a> (.*)<\/i>$', "privateChat" : r'<a target="?mainpane"? href="showplayer\.php\?who=([0-9]+)"><font color="?blue"?><b>([^)]+) \(private\):<\/b><\/font><\/a> <font color="?blue"?>(.*)</font>$', "chatNewKmailNotification" : r'<a target="?mainpane"? href="messages\.php"><font color="?green"?>New message received from <a target="?mainpane"? href=\'showplayer\.php\?who=([0-9]+)\'><font color="?green"?>([^<>]+)<\/font><\/a>\.<\/font><\/a>$', "chatLink" : r'<a target="?_blank"? href="([^"]+)"><font color="?blue"?>\[link\]<\/font><\/a> ', "chatWhoResponse" : r'<table><tr><td class=tiny><center><b>Players in (?:this channel|channel \w+):', "chatWhoPerson" : r'<a (?:class="([^"]+)" )?target="?mainpane"? href="showplayer\.php\?who=([0-9]+)"><font color="?#?\w+"?>([^<>]+)<\/font><\/a>', "chatLinkedPlayer" : r"<a style='color: #?\w+' href='showplayer\.php\?who=([0-9]+)' target=mainpane>([^<]+)<\/a>", "newChatChannel" : r"<font color=[^>]+>You are now talking in channel: ([^\,]+?)\.<p><p>(.*?)</font>", "chatListenResponse" : r"<font color=[^>]+>Currently listening to channels:(.*?<b>.*?</b>.*?)</font>", "chatListenCurrent" : r"<br>&nbsp;&nbsp;<b>(.*?)</b>", "chatListenOthers" : r"&nbsp;&nbsp;([^<>]*?)<br>", "chatStartListen" : r'<font color=[^>]+>Now listening to channel: ([^>]+)</font>', "chatStopListen" : r'<font color=[^>]+>No longer listening to channel: ([^>]+)</font>', "chatMultiLineStart" : r'<b><a target="?mainpane"? href="showplayer\.php\?who=(-?[0-9]+)"><font color="?#?\w+"?>([^<>]+)<\/font><\/b><\/a>:$', "chatMultiLineEmote" : r'<b><i><a target="?mainpane"? href="showplayer\.php\?who=(-?[0-9]+)"><font color="?#?\w+"?>([^<>]+)<\/b><\/font><\/a>$', "outgoingPrivate" : r'<font color="?blue"?><b>private to <a class=nounder target="?mainpane"? href="?showplayer.php\?who=([0-9]+)"?><font color="?blue"?>(.*?)</font></a></b>:(.*?)</font></br>', "chatPlayerLoggedOn" : r'<font color=green><a target=mainpane href=\'showplayer\.php\?who=([0-9]+)\'><font color=green><b>([^<>]+)<\/b><\/font><\/a> logged on\.<\/font>$', "chatPlayerLoggedOff" : r'<font color=green><a target=mainpane href=\'showplayer\.php\?who=([0-9]+)\'><font color=green><b>([^<>]+)<\/b><\/font><\/a> logged off\.<\/font>$', "chatTalkieFrequency" : r'<font color=green>The frequency is (.*?), Mr. Rather\.<\/font>', # Clan dungeon patterns. "dungeonActivity" : r'(?:^|<br>|<br><b>|<b>)([^<>]+) \(#([0-9,]+)\) ([^<>]+) \(([0-9,]+) turns?\)', "dungeonLootDistribution" : r'(?:<blockquote>|<br>)([^<>]+) \(#([0-9,]+)\) distributed <b>([^<>]+)</b> to ([^<>]+) \(#([0-9,]+)\)<br>', "dungeonUndis
indirectlylit/kolibri
kolibri/core/content/test/test_content_app.py
Python
mit
72,718
0.001595
""" To run this test, type this in command line <kolibri manage test -- kolibri.core.content> """ import datetime import unittest import uuid import mock import requests from django.conf import settings from django.core.cache import cache from django.core.urlresolvers import reverse from django.test import TestCase from django.utils import timezone from le_utils.constants import content_kinds from rest_framework import status from rest_framework.test import APITestCase from kolibri.core.auth.models import Facility from kolibri.core.auth.models import FacilityUser from kolibri.core.auth.test.helpers import provision_device from kolibri.core.content import models as content from kolibri.core.content.test.test_channel_upgrade import ChannelBuilder from kolibri.core.device.models import DevicePermissions from kolibri.core.device.models import DeviceSettings from kolibri.core.logger.models import ContentSessionLog from kolibri.core.logger.models import ContentSummaryLog DUMMY_PASSWORD = "password" class ContentNodeTestBase(object): """ Basecase for content metadata methods """ def test_get_prerequisites_for(self): """ test the directional characteristic of prerequisite relationship """ c1 = content.ContentNode.objects.get(title="c1") root = content.ContentNode.objects.get(title="root") # if root is the prerequisite of c1 expected_output = content.ContentNode.objects.filter(title__in=["root"]) actual_output = content.ContentNode.objects.filter(prerequisite_for=c1) self.assertEqual(set(expected_output), set(actual_output)) # then c1 should not be the prerequisite of root unexpected_output = content.ContentNode.objects.filter(title__in=["c1"]) actual_output = content.ContentNode.objects.filter(prerequisite_for=root) self.assertNotEqual(set(actual_output), set(unexpected_output)) def test_get_has_prerequisites(self): """ test the directional characteristic of prerequisite relationship """ c1 = content.ContentNode.objects.get(title="c1") root = content.ContentNode.objects.get(title="root") # if root is the prerequisite of c1 expected_output = content.ContentNode.objects.filter(title__in=["c1"]) actual_output = content.ContentNode.objects.filter(has_prerequisite=root) self.assertEqual(set(expected_output), set(actual_output)) # then c1 should not be the prerequisite of root unexpected_output = content.ContentNode.objects.filter(title__in=["root"]) actual_output = content.ContentNode.objects.filter(has_prerequisite=c1) self.assertNotEqual(set(actual_output), set(unexpected_output)) def test_get_all_related(self): """ test the nondirectional characteristic of related relationship """ c1 = content.ContentNode.objects.get(title="c1") c2 = content.ContentNode.objects.get(title="c2") # if c1 is related to c2 expected_output = content.ContentNode.objects.filter(title__in=["c2"]) actual_output = content.ContentNode.objects.filter(related=c1) self.assertEqual(set(expected_output), set(actual_output)) # then c2 should be related to c1 expected_output = content.ContentNode.objects.filter(title__in=["c1"]) actual_output = content.ContentNode.objects.filter(related=c2) self.assertEqual(set(expected_output), set(actual_output)) def test_descendants_of_kind(self): p = content.ContentNode.objects.get(title="root") expected_output = content.ContentNode.objects.filter(title__in=["c1"]) actual_output = p.get_descendants(include_self=False).filter( kind=content_kinds.VIDEO ) self.assertEqual(set(expected_output), set(actual_output)) def test_get_top_level_topics(self): p = content.ContentNode.objects.get(title="root") expected_output = content.ContentNode.objects.filter( parent=p, kind=content_kinds.TOPIC ) actual_output = ( content.ContentNode.objects.get(title="root") .get_children() .filter(kind=content_kinds.TOPIC) ) self.assertEqual(set(expected_output), set(actual_output)) def test_tag_str(self): # test for ContentTag __str__ p = content.ContentTag.objects.get(tag_name="tag_2") self.assertEqual(str(p), "tag_2") def test_lang_str(self): # test for Language __str__ p = content.Language.objects.get(lang_code="en") self.assertEqual(str(p), "English-Test") def test_channelmetadata_str(self): # test for ChannelMetadata __str__ p = content.ChannelMetadata.objects.get(name="testing") self.assertEqual(str(p), "testing") def tes
t_tags(self): root_tag_count = content.ContentNode.objects.get(title="root").tags.count() self.assertEqual(root_tag_count, 3) c1_tag_count = content.Con
tentNode.objects.get(title="c1").tags.count() self.assertEqual(c1_tag_count, 1) c2_tag_count = content.ContentNode.objects.get(title="c2").tags.count() self.assertEqual(c2_tag_count, 1) c2c1_tag_count = content.ContentNode.objects.get(title="c2c1").tags.count() self.assertEqual(c2c1_tag_count, 0) def test_local_files(self): self.assertTrue( content.LocalFile.objects.filter( id="9f9438fe6b0d42dd8e913d7d04cfb2b2" ).exists() ) self.assertTrue( content.LocalFile.objects.filter( id="725257a0570044acbd59f8cf6a68b2be" ).exists() ) self.assertTrue( content.LocalFile.objects.filter( id="e00699f859624e0f875ac6fe1e13d648" ).exists() ) self.assertTrue( content.LocalFile.objects.filter( id="4c30dc7619f74f97ae2ccd4fffd09bf2" ).exists() ) self.assertTrue( content.LocalFile.objects.filter( id="8ad3fffedf144cba9492e16daec1e39a" ).exists() ) def test_delete_tree(self): channel = content.ChannelMetadata.objects.first() channel_id = channel.id channel.delete_content_tree_and_files() self.assertFalse( content.ContentNode.objects.filter(channel_id=channel_id).exists() ) self.assertFalse(content.File.objects.all().exists()) class ContentNodeQuerysetTestCase(TestCase): fixtures = ["content_test.json"] the_channel_id = "6199dde695db4ee4ab392222d5af1e5c" @classmethod def setUpTestData(cls): provision_device() cls.facility = Facility.objects.create(name="facility") cls.admin = FacilityUser.objects.create(username="admin", facility=cls.facility) cls.admin.set_password(DUMMY_PASSWORD) cls.admin.save() cls.facility.add_admin(cls.admin) def test_filter_uuid(self): content_ids = content.ContentNode.objects.values_list("id", flat=True) self.assertEqual( content.ContentNode.objects.filter_by_uuids(content_ids).count(), len(content_ids), ) def test_filter_uuid_bad_uuid(self): content_ids = list(content.ContentNode.objects.values_list("id", flat=True)) content_ids[0] = '7d1bOR"1"="1"d08e29c36115f1af3da99' self.assertEqual( content.ContentNode.objects.filter_by_uuids(content_ids).count(), 0 ) kind_activity_map = { content_kinds.EXERCISE: "practice", content_kinds.VIDEO: "watch", content_kinds.AUDIO: "listen", content_kinds.DOCUMENT: "read", content_kinds.HTML5: "explore", } def infer_learning_activity(kind): activity = kind_activity_map.get(kind) if activity: return [activity] return [] class ContentNodeAPITestCase(APITestCase): """ Testcase for content API methods """ fixtures = ["content_test.json"] the_channel_id = "6199dde695db4ee4ab392222d5af1e5c" @classmethod def setUpTestData(cl
bancek/egradebook
src/apps/infosys/admin.py
Python
gpl-3.0
6,165
0.005515
from django.contrib import admin from django.utils.text import truncate_words from django.core import urlresolvers from django.utils.html import escape from infosys.models import * def uni_tr_10(field_name): def func(obj): return truncate_words(unicode(getattr(obj, field_name)), 10) func.short_description = field_name func.admin_order_field = field_name return func def uni_fk_tr_10(field_name, order_field=None): fnparts = field_name.split('__') def func(obj): f = getattr(obj, fnparts[0]) for part in fnparts[1:]: f = getattr(f, part) url_name = 'admin:%s_%s_change' % (f._meta.app_label, f._meta.module_name) url = urlresolvers.reverse(url_name, args=(f.pk,)) name = escape(truncate_words(unicode(f), 10)) return u'<a href="%s">%s</a>' % (url, name) func.allow_tags = True func.short_description = fnparts[-1] if order_field is not False: func.admin_order_field = order_field or field_name return func class NaslovAdmin(admin.ModelAdmin): search_fields = ['ulica', 'hisna_stevilka', 'posta', 'kraj'] list_display = ['id', 'ulica', 'hisna_stevilka', 'posta', 'kraj'] class SolskoLetoAdmin(admin.ModelAdmin): search_fields = [] list_display = ['id', 'zacetno_leto', 'koncno_leto', 'aktivno'] raw_id_fields = [] class ProfesorAdmin(admin.ModelAdmin): search_fields = ['uporabnik__username', 'uporabnik__first_name', 'uporabnik__last_name'] list_display = ['id', uni_fk_tr_10('uporabnik', 'uporabnik__username'), 'ime', 'priimek'] raw_id_fields = ['uporabnik'] def ime(self, obj): return obj.uporabnik.first_name ime.admin_order_field = 'uporabnik__first_name' def priimek(self, obj): return obj.uporabnik.last_name priimek.admin_order_field = 'uporabnik__last_name' class SmerAdmin(admin.ModelAdmin): search_fields = ['smer'] list_display = ['id', 'smer'] class PredmetAdmin(admin.ModelAdmin): search_fields = ['predmet', 'ime'] list_display = ['id', 'ime', 'predmet'] class StarsAdmin(admin.ModelAdmin): search_fields = ['uporabnik__username', 'uporabnik__first_name', 'uporabnik__last_name'] list_display = ['id', uni_fk_tr_10('uporabnik', 'uporabnik__username'), 'ime', 'priimek', uni_fk_tr_10('prebivalisce')] raw_id_fields = ['uporabnik', 'prebivalisce'] def ime(self, obj): return obj.uporabnik.first_name ime.admin_order_field = 'uporabnik__first_name' def priimek(self, obj): return obj.uporabnik.last_name priimek.admin_order_field = 'uporabnik__last_name' class DijakAdmin(admin.ModelAdmin): search_fields = ['uporabnik__username', 'uporabnik__first_name', 'uporabnik__last_name', 'emso'] list_display = ['id', uni_fk_tr_10('uporabnik', 'uporabnik__username'), 'ime',
'priimek', 'emso'] raw_id_fields = ['uporabnik', 'stalno_prebivalisce', 'zacasno_prebivalisce', 'oce', 'mati'] list_filter = ['v_dijaskem_domu'] def ime(self, obj): return obj.uporabnik.first_name ime.admin_order_field = 'uporabnik__first_name' def priimek(self, obj): return obj.uporabnik.last_name
priimek.admin_order_field = 'uporabnik__last_name' class RazredAdmin(admin.ModelAdmin): search_fields = ['ime'] list_display = ['id', 'ime', uni_fk_tr_10('solsko_leto'), uni_fk_tr_10('smer'), uni_fk_tr_10('razrednik')] raw_id_fields = ['razrednik'] filter_horizontal = ['dijaki'] class PoucujeAdmin(admin.ModelAdmin): search_fields = [] list_display = ['id', uni_fk_tr_10('profesor'), uni_fk_tr_10('razred'), uni_fk_tr_10('predmet')] raw_id_fields = ['profesor', 'razred', 'predmet'] class OcenjevalnoObdobjeAdmin(admin.ModelAdmin): search_fields = ['ime'] list_display = ['id', 'ime', uni_fk_tr_10('solsko_leto'), 'zacetek', 'konec'] class DogodekAdmin(admin.ModelAdmin): search_fields = ['ime'] list_display = ['id', uni_tr_10('ime'), uni_tr_10('datum'), uni_fk_tr_10('poucuje__predmet', 'poucuje__predmet__ime'), uni_fk_tr_10('poucuje__profesor', False), 'ocenjevalno_obdobje'] raw_id_fields = ['poucuje'] class OcenaAdmin(admin.ModelAdmin): search_fields = ['ocena', 'opomba', 'dijak__uporabnik__first_name', 'dijak__uporabnik__last_name', 'dijak__uporabnik__username'] list_display = ['id', uni_fk_tr_10('dijak', False), uni_fk_tr_10('poucuje__profesor', False), uni_fk_tr_10('poucuje__razred', False), 'ocena', 'datum_pridobitve', uni_fk_tr_10('ocenjevalno_obdobje'), uni_fk_tr_10('dogodek')] raw_id_fields = ['dijak', 'poucuje', 'dogodek'] class ZakljucenaOcenaAdmin(admin.ModelAdmin): search_fields = ['ocena', 'dijak__uporabnik__first_name', 'dijak__uporabnik__last_name', 'dijak__uporabnik__username'] list_display = ['id', uni_fk_tr_10('dijak'), uni_fk_tr_10('poucuje__profesor', False), uni_fk_tr_10('poucuje__razred', False), 'ocena', 'datum_pridobitve'] raw_id_fields = ['dijak', 'poucuje'] admin.site.register(Naslov, NaslovAdmin) admin.site.register(SolskoLeto, SolskoLetoAdmin) admin.site.register(Profesor, ProfesorAdmin) admin.site.register(Smer, SmerAdmin) admin.site.register(Predmet, PredmetAdmin) admin.site.register(Stars, StarsAdmin) admin.site.register(Dijak, DijakAdmin) admin.site.register(Razred, RazredAdmin) admin.site.register(Poucuje, PoucujeAdmin) admin.site.register(OcenjevalnoObdobje, OcenjevalnoObdobjeAdmin) admin.site.register(Dogodek, DogodekAdmin) admin.site.register(Ocena, OcenaAdmin) admin.site.register(ZakljucenaOcena, ZakljucenaOcenaAdmin)
RachellCalhoun/craftsite
accounts/urls.py
Python
gpl-3.0
1,059
0.00661
from django.conf.urls import include, url from . import views urlpatterns = [ url(r'^register/$', views.register,name="register"), # url('^logout', views.logout_view,name="logout"), # url('^login', views.logout_view,name="login"), url(r'^password_change/$','django.contrib.auth.views.password_change',{'template_name': 'profiles/change-password.html'}), url(r'^password_change/
done', 'django.contrib.auth.views.pas
sword_change_done', {'template_name':'profiles/password_change_done.html'}), url(r'^login/$', 'django.contrib.auth.views.login'), url(r'^logout/$', 'django.contrib.auth.views.logout', {'next_page': '/'}), url(r'^', include('django.contrib.auth.urls')), url(r'^myprofile/$', views.myprofile, name='myprofile'), url(r'^users/$', views.user_profilelist, name='user_profilelist'), url(r'^editprofile/$', views.profile_edit, name='profile_edit'), url(r'^(?P<id>\d+)$', views.user_profiles, name='user_profiles'), ]
douban/code
vilya/views/m.py
Python
bsd-3-clause
1,035
0
# -*- coding: utf-8 -*- from __future__ import absolute_import from vilya.libs.template import st from vilya.models.feed import get_user_inbox, get_public_feed MAX_ACT_COUNT = 100 _q_exports = ['actions', 'public_timeline'] def _q_index(request): return st('/m/feed.html', **locals()) def public_timeline(request): is_public = True return st('/m/feed.html', **locals()) def actions(request): since_id = request.get_form_var('since_id', '') is_pub
lic = request.get_form_var('is_public', '') user = request.user all_actions = [] if is_public == 'true': all_actions = get_public_feed().get_actions(0, MAX_ACT_COUNT) elif user: all_actions = get_user_inbox(user.username).get_actions( 0, MAX_A
CT_COUNT) if since_id: actions = [] for action in all_actions: if action.get('uid') == since_id: break actions.append(action) else: actions = all_actions return st('/m/actions.html', **locals())
chepazzo/trigger
trigger/contrib/docommand/__init__.py
Python
bsd-3-clause
12,051
0.001079
#!/usr/bin/env python # -*- coding: utf-8 -*- """ trigger.contrib.docommand ~~~~~~~~~~~~~~~~~~~~~~~~~ This package provides facilities for running commands on devices using the CLI. """ __author__ = 'Jathan McCollum, Mike Biancianello' __maintainer__ = 'Jathan McCollum' __email__ = 'jathan@gmail.com' __copyright__ = 'Copyright 2012-2013, AOL Inc.; 2013 Salesforce.com' __version__ = '3.2.1' # Imports import os import re import socket from twisted.python import log from trigger.conf import settings from trigger.cmds import Commando from xml.etree.cElementTree import ElementTree, Element, SubElement import xml.etree.cElementTree as ET # Exports __all__ = ['DoCommandBase', 'CommandRunner', 'ConfigLoader', 'xml_print', 'core'] from . import core from core import * __all__.extend(core.__all__) # Classes class DoCommandBase(Commando): """ Base class for docommand action classes. """ description = 'Insert description here.' def errback(self, failure, device): failure = super(DoCommandBase, self).errback(failure, device) print '%s - Error: %s' % (device, failure.value) return failure def from_base(self, results, device, commands=None): """Call store_results without calling map_results""" log.msg('Received %r from %s' % (results, device)) self.store_results(device, results) # TODO: Right now if you are loading commands from files, this will ultimately # fail with a ReactorNotRestartable error because the core.main() function is # calling each action class separately. We need to account for this. See # https://gist.github.com/jathanism/4543974 for a possible solution. class CommandRunner(DoCommandBase): """ Run commands on network devices. Usage:: n = CommandRunner(devices=['dev1', dev2'], files=['file1', 'file2']) n.run() This will execute all commands inside of each file ('file1','file2') onto all listed devices ('dev1, 'dev2'). :param devices: List of device names. Each hostname must have a match in NetDevices. :param files: List of files named after the FQDN of each device. """ description = 'Run commands on network devices.' def __init__(self, files=None, commands=None, debug=False, timeout=30, **kwargs): """ :param files: List of fully-qualified paths to command files :param commands: List of commands to execute :param debug: Whether to display debug information :param timeout: Timeout in seconds """ if files is None: files = [] if commands is None: commands = [] self.commands = commands self.data = {} self.files = files self.debug = debug self.__loadCmdsFromFiles() if 'kwargs' in locals(): kwargs['timeout'] = timeout else: kwargs = dict(timeout=timeout) super(CommandRunner, self).__init__(**kwargs) def __loadCmdsFromFiles(self, skip_comments=True): """ Reads in file contents and adds to self.commands list. This is done to prevent having to read the list of cmds multiple times. """ for fname in self.files: with open(fname, 'r') as fr: lines = fr.readlines() if skip_comments: lines = [line for line in lines if not line.startswith('#')] for cmd in lines: cmd = cmd.strip() self.commands.append(cmd) def store_results(self, device, results): """Define how we're storing results.""" devname = device.nodeName if self.verbose: print 'Parsing commands for %s' % devname if self.debug: msg = "-->store_results(device=%r, results=%r)" % (devname, results) print msg log.msg(msg) outs = [] for i, o
ut in enumerate(results): cmd = self.commands[i] d = {'cmd': cmd, 'out': out, 'dev': dev
ice} outs.append(d) self.data[devname] = outs return True def __children_with_namespace(self, ns): return lambda elt, tag: elt.findall('./' + ns + tag) def from_juniper(self, data, device, commands=None): # If we've set foce_cli, use from_base() instead if self.force_cli: return self.from_base(data, device, commands) devname = device.nodeName ns = '{http://xml.juniper.net/xnm/1.1/xnm}' if self.verbose: print 'parsing JunOS commands for %s' % devname if self.debug: print '-->from_juniper(data=%s, device=%r)' % (data, devname) cmds = self.commands outs = [] for i, xml in enumerate(data): cmd = cmds[i] outarr = xml_print(xml, iterations=10) out = '\n'.join(outarr) d = {'cmd': cmd, 'out': out, 'dev': device} outs.append(d) if self.debug: print '\ndata["%s"]:' % i ET.dump(xml) self.data[devname] = outs return True class ConfigLoader(Commando): """ Load configuration changes on network devices. Usage:: n = ConfigLoader(devices=['dev1', dev2'], files=['file1', 'file2']) n.run() This will load all listed config files ('file1','file2') onto all listed devices ('dev1, 'dev2'). :param files: List of files named after the FQDN of each device. + Files *must* exist in a local TFTP directory for non-Juniper devices. + Files *must* be accessible by device via TFTP for non-Juniper devices. """ description = 'Load configuration changes on network devices.' # These are the only officially supported vendors at this time vendors = ['a10', 'arista', 'brocade', 'cisco', 'foundry', 'dell', 'juniper'] # TODO: The config commands should be moved into NetDevice object # (.configure_commands). The save commands are already managed like that, # but we don't yet have a way to account for Juniper CLI commit command (it # assumes JunoScript). We need to not be hard-coding these types of things # all over the code-base. known_commands = { 'config':{ 'a10': 'configure terminal', 'arista': 'configure terminal', 'brocade': 'configure terminal', 'cisco': 'configure terminal', 'dell': 'configure', 'foundry': 'configure terminal', 'juniper': 'configure', }, 'save_config':{ 'a10': 'write memory', 'arista': 'write memory', 'brocade': 'write memory', 'cisco': 'write memory', 'dell': 'copy running-config startup-config', 'foundry': 'write memory', 'juniper': 'commit and-quit', } } def __init__(self, files=None, commands=None, debug=False, **kwargs): """ :param files: List of filenames named after the FQDN of each device. :param commands: List of commands to execute :param debug: Whether to display debug information """ if files is None: files = [] if commands is None: commands = [] self.data = {} self.commands = commands self.files = files self.debug = debug super(ConfigLoader, self).__init__(**kwargs) def to_juniper(self, device=None, commands=None, extra=None): """ Configure a Juniper device using JunoScript. :returns: list """ if self.verbose: print "generating JunOS commands" files = self.files cmds = [Element('lock-configuration')] for fname in files: # fname is required to contain the full path lc = Element('load-configuration', action='replace', format='text') body = SubElement(lc, 'configuration-text') if self.debug: print "fname: " +
hzlf/openbroadcast
website/apps/abcast/admin/baseadmin.py
Python
gpl-3.0
1,860
0.017204
from django.contrib import admin from abcast.models import * from django.contrib.auth.models import User from genericadmin.admin import GenericAdminModelAdmin, GenericTabularInline class MembersInline(admin.TabularInline): model = Station.members.through class ChannelsInline(admin.TabularInline): model = Channel readonly_fields = ('teaser', 'type', 'stream_server', ) exclude = ('description', 'stream_url', 'teaser',) class StationAdmin(admin.ModelAdmin): list_display = ('name', 'type', 'website',) readonly_fields = ('uuid', 'slug', ) inlines = [ChannelsInline, MembersInline,] class ChannelAdmin(admin.ModelAdmin): list_display = ('name', 'station', 'type', 'stream_url', 'mount', ) list_filter = ('station', 'type',) readonly_fields = ('uuid', 'slug', ) class JingleInline(admin.
TabularInline): exclude = ['description', 'slug', 'processed', 'conversion_status'] model = Jingle class JingleAdmin(admin.ModelAdmin): list_display = ('name', 'duration', 'set', 'type' ) list_filter = ('type',) readonly_fields = ('uuid', 'slug', 'folder') class JingleSetAdmin(admin.ModelAdmin): #list_display = ('name', 'duration', 'set', 'type' ) #list_filter = ('type',) readonly_fields = ('u
uid', 'slug', ) inlines = [JingleInline, ] class StreamServerAdmin(admin.ModelAdmin): list_display = ('name', 'host', 'type' ) list_filter = ('type',) readonly_fields = ('uuid',) admin.site.register(Station, StationAdmin) admin.site.register(Channel, ChannelAdmin) admin.site.register(Jingle, JingleAdmin) admin.site.register(JingleSet, JingleSetAdmin) admin.site.register(StreamServer, StreamServerAdmin) admin.site.register(StreamFormat) admin.site.register(Role)
WhiskeyMedia/ella
ella/positions/admin.py
Python
bsd-3-clause
1,381
0.003621
from django.contrib import admin from django.utils.translation import ugettext, ugettext_lazy as _ from ella.positions.models import Position from ella.utils import timezone class PositionOptions(admin.ModelAdmin): def show_title(self, obj): if not obj.target:
return '-- %s --' % ugettext('empty position') else: return u'%s [%s]' % (obj.t
arget.title, ugettext(obj.target_ct.name),) show_title.short_description = _('Title') def is_filled(self, obj): if obj.target: return True else: return False is_filled.short_description = _('Filled') is_filled.boolean = True def is_active(self, obj): if obj.disabled: return False now = timezone.now() active_from = not obj.active_from or obj.active_from <= now active_till = not obj.active_till or obj.active_till > now return active_from and active_till is_active.short_description = _('Active') is_active.boolean = True list_display = ('name', 'category', 'box_type', 'is_active', 'is_filled', 'show_title', 'disabled',) list_filter = ('category', 'name', 'disabled', 'active_from', 'active_till',) search_fields = ('box_type', 'text',) # suggest_fields = {'category': ('tree_path', 'title', 'slug',),} admin.site.register(Position, PositionOptions)
leeopop/2015-CS570-Project
prepare_lda.py
Python
mit
897
0.031215
from loader import * from extract_keywords import split_line from collections import defaultdict def main(): keyword_data = load_single_file('keyword_table.csv') paper_data = load_single_file('Paper.csv') with open('lda_input.txt', 'w', encoding='utf-8') as write_file: for paper_id in paper_data.keys(): paper = paper_data[paper_id] title = paper['title'] keyword = paper['keyword'] word_list = split_line(title) word_list += split_line(keyword) counter = defaultdict(int) for word in word_list: if word in keyword_data.keys(): unique_id = keyword_data[word]['unique'] counter[unique_id] += 1 line = '' for key in sorted(counter.keys()): count = counter[key] line += '{} {} '.format(key, count) line = line
.strip
() if len(line) == 0: continue line += '\n' write_file.write(line) pass if __name__ == '__main__': main()
vineodd/PIMSim
GEM5Simulation/gem5/src/python/m5/ticks.py
Python
gpl-3.0
3,221
0.003415
# Copyright (c) 2007 The Regents of The University of Michigan # All rights reserved. # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are # met: redistributions of source code must retain the above copyright # notice, this list of conditions and the following disclaimer; # redistributions in binary form must reproduce the above copyright # notice, this list of conditions and the following disclaimer in the # documentation and/or other materials provided with the distribution; # neither the name of the copyright holders nor the names of its # contributors may be used to endorse or promote products derived from # this software without specific prior written permission. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS # "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT # LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR # A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT # OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, # SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT # LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, # DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY # THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. # # Authors: Nathan Bin
kert from __future__ import print_function import sys from m5.util import warn # fix the global frequency def fixGlobalFrequency(): import _m5.core _m5.core.fixClockFrequency() def setGlobalFrequency(ticksPerSecond): from m5.util import convert import _m5.core if isinstance(ticksPerSecond, (int, long)): tps = ticksPerSecond elif isinstance(ticksPerSecond, float): tps = ticksPerSecond elif isinstance(ticksPerSecond, str): tps = round(convert.anyToF
requency(ticksPerSecond)) else: raise TypeError, \ "wrong type '%s' for ticksPerSecond" % type(ticksPerSecond) _m5.core.setClockFrequency(int(tps)) # how big does a rounding error need to be before we warn about it? frequency_tolerance = 0.001 # 0.1% def fromSeconds(value): import _m5.core if not isinstance(value, float): raise TypeError, "can't convert '%s' to type tick" % type(value) # once someone needs to convert to seconds, the global frequency # had better be fixed if not _m5.core.clockFrequencyFixed(): raise AttributeError, \ "In order to do conversions, the global frequency must be fixed" if value == 0: return 0 # convert the value from time to ticks value *= _m5.core.getClockFrequency() int_value = int(round(value)) err = (value - int_value) / value if err > frequency_tolerance: warn("rounding error > tolerance\n %f rounded to %d", value, int_value) return int_value __all__ = [ 'setGlobalFrequency', 'fixGlobalFrequency', 'fromSeconds', 'frequency_tolerance' ]
jjiang-mtu/virtual-breast-project
dynamic_SWE/write_FEBio_format_quadratic/vmtkfebiowytet10.py
Python
gpl-2.0
1,844
0.016269
#!/usr/bin/env python ## Program: VMTK ## Module: $RCSfile: vmtkfebiowytet10.py,v $ ## Language: Python ## Date: $Date: 2016/08/19 09:49:59 $ ## Version: $Revision: 1.6 $ ## Copyright (c) Jingfeng Jiang, Yu Wang. All rights reserved. ## See LICENCE file
for details. ## This software is distributed WITHOUT ANY WARRANTY; without even ## the i
mplied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR ## PURPOSE. See the above copyright notices for more information. import vtk import vtkvmtk import sys import pypes vmtkfebiowytet10 = 'vmtkfebiowrite' class vmtkfebiowrite(pypes.pypeScript): def __init__(self): pypes.pypeScript.__init__(self) self.Surface = None self.SetScriptName('vmtkfebiowrite') self.SetScriptDoc('interpolates the point data of a reference surface onto the input surface based on minimum distance criterion') self.SetInputMembers([ ['Surface','i','vtkUnstructuredGrid',1,'','the mesh surface','vmtkmeshreader'] ]) self.SetOutputMembers([]) # ['dSurface','o','vtkUnstructuredGrid',1,'','the output surface','vmtkmeshwriter'] def Execute(self): if self.Surface == None: self.PrintError('Error: No Surface.') self.PrintLog('Computing projection.') surfaceProjection = vtkvmtk.vtkvmtkFEBioWritertet10() surfaceProjection.SetInput(self.Surface) # surfaceProjection.SetBoundaryDataArrayName('CellEntityIds') surfaceProjection.SetVolumeDataArrayName('VolId') # type ID: Volume VolId surfaceProjection.SetFileName('output.feb') surfaceProjection.Write() surfaceProjection.Update() if __name__=='__main__': main = pypes.pypeMain() main.Arguments = sys.argv main.Execute()
jeremiah-c-leary/vhdl-style-guide
vsg/rules/if_statement/rule_031.py
Python
gpl-3.0
988
0.001012
from vsg.rules import previous_line from vsg import token lTokens = [] lTokens.append(token.if_statement.if_keyword) class rule_031(previous_line): ''' This rule checks for blank lines or comments above the **if** keyword. In the case of nested **if** statements, the rule will be enfoced on the first **if**
. |configuring_previous_line_rules_link| The default style is :code:`no_code`. **Violation** .. code-block:: vhdl C <= '1'; if (A = '1') then B <= '0'; end if; -- This is a comment if (A = '1') then B <= '0'; end if; **Fix** .. code-block:: vhdl
C <= '1'; if (A = '1') then B <= '0'; end if; -- This is a comment if (A = '1') then B <= '0'; end if; ''' def __init__(self): previous_line.__init__(self, 'if', '031', lTokens) self.lHierarchyLimits = [0] self.style = 'no_code'
mattcaldwell/autopylot
autopylot/django/__init__.py
Python
mit
324
0.003086
# from http://stackoverflow.com/questions/4581789/how-do-i-get-us
er-ip-address-in-django def get_client_ip(request): x_forwarded_for = request.META.get('HTTP_X_FORWARDED_FOR') if x_forwarded_for: ip = x_forwarded_for.split(',')[0].stri
p() else: ip = request.META.get('REMOTE_ADDR') return ip
MrReN/django-oscar
oscar/forms/widgets.py
Python
bsd-3-clause
5,740
0
from django.core.files.uploadedfile import InMemoryUploadedFile import re import six from django import forms from django.forms.util import flatatt from django.forms.widgets import FileInput from django.template import Context from django.template.loader import render_to_string from django.utils.encoding import force_text from django.utils.safestring import mark_safe try: from django.utils.html import format_html except ImportError: # Django 1.4 compatibility from oscar.core.compat import format_html class ImageInput(FileInput): """ Widget providing a input element for file uploads based on the Django ``FileInput`` element. It hides the actual browser-specific input element and shows the available image for images that have
been previously uploaded. Selecting the image will open the file dialog and allow for selecting a new or replacing image file. """ template_name = 'partials/image_input_widget.html' attrs = {'accept': 'image/*'} def render(self, name, value, attrs=None): """ Render the ``input`` field based on the defined ``template_name``. The image URL is take from *value* and is provided to the template as ``image_url`` context vari
able relative to ``MEDIA_URL``. Further attributes for the ``input`` element are provide in ``input_attrs`` and contain parameters specified in *attrs* and *name*. If *value* contains no valid image URL an empty string will be provided in the context. """ final_attrs = self.build_attrs(attrs, type=self.input_type, name=name) if not value or isinstance(value, InMemoryUploadedFile): # can't display images that aren't stored image_url = '' else: image_url = final_attrs['value'] = force_text( self._format_value(value)) return render_to_string(self.template_name, Context({ 'input_attrs': flatatt(final_attrs), 'image_url': image_url, 'image_id': "%s-image" % final_attrs['id'], })) class WYSIWYGTextArea(forms.Textarea): def __init__(self, *args, **kwargs): kwargs.setdefault('attrs', {}) kwargs['attrs'].setdefault('class', '') kwargs['attrs']['class'] += ' wysiwyg' super(WYSIWYGTextArea, self).__init__(*args, **kwargs) def datetime_format_to_js_date_format(format): """ Convert a Python datetime format to a date format suitable for use with JS date pickers """ converted = format replacements = { '%Y': 'yy', '%m': 'mm', '%d': 'dd', '%H:%M': '', } for search, replace in six.iteritems(replacements): converted = converted.replace(search, replace) return converted.strip() def datetime_format_to_js_time_format(format): """ Convert a Python datetime format to a time format suitable for use with JS date pickers """ converted = format replacements = { '%Y': '', '%m': '', '%d': '', '%H': 'HH', '%M': 'mm', } for search, replace in six.iteritems(replacements): converted = converted.replace(search, replace) converted = re.sub('[-/][^%]', '', converted) return converted.strip() def add_js_formats(widget): """ Set data attributes for date and time format on a widget """ attrs = { 'data-dateFormat': datetime_format_to_js_date_format( widget.format), 'data-timeFormat': datetime_format_to_js_time_format( widget.format) } widget.attrs.update(attrs) class DatePickerInput(forms.DateInput): """ DatePicker input that uses the jQuery UI datepicker. Data attributes are used to pass the date format to the JS """ def __init__(self, *args, **kwargs): super(DatePickerInput, self).__init__(*args, **kwargs) add_js_formats(self) class DateTimePickerInput(forms.DateTimeInput): # Build a widget which uses the locale datetime format but without seconds. # We also use data attributes to pass these formats to the JS datepicker. def __init__(self, *args, **kwargs): include_seconds = kwargs.pop('include_seconds', False) super(DateTimePickerInput, self).__init__(*args, **kwargs) if not include_seconds: self.format = re.sub(':?%S', '', self.format) add_js_formats(self) class AdvancedSelect(forms.Select): """ Customised Select widget that allows a list of disabled values to be passed to the constructor. Django's default Select widget doesn't allow this so we have to override the render_option method and add a section that checks for whether the widget is disabled. """ def __init__(self, attrs=None, choices=(), disabled_values=()): self.disabled_values = set(force_text(v) for v in disabled_values) super(AdvancedSelect, self).__init__(attrs, choices) def render_option(self, selected_choices, option_value, option_label): option_value = force_text(option_value) if option_value in self.disabled_values: selected_html = mark_safe(' disabled="disabled"') elif option_value in selected_choices: selected_html = mark_safe(' selected="selected"') if not self.allow_multiple_selected: # Only allow for a single selection. selected_choices.remove(option_value) else: selected_html = '' return format_html(u'<option value="{0}"{1}>{2}</option>', option_value, selected_html, force_text(option_label))
ryepdx/account_payment_cim_authdotnet
wizard/__init__.py
Python
agpl-3.0
1,222
0
# -*- coding: utf-8 -*- ############################################################################## # # OpenERP, Open Source Management Solution # Copyright (C) 2011 NovaPoint Group LLC (<http://www.novapointgroup.com>) # Copyright (C) 2004-2010 OpenERP SA (<http://www.openerp.com>) # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program. If not, see <http://www.gnu.org/licenses/> # ###########################################################################
### impor
t create_payment_profile import make_transaction import edit_payment_profile import delete_payment_profile # vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
eduNEXT/edunext-platform
common/djangoapps/xblock_django/admin.py
Python
agpl-3.0
2,485
0.004427
""" Django admin dashboard configuration. """ from config_models.admin import ConfigurationModelAdmin, KeyedConfigurationModelAdmin from django.contrib import admin from django.utils.translation import ugettext_lazy as _ from common.djangoapps.xblock_django.models import XBlockConfiguration, XBlockStudioConfiguration, XBlockStudioConfigurationFlag # lint-amnesty, pylint: disable=line-too-long class XBlockConfigurationAdmin(KeyedConfigurationModelAdmin): """ Admin for XBlockConfiguration. """ fieldsets = ( ('XBlock Name', { 'fields': ('name',) }), ('Enable/Disable XBlock', { 'description': _('To disable the XBlock and prevent rendering in the LMS, leave "Enabled" deselected; ' 'for clarity, update
XBlockStudioConfiguration support state accordingly.'), 'fields': ('enabled',) }), ('Deprecate XBlock', { 'description': _("Only XBlocks listed in a course's Advanced Module List can be flagged as deprecated. " "Remember to update XBlockStudioConfiguration support state accordingly, as deprecated " "doe
s not impact whether or not new XBlock instances can be created in Studio."), 'fields': ('deprecated',) }), ) class XBlockStudioConfigurationAdmin(KeyedConfigurationModelAdmin): """ Admin for XBlockStudioConfiguration. """ fieldsets = ( ('', { 'fields': ('name', 'template') }), ('Enable Studio Authoring', { 'description': _( 'XBlock/template combinations that are disabled cannot be edited in Studio, regardless of support ' 'level. Remember to also check if all instances of the XBlock are disabled in XBlockConfiguration.' ), 'fields': ('enabled',) }), ('Support Level', { 'description': _( "Enabled XBlock/template combinations with full or provisional support can always be created " "in Studio. Unsupported XBlock/template combinations require course author opt-in." ), 'fields': ('support_level',) }), ) admin.site.register(XBlockConfiguration, XBlockConfigurationAdmin) admin.site.register(XBlockStudioConfiguration, XBlockStudioConfigurationAdmin) admin.site.register(XBlockStudioConfigurationFlag, ConfigurationModelAdmin)
foosel/OctoPrint
src/octoprint/plugins/tracking/__init__.py
Python
agpl-3.0
15,412
0.026022
# -*- coding: utf-8 -*- from __future__ import absolute_import, division, print_function, unicode_literals __license__ = 'GNU Affero General Public License http://www.gnu.org/licenses/agpl.html' __copyright__ = "Copyright (C) 2018 The OctoPrint Project - Released under terms of the AGPLv3 License" import octoprint.plugin from flask_babel import gettext import requests import hashlib import logging try: # noinspection PyCompatibility from urllib.parse import urlencode except ImportError: from urllib import urlencode # noinspection PyCompatibility import concurrent.futures from octoprint.util import RepeatedTimer, monotonic_time from octoprint.util.version import get_octoprint_version_string from octoprint.events import Events TRACKING_URL = "https://tracking.octoprint.org/track/{id}/{event}/" # noinspection PyMissingConstructor class TrackingPlugin(octoprint.plugin.SettingsPlugin, octoprint.plugin.EnvironmentDetectionPlugin, octoprint.plugin.StartupPlugin, octoprint.plugin.ShutdownPlugin, octoprint.plugin.TemplatePlugin, octoprint.plugin.AssetPlugin, octoprint.plugin.WizardPlugin, octoprint.plugin.EventHandlerPlugin): def __init__(self): self._environment = None self._throttle_state = None self._helpers_get_throttle_state = None self._printer_connection_parameters = None self._url = None self._ping_worker = None self._pong_worker = None self._executor = concurrent.futures.ThreadPoolExecutor(max_workers=1) self._record_next_firmware_info = False self._startup_time = monotonic_time() def initialize(self): self._init_id() ##~~ SettingsPlugin def get_settings_defaults(self): return dict(enabled=None, unique_id=None, server=TRACKING_URL, ping=15*60, pong=24*60*60, events=dict(pong=True, startup=True, printjob=True, commerror=True, plugin=True, update=True, printer=True, printer_safety_check=True, throttled=True, slicing=True)) def get_settings_restricted_paths(self): return dict(admin=[["enabled"], ["unique_id"], ["events"]], never=[["server"], ["ping"]]) def on_settings_save(self, data): enabled = self._settings.get(["enabled"]) octoprint.plugin.SettingsPlugin.on_settings_save(self, data) if enabled is None and self._settings.get(["enabled"]): # tracking was just enabled, let's start up tracking self._start_tracking() ##~~ EnvironmentDetectionPlugin def on_environment_detected(self, environment, *args, **kwargs): self._environment = environment ##~~ StartupPlugin def on_after_startup(self): self._start_tracking() ##~~ ShutdownPlugin def on_shutdown(self): if not self._settings.get_boolean(["enabled"]): return self._track_shutdown() ##~~ EventHandlerPlugin # noinspection PyUnresolvedReferences def on_event(self, event, payload): if not self._settings.get_boolean(["enabled"]): return if event in (Events.PRINT_STARTED, Events.PRINT_DONE, Events.PRINT_FAILED, Events.PRINT_CANCELLED): self._track_printjob_event(event, payload) elif event in (Events.ERROR,): self._track_commerror_event(event, payload) elif event in (Events.CONNECTED,): self._printer_connection_parameters = dict(port=payload["port"], baudrate=payload["baudrate"]) self._record_next_firmware_info = True elif event in (Events.FIRMWARE_DATA,) and self._record_next_firmware_info: self._record_next_firmware_info = False self._track_printer_event(event, payload) elif event in (Events.SLICING_STARTED,): self._track_slicing_event(event, payload) elif hasattr(Events, "PLUGIN_PLUGINMANAGER_INSTALL_PLUGIN") and \ event in (Events.PLUGIN_PLUGINMANAGER_INSTALL_PLUGIN, Events.PLUGIN_PLUGINMANAGER_UNINSTALL_PLUGIN, Events.PLUGIN_PLUGINMANAGER_ENABLE_PLUGIN, Events.PLUGIN_PLUGINMANAGER_DISABLE_PLUGIN): self._track_plugin_event(event, payload) elif hasattr(Events, "PLUGIN_SOFTWAREUPDATE_UPDATE_SUCCEEDED") and \ event in (Events.PLUGIN_SOFTWAREUPDATE_UPDATE_SUCCEEDED, Events.PLUGIN_SOFTWAREUPDATE_UPDATE_FAILED): self._track_update_event(event, payload) elif hasattr(Events, "PLUGIN_PI_SUPPORT_THROTTLE_STATE") and event in (Events.PLUGIN_PI_SUPPORT_THROTTLE_STATE,): self._throttle_state = payload self._track_throttle_event(event, payload) elif hasattr(Events, "PLUGIN_PRINTER_SAFETY_CHECK_WARNING") and event in (Events.PLUGIN_PRINTER_SAFETY_CHECK_WARNING,): self._track_printer_safety_event(event, payload) ##~~ TemplatePlugin def get_template_configs(self): return [ dict(type="settings", name=gettext("Anonymous Usage Tracking"), template="tracking_settings.jinja2", custom_bindings=False), dict(type="wizard", name=gettext("Anonymous Usage Tracking"), template="tracking_wizard.jinja2", custom_bindings=True, mandatory=True) ] ##~~ AssetPlugin def get_assets(self): return dict(js=["js/usage.js"]) ##~~ WizardPlugin def is_wizard_required(self): return self._settings.get(["enabled"]) is None ##~~ helpers def _init_id(self): if not self._settings.get(["unique_id"]): import uuid self._settings.set(["unique_id"], str(uuid.uuid4())) self._settings.save() def _start_tracking(self): if not self._settings.get_boolean(["enabled"]): return if self._ping_worker is None: ping_interval = self._settings.get_int(["ping"]) if ping_interval: self._ping_worker = RepeatedTimer(ping_interval, self._track_ping, run_first=True) self._ping_worker.start() if self._pong_worker is None: pong_interval = self._settings.get(["pong"]) if pong_interval: self._pong_worker = RepeatedTimer(pong_interval, self._track_pong, run_first=True) self._pong_worker.start() if self._helpers_get_throttle_state is None: # cautiously look for the get_throttled helper from pi_support pi_helper = self._plugin_manager.get_helpers("pi_support", "get_throttled") if pi_helper and 'get_throttled' in pi_helper: self._helpers_get_throttle_state = pi_helper['get_throttled'] # now that we have everything set up, phone home. self._track_startup() def _track_ping(self): if not self._settings.get_boolean(["enabled"]): return uptime = int(monotonic_time() - self._startup_time) self._track("ping", octoprint_uptime=uptime) def _track_pong(self): if not self._settings.get_boolean(["events", "pong"]): return plugins =
self._plugin_manager.enabled_plugins plugins_thirdparty = [plugin for plugin in plugins.values() if not plugin.bundled] payload = dict(plugins=",".join(map(lambda x: "{}:{}".format(x.key.lower(),
x.version.lower() if x.version else "?"), plugins_thirdparty))) self._track("pong", body=True, **payload) def _track_startup(self): if not self._settings.get_boolean(["events", "startup"]): return payload = dict(version=get_octoprint_version_string(), os=self._environment["os"]["id"], python=self._environment["python"]["version"], pip=self._environment["python"]["pip"], cores=self._environment["hardware"]["cores"], freq=self._environment["hardware"]["freq"], ram=self._environment["hardware"]["ram"]) if "plugins" in self._environment and "pi_support" in self._environment["plugins"]: payload["pi_model"] = self._environment["plugins"]["pi_support"]["model"] if "octopi_version" in self._environment["plugins"]["pi_support"]: payload["octopi_version"] = self._environment["plugins"]["pi_support"]["octopi_version"] self._track("startup", **payload) def _track_shutdown(self): if not self._settings.get_boolean(["enabled"]): re
ctrlaltdel/neutrinator
vendor/openstack/load_balancer/v2/flavor.py
Python
gpl-3.0
1,423
0
# Copyright 2019 Rackspace, US Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance wit
h the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WIT
HOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from openstack import resource class Flavor(resource.Resource): resource_key = 'flavor' resources_key = 'flavors' base_path = '/lbaas/flavors' # capabilities allow_create = True allow_fetch = True allow_commit = True allow_delete = True allow_list = True _query_mapping = resource.QueryParameters( 'id', 'name', 'description', 'flavor_profile_id', is_enabled='enabled' ) # Properties #: The ID of the flavor. id = resource.Body('id') #: The name of the flavor. name = resource.Body('name') #: The flavor description. description = resource.Body('description') #: The associated flavor profile ID flavor_profile_id = resource.Body('flavor_profile_id') #: Whether the flavor is enabled for use or not. is_enabled = resource.Body('enabled')
uclouvain/OSIS-Louvain
education_group/ddd/validators/_copy_check_mini_training_end_date.py
Python
agpl-3.0
1,855
0.002157
# ############################################################################ # OSIS stands for Open Student Information System. It's an application # designed to manage the core business of higher education institutions, # such as universities, faculties, institutes and professional schools. # The core business involves the administration of
students, teachers, # courses, programs and so on. # # Copyright (C) 2015-2020 Université catholique de Louvain (http://www.uclouvain.be) # # This program is free software: you can redistribute it and/or modify # it under the ter
ms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # A copy of this license - GNU General Public License - is available # at the root of the source code of this program. If not, # see http://www.gnu.org/licenses/. # ############################################################################ from base.ddd.utils import business_validator from education_group.ddd.business_types import * from education_group.ddd.domain.exception import CannotCopyMiniTrainingDueToEndDate class CheckMiniTrainingEndDateValidator(business_validator.BusinessValidator): def __init__(self, mini_training: 'MiniTraining'): super().__init__() self.mini_training = mini_training def validate(self, *args, **kwargs): if self.mini_training.end_year and self.mini_training.year >= self.mini_training.end_year: raise CannotCopyMiniTrainingDueToEndDate(mini_training=self.mini_training)
mmaelicke/scikit-gstat
docs/conf.py
Python
mit
7,252
0.000552
# -*- coding: utf-8 -*- # # Configuration file for the Sphinx documentation builder. # # This file does only contain a selection of the most common options. For a # full list see the documentation: # http://www.sphinx-doc.org/en/stable/config # -- Path setup -------------------------------------------------------------- # If extensions (or modules to document with autodoc) are in another directory, # add these directories to sys.path here. If the directory is relative to the # documentation root, use os.path.abspath to make it absolute, like shown here. # import os import sys sys.path.insert(0, os.path.abspath('.')) def get_version(): B = os.path.abspath(os.path.dirname(__file__)) with open(os.path.join(B, '..', 'VERSION'), 'r') as f: return f.read().strip() # -- Project information ----------------------------------------------------- project = 'SciKit GStat' copyright = '2021, Mirko Mälicke' author = 'Mirko Mälicke' # The short X.Y version # version = '0.3.2' # The full version, including alpha/beta/rc tags release = get_version() # -- General configuration --------------------------------------------------- # If your documentation needs a minimal Sphinx version, state it here. # # needs_sphinx = '1.0' # Add any Sphinx extension module names here, as strings. They can be # extensions coming with Sphinx (named 'sphinx.ext.*') or your custom # ones. extensions = [ 'sphinx.ext.napoleon', 'sphinx.ext.autodoc', 'sphinx.ext.doctest', 'sphinx.ext.intersphinx', 'sphinx.ext.todo', 'sphinx.ext.coverage', 'sphinx.ext.mathjax', 'sphinx.ext.ifconfig', 'sphinx.ext.viewcode', 'sphinx.ext.githubpages', 'IPython.sphinxext.ipython_console_highlighting', 'IPython.sphinxext.ipython_directive', 'sphinx_gallery.gen_gallery', ] # Add any paths that contain templates here, relative to this directory. templates_path = ['_templates'] # The suffix(es) of source filenames. # You can specify multiple suffix as a list of string: # # source_suffix = ['.rst', '.md'] source_suffix = ['.rst', '.ipynb'] # The master toctree document. master_doc = 'index' # The language for content autogenerated by Sphinx. Refer to documentation # for a list of supported languages. # # This is also used if you do content translation via gettext catalogs. # Usually you set "language" from the command line for these cases. language = None # List of patterns, relative to source directory, that match files and # directories to ignore when looking for source files. # This pattern also affects html_static_path and html_extra_path . exclude_patterns = ['_build', 'Thumbs.db', '.DS_Store', '**.ipynb_checkpoints'] # The name of the Pygments (syntax highlighting) style to use. pygments_style = 'sphinx' # -- Options for
HTML output ------------------------------------------------- # The theme to use for HTML and HTML Help pages. See the documentation for # a list
of builtin themes. # html_theme = 'pydata_sphinx_theme' # Theme options are theme-specific and customize the look and feel of a theme # further. For a list of options available for each theme, see the # documentation. # html_theme_options = { 'github_url': 'https://github.com/mmaelicke/scikit-gstat', } html_context = { 'github_user': 'mmaelicke', 'github_repo': 'scikit-gstat', 'github_version': 'master', 'doc_path': 'docs' } html_short_title = 'SciKit-GStat' """ html_sidebars = { '**': [ 'about.html', 'navigation.html', 'relations.html', 'searchbox.html', 'donate.html' ] } """ # Add any paths that contain custom static files (such as style sheets) here, # relative to this directory. They are copied after the builtin static files, # so a file named "default.css" will overwrite the builtin "default.css". html_static_path = ['_static'] # Custom sidebar templates, must be a dictionary that maps document names # to template names. # # The default sidebars (for documents that don't match any pattern) are # defined by theme itself. Builtin themes are using these templates by # default: ``['localtoc.html', 'relations.html', 'sourcelink.html', # 'searchbox.html']``. # # html_sidebars = {} # -- Options for HTMLHelp output --------------------------------------------- # Output file base name for HTML help builder. htmlhelp_basename = 'SciKitGStatdoc' # -- Options for LaTeX output ------------------------------------------------ latex_elements = { # The paper size ('letterpaper' or 'a4paper'). # # 'papersize': 'letterpaper', # The font size ('10pt', '11pt' or '12pt'). # # 'pointsize': '10pt', # Additional stuff for the LaTeX preamble. # # 'preamble': '', # Latex figure (float) alignment # # 'figure_align': 'htbp', } # Grouping the document tree into LaTeX files. List of tuples # (source start file, target name, title, # author, documentclass [howto, manual, or own class]). latex_documents = [ (master_doc, 'SciKitGStat.tex', 'SciKit GStat Documentation', 'Mirko Mälicke', 'manual'), ] # -- Options for manual page output ------------------------------------------ # One entry per manual page. List of tuples # (source start file, name, description, authors, manual section). man_pages = [ (master_doc, 'scikitgstat', 'SciKit GStat Documentation', [author], 1) ] # -- Options for Texinfo output ---------------------------------------------- # Grouping the document tree into Texinfo files. List of tuples # (source start file, target name, title, author, # dir menu entry, description, category) texinfo_documents = [ (master_doc, 'SciKitGStat', 'SciKit GStat Documentation', author, 'SciKitGStat', 'One line description of project.', 'Miscellaneous'), ] # -- Extension configuration ------------------------------------------------- # -- Options for intersphinx extension --------------------------------------- # Example configuration for intersphinx: refer to the Python standard library. # intersphinx_mapping = {'https://docs.python.org/': None} # -- Options for todo extension ---------------------------------------------- # If true, `todo` and `todoList` produce output, else they produce nothing. todo_include_todos = True # -- Intersphinx mapping ----------------------------------------------------- intersphinx_mapping = { 'python': ('https://docs.python.org/3.6', None), 'pandas': ('https://pandas.pydata.org/pandas-docs/stable/', None), 'numpy': ('https://docs.scipy.org/doc/numpy', None), 'scipy': ('https://docs.scipy.org/doc/scipy/reference', None), 'gstools': ('https://geostat-framework.readthedocs.io/projects/gstools/en/latest/', None), 'sklearn': ('http://scikit-learn.org/stable', None), } from plotly.io._sg_scraper import plotly_sg_scraper image_scrapers = ('matplotlib', plotly_sg_scraper,) import plotly.io as pio pio.renderers.default = 'sphinx_gallery' import sphinx_gallery sphinx_gallery_conf = { 'examples_dirs': './tutorials', 'gallery_dirs': 'auto_examples', 'backreferences_dir': 'gen_modules/backreferences', 'doc_module': ('skgstat', 'skgstat'), 'image_scrapers': image_scrapers, 'filename_pattern': '/tutorial', }
adebray/enumerating_abelian_groups
finite_abelian_groups.py
Python
mit
5,033
0.020481
#!/usr/bin/env python3.5 # Arun Debray, 24 Dec. 2015 # Given a group order, classifies finite abelian groups of that order. # ./finite_abelian_groups.py [-tpi] number # -t formats the output in TeX (as opposed to in the terminal) # -p chooses the primary components decomposition (default) # -i chooses the invariant factors decomposition import argparse import collections import functools import itertools import math # Handles command-line arguments. See usage, above. def parse_args() -> argparse.Namespace: parser = argparse.ArgumentParser(description='Classifies finite abelian groups of a given order.') parser.add_argument('-t', action='store_true', help='formats output in LaTeX') parser.add_argument('-p', action='store_true', help='chooses primary components decomposition (default)') parser.add_argument('-i', action='store_true', help='chooses invariant factors decomposition') parser.add_argument('order', metavar='n', type=int, help='group order') return parser.parse_args() # Determines the prime factors of a number. No, this isn't the best algorithm, but # it's good enough. Returns them as a Counter object (a dict of prime -> power), in which # all values will be strictly positive. def prime_factors(n: int) -> collections.Counter: for i in range(2, 1 + math.ceil(math.sqrt(n))): # By doing this in order, we guarantee that this only happens when i is prime (2 comes before # any other even number, etc.) if n % i == 0: if n == i: return collections.Counter({n: 1}) else: to_return = prime_factors(n // i) to_return[i] += 1 return to_return # if n is prime return collections.Counter({n: 1}) # A helper function for the partitions function, below. Returns the partitions of n using integers # less than or equal to m. However, within this program it makes more sense to do this multiplicatively: # n is represented as p^n, and we are partitioning as p^n = p^{n_1} x p^{n_2} x ... x p^{n_k}. @functools.lru_cache(maxsize=None) # Wraps the function in a cache, making memoization simpler def _part_lt(p: int, n: int, m: int) -> list: if n == 0: return [[]] if n == 1: return [[p]] if m > n: return _part_lt(p, n, n) # Now, we recurse: the first entry in the partition can be any j in {1, ..., m}, and the rest is a # partition in _part_lt(n-j, j). to_return = [] for j in range(1, m+1): to_return += [part + [p**j] for part in _part_lt(p, n-j, j)] return to_return # Returns the partitions of n as pth powers, i.e. the ways of writing p^n = p^{a_1} x ... x p^{a_m} # such that each a_i is a positive integer and a_i >= a_{i+1} for each i. This is the algorithmic meat # of each decomposition, though some thought must go into piecing the partitions for different primes # together. Of course, this function calls the helper function, above. def partitions(p: int, n: int) -> list: return _part_lt(p, n, n) # Flattens one level of a list, turning [[1, 2, 3], [4, 5]] into [1, 2, 3, 4, 5]. def flatten(xs: list) -> list: return [item for sublist in xs for item in sublist] # Given the prime factors, returns a list of all abelian groups of the given order in primary- # factors format. def primary_factor_decomp(factors: collections.Counter) -> list: decomps_at_primes = [partitions(p, factors[p]) for p in factors] return [flatten(choice) for
choice in itertools.product(*decomps_at_primes)] # Uses the partitions in a different way to make a list of all abelian groups of a given order in # the invariant-factors decomposition. def invariant_factor_decomp(factors: collections.Counter) -> list: decomps_at_primes = [partitions(p, factors[p]) for p in factors] return [(functools.reduce(lambda x,y: x*y, inv_fact) for inv_fact in itertools.zip_longest(*choice, fillval
ue=1)) for choice in itertools.product(*decomps_at_primes)] # Returns "there are n abelian groups" or "there is one abelian group" depending on the value of n. def format_plurals(n: int) -> str: if n == 1: return 'There is one abelian group' else: return 'There are %d abelian groups' % n # Formats and prints the output. def output(groups: list, order: int, as_TeX: bool): if as_TeX: print('\\documentclass{amsart}') print('\\newcommand{\\Z}{\\mathbb Z}') print('\\title{Abelian Groups of Order %d}' % order) print('\\begin{document}') print('\\maketitle') print('%s of order %d.' % (format_plurals(len(groups)), order)) print('\\begin{gather*}') print('\\\\\n'.join(['\\oplus'.join(['\\Z/%d' % component for component in group]) for group in groups])) print('\\end{gather*}') print('\\end{document}') else: print('%s of order %d.' % (format_plurals(len(groups)), order)) for group in groups: print('⊕ '.join('ℤ/%d' % component for component in group)) def main(): arginfo = parse_args() groups = None factors = prime_factors(arginfo.order) if arginfo.i: groups = invariant_factor_decomp(factors) else: groups = primary_factor_decomp(factors) output(groups, arginfo.order, arginfo.t) if __name__ == '__main__': main()
cmuphillycapstone/ckanext-dictionary
ckanext/dictionary/tests/test_plugin.py
Python
agpl-3.0
98
0.020408
"""Tests for plugin.py.""" import ckanext.dictionary.plugin as pl
ugin def t
est_plugin(): pass
akkana/scripts
dirsneeded.py
Python
gpl-2.0
4,269
0.001171
#!/usr/bin/env python3 # What directories are needed to run an app? # Use strace to find out. Potentially useful for setting up chroots. # Usage: dirsneeded.py cmd [arg arg ...] ''' TODO: > o for anything where access attempt is made in chroot and fails, > at least by default, only consider it something to possibly be added > to chroot if that something exists outside the chroot > o for all opens and the like, track if it's opened for read, or > write/append, or both, and furthermore also track if it's only > read, or only written / appended to > o track dire
ctories accessed, and if merely accessed, or if also > read. Likewise, divide and conquer, do any tests fail if read > access is removed, likewise to then have x removed, or directory > removed. ''' import subprocess from pathlib import Path from collections import defaultdict import shlex import sys def strace_cmd(cmdargs): """Run cmdargs (a list containing a command and all argu
ments) under strace, and output a list of files and directories opened. Returns a dict of lists of fileinfo dicts. { "dirpath", [ ("filename": "/tmp/foo", "mode": "O_RDONLY", etc... ] } """ '''Some sample strace out lines: openat(AT_FDCWD, "/etc/ld.so.preload", O_RDONLY|O_CLOEXEC) = 3 execve("/usr/bin/ls", ["ls", "/tmp/foo"], 0x7ffe05c05678 /* 50 vars */) = 0 access("/etc/ld.so.preload", R_OK) = 0 statfs("/sys/fs/selinux", 0x7ffdf3e99420) = -1 ENOENT (No such file or directorystat("/tmp/moncycle", {st_mode=S_IFREG|0664, st_size=183, ...}) = 0 lstat("/tmp/moncycle", {st_mode=S_IFREG|0664, st_size=183, ...}) = 0 ''' cmdargs = ["strace", "-e", "trace=file"] + cmdargs straceout = subprocess.run(cmdargs, # stdout=subprocess.DEVNULL, stderr=subprocess.PIPE).stderr.decode() dirlist = defaultdict(dict) for line in straceout.split('\n'): # For now, only parse the openat lines. if line.startswith("openat("): # An strace open line looks like: # openat(AT_FDCWD, "/lib/x86_64-linux-gnu/libc.so.6", O_RDONLY|O_CLOEXEC) = 3 # openat(AT_FDCWD, "/tmp/foo", O_RDONLY|O_CLOEXEC) = -1 ENOENT (No such file or directory) # Use shlex since the filename is quoted and may include spaces. parts = shlex.split(line.strip()) # but they all end with commas, so strip those. for i, p in enumerate(parts): if p.endswith(','): parts[i] = p[:-1] fileinfo = {} fileinfo["filename"] = parts[1] fileinfo["syscall"] = parts[0] fileinfo["mode"] = parts[2] if fileinfo["mode"][-1] == ')': # Remove the close-paren fileinfo["mode"] = fileinfo["mode"][:-1] # Seems like I've seen additional args after the mode, # but now that I look, I can't find any examples. # Skip forward to the = to get the return value. # Really only need to search from parts[3] on, # but that messes up i. for i, part in enumerate(parts): if part == "=": fileinfo["retval"] = ' '.join(parts[i+1:]) break else: fileinfo = None if fileinfo: # We have a filename. Find the file's directory path = Path(fileinfo["filename"]).resolve() if path.is_dir(): dirname = str(path) fileinfo["type"] = "d" else: dirname = str(path.parent) dirlist[dirname][fileinfo["filename"]] = fileinfo return dirlist def fileinfo_to_str(fileinfo): s = fileinfo["filename"] if "type" in fileinfo and fileinfo["type"] == "d": s += " (DIRECTORY)" if "mode" in fileinfo: s += " (" + fileinfo["mode"] + ")" if "retval" in fileinfo: s += " -> " + fileinfo["retval"] return s if __name__ == '__main__': dirlist = strace_cmd(sys.argv[1:]) for d in dirlist: print(d) files = sorted(dirlist[d].keys()) for f in files: print(" ", fileinfo_to_str(dirlist[d][f]))
somebody1234/Charcoal
directiondictionaries.py
Python
mit
2,433
0
from direction import Direction, Pivot XMovement = { Direction.left: -1, Direction.up: 0, Direction.right: 1, Direction.down: 0, Direction.up_left: -1, Direction.up_right: 1, Direction.down_left: -1, Direction.down_right: 1 } YMovement = { Direction.left: 0, Direction.up: -1, Direction.right: 0, Direction.down: 1, Direction.up_left: -1, Direction.up_right: -1, Direction.down_left: 1, Direction.down_right: 1 } NewlineDirection = { Direction.left: Direction.up, Direction.up: Direction.right, Direction.right: Direction.down, Direction.down: Direction.left, Direction.up_left: Direction.up_right, Direction.up_right: Direction.down
_right, Direction.down_left: Direction.up_left, Direction.down_right: Direction.down_left } NextDirection = { Direction.left: Direction.up_left, Direction.up: Direction.up_right,
Direction.right: Direction.down_right, Direction.down: Direction.down_left, Direction.up_left: Direction.up, Direction.up_right: Direction.right, Direction.down_left: Direction.left, Direction.down_right: Direction.down } DirectionCharacters = { Direction.left: "-", Direction.up: "|", Direction.right: "-", Direction.down: "|", Direction.up_left: "\\", Direction.up_right: "/", Direction.down_left: "/", Direction.down_right: "\\" } PivotLookup = { Pivot.left: { Direction.left: Direction.down_left, Direction.up: Direction.up_left, Direction.right: Direction.up_right, Direction.down: Direction.down_right, Direction.up_left: Direction.left, Direction.up_right: Direction.up, Direction.down_left: Direction.down, Direction.down_right: Direction.right }, Pivot.right: { Direction.left: Direction.up_left, Direction.up: Direction.up_right, Direction.right: Direction.down_right, Direction.down: Direction.down_left, Direction.up_left: Direction.up, Direction.up_right: Direction.right, Direction.down_left: Direction.left, Direction.down_right: Direction.down } } DirectionFromXYSigns = { -1: {-1: Direction.up_left, 0: Direction.left, 1: Direction.down_left}, 0: {-1: Direction.up, 0: Direction.right, 1: Direction.down}, 1: {-1: Direction.up_right, 0: Direction.right, 1: Direction.down_right} }
andrewstephens75/gensite
gensite/siteconfig.py
Python
mit
1,317
0.012908
import os.path import json class Tag: def __init__(self, tag, title, icon): self.tag = tag self.title = title self.icon = icon class SiteC
onfig: def __init__(self, site_dir): self.site_dir = site_dir config_file_name = os.path.join(self.site_dir, "config.js") if not os.path.exists(config_file_name): raise CommandError("No site config file exists : " + site_config_file) site_config = {} with open(config_file_name, "r", encoding="utf-8") as f: site_config = json.load(f) self.source_dir = site_config["source_dir"] self.destination_dir = site_config["destination_d
ir"] self.template = site_config["template"] self.blog_name = site_config["blog_name"] self.blog_description = site_config["blog_description"] self.blog_author = site_config["blog_author"] self.root_url = site_config["root_url"] self.relative_index = site_config["relative_index"] self.navigation_menu = site_config["navigation_menu"] self.twitter_handle = site_config["twitter_handle"] self.allowed_tags = {} tags = site_config["allowed_tags"] for t in tags: self.allowed_tags[t["tag"]] = Tag(t["tag"], t["title"], t["icon"]) def is_tag_allowed(self, tag): return tag in self.allowed_tags
iemejia/coursera-dl
coursera/test/test_api.py
Python
lgpl-3.0
4,808
0.003536
""" Test APIs. """ import json import pytest from mock import patch from coursera import api from coursera.test.utils import slurp_fixture @pytest.fixture def course(): course = api.CourseraOnDemand(session=None, course_id='0') return course @patch('coursera.api.get_page_json') def test_ondemand_programming_supplement_no_instructions(get_page_json, course): no_instructions = slurp_fixture('json/supplement-programming-no-instructions.json') get_page_json.return_value = json.loads(no_instructions) output = course.extract_links_from_programming('0') assert {} == output @patch('coursera.api.get_page_json') def test_ondemand_programming_supplement_empty_instructions(get_page_json, course): empty_instructions = slurp_fixture('json/supplement-programming-empty-instructions.json') get_page_json.return_value = json.loads(empty_instructions) output = course.extract_links_from_programming('0') # Make sure that SOME html content has been extracted, but remove # it immeditely because it's a hassle to properly prepare test input # for it. FIXME later. assert 'html' in output del output['html'] assert {} == output @patch('coursera.api.get_page_json') def test_ondemand_programming_supplement_one_asset(get_page_json, course): one_asset_tag = slurp_fixture('json/supplement-programming-one-asset.json') one_asset_url = slurp_fixture('json/asset-urls-one.json') asset_json = json.loads(one_asset_url) get_page_json.side_effect = [json.loads(one_asset_tag), json.loads(one_asset_url)] expected_output = {'pdf': [(asset_json['elements'][0]['url'], 'statement-pca')]} output = course.extract_links_from_programming('0') # Make sure that SOME html content has been extracted, but remove # it immeditely because it's a hassle to properly prepare test input # for it. FIXME later. assert 'html' in output del output['html'] assert expected_output == output @patch('coursera.api.get_page_json') def test_ondemand_programming_supplement_three_assets(get_page_json, course): three_assets_tag = slurp_fixture('json/supplement-programming-three-assets.json') three_assets_url = slurp_fixture('json/asset-urls-three.json') get_page_json.side_effect = [json.loads(three_assets_tag), json.loads(three_assets_url)] expected_output = json.loads(slurp_fixture('json/supplement-three-assets-output.json')) output = course.extract_links_from_programming('0') output = json.loads(json.dumps(output)) # Make sure that SOME html content has been extracted, but remove # it immeditely because it's a hassle to properly prepare test input # for it. FIXME later. assert 'html' in output del output['html'] assert expected_output == output @patch('coursera.api.get_page_json') def test_extract_links_from_lecture_assets_typename_asset(get_page_json, course): open_course_assets_reply = slurp_fixture('json/supplement-open-course-assets-reply.json') api_assets_v1_reply = slurp_fixture('json/supplement-api-assets-v1-reply.json') get_page_json.side_effect = [json.loads(open_course_assets_reply), json.loads(api_assets_v1_re
ply)] expected_output = json.loads(slurp_fixture('json/supplement-extract-links-from-lectures-output.json')) assets = ['giAxucdaEeWJTQ5WTi8YJQ'] output = course._extract_links_from_lecture_assets(assets) output = json.loads(json.dumps(output)) assert expected_output == output @patch('coursera.api.get_page_json') def test_extract_links_from_lecture_assets_typname_url_and_asset(get_page_json, course): """ This test makes sure that _extract_links_from_lect
ure_assets grabs url links both from typename == 'asset' and == 'url'. """ get_page_json.side_effect = [ json.loads(slurp_fixture('json/supplement-open-course-assets-typename-url-reply-1.json')), json.loads(slurp_fixture('json/supplement-open-course-assets-typename-url-reply-2.json')), json.loads(slurp_fixture('json/supplement-open-course-assets-typename-url-reply-3.json')), json.loads(slurp_fixture('json/supplement-open-course-assets-typename-url-reply-4.json')), json.loads(slurp_fixture('json/supplement-open-course-assets-typename-url-reply-5.json')), ] expected_output = json.loads(slurp_fixture('json/supplement-extract-links-from-lectures-url-asset-output.json')) assets = ['Yry0spSKEeW8oA5fR3afVQ', 'kMQyUZSLEeWj-hLVp2Pm8w', 'xkAloZmJEeWjYA4jOOgP8Q'] output = course._extract_links_from_lecture_assets(assets) output = json.loads(json.dumps(output)) assert expected_output == output
Azure/azure-batch-apps-python
batchapps/test/unittest_pool.py
Python
mit
7,440
0.007124
#------------------------------------------------------------------------- # The Azure Batch Apps Python Client # # Copyright (c) Microsoft Corporation. All rights reserved. # # The MIT License (MIT) # # Permission is hereby granted, free of charge, to any person obtaining a copy # of this software and associated documentation files (the ""Software""), to deal # in the Software without restriction, including without limitation the rights # to use, copy, modify, merge, publish, distribute, sublicense, and/or sell # copies of the Software, and to permit persons to whom the Software is # furnished to do so, subject to the following conditions: # # The above copyright notice and this permission notice shall be included in # all copies or substantial portions of the Software. # # THE SOFTWARE IS PROVIDED *AS IS*, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN # THE SOFTWARE. # #-------------------------------------------------------------------------- """Unit tests for Pool and PoolSpecifier""" import sys try: import unittest2 as unittest except ImportError: import unittest try: from unittest import mock except ImportError: import mock from batchapps.pool import ( Pool, PoolSpecifier) from batchapps.api import ( BatchAppsApi, Response) from batchapps.exceptions import RestCallException # pylint: disable=W0212 class TestPool(unittest.TestCase): """Unit tests for Pool""" def test_pool_create(self): """Test Pool object""" api = mock.create_autospec(BatchAppsApi) pool = Pool(api) self.assertIsNone(pool.id) self.assertIsNone(pool.created) self.assertEqual(pool.target_size, 0) pool_spec = { 'id': 'abc', 'creationTime': '', 'targetDedicated': '5', 'state': 'active', 'communication': True } pool = Pool(api, **pool_spec) self.assertEqual(pool.id, 'abc') self.assertEqual(pool.created, '') self.assertEqual(pool.target_size, 5) self.assertEqual(pool.communication, True) def test_pool_delete(self): """Test delete""" api = mock.create_autospec(BatchAppsApi) api.delete_pool.return_value = mock.create_autospec(Response) api.delete_pool.return_value.success = True pool = Pool(api) pool.delete() api.delete_pool.assert_called_with(None) api.delete_pool.return_value.success = False api.delete_pool.return_value.result = RestCallException(None, "Test", None) with self.assertRaises(RestCallException): pool.delete() @mock.patch.object(Pool, 'update') def test_pool_resize(self, mock_update): """Test resize""" api = mock.create_autospec(BatchAppsApi) api.resize_pool.return_value = mock.create_autospec(Response) api.resize_pool.return_value.success = True pool = Pool(api) pool.resize(5) api.resize_pool.assert_called_with(None, 5) mock_update.assert_called_with() with self.assertRaises(ValueError): pool.resize("test") api.resize_pool.return_value.success = False api.resize_pool.return_value.result = RestCallException
(None, "Test", None) mock_update.called = False with self.assertRaises(RestCallException): pool.resize(1) self.assertFalse(mock_update.called) def test_pool_update(self): """Test delete""" api = mock.create_autospec(BatchAppsApi) pool = Pool(api) api.get_pool.return_value = mock.create_a
utospec(Response) api.get_pool.return_value.success = True api.get_pool.return_value.result = { 'targetDedicated':'5', 'currentDedicated':'4', 'state':'active', 'allocationState':'test', } self.assertEqual(pool.target_size, 0) self.assertEqual(pool.current_size, 0) self.assertEqual(pool.state, None) self.assertEqual(pool.allocation_state, None) self.assertEqual(pool.resize_error, '') pool.update() api.get_pool.assert_called_with(pool_id=None) self.assertEqual(pool.target_size, 5) self.assertEqual(pool.current_size, 4) self.assertEqual(pool.state, 'active') self.assertEqual(pool.allocation_state, 'test') self.assertEqual(pool.resize_error, '') api.get_pool.return_value.success = False api.get_pool.return_value.result = RestCallException(None, "Test", None) with self.assertRaises(RestCallException): pool.update() class TestPoolSpecifier(unittest.TestCase): """Unit tests for PoolSpecifier""" def test_poolspecifier_create(self): """Test PoolSpecifier object""" api = mock.create_autospec(BatchAppsApi) pool = PoolSpecifier(api) self.assertEqual(pool.target_size, 0) self.assertEqual(pool.max_tasks, 1) self.assertEqual(pool.communication, False) self.assertEqual(pool.certificates, []) pool = PoolSpecifier(api, target_size=5, max_tasks=2, communication=True) self.assertEqual(pool.target_size, 5) self.assertEqual(pool.max_tasks, 2) self.assertEqual(pool.communication, True) self.assertEqual(pool.certificates, []) def test_poolspecifier_start(self): """Test start""" api = mock.create_autospec(BatchAppsApi) api.add_pool.return_value.success = True api.add_pool.return_value.result = { 'poolId':'abc', 'link':{'href':'test.com'}} pool = PoolSpecifier(api) new_pool = pool.start() self.assertEqual(new_pool, {'id':'abc', 'link':'test.com'}) api.add_pool.assert_called_with(0, 1, False, []) api.add_pool.return_value.success = False api.add_pool.return_value.result = RestCallException(None, "Test", None) with self.assertRaises(RestCallException): pool.start() def test_poolspecifier_add_cert(self): api = mock.create_autospec(BatchAppsApi) pool = PoolSpecifier(api) pool.add_cert("test_thumb") self.assertEqual(pool.certificates, [{ 'thumbprint':'test_thumb', 'thumbprintAlgorithm':'SHA1', 'storeLocation':'CurrentUser', 'storeName':'My'}]) pool.add_cert("test_thumb", store_location="test", store_name=None) self.assertEqual(pool.certificates, [{ 'thumbprint':'test_thumb', 'thumbprintAlgorithm':'SHA1', 'storeLocation':'CurrentUser', 'storeName':'My'},{ 'thumbprint':'test_thumb', 'thumbprintAlgorithm':'SHA1', 'storeLocation':'test', 'storeName':'None'}]) pool.id = None pool.certificates = [0,1,2,3,4,5,6,7,8,9] pool.add_cert("new_cert") self.assertEqual(pool.certificates, [0,1,2,3,4,5,6,7,8,9]) if __name__ == '__main__': unittest.main()
titilambert/home-assistant
homeassistant/components/integration/sensor.py
Python
apache-2.0
6,827
0.000586
"""Numeric integration of data coming from a source sensor over time.""" from decimal import Decimal, DecimalException import logging import voluptuous as vol from homeassistant.components.sensor import PLATFORM_SCHEMA from homeassistant.const import ( ATTR_UNIT_OF_MEASUREMENT, CONF_NAME, STATE_UNAVAILABLE, STATE_UNKNOWN, TIME_DAYS, TIME_HOURS, TIME_MINUTES, TIME_SECONDS, ) from homeassistant.core import callback import homeassistant.helpers.config_validation as cv from homeassistant.helpers.event import async_track_state_change_event from homeassistant.helpers.restore_state import RestoreEntity # mypy: allow-untyped-defs, no-check-untyped-defs _LOGGER = logging.getLogger(__name__) ATTR_SOURCE_ID = "source" CONF_SOURCE_SENSOR = "source" CONF_ROUND_DIGITS = "round" CONF_UNIT_PREFIX = "unit_prefix" CONF_UNIT_TIME = "unit_time" CONF_UNIT_OF_MEASUREMENT = "unit" CONF_METHOD = "method" TRAPEZOIDAL_METHOD = "trapezoidal" LEFT_METHOD = "left" RIGHT_METHOD = "right" INTEGRATION_METHOD = [TRAPEZOIDAL_METHOD, LEFT_METHOD, RIGHT_METHOD] # SI Metric prefixes UNIT_PREFIXES = {None: 1, "k": 10 ** 3, "M": 10 ** 6, "G": 10 ** 9, "T": 10 ** 12} # SI Time prefixes UNIT_TIME = { TIME_SECONDS: 1, TIME_MINUTES: 60, TIME_HOURS: 60 * 60, TIME_DAYS: 24 * 60 * 60, } ICON = "mdi:chart-histogram" DEFAULT_ROUND = 3 PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend( { vol.Optional(CONF_NAME): cv.string, vol.Required(CONF_SOURCE_SENSOR): cv.entity_id, vol.Optional(CONF_ROUND_DIGITS, default=DEFAULT_ROUND): vol.Coerce(int),
vol.Optional(CONF_UNIT_PREFIX, default=None): vol.In(UNIT_PREFIXES), vol.Optional(CONF_UNIT_TIME, d
efault=TIME_HOURS): vol.In(UNIT_TIME), vol.Optional(CONF_UNIT_OF_MEASUREMENT): cv.string, vol.Optional(CONF_METHOD, default=TRAPEZOIDAL_METHOD): vol.In( INTEGRATION_METHOD ), } ) async def async_setup_platform(hass, config, async_add_entities, discovery_info=None): """Set up the integration sensor.""" integral = IntegrationSensor( config[CONF_SOURCE_SENSOR], config.get(CONF_NAME), config[CONF_ROUND_DIGITS], config[CONF_UNIT_PREFIX], config[CONF_UNIT_TIME], config.get(CONF_UNIT_OF_MEASUREMENT), config[CONF_METHOD], ) async_add_entities([integral]) class IntegrationSensor(RestoreEntity): """Representation of an integration sensor.""" def __init__( self, source_entity, name, round_digits, unit_prefix, unit_time, unit_of_measurement, integration_method, ): """Initialize the integration sensor.""" self._sensor_source_id = source_entity self._round_digits = round_digits self._state = 0 self._method = integration_method self._name = name if name is not None else f"{source_entity} integral" if unit_of_measurement is None: self._unit_template = ( f"{'' if unit_prefix is None else unit_prefix}{{}}{unit_time}" ) # we postpone the definition of unit_of_measurement to later self._unit_of_measurement = None else: self._unit_of_measurement = unit_of_measurement self._unit_prefix = UNIT_PREFIXES[unit_prefix] self._unit_time = UNIT_TIME[unit_time] async def async_added_to_hass(self): """Handle entity which will be added.""" await super().async_added_to_hass() state = await self.async_get_last_state() if state: try: self._state = Decimal(state.state) except ValueError as err: _LOGGER.warning("Could not restore last state: %s", err) @callback def calc_integration(event): """Handle the sensor state changes.""" old_state = event.data.get("old_state") new_state = event.data.get("new_state") if ( old_state is None or old_state.state in [STATE_UNKNOWN, STATE_UNAVAILABLE] or new_state.state in [STATE_UNKNOWN, STATE_UNAVAILABLE] ): return if self._unit_of_measurement is None: unit = new_state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) self._unit_of_measurement = self._unit_template.format( "" if unit is None else unit ) try: # integration as the Riemann integral of previous measures. area = 0 elapsed_time = ( new_state.last_updated - old_state.last_updated ).total_seconds() if self._method == TRAPEZOIDAL_METHOD: area = ( (Decimal(new_state.state) + Decimal(old_state.state)) * Decimal(elapsed_time) / 2 ) elif self._method == LEFT_METHOD: area = Decimal(old_state.state) * Decimal(elapsed_time) elif self._method == RIGHT_METHOD: area = Decimal(new_state.state) * Decimal(elapsed_time) integral = area / (self._unit_prefix * self._unit_time) assert isinstance(integral, Decimal) except ValueError as err: _LOGGER.warning("While calculating integration: %s", err) except DecimalException as err: _LOGGER.warning( "Invalid state (%s > %s): %s", old_state.state, new_state.state, err ) except AssertionError as err: _LOGGER.error("Could not calculate integral: %s", err) else: self._state += integral self.async_write_ha_state() async_track_state_change_event( self.hass, [self._sensor_source_id], calc_integration ) @property def name(self): """Return the name of the sensor.""" return self._name @property def state(self): """Return the state of the sensor.""" return round(self._state, self._round_digits) @property def unit_of_measurement(self): """Return the unit the value is expressed in.""" return self._unit_of_measurement @property def should_poll(self): """No polling needed.""" return False @property def device_state_attributes(self): """Return the state attributes of the sensor.""" state_attr = {ATTR_SOURCE_ID: self._sensor_source_id} return state_attr @property def icon(self): """Return the icon to use in the frontend.""" return ICON
endlessm/chromium-browser
third_party/llvm/lldb/test/API/lang/cpp/trivial_abi/TestTrivialABI.py
Python
bsd-3-clause
2,923
0.004105
""" Test that we work properly with classes with the trivial_abi attribute """ import lldb from lldbsuite.test.decorators import * from lldbsuite.test.lldbtest import * from lldbsuite.test import lldbutil class TestTrivialABI(TestBase): mydir = TestBase.compute_mydir(__file__) NO_DEBUG_INFO_TESTCASE = True @skipUnlessSupportedTypeAttribute("trivial_abi") @expectedFailureAll(oslist=["windows"], bugnumber="llvm.org/pr37995") @expectedFailureAll(archs=["aarch64"], oslist=["linux"], bugnumber="llvm.org/pr44161") def test_call_trivial(self): """Test that we can print a variable & call a function with a trivial ABI class.""" self.build() self.main_source_file = lldb.SBFileSpec("main.cpp") self.expr_test(True) @skipUnlessSupportedTypeAttribute("trivial_abi") # fixed for SysV-x86_64 ABI, but not Windows-x86_64 @expectedFailureAll(oslist=["windows"], bugnumber="llvm.org/pr36870") @expectedFailureAll(archs=["aarch64"], oslist=["linux"], bugnumber="llvm.org/pr44161") @expectedFailureAll(archs=["arm64", "arm64e"], bugnumber="<rdar://problem/57844240>") def test_call_nontrivial(self): """Test that we can print a variable & call a function on the same class w/o the trivial ABI marker.""" self.build() self.main_source_file = lldb.SBFileSpec("main.cpp") self.expr_test(False) def check_value(self, test_var, ivar_value): self.assertTrue(test_var.GetError().Success(), "Invalid valobj: %s"%(test_var.GetError().GetCString())) ivar = test_var.GetChildMemberWithName("ivar") self.assertTrue(test_var.GetError().Success(), "Failed to fetch ivar") self.assertEqual(ivar_value, ivar.GetValueAsSigned(), "Got the right value for ivar") def check_frame(self, thread): frame = thread.frames[0] inVal_var = frame.FindVariable("inVal") self.check_value(inVal_var, 10) options = lldb.SBExpressionOptions() inVal_expr = frame.EvaluateExpression("inVal", options) self.check_value(inVal_expr, 10) thread.Step
Out() outVal_ret = thread.GetStopReturnValue() self.check_value(outVal_ret, 30) def expr_test(self, trivial): (target, process, thread, bkpt) = lldbutil.run_to_source_breakpoint(self, "Set a breakpo
int here", self.main_source_file) # Stop in a function that takes a trivial value, and try both frame var & expr to get its value: if trivial: self.check_frame(thread) return # Now continue to the same thing without the trivial_abi and see if we get that right: threads = lldbutil.continue_to_breakpoint(process, bkpt) self.assertEqual(len(threads), 1, "Hit my breakpoint the second time.") self.check_frame(threads[0])
ericchill/gnofract4d
fract4d/test_stdlib.py
Python
bsd-3-clause
14,992
0.014141
#!/usr/bin/env python # tests the standard library of math functions import unittest import math import cmath import string import commands import types import testbase import absyn import codegen import fractparser import fractlexer import fsymbol import translate class Test(testbase.TestBase): def setUp(self): self.fakeNode = absyn.Empty(0) self.codegen = codegen.T(fsymbol.T()) self.parser = fractparser.parser def compileAndRun(self,c_code): cFileName = self.codegen.writeToTempFile(c_code,".cpp") oFileName = self.codegen.writeToTempFile("") #print c_code cmd = "g++ -Wall %s -o %s -Ic -lm" % (cFileName, oFileName) #print cmd (status,output) = commands.getstatusoutput(cmd) self.assertEqual(status,0,"C error:\n%s\nProgram:\n%s\n" % \ ( output,c_code)) #print "status: %s\noutput:\n%s" % (status, output) cmd = oFileName (status,output) = commands.getstatusoutput(cmd) self.assertEqual(status,0, "Runtime error:\n" + output) print "status: %s\noutput:\n%s" % (status, output) return output def makeC(self,user_preamble="", user_postamble=""): # construct a C stub for testing preamble = ''' #include <stdio.h> #include <math.h> #include "cmap.cpp" typedef enum { INT = 0, FLOAT = 1, GRADIENT = 2 } e_paramtype; struct s_param { e_paramtype t; int intval; double doubleval; void *gradient; }; typedef struct { struct s_param *p; void *arena; } pf_fake; int main(){ struct s_param params[20]; int i = 0; ListColorMap *pMap = new ListColorMap(); pMap->init(2); pMap->set(0,0.0,255,0,0,0); pMap->set(1,1.0,0,255,0,0); for(i = 0; i < 20; ++i) { params[i].t = FLOAT; params[i].intval = 773; params[i].doubleval = 0.0; params[i].gradient = pMap; }; pf_fake t__f; t__f.p = params; pf_fake *t__pfo = &t__f; double pixel_re = 0.0, pixel_im = 0.0; double t__h_zwpixel_re = 0.0, t__h_zwpixel_im = 0.0; double t__h_color_re = 0.0; double t__h_color_i = 0.0; double t__h_color_j = 0.0; double t__h_color_k = 0.0; double inputs[] = { 0, 0, 0, 1, 1, 0, 1, 1, 3, 2, 1,-0.0, 0,-1, -3,2, -2,-2, -1,0 }; for(int i__ = 0; i < sizeof(inputs)/sizeof(double); i__ += 2) { ''' codegen_symbols = self.codegen.output_symbols(self.codegen,{}) decls = string.join(map(lambda x: x.format(), codegen_symbols),"\n") str_output = string.join(map(lambda x : x.format(), self.codegen.out),"\n") postamble = "}\nreturn 0;}\n" return string.join([preamble,decls,"\n", user_preamble,str_output,"\n", user_postamble,postamble],"") def inspect_bool(self,name): return "printf(\"%s = %%d\\n\", f%s);" % (name,name) def inspect_float(self,name): return "printf(\"%s = %%g\\n\", f%s);" % (name,name) def inspect_int(self,name): return "printf(\"%s = %%d\\n\", f%s);" % (name,name) def inspect_complex(self,name,prefix="f"): return "printf(\"%s = (%%g,%%g)\\n\", %s%s_re, %s%s_im);" % \ (name,prefix,name,prefix,name) def inspect_hyper(self,name,prefix="f"): return ("printf(\"%s = (%%g,%%g,%%g,%%g)\\n\"," + "%s%s_re, %s%s_i, %s%s_j, %s%s_k);") % \ (name,prefix,name,prefix,name,prefix,name,prefix,name) def inspect_color(self,name,prefix="f"): return self.inspect_hyper(name, prefix) def inspect_colors(self,namelist): return "".join(map(lambda x : self.inspect_color(x), namelist)) def predict(self,f,arg1=0,arg2=1): # compare our compiler results to Python stdlib try: x = "%.6g" % f(arg1) except ZeroDivisionError: x = "inf" try: y = "%.6g" % f(arg2) except ZeroDivisionError: y = "inf" return "(%s,%s)" % (x,y) def cpredict(self,f,arg=(1+0j)): try: z = f(arg) return "(%.6g,%.6g)" % (z.real,z.imag) except OverflowError: return "(inf,inf)" except ZeroDivisionError: return "(nan,nan)" def make_test(self,myfunc,pyfunc,val,n): codefrag = "ct_%s%d = %s((%d,%d))" % (myfunc, n, myfunc, val.real, val.imag) lookat = "ct_%s%d" % (myfunc, n)
result = self.cpredict(pyfunc,val) return [ codefrag, lookat, result] def manufacture_tests(self,myfunc,pyfunc): vals = [ 0+0j, 0+1j, 1+0j, 1+1j, 3+2j, 1-0j, 0-1j, -3+2j, -2-2j, -1+0j ]
return map(lambda (x,y) : self.make_test(myfunc,pyfunc,x,y), \ zip(vals,range(1,len(vals)))) def cotantests(self): def mycotan(z): return cmath.cos(z)/cmath.sin(z) tests = self.manufacture_tests("cotan",mycotan) # CONSIDER: comes out as -0,1.31304 in python, but +0 in C++ and gf4d # think Python's probably in error, but not 100% sure tests[6][2] = "(0,1.31304)" return tests def logtests(self): tests = self.manufacture_tests("log",cmath.log) tests[0][2] = "(-inf,0)" # log(0+0j) is overflow in python return tests def asintests(self): tests = self.manufacture_tests("asin",cmath.asin) # asin(x+0j) = (?,-0) in python, which is wrong tests[0][2] = "(0,0)" tests[2][2] = tests[5][2] = "(1.5708,0)" return tests def acostests(self): # work around buggy python acos tests = self.manufacture_tests("acos",cmath.acos) tests[0][2] = "(1.5708,0)" tests[2][2] = tests[5][2] = "(0,0)" return tests def atantests(self): tests = self.manufacture_tests("atan",cmath.atan) tests[1][2] = "(nan,nan)" tests[6][2] = "(nan,-inf)" # not really sure who's right on this return tests def atanhtests(self): tests = self.manufacture_tests("atanh",cmath.atanh) tests[2][2] = tests[5][2] = "(inf,0)" # Python overflows the whole number return tests def test_stdlib(self): '''This is the slowest test, due to how much compilation it does. Calls standard functions with a variety of values, checking that they produce the right answers''' # additions to python math stdlib def myfcotan(x): return math.cos(x)/math.sin(x) def myfcotanh(x): return math.cosh(x)/math.sinh(x) def mycotanh(z): return cmath.cosh(z)/cmath.sinh(z) def myasinh(z): return cmath.log(z + cmath.sqrt(z*z+1)) def myacosh(z): return cmath.log(z + cmath.sqrt(z-1) * cmath.sqrt(z+1)) def myctrunc(z): return complex(int(z.real),int(z.imag)) def mycfloor(z): return complex(math.floor(z.real),math.floor(z.imag)) def mycround(z): return complex(int(z.real+0.5),int(z.imag+0.5)) def mycceil(z): x = complex(math.ceil(z.real),math.ceil(z.imag)) return x def mycosxx(z): cosz = cmath.cos(z) return complex(cosz.real, -cosz.imag) def myczero(z): return complex(0,0) tests = [] # # code to run, var to inspect, result # [ "fm = (3.0 % 2.0, 3.1 % 1.5)","fm","(1,0.1)"], # [ "cj = conj(y)", "cj", "(1,-2)"], # [ "fl = flip(y)", "fl", "(2,1)"], # [ "ri = (imag(y),real(y))","ri", "(2,1)"], # [ "m = |y|","m","(5,0)"], # [
cuijiaxing/nlp
rewriter/rules/rewrite_rule/generator.py
Python
gpl-2.0
4,155
0.004091
import nltk import json import sys sys.path.append("../../") import parser from entity import Word class ModelRewriter: rewriteRules = None rewriteRuleFileName = "model.txt" @staticmethod def loadModel(): inputFile = open("model.txt") modelJsonString = inputFile.read() inputFile.close() modelMap = json.loads(modelJsonString) ModelRewriter.rewriteRules = modelMap return modelMap def __init__(self): if ModelRewriter.rewriteRules is None: ModelRewriter.rewriteRules = ModelRewriter.loadModel() #this is the only method the user need to invoke @staticmethod def generateQuestions(input
Sentence): print i
nputSentence sentencePOS = ModelRewriter.getPOSList(inputSentence) nearestModels = ModelRewriter.getNearestModel(sentencePOS) questions = [] for model in nearestModels: tempQuestionList = ModelRewriter.generateQuestionFromModel(model, inputSentence) questions += tempQuestionList return questions @staticmethod def parseSentence(sentence): questionWordMap = {} text = nltk.word_tokenize(sentence) posTag = nltk.pos_tag(text) for i in xrange(len(text)): word = Word() word.index = i word.pos = posTag[i][1] questionWordMap[text[i]] = word questionWordMap["WHOLE-SENTENCE-LIST"] = text return questionWordMap @staticmethod def getNearestModel(sentencePOSList): ''' return the nearest model ''' nearestModelList = [] modelList = ModelRewriter.rewriteRules["template"] for model in modelList: posList = model["pos"] if ModelRewriter.comparePOSList(sentencePOSList, posList): nearestModelList.append(model) return nearestModelList @staticmethod def comparePOSList(templateModelPOSList, newModelPOSList): if len(templateModelPOSList) != len(newModelPOSList): return False else: print templateModelPOSList print newModelPOSList for i in xrange(len(templateModelPOSList)): tempTemplate = unicode(templateModelPOSList[i]) tempNew = unicode(newModelPOSList[i]) if tempTemplate != tempNew: return False return True @staticmethod def getPOSList(sentence): tokenList = nltk.word_tokenize(sentence) posList = nltk.pos_tag(tokenList) resultList = [] for temp in posList: resultList.append(temp[1]) return resultList @staticmethod def generateQuestionFromModel(model, inputSentence): sentenceToken = nltk.word_tokenize(inputSentence) questions = [] if model.has_key("Easy"): questionList = model["Easy"] for questionMap in questionList: question = ModelRewriter.generateSingleQuestion(questionMap, sentenceToken) if question is not None: questions.append(question) #merge two lists elif model.has_key["Medium"]: pass elif model.has_key["Hard"]: pass elif model.has_key["Ghost"]: pass return questions @staticmethod def generateSingleQuestion(modelMap, sentenceToken): question = modelMap["question"] indexList = modelMap["index"] questionToken = nltk.word_tokenize(question.strip()) questionString = "" indexList = indexList.strip().split() for i in xrange(len(indexList)): if indexList[i] == "-": questionString += questionToken[i] else: questionString += sentenceToken[int(indexList[i].strip())] questionString += " " return questionString.strip() if __name__ == "__main__": print ModelRewriter.getPOSList("He received two yellow cards as Chelsea lost at White Hart Lane for the first time since 1987.")
UManPychron/pychron
pychron/hardware/gauges/mks/__init__.py
Python
apache-2.0
788
0.001269
# =============================================================================== # Copyright 2011 Jake Ross # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language gov
erning p
ermissions and # limitations under the License. # =============================================================================== ''' Gauges Package contains G{packagetree } '''
amenonsen/ansible
lib/ansible/modules/network/fortios/fortios_firewall_vip6.py
Python
gpl-3.0
46,189
0.002728
#!/usr/bin/python from __future__ import (absolute_import, division, print_function) # Copyright 2019 Fortinet, Inc. # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program. If not, see <https://www.gnu.org/licenses/>. __metaclass__ = type ANSIBLE_METADATA = {'status': ['preview'], 'supported_by': 'community', 'metadata_version': '1.1'} DOCUMENTATION = ''' --- module: fortios_firewall_vip6 short_description: Configure virtual IP for IPv6 in Fortinet's FortiOS and FortiGate. description: - This module is able to configure a FortiGate or FortiOS (FOS) device by allowing the user to set and modify firewall feature and vip6 category. Examples include all parameters and values need to be adjusted to datasources before usage. Tested with FOS v6.0.5 version_added: "2.8" author: - Miguel Angel Munoz (@mamunozgonzalez) - Nicolas Thomas (@thomnico) notes: - Requires fortiosapi library developed by Fortinet - Run as a local_action in your playbook requirements: - fortiosapi>=0.9.8 options: host: description: - FortiOS or FortiGate IP address. type: str required: false username: description: - FortiOS or FortiGate username. type: str required: false password: description: - FortiOS or FortiGate password. type: str default: "" vdom: description: - Virtual domain, among those defined previously. A vdom is a virtual instance of the FortiGate that can be configured and used as a different unit. type: str default: root https: description: - Indicates if the requests towards FortiGate must use HTTPS protocol. type: bool default: true ssl_verify: description: - Ensures FortiGate certificate must be verified by a proper CA. type: bool default: true version_added: 2.9 state: description: - Indicates whether to create or remove the object. type: str required: true choices:
- present - absent version_added: 2.9 firewall_vip6: description: - C
onfigure virtual IP for IPv6. default: null type: dict suboptions: arp_reply: description: - Enable to respond to ARP requests for this virtual IP address. Enabled by default. type: str choices: - disable - enable color: description: - Color of icon on the GUI. type: int comment: description: - Comment. type: str extip: description: - IP address or address range on the external interface that you want to map to an address or address range on the destination network. type: str extport: description: - Incoming port number range that you want to map to a port number range on the destination network. type: str http_cookie_age: description: - Time in minutes that client web browsers should keep a cookie. Default is 60 seconds. 0 = no time limit. type: int http_cookie_domain: description: - Domain that HTTP cookie persistence should apply to. type: str http_cookie_domain_from_host: description: - Enable/disable use of HTTP cookie domain from host field in HTTP. type: str choices: - disable - enable http_cookie_generation: description: - Generation of HTTP cookie to be accepted. Changing invalidates all existing cookies. type: int http_cookie_path: description: - Limit HTTP cookie persistence to the specified path. type: str http_cookie_share: description: - Control sharing of cookies across virtual servers. same-ip means a cookie from one virtual server can be used by another. Disable stops cookie sharing. type: str choices: - disable - same-ip http_ip_header: description: - For HTTP multiplexing, enable to add the original client IP address in the XForwarded-For HTTP header. type: str choices: - enable - disable http_ip_header_name: description: - For HTTP multiplexing, enter a custom HTTPS header name. The original client IP address is added to this header. If empty, X-Forwarded-For is used. type: str http_multiplex: description: - Enable/disable HTTP multiplexing. type: str choices: - enable - disable https_cookie_secure: description: - Enable/disable verification that inserted HTTPS cookies are secure. type: str choices: - disable - enable id: description: - Custom defined ID. type: int ldb_method: description: - Method used to distribute sessions to real servers. type: str choices: - static - round-robin - weighted - least-session - least-rtt - first-alive - http-host mappedip: description: - Mapped IP address range in the format startIP-endIP. type: str mappedport: description: - Port number range on the destination network to which the external port number range is mapped. type: str max_embryonic_connections: description: - Maximum number of incomplete connections. type: int monitor: description: - Name of the health check monitor to use when polling to determine a virtual server's connectivity status. type: list suboptions: name: description: - Health monitor name. Source firewall.ldb-monitor.name. required: true type: str name: description: - Virtual ip6 name. required: true type: str outlook_web_access: description: - Enable to add the Front-End-Https header for Microsoft Outlook Web Access. type: str choices: - disable - enabl
ByrdOfAFeather/AlphaTrion
Community/migrations/0034_auto_20171121_1316.py
Python
mit
1,619
0.003706
# -*- coding: utf-8 -*- # Generated by Django 1.10.2 on 2017-11-21 18:16 from __future__ import unicode_literals from django.conf import settings from django.db import migrations, models import django.db.models.deletion class Migration(migrations.Migration): dependencies = [ migrations.swappable_dependency(settings.AUTH_USER_MODEL), ('Community', '0033_auto_20171112_1742'), ] operations = [ migrations.CreateModel(
name='SongSuggestions', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('suggestions', models.TextField(help_text="Please list links to songs, we can't play it with just a name")), ('community', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='Community.CommunityInst')), ('user', models.ForeignKey(on_delete=django.db.models.deletio
n.CASCADE, to=settings.AUTH_USER_MODEL)), ], ), migrations.AlterField( model_name='communityextraratings', name='overall_rating', field=models.PositiveIntegerField(choices=[(1, '1'), (2, '2'), (3, 'e'), (4, '4'), (5, '5'), (6, '6'), (7, '7'), (8, '8'), (9, '9'), (10, '10')], default=5), ), migrations.AlterField( model_name='communitygameratings', name='game_rating', field=models.PositiveIntegerField(choices=[(1, '1'), (2, '2'), (3, 'e'), (4, '4'), (5, '5'), (6, '6'), (7, '7'), (8, '8'), (9, '9'), (10, '10')], default=5), ), ]
nash-x/hws
neutron/db/vpn/vpn_validator.py
Python
apache-2.0
4,980
0.000602
# Copyright 2014 Cisco Systems, Inc. All rights reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import json from neutron.db import l3_db from neutron.extensions import vpnaas from neutron import manager from neutron.plugins.common import constants class VpnReferenceValidator(object): """Baseline validation routines for VPN resources.""" IP_MIN_MTU = {4: 68, 6: 1280} @property def l3_plugin(self): try: return self._l3_plugin except AttributeError:
self._l3_plugin = manager.NeutronManager.get_service_plugins().get( constants.L3_ROUTER_NAT) return self._l3_plugin @property def core_plugin(self): try: return self._core_plugin except AttributeError: self._core_plugin = manager.NeutronManager.get_plugin() return self._core_plugin def _check_dpd(self, ipsec_sitecon): """Ensure that DPD timeout
is greater than DPD interval.""" if ipsec_sitecon['dpd_timeout'] <= ipsec_sitecon['dpd_interval']: raise vpnaas.IPsecSiteConnectionDpdIntervalValueError( attr='dpd_timeout') def _check_mtu(self, context, mtu, ip_version): if mtu < VpnReferenceValidator.IP_MIN_MTU[ip_version]: raise vpnaas.IPsecSiteConnectionMtuError(mtu=mtu, version=ip_version) def assign_sensible_ipsec_sitecon_defaults(self, ipsec_sitecon, prev_conn=None): """Provide defaults for optional items, if missing. Flatten the nested DPD information, and set default values for any missing information. For connection updates, the previous values will be used as defaults for any missing items. """ if not prev_conn: prev_conn = {'dpd_action': 'hold', 'dpd_interval': 30, 'dpd_timeout': 120} dpd = ipsec_sitecon.get('dpd', {}) ipsec_sitecon['dpd_action'] = dpd.get('action', prev_conn['dpd_action']) ipsec_sitecon['dpd_interval'] = dpd.get('interval', prev_conn['dpd_interval']) ipsec_sitecon['dpd_timeout'] = dpd.get('timeout', prev_conn['dpd_timeout']) def validate_ipsec_site_connection(self, context, ipsec_sitecon, ip_version): """Reference implementation of validation for IPSec connection.""" self._check_dpd(ipsec_sitecon) mtu = ipsec_sitecon.get('mtu') if mtu: self._check_mtu(context, mtu, ip_version) def _check_router(self, context, router_id): router = self.l3_plugin.get_router(context, router_id) if not router.get(l3_db.EXTERNAL_GW_INFO): raise vpnaas.RouterIsNotExternal(router_id=router_id) def _check_subnet_id(self, context, router_id, subnet_id): ports = self.core_plugin.get_ports( context, filters={ 'fixed_ips': {'subnet_id': [subnet_id]}, 'device_id': [router_id]}) if not ports: raise vpnaas.SubnetIsNotConnectedToRouter( subnet_id=subnet_id, router_id=router_id) def validate_vpnservice(self, context, vpnservice): self._check_router(context, vpnservice['router_id']) self._check_subnet_id(context, vpnservice['router_id'], vpnservice['subnet_id']) def validate_vpnservice_ngfw(self, context, vpnservice): try: if vpnservice.has_key('description'): description = json.loads(vpnservice["description"]) else: return except ValueError: raise vpnaas.DescriptionInvalid(description=vpnservice["description"]) else: tenant_router_id = description.get("tenant_router_id", None) if not tenant_router_id: raise vpnaas.TenantRouterIdMustBeSet() subnets = description.get("subnets", []) if not subnets: raise vpnaas.SubnetMustBeSet() for subnet in subnets: self._check_subnet_id(context, tenant_router_id, subnet)
sinotradition/meridian
meridian/acupoints/laogong21.py
Python
apache-2.0
241
0.034043
#
!/usr/bin/python #coding=utf-8 ''' @author: sheng @license: ''' SPELL=u'láogōng' CN=u'劳宫' NAME=u'laogong21' CHANNEL='pericardium' CHANNEL_FULLNAME='PericardiumChannelofHand-Jueyin' SEQ='PC8' if __name__ == '__main__
': pass
vetscience/Tools
Utils/base.py
Python
bsd-3-clause
7,040
0.014773
#!/usr/bin/env python ''' Oct 10, 2017: Pasi Korhonen, The University of Melbourne Simplifies system calls, logs and pipe interaction. ''' import sys, os, time #, ConfigParser import shlex, subprocess, errno from threading import Timer ############################################################################### class Base: ''' ''' ########################################################################### def __init__(self, logHandle = subprocess.PIPE): ''' ''' self.fname = None self.handle = None self.log = logHandle ########################################################################### def ropen(self, fname): ''' Allow one to read data either from pipe or file ''' self.handle = None self.fname = fname if fname == '-': self.handle = sys.stdin.readlines() else: self.handle = open(fname, 'r') return self.handle ########################################################################### def rclose(self): ''' Allows one to close the file if reading from pipe is allowed ''' if self.fname != '-': self.handle.close() ########################################################################### def createDir(self, mydir): '''Creates a directory for the assembly if one does not exist yet. ''' try: os.makedirs(mydir) except OSError as e: if e.errno != errno.EEXIST: raise ########################################################################### def isNumber(self, myStr): ''' ''' retVal = True try: float(myStr) except ValueError: retVal = False return retVal ########################################################################### def logTime(self, myStr = ""): ''' ''' if myStr != "": myStr = myStr + ':' rt = time.localtime() self.log.write("\n------------------------------------------------------------\n") self.log.write("%s %d,%d,%d %d:%d:%d\n" %(myStr, rt.tm_year, rt.tm_mon, rt.tm_mday, rt.tm_hour, rt.tm_min, rt.tm_sec)) self.log.write("------------------------------------------------------------\n\n") ########################################################################### def setLogHandle(self, handle): ''' Log handle should be always set because a full buffer can cease processing ''' self.log = handle ########################################################################### def closeLogHandle(self): ''' Log handle should be always set because a full buffer can cease processing ''' self.log.close() ########################################################################### def logger(self, myStr):
''' Writes a message to the log file ''' self.log.write("## %s\n" %myStr) ########################################################################### def shell(self, myStr, doPrint = True, myStdout = False, ignoreFailure = False, log = True): '''Runs given command in a shell and waits for the command to finish. ''' if log == True: self.log.write("# %s\n" %m
yStr) if doPrint == True: print("# " + myStr, file=sys.stderr) # is printed as comment line which is easy to remove if myStdout == True: p = subprocess.Popen(myStr, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, shell=True) else: p = subprocess.Popen(myStr, stdout=self.log, stderr=subprocess.STDOUT, shell=True) retVal = p.wait() if retVal != 0 and ignoreFailure == False: if log == True: self.logger("# FAILED (%d): %s" %(retVal, myStr)) print("# FAILED (%d): %s" %(retVal, myStr), file = sys.stderr) sys.exit(retVal) return p ########################################################################### def _killProc(self, proc, timeout): ''' ''' timeout["value"] = True proc.kill() ########################################################################### def run(self, cmd, timeoutSec = None, doPrint = True, myStdout = True, ignoreFailure = False, log = True): ''' Runs given command in a subprocess and wait for the command to finish. Retries 3 times if timeout is given. ''' retryCnt = 0 while retryCnt < 3: if log == True: self.log.write("# %s\n" %cmd) if doPrint == True: print("# " + cmd, file = sys.stderr) # is printed as comment line which is easy to remove if myStdout == True: proc = subprocess.Popen(shlex.split(cmd), stdout=subprocess.PIPE, stderr=subprocess.PIPE) else: proc = subprocess.Popen(shlex.split(cmd), stdout=self.log, stderr=subprocess.PIPE) if timeoutSec != None: timeout = {"value": False} timer = Timer(timeoutSec, self._killProc, [proc, timeout]) timer.start() stdout, stderr = proc.communicate() if timeoutSec != None: timer.cancel() if (proc.returncode > 1 or proc.returncode < 0) and ignoreFailure == False: retryCnt += 1 if retryCnt >= 3: # Tries three times self.logger("## FAILED(%d): %s. Three failures. Exiting ..." %(proc.returncode, cmd)) print("## FAILED(%d): %s. Three failures. Exiting ..." %(proc.returncode, cmd), file = sys.stderr) sys.exit(proc.returncode) if log == True: self.logger("## FAILED(%d): %s. Retrying ..." %(proc.returncode, cmd)) print("## FAILED(%d): %s. Retrying ..." %(proc.returncode, cmd), file = sys.stderr) time.sleep(120) # Wait 2 minutes before the next try else: break return proc ''' ########################################################################### def readSection(self, config, section, sep=None): #''Reads a section from config parser and returns it a list of item rows #'' mylist = [] try: lines = config.options(section) lines = sorted(lines) for line in lines: items = config.get(section, line).split() if sep != None: items = config.get(section, line).split(sep) try: if items[0][0] != '#': # Comment line mylist.append(items) except IndexError: pass except ConfigParser.NoSectionError: print("# WARNING: Base::readSection: section '%s' not found ..." %section) return mylist '''
AwesomeTurtle/personfinder
app/add_note.py
Python
apache-2.0
7,586
0.000791
#!/usr/bin/python2.7 # Copyright 2015 Google Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. from google.appengine.api import datastore_errors from model import * from photo import create_photo, PhotoError from utils import * from detect_spam import SpamDetector import extend import reveal import subscribe from django.utils.translation import ugettext as _ from urlparse imp
ort urlparse # TODO(jessien): Clean up duplicate code here and in create.py. # https://github.com/
google/personfinder/issues/157 # how many days left before we warn about imminent expiration. # Make this at least 1. EXPIRY_WARNING_THRESHOLD = 7 class Handler(BaseHandler): def get(self): # Check the request parameters. if not self.params.id: return self.error(404, _('No person id was specified.')) try: person = Person.get(self.repo, self.params.id) # TODO(ichikawa) Consider removing this "except" clause. # I don't think ValueError is thrown here. except ValueError: return self.error(404, _("This person's entry does not exist or has been deleted.")) if not person: return self.error(404, _("This person's entry does not exist or has been deleted.")) standalone = self.request.get('standalone') # Render the page. enable_notes_url = self.get_url('/enable_notes', id=self.params.id) self.render('add_note.html', person=person, standalone=standalone, enable_notes_url=enable_notes_url) def post(self): """Post a note in person's record view page""" if not self.params.text: return self.error( 200, _('Message is required. Please go back and try again.')) if not self.params.author_name: return self.error( 200, _('Your name is required in the "About you" section. ' 'Please go back and try again.')) if (self.params.status == 'is_note_author' and not self.params.author_made_contact): return self.error( 200, _('Please check that you have been in contact with ' 'the person after the disaster, or change the ' '"Status of this person" field.')) if (self.params.status == 'believed_dead' and not self.config.allow_believed_dead_via_ui): return self.error( 200, _('Not authorized to post notes with the status ' '"believed_dead".')) person = Person.get(self.repo, self.params.id) if person.notes_disabled: return self.error( 200, _('The author has disabled status updates ' 'on this record.')) # If a photo was uploaded, create and store a new Photo entry and get # the URL where it's served; otherwise, use the note_photo_url provided. photo, photo_url = (None, self.params.note_photo_url) if self.params.note_photo is not None: try: photo, photo_url = create_photo(self.params.note_photo, self) except PhotoError, e: return self.error(400, e.message) photo.put() spam_detector = SpamDetector(self.config.bad_words) spam_score = spam_detector.estimate_spam_score(self.params.text) if (spam_score > 0): note = NoteWithBadWords.create_original( self.repo, entry_date=get_utcnow(), person_record_id=self.params.id, author_name=self.params.author_name, author_email=self.params.author_email, author_phone=self.params.author_phone, source_date=get_utcnow(), author_made_contact=bool(self.params.author_made_contact), status=self.params.status, email_of_found_person=self.params.email_of_found_person, phone_of_found_person=self.params.phone_of_found_person, last_known_location=self.params.last_known_location, text=self.params.text, photo=photo, photo_url=photo_url, spam_score=spam_score, confirmed=False) # Write the new NoteWithBadWords to the datastore note.put_new() # When the note is detected as spam, we do not update person record # or log action. We ask the note author for confirmation first. return self.redirect('/post_flagged_note', id=note.get_record_id(), author_email=note.author_email, repo=self.repo) else: note = Note.create_original( self.repo, entry_date=get_utcnow(), person_record_id=self.params.id, author_name=self.params.author_name, author_email=self.params.author_email, author_phone=self.params.author_phone, source_date=get_utcnow(), author_made_contact=bool(self.params.author_made_contact), status=self.params.status, email_of_found_person=self.params.email_of_found_person, phone_of_found_person=self.params.phone_of_found_person, last_known_location=self.params.last_known_location, text=self.params.text, photo=photo, photo_url=photo_url) # Write the new regular Note to the datastore note.put_new() # Specially log 'believed_dead'. if note.status == 'believed_dead': UserActionLog.put_new( 'mark_dead', note, person.primary_full_name, self.request.remote_addr) # Specially log a switch to an alive status. if (note.status in ['believed_alive', 'is_note_author'] and person.latest_status not in ['believed_alive', 'is_note_author']): UserActionLog.put_new('mark_alive', note, person.primary_full_name) # Update the Person based on the Note. if person: person.update_from_note(note) # Send notification to all people # who subscribed to updates on this person subscribe.send_notifications(self, person, [note]) # write the updated person record to datastore db.put(person) # If user wants to subscribe to updates, redirect to the subscribe page if self.params.subscribe: return self.redirect('/subscribe', id=person.record_id, subscribe_email=self.params.author_email, context='add_note') # Redirect to view page so the browser's back button works properly. self.redirect('/view', id=self.params.id, query=self.params.query)
20c/vaping
src/vaping/plugins/vodka.py
Python
apache-2.0
3,075
0.000325
import copy import confu.sche
ma import vaping import vaping.config import vaping.io from vaping.plugins import PluginConfigSchema try: import vodka import vodka.data except ImportError: pass try: import graphsrv import graphsrv.group except ImportError: graphsrv = None def probe_to_graphsrv(probe): """ takes a probe instance and generates a graphsrv data group for it using the probe's config """ config = probe.config # manual group set up via `group` config key if "group
" in config: source, group = config["group"].split(".") group_field = config.get("group_field", "host") group_value = config[group_field] graphsrv.group.add( source, group, {group_value: {group_field: group_value}}, **config ) return # automatic group setup for fping for group_name, group_config in list(probe.groups.items()): if "hosts" not in group_config: continue r = {} for host in group_config.get("hosts"): if isinstance(host, dict): r[host["host"]] = host else: r[host] = {"host": host} graphsrv.group.add(probe.name, group_name, r, **group_config) class VodkaSchema(PluginConfigSchema): """ Define a schema for FPing and also define defaults. """ data = confu.schema.List(item=vaping.config.MixedDict()) apps = confu.schema.Dict(item=vaping.config.MixedDict()) plugins = confu.schema.List(item=vaping.config.MixedDict()) @vaping.plugin.register("vodka") class VodkaPlugin(vaping.plugins.EmitBase): """ Plugin that emits to vodka data """ # starting vodka automatically when vaping is spinning # up all the plugins causes some inconsistent behaviour # in daemon mode, so we allow it to lazy start for now # # TODO: might need to revisit later lazy_start = True # Define config schema ConfigSchema = VodkaSchema def init(self): self._is_started = False def start(self): if self._is_started: return # deep copy vodka plugin config and prepare to pass # to vodka as it's own copy with type and name keys # removed vodka_config = copy.deepcopy(self.config) if "name" in vodka_config: del vodka_config["name"] if "type" in vodka_config: del vodka_config["type"] self._is_started = True vodka.run(vodka_config, self.vaping.config) if graphsrv: # if graphsrv is installed proceed to generate # target configurations for it from probe config for node in self.vaping.config.get("probes", []): probe = vaping.plugin.get_probe(node, self.vaping) probe_to_graphsrv(probe) def emit(self, message): if not self._is_started: self.start() vodka.data.handle( message.get("type"), message, data_id=message.get("source"), caller=self )
mattesno1/Sick-Beard
lib/requests/packages/urllib3/util.py
Python
gpl-3.0
11,326
0.000618
# urllib3/util.py # Copyright 2008-2013 Andrey Petrov and contributors (see CONTRIBUTORS.txt) # # This module is part of urllib3 and is released under # the MIT License: http://www.opensource.org/licenses/mit-license.php from base64 import b64encode from collections import namedtuple from socket import error as SocketError from hashlib import md5, sha1 from binascii import hexlify, unhexlify try: from select import poll, POLLIN except ImportError: # `poll` doesn't exist on OSX and other platforms poll = False try: from select import select except ImportError: # `select` doesn't exist on AppEngine. select = False try: # Test for SSL features SSLContext = None HAS_SNI = False import ssl from ssl import wrap_socket, CERT_NONE, PROTOCOL_SSLv23 from ssl import SSLContext # Modern SSL? from ssl import HAS_SNI # Has SNI? except ImportError: pass from .packages import six from .exceptions import LocationParseError, SSLError class Url(namedtuple('Url', ['scheme', 'auth', 'host', 'port', 'path', 'query', 'fragment'])): """ Datastructure for representing an HTTP URL. Used as a return value for :func:`parse_url`. """ slots = () def __new__(cls, scheme=None, auth=None, host=None, port=None, path=None, query=None, fragment=None): return super(Url, cls).__new__(cls, scheme, auth, host, port, path, query, fragment) @property def hostname(self): """For backwards-compatibility with urlparse. We're nice like that.""" return self.host @property def request_uri(self): """Absolute path including the query string.""" uri = self.path or '/' if self.query is not None: uri += '?' + self.query return uri def split_first(s, delims): """ Given a string and an iterable of delimiters, split on the first found delimiter. Return two split parts and the matched delimiter. If not found, then the first part is the full input string. Example: :: >>> split_first('foo/bar?baz', '?/=') ('foo', 'bar?baz', '/') >>> split_first('foo/bar?baz', '123') ('foo/bar?baz', '', None) Scales linearly with number of delims. Not ideal for large number of delims. """ min_idx = None min_delim = None for d in delims: idx = s.find(d) if idx < 0: continue if min_idx is None or idx < min_idx: min_idx = idx min_delim = d if min_idx is None or min_idx < 0: return s, '', None return s[:min_idx], s[min_idx+1:], min_delim def parse_url(url): """ Given a url, return a parsed :class:`.Url` namedtuple. Best-effort is performed to parse incomplete urls. Fields not provided will be None. Partly backwards-compatible with :mod:`urlparse`. Example: :: >>> parse_url('http://google.com/mail/') Url(scheme='http', host='google.com', port=None, path='/', ...) >>> parse_url('google.com:80') Url(scheme=None, host='google.com', port=80, path=None, ...) >>> parse_url('/foo?bar') Url(scheme=None, host=None, port=None, path='/foo', query='bar', ...) """ # While this code has overlap with stdlib's urlparse, it is much # simplified for our needs and less annoying. # Additionally, this imeplementations does silly things to be optimal # on CPython. scheme = None auth = None host = None port = None path = None fragment = None query = None # Scheme if '://' in url: scheme, url = url.split('://', 1) # Find the earliest Authority Terminator # (http://tools.ietf.org/html/rfc3986#section-3.2) url, path_, delim = split_first(url, ['/', '?', '#']) if delim: # Reassemble the path path = delim + path_ # Auth if '@' in url: auth, url = url.split('@', 1) # IPv6 if url and url[0] == '[': host, url = url[1:].split(']', 1) # Po
rt if ':' in url: _host, port = url.split(':', 1) if not host: host = _host if not port.isdigit(): raise LocationParseError("Failed to parse: %s" % url) port =
int(port) elif not host and url: host = url if not path: return Url(scheme, auth, host, port, path, query, fragment) # Fragment if '#' in path: path, fragment = path.split('#', 1) # Query if '?' in path: path, query = path.split('?', 1) return Url(scheme, auth, host, port, path, query, fragment) def get_host(url): """ Deprecated. Use :func:`.parse_url` instead. """ p = parse_url(url) return p.scheme or 'http', p.hostname, p.port def make_headers(keep_alive=None, accept_encoding=None, user_agent=None, basic_auth=None): """ Shortcuts for generating request headers. :param keep_alive: If ``True``, adds 'connection: keep-alive' header. :param accept_encoding: Can be a boolean, list, or string. ``True`` translates to 'gzip,deflate'. List will get joined by comma. String will be used as provided. :param user_agent: String representing the user-agent you want, such as "python-urllib3/0.6" :param basic_auth: Colon-separated username:password string for 'authorization: basic ...' auth header. Example: :: >>> make_headers(keep_alive=True, user_agent="Batman/1.0") {'connection': 'keep-alive', 'user-agent': 'Batman/1.0'} >>> make_headers(accept_encoding=True) {'accept-encoding': 'gzip,deflate'} """ headers = {} if accept_encoding: if isinstance(accept_encoding, str): pass elif isinstance(accept_encoding, list): accept_encoding = ','.join(accept_encoding) else: accept_encoding = 'gzip,deflate' headers['accept-encoding'] = accept_encoding if user_agent: headers['user-agent'] = user_agent if keep_alive: headers['connection'] = 'keep-alive' if basic_auth: headers['authorization'] = 'Basic ' + \ b64encode(six.b(basic_auth)).decode('utf-8') return headers def is_connection_dropped(conn): # Platform-specific """ Returns True if the connection is dropped and should be closed. :param conn: :class:`httplib.HTTPConnection` object. Note: For platforms like AppEngine, this will always return ``False`` to let the platform handle connection recycling transparently for us. """ sock = getattr(conn, 'sock', False) if not sock: # Platform-specific: AppEngine return False if not poll: if not select: # Platform-specific: AppEngine return False try: return select([sock], [], [], 0.0)[0] except SocketError: return True # This version is better on platforms that support it. p = poll() p.register(sock, POLLIN) for (fno, ev) in p.poll(0.0): if fno == sock.fileno(): # Either data is buffered (bad), or the connection is dropped. return True def resolve_cert_reqs(candidate): """ Resolves the argument to a numeric constant, which can be passed to the wrap_socket function/method from the ssl module. Defaults to :data:`ssl.CERT_NONE`. If given a string it is assumed to be the name of the constant in the :mod:`ssl` module or its abbrevation. (So you can specify `REQUIRED` instead of `CERT_REQUIRED`. If it's neither `None` nor a string we assume it is already the numeric constant which can directly be passed to wrap_socket. """ if candidate is None: return CERT_NONE if isinstance(candidate, str): res = getattr(ssl, candidate, None) if res is None: res = getattr(ssl, 'CERT_' + candidate) return res return candidate def resolve_ssl_version(candidate): """ like resolve_cert_reqs """ if candidate is None: return PROTOCOL_SSLv23
kawamon/hue
desktop/core/ext-py/cx_Oracle-6.4.1/samples/tutorial/solutions/bind_sdo.py
Python
apache-2.0
2,694
0.003712
#------------------------------------------------------------------------------ # bind_sdo.py (Section 4.4) #---------------------------------------------------------------------------
--- #------------------------------------------------------------------------------ # Copyright 2017, 2018, Oracle and/or its affiliates. All rights reserved. #------------------------------------------------------------------------------ from __future__ import print_function import cx_Oracle import db_config con = cx_Oracle.connect(db_config.user, db_config.pw, db_config.dsn) cur = con.cur
sor() # Create table cur.execute("""begin execute immediate 'drop table testgeometry'; exception when others then if sqlcode <> -942 then raise; end if; end;""") cur.execute("""create table testgeometry ( id number(9) not null, geometry MDSYS.SDO_GEOMETRY not null)""") # Create and populate Oracle objects typeObj = con.gettype("MDSYS.SDO_GEOMETRY") elementInfoTypeObj = con.gettype("MDSYS.SDO_ELEM_INFO_ARRAY") ordinateTypeObj = con.gettype("MDSYS.SDO_ORDINATE_ARRAY") obj = typeObj.newobject() obj.SDO_GTYPE = 2003 obj.SDO_ELEM_INFO = elementInfoTypeObj.newobject() obj.SDO_ELEM_INFO.extend([1, 1003, 3]) obj.SDO_ORDINATES = ordinateTypeObj.newobject() obj.SDO_ORDINATES.extend([1, 1, 5, 7]) pointTypeObj = con.gettype("MDSYS.SDO_POINT_TYPE") obj.SDO_POINT = pointTypeObj.newobject() obj.SDO_POINT.X = 1 obj.SDO_POINT.Y = 2 obj.SDO_POINT.Z = 3 print("Created object", obj) # Add a new row print("Adding row to table...") cur.execute("insert into testgeometry values (1, :objbv)", objbv = obj) print("Row added!") # Define a function to dump the contents of an Oracle object def dumpobject(obj, prefix = " "): if obj.type.iscollection: print(prefix, "[") for value in obj.aslist(): if isinstance(value, cx_Oracle.Object): dumpobject(value, prefix + " ") else: print(prefix + " ", repr(value)) print(prefix, "]") else: print(prefix, "{") for attr in obj.type.attributes: value = getattr(obj, attr.name) if isinstance(value, cx_Oracle.Object): print(prefix + " " + attr.name + " :") dumpobject(value, prefix + " ") else: print(prefix + " " + attr.name + " :", repr(value)) print(prefix, "}") # Query the row print("Querying row just inserted...") cur.execute("select id, geometry from testgeometry") for (id, obj) in cur: print("Id: ", id) dumpobject(obj)
tensorflow/tensorflow
tensorflow/python/debug/lib/common.py
Python
apache-2.0
2,967
0.004382
# Copyright 2017 The TensorFlow Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # ============================================================================== """Common values and methods for TensorFlow Debugger.""" import collections import json GRPC_URL_PREFIX = "grpc://" # A key for a Session.run() call. RunKey = collections.namedtuple("RunKey", ["feed_names", "fetch_names"]) def get_graph_element_name(elem): """Obtain the name or str
ing representation of a graph element. If the graph element has the attribute "name", return name. Otherwise, return a __str__ representation of the graph element. Certain graph elements, such as `SparseTensor`s, do not have the attribute "name". Args: elem: The graph
element in question. Returns: If the attribute 'name' is available, return the name. Otherwise, return str(fetch). """ return elem.name if hasattr(elem, "name") else str(elem) def get_flattened_names(feeds_or_fetches): """Get a flattened list of the names in run() call feeds or fetches. Args: feeds_or_fetches: Feeds or fetches of the `Session.run()` call. It maybe a Tensor, an Operation or a Variable. It may also be nested lists, tuples or dicts. See doc of `Session.run()` for more details. Returns: (list of str) A flattened list of fetch names from `feeds_or_fetches`. """ lines = [] if isinstance(feeds_or_fetches, (list, tuple)): for item in feeds_or_fetches: lines.extend(get_flattened_names(item)) elif isinstance(feeds_or_fetches, dict): for key in feeds_or_fetches: lines.extend(get_flattened_names(feeds_or_fetches[key])) else: # This ought to be a Tensor, an Operation or a Variable, for which the name # attribute should be available. (Bottom-out condition of the recursion.) lines.append(get_graph_element_name(feeds_or_fetches)) return lines def get_run_key(feed_dict, fetches): """Summarize the names of feeds and fetches as a RunKey JSON string. Args: feed_dict: The feed_dict given to the `Session.run()` call. fetches: The fetches from the `Session.run()` call. Returns: A JSON Array consisting of two items. They first items is a flattened Array of the names of the feeds. The second item is a flattened Array of the names of the fetches. """ return json.dumps(RunKey(get_flattened_names(feed_dict), get_flattened_names(fetches)))
pnprog/goreviewpartner
gnugo_analysis.py
Python
gpl-3.0
13,115
0.060465
# -*- coding: utf-8 -*- from __future__ import unicode_literals from gtp import gtp from Tkinter import * from toolbox import * from toolbox import _ def get_full_sequence_threaded(worker,current_color,deepness): sequence=get_full_sequence(worker,current_color,deepness) threading.current_thread().sequence=sequence def get_full_sequence(worker,current_color,deepness): try: sequence="" undos=0 for d in range(deepness): if current_color.lower()=="b": answer=worker.play_black() current_color="w" else: answer=worker.play_white() current_color="b" sequence+=answer+" " if answer=='RESIGN': break if answer=='PASS': undos+=1 break undos+=1 es=worker.get_gnugo_estimate_score() for u in range(undos): worker.undo() return [sequence.strip(),es] except Exception, e: return e class GnuGoAnalysis(): def run_analysis(self,current_move): one_move=go_to_move(self.move_zero,current_move) player_color=guess_color_to_play(self.move_zero,current_move) gnugo=self.gnugo log() log("==============") log("move",str(current_move)) final_score=gnugo.get_gnugo_estimate_score() #linelog(final_score) es=final_score.split()[0] if es[0]=="B": lbs="B%+d"%(-1*float(final_score.split()[3][:-1])) ubs="B%+d"%(-1*float(final_score.split()[5][:-1])) else: ubs="W%+d"%(float(final_score.split()[3][:-1])) lbs="W%+d"%(float(final_score.split()[5][:-1])) node_set(one_move,"ES",es) node_set(one_move,"UBS",ubs) node_set(one_move,"LBS",lbs) if player_color in ('w',"W"): log("gnugo plays white") top_moves=gnugo.gnugo_top_moves_white() answer=gnugo.play_white() else: log("gnugo plays black") top_moves=gnugo.gnugo_top_moves_black() answer=gnugo.play_black() log("====","Gnugo answer:",answer) node_set(one_move,"CBM",answer) log("==== Gnugo top moves") for one_top_move in top_moves: log("\t",one_top_move) log() top_moves=top_moves[:min(self.nb_variations,self.maxvariations)] if (answer not in ["PASS","RESIGN"]): gnugo.undo() while len(top_moves)>0: all_threads=[] for worker in self.workers: worker.need_undo=False if len(top_moves)>0: one_top_move=top_moves.pop(0) if player_color in ('w',"W"): worker.place_white(one_top_move) one_thread=threading.Thread(target=get_full_sequence_threaded,args=(worker,'b',self.deepness)) else: worker.place_black(one_top_move) one_thread=threading.Thread(target=get_full_sequence_threaded,args=(worker,'w',self.deepness)) worker.need_undo=True one_thread.one_top_move=one_top_move one_thread.start() all_threads.append(one_thread) for one_thread in all_threads: one_thread.join() for worker in self.workers: if worker.need_undo: worker.undo() for one_thread in all_threads: if type(one_thread.sequence)!=type(["list"]): raise GRPException(_("GnuGo thread failed:")+"\n"+str(one_thread.sequence)) one_sequence=one_thread.one_top_move+" "+one_thread.sequence[0] es=one_thread.sequence[1] one_sequence=one_sequence.strip() log(">>>>>>",one_sequence) previous_move=one_move.parent current_color=player_color first_move=True for one_deep_move in one_sequence.split(' '): if one_deep_move not in ['RESIGN','PASS']: i,j=gtp2ij(one_deep_move) new_child=previous_move.new_child() node_set(new_child,current_color,(i,j)) if first_move: first_move=False node_set(new_child,"ES",es) previous_move=new_child if current_
color in ('w','W'): current_color='b' else: current_color='w' else: gnugo.undo() #one_move.add_comment_text(additional_comments) log("Creating the influence map") black_influence=gnugo.get_gnugo_initial_influence_black() black_t
erritories_points=[] black_influence_points=[] white_influence=gnugo.get_gnugo_initial_influence_white() white_territories_points=[] white_influence_points=[] for i in range(self.size): for j in range(self.size): if black_influence[i][j]==-3: black_territories_points.append([i,j]) if white_influence[i][j]==3: white_territories_points.append([i,j]) if black_influence[i][j]==-2: black_influence_points.append([i,j]) if white_influence[i][j]==2: white_influence_points.append([i,j]) if black_influence_points!=[]: node_set(one_move,"IBM",black_influence_points) #IBM: influence black map if black_territories_points!=[]: node_set(one_move,"TBM",black_territories_points) #TBM: territories black map if white_influence_points!=[]: node_set(one_move,"IWM",white_influence_points) #IWM: influence white map if white_territories_points!=[]: node_set(one_move,"TWM",white_territories_points) #TWM: territories white map return answer #returning the best move, necessary for live analysis def play(self,gtp_color,gtp_move):#GnuGo needs to redifine this method to apply it to all its workers if gtp_color=='w': self.bot.place_white(gtp_move) for worker in self.workers: worker.place_white(gtp_move) else: self.bot.place_black(gtp_move) for worker in self.workers: worker.place_black(gtp_move) def undo(self): self.bot.undo() for worker in self.workers: worker.undo() def terminate_bot(self): log("killing gnugo") self.gnugo.close() log("killing gnugo workers") for w in self.workers: w.close() def initialize_bot(self): self.nb_variations=4 try: self.nb_variations=int(self.profile["variations"]) except: pass #grp_config.set("GnuGo", "variations",self.nb_variations)""" self.deepness=4 try: self.deepness=int(self.profile["deepness"]) except: pass #grp_config.set("GnuGo", "deepness",self.deepness)""" gnugo=gnugo_starting_procedure(self.g,self.profile) self.nb_workers=self.nb_variations log("Starting all GnuGo workers") self.workers=[] for w in range(self.nb_workers): log("\t Starting worker",w+1) gnugo_worker=gnugo_starting_procedure(self.g,self.profile) self.workers.append(gnugo_worker) log("All workers ready") self.gnugo=gnugo self.time_per_move=0 return gnugo def gnugo_starting_procedure(sgf_g,profile,silentfail=False): return bot_starting_procedure("GnuGo","GNU Go",GnuGo_gtp,sgf_g,profile,silentfail) class RunAnalysis(GnuGoAnalysis,RunAnalysisBase): def __init__(self,parent,filename,move_range,intervals,variation,komi,profile="slow",existing_variations="remove_everything"): RunAnalysisBase.__init__(self,parent,filename,move_range,intervals,variation,komi,profile,existing_variations) class LiveAnalysis(GnuGoAnalysis,LiveAnalysisBase): def __init__(self,g,filename,profile="slow"): LiveAnalysisBase.__init__(self,g,filename,profile) class GnuGo_gtp(gtp): def get_gnugo_initial_influence_black(self): self.write("initial_influence black influence_regions") one_line=self.readline() one_line=one_line.split("= ")[1].strip().replace(" "," ") lines=[one_line] for i in range(self.size-1): one_line=self.readline().strip().replace(" "," ") lines.append(one_line) influence=[] for i in range(self.size): influence=[[int(s) for s in lines[i].split(" ")]]+influence return influence def get_gnugo_initial_influence_white(self): self.write("initial_influence white influence_regions") one_line=self.readline() one_line=one_line.split("= ")[1].strip().replace(" "," ") lines=[one_line] for i in range(self.size-1): one_line=self.readline().strip().replace(" "," ") lines.append(one_line) influence=[] for i in range(self.size): influence=[[int(s) for s in lines[i].split(" ")]]+influence return influence def quick_evaluation(self,color): return variation_data_formating["ES"]%self.get_gnugo_estimate_score() def get_gnugo_estimate_score(self): self.write("estimate_score") answer=self.readline().strip() try: return answer[2:] except: raise GRPException("GRPException in get_gnugo_estimate_score()") def gnugo_to
st-tu-dresden/inloop
tests/testrunner/tests.py
Python
gpl-3.0
6,974
0.002156
import signal import subprocess import sys from pathlib import Path from unittest import TestCase, skipIf from django.test import tag from inloop.testrunner.runner import DockerTestRunner, collect_files BASE_DIR = Path(__file__).resolve().parent DATA_DIR = str(BASE_DIR.joinpath("data")) class CollectorTest(TestCase): def test_subdirs_and_large_files_are_not_collected(self): contents, ignored_names = collect_files(DATA_DIR, filesize_limit=300) self.assertEqual(contents.keys(), {"empty1.txt", "README.md"}) self.assertEqual(ignored_names, {"larger_than_300_bytes.txt"}) def test_subdirs_are_not_collected(self): contents, ignored_names = collect_files(DATA_DIR, filesize_limit=1000) self.assertEqual(contents.keys(), {"empty1.txt", "README.md", "larger_than_300_bytes.txt"}) self.assertFalse(ignored_names) def test_collected_contents_are_correct(self): contents, _ = collect_files(DATA_DIR, filesize_limit=300) self.assertEqual(contents["empty1.txt"], "") self.assertEqual(contents["README.md"], "This is a test harness for collect_files().\n") @tag("slow", "needs-docker") class DockerTestRunnerIntegrationTest(TestCase): """ Each of the the following tests uses a *real* docker container, there is no monkey patching (aka mocking) involved. The Docker image required for the tests uses a simple trick to allow submitting arbitrary test commands to the container using the task_name parameter of DockerTestRunner.check_task(). This makes it really easy to simulate the behaviors of a real tester image. """ OPTIONS = { "image": "inloop-integration-test", "timeout": 1.5, } def setUp(self): self.runner = DockerTestRunner(self.OPTIONS) def test_selftest(self): """Test if our test image works.""" rc = subprocess.call(["docker", "run", "--rm", self.OPTIONS["image"], "exit 42"]) self.assertEqual(42, rc) def test_outputs(self): """Test if we receive stdout, stderr and exit code.""" result = self.runner.check_task("echo -n OUT; echo -n ERR >&2; exit 42", DATA_DIR) self.assertEqual(result.rc, 42) self.assertEqual(result.stdout, "OUT") self.assertEqual(result.stderr, "ERR") self.assertGreaterEqual(result.duration, 0.0) @skipIf(sys.platform == "darwin", reason="Docker Desktop issues") def test_kill_on_timeout(self): """Test if the container gets killed after the timeout.""" result = self.runner.check_task("sleep 10", DATA_DIR) self.assertEqual(result.rc, signal.SIGKILL) self.assertGreaterEqual(result.duration, 0.0) self.assertLess(result.duration, 10.0) @skipIf(sys.platform == "darwin", reason="Docker Desktop issues") def test_output_on_timeout(self): """Test if we receive output even if a timeout happens.""" result = self.runner.check_task("echo -n OUT; echo -n ERR >&2; sleep 10", DATA_DIR) self.assertEqual(result.rc, signal.SIGKILL) self.assertEqual(result.stdout, "OUT") self.assertEqual(result.stderr, "ERR") def test_inbound_mountpoint(self): """Test if the input mount point works correctly.""" result = self.runner.check_task("cat /checker/input/README.md", DATA_DIR) self.assertEqual("This is a test harness for collect_files().\n", result.stdout) self.assertEqual(result.rc, 0) def test_scratch_area(self): """Test that we can write the scratch area.""" result = self.runner.check_task("touch /checker/scratch/test_file", DATA_DIR) self.assertEqual(result.rc, 0) def test_inbound_mountpoint_ro(self): """Test if the input is mounted read-only.""" result = self.runner.check_task("touch /checker/input/test_file", DATA_DIR) self.assertNotEqual(result.rc, 0) def test_storage_exists(self): """Test if the storage directory exists.""" result = self.ru
nner.check_task("test -d /checker/output/storage", DATA_DIR) self.ass
ertEqual(result.rc, 0) def test_output_filedict(self): """Test if we can create a file which appears in the files dictionary.""" result = self.runner.check_task("echo -n FOO >/checker/output/storage/bar", DATA_DIR) self.assertEqual(result.rc, 0) self.assertEqual("FOO", result.files["bar"]) def test_container_unprivileged(self): """Test if we execute commands as unprivileged user.""" result = self.runner.check_task("id -un", DATA_DIR) self.assertEqual(result.rc, 0) self.assertEqual(result.stdout.strip(), "nobody") def test_maximum_file_size(self): """Test limits of the scratch file system.""" result = self.runner.check_task( "dd if=/dev/zero of=/checker/scratch/largefile bs=1M count=100", DATA_DIR ) self.assertNotEqual(result.rc, 0) def test_scratch_mount_options(self): """Verify if the tmpfs is mounted correctly.""" result = self.runner.check_task("mount | grep 'tmpfs on /checker/scratch'", DATA_DIR) # the default size=32m is expanded to kilobytes self.assertIn("size=32768k", result.stdout) class DockerTestRunnerTest(TestCase): def setUp(self): self.runner = DockerTestRunner( { "image": "image-not-used", "output_limit": 10, } ) def test_constructor_requires_configkey(self): with self.assertRaises(ValueError): DockerTestRunner({}) # TEST 1: good utf-8 sequence def test_clean_stream_with_short_valid_utf8(self): sample_stream = "abcöüä".encode() cleaned = self.runner.clean_stream(sample_stream) self.assertEqual(cleaned, "abcöüä") # TEST 2: bogus utf-8 sequence def test_clean_stream_with_short_invalid_utf8(self): sample_stream = "abcöüä".encode() # cut off the right half of the utf8 char at the end ('ä'), making it invalid cleaned = self.runner.clean_stream(sample_stream[:-1]) self.assertEqual(len(cleaned), 6) self.assertIn("abcöü", cleaned) # TEST 3: good utf-8 sequence, too long def test_clean_stream_with_too_long_valid_utf8(self): sample_stream = ("a" * 11).encode() cleaned = self.runner.clean_stream(sample_stream) self.assertNotIn("a" * 11, cleaned) self.assertIn("a" * 10, cleaned) self.assertIn("output truncated", cleaned) # TEST 4: too long utf-8 sequence, utf-8 composite at cut position def test_clean_stream_with_utf8_composite_at_cut_position(self): sample_stream = "".join(["a", "ä" * 5]).encode() cleaned = self.runner.clean_stream(sample_stream) self.assertNotIn("ä" * 5, cleaned) self.assertIn("aääää", cleaned) self.assertIn("output truncated", cleaned)
mahak/nova
nova/tests/functional/libvirt/base.py
Python
apache-2.0
14,789
0.000135
# Copyright (C) 2018 Red Hat, Inc # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import copy import io import fixtures import mock from nova import conf from nova.tests import fixtures as nova_fixtures from nova.tests.fixtures import libvirt as fakelibvirt from nova.tests.functional import integrated_helpers CONF = conf.CONF class ServersTestBase(integrated_helpers._IntegratedTestBase): """A libvirt-specific variant of the integrated test base.""" ADDITIONAL_FILTERS = [] def setUp(self): self.flags(instances_path=self.useFixture(fixtures.TempDir()).path) self.computes = {} self.compute_rp_uuids = {} super(ServersTestBase, self).setUp() self.useFixture(nova_fixtures.LibvirtImageBackendFixture()) self.useFixture(nova_fixtures.LibvirtFixture()) self.useFixture(nova_fixtures.OSBrickFixture()) self.useFixture(fixtures.MockPatch( 'nova.virt.libvirt.LibvirtDriver._create_image', return_value=(False, False))) self.useFixture(fixtures.MockPatch( 'nova.virt.libvirt.LibvirtDriver._get_local_gb_info', return_value={'total': 128, 'used': 44, 'free': 84})) self.useFixture(fixtures.MockPatch( 'nova.virt.libvirt.driver.libvirt_utils.is_valid_hostname', return_value=True)) self.useFixture(fixtures.MockPatch( 'nova.virt.libvirt.driver.libvirt_utils.file_open', side_effect=lambda *a, **k: io.BytesIO(b''))) self.useFixture(fixtures.MockPatch( 'nova.privsep.utils.supports_direct_io', return_value=True)) self.useFixture(fixtures.MockPatch( 'nova.virt.libvirt.host.Host.get_online_cpus', return_value=set(range(16)))) # Mock the 'get_connection' function, as we're going to need to provide # custom capabilities for each test _p = mock.patch('nova.virt.libvirt.host.Host.get_connection') self.mock_conn = _p.start() self.addCleanup(_p.stop) def _setup_compute_service(self): # NOTE(stephenfin): We don't start the compute service here as we wish # to configure the host capabilities first. We instead start the # service in the test self.flags(compute_driver='libvirt.LibvirtDriver') def _setup_scheduler_service(self): enabled_filters = CONF.filter_scheduler.enabled_filters enabled_filters += self.ADDITIONAL_FILTERS self.flags(enabled_filters=enabled_filters, group='filter_scheduler') return self.start_service('scheduler') def _get_connection( self, host_info=None, pci_info=None, mdev_info=None, vdpa_info=None, libvirt_version=None, qemu_version=None, hostname=None, ): if not host_info: host_info = fakelibvirt.HostInfo( cpu_nodes=2, cpu_sockets=1, cpu_cores=2, cpu_threads=2) # sanity check self.assertGreater(16, host_info.cpus, "Host.get_online_cpus is only accounting for 16 CPUs but you're " "requesting %d; change the mock or your test" % host_info.cpus) libvirt_version = libvirt_version or fakelibvirt.FAKE_LIBVIRT_VERSION qemu_version = qemu_version or fakelibvirt.FAKE_QEMU_VERSION fake_connection = fakelibvirt.Connection( 'qemu:///system', version=libvirt_version, hv_version=qemu_version, host_info=host_info, pci_info=pci_info, mdev_info=mdev_info, vdpa_info=vdpa_info, hostname=hostname) return fake_connection def start_compute( self, hostname='compute1', host_info=None, pci_info=None, mdev_info=None, vdpa_info=None, libvirt_version=None, qemu_version=None, ): """Start a compute service. The started service will be saved in self.computes, keyed by hostname. :param hostname: A hostname. :param host_info: A fakelibvirt.HostInfo object for the host. Defaults to a HostInfo with 2 NUMA nodes, 2 cores per node, 2 threads per core, and 16GB of RAM. :returns: The hostname of the created service, which can be used to lookup the created service and UUID of the assocaited resource provider. """ def _start_compute(hostname, host_info): fake_connection = self._get_connection( host_info, pci_info, mdev_info, vdpa_info, libvirt_version, qemu_version, hostname, ) # This is fun. Firstly we need to do a global'ish mock so we can # actually start the service. with mock.patch('nova.virt.libvirt.host.Host.get_connection', return_value=fake_connection): compute = self.start_service('compute', host=hostname) # Once that's done, we need to tweak the compute "service" to # make sure it returns unique objects. We do this inside the # mock context to avoid a small window between the end of the # context and the tweaking where get_connection would revert to # being an autospec mock. compute.driver._host.get_connection = lambda: fake_connection return compute # ensure we haven't already registered services with these hostnames self.assertNotIn(hostname, self.computes) s
elf.assertNotIn(hostname, self.compute_rp_uuids) self.computes[hostname] = _start_compute(hostname, host_info) self.compute_rp_uuids[hostname] = self.placement.get( '/resource_providers?name=%s' % hostname).body[ 'resource_providers'][0]['uuid'] return hostname class LibvirtMigrationMixin(object): """A simple mixin to facilliate successful libvirt live migrations Requires that the te
st class set self.server for the specific test instnace and self.{src,dest} to indicate the direction of the migration. For any scenarios more complex than this they should override _migrate_stub with their own implementation. """ def setUp(self): super().setUp() self.useFixture(fixtures.MonkeyPatch( 'nova.tests.fixtures.libvirt.Domain.migrateToURI3', self._migrate_stub)) self.migrate_stub_ran = False def _migrate_stub(self, domain, destination, params, flags): self.dest.driver._host.get_connection().createXML( params['destination_xml'], 'fake-createXML-doesnt-care-about-flags') conn = self.src.driver._host.get_connection() dom = conn.lookupByUUIDString(self.server['id']) dom.complete_job() self.migrate_stub_ran = True class LibvirtNeutronFixture(nova_fixtures.NeutronFixture): """A custom variant of the stock neutron fixture with more networks. There are three networks available: two l2 networks (one flat and one VLAN) and one l3 network (VXLAN). """ network_1 = { 'id': '3cb9bc59-5699-4588-a4b1-b87f96708bc6', 'status': 'ACTIVE', 'subnets': [], 'name': 'physical-network-foo', 'admin_state_up': True, 'tenant_id': nova_fixtures.NeutronFixture.tenant_id, 'provider:physical_network': 'foo', 'provider:network_type': 'flat', 'provider:segmentation_id': None, } network_2 = network_1.copy() network_2.update({ 'id': 'a252b8cd-2d99-4e82-9a97-ec1217c496f5', 'name': 'physical-network-bar', 'provider:physi
cedadev/ndg_oauth
ndg_oauth_server/ndg/oauth/server/lib/authenticate/password_authenticator.py
Python
bsd-3-clause
2,193
0.006384
"""OAuth 2.0 WSGI server middleware providing MyProxy certificates as access tokens """ __author__ = "W van Engen" __date__ = "01/11/12" __copyright__ = "(C) 2011 F
OM / Nikhef" __license__ = "BSD
- see LICENSE file in top-level directory" __contact__ = "wvengen+oauth2@nikhef.nl" __revision__ = "$Id$" from base64 import b64decode from ndg.oauth.server.lib.authenticate.authenticator_interface import AuthenticatorInterface from ndg.oauth.server.lib.oauth.oauth_exception import OauthException class PasswordAuthenticator(AuthenticatorInterface): """ Authenticator implementation that checks for a client/resource id+secret combination, either in the HTTP Authorization header, or in the request parameters, according to the OAuth 2 RFC, section 2.3.1 @todo implement protection against brute force attacks (MUST) """ def __init__(self, typ, register): super(PasswordAuthenticator, self).__init__(typ) self._register = register def authenticate(self, request): """ Checks for id/secret pair in Authorization header, or else POSTed request parameters. @type request: webob.Request @param request: HTTP request object @rtype: str @return: id of authenticated client/resource Raise OauthException if authentication fails. """ cid = secret = None if 'Authorization' in request.headers and request.headers['Authorization'].startswith('Basic'): cid, secret = b64decode(request.headers['Authorization'][6:]).split(':',1) elif 'client_id' in request.POST and 'client_secret' in request.POST: cid = request.POST['client_id'] secret = request.POST['client_secret'] if not cid or not secret: raise OauthException('invalid_%s' % self.typ, 'No %s password authentication supplied' % self.typ) for authorization in self._register.register.itervalues(): if authorization.id == cid and authorization.secret == secret: return authorization.id raise OauthException('invalid_%s' % self.typ, '%s access denied: %s' % (cid, self.typ))
meyt/sqlalchemy-dict
setup.py
Python
mit
1,268
0
import sys from setuptools import setup, find_packages package_name = "sqlalchemy_dict" py_version = sys.version_info[:2] def read_version(module_name): f
rom re import match, S from os.path import join, dirname f = open(join(dirname(__file__), module_name, "__init__.py")) return match(r".*__version__ = (\"|')(.*?)('|\")", f.read(), S).group(2) dependencies = ["sqlalchemy"] if py_version < (3, 5): dependencies.append("typing") setup( name=package_name, version=read_version(package_name), author="Mahdi Ghane.g", description=( "sqlal
chemy extension for interacting models with python dictionary." ), long_description=open("README.rst").read(), url="https://github.com/meyt/sqlalchemy-dict", packages=find_packages(), install_requires=dependencies, license="MIT License", classifiers=[ "Development Status :: 3 - Alpha", "License :: OSI Approved :: MIT License", "Programming Language :: Python :: 3.4", "Programming Language :: Python :: 3.5", "Programming Language :: Python :: 3.6", "Programming Language :: Python :: 3.7", "Operating System :: OS Independent", "Topic :: Software Development :: Libraries", ], )
smartczm/python-learn
Old-day01-10/s13-day1/system.py
Python
gpl-2.0
1,313
0.012573
#!/usr/bin/env python3.5 # -*- coding: utf-8 -*- # Author: ChenLiang # 执行系统命令 import os os.system("ls -al") os.mkdir("pwd") read = os.popen("df -hT").read() # 查看系统路径 import sys print(sys.path) # 命令行下tab补全命令 # For MAC: import sys import readline import rlcom
pleter if sys.platform == 'darwin' and sys.version_info[0] == 2: readline.parse_and_bind("tab: complete") # linux
and python3 on mac else: readline.parse_and_bind("bind ^I rl_complete") # 说明:上面的代码如果在Mac上不好用,可以尝试下面的代码 # https://docs.python.org/2/library/rlcompleter.html try: import readline except ImportError: print("Module readline not available.") else: import rlcompleter readline.parse_and_bind("tab: complete") # For Linux: #!/usr/bin/env python # python startup file import sys import readline import rlcompleter import atexit import os # tab completion readline.parse_and_bind('tab: complete') # history file histfile = os.path.join(os.environ['HOME'], '.pythonhistory') try: readline.read_history_file(histfile) except IOError: pass atexit.register(readline.write_history_file, histfile) del os, histfile, readline, rlcompleter # 需要注意: 自己定义的模块都放到/usr/lib/python2.7/site-packages/
etherex/pyepm
test/helpers.py
Python
mit
931
0.003222
from distutils import spawn import mock import pytest import requests from pyepm import config as c config = c.get_default_config() has_solc = spawn.find_executable("solc") solc = pytest.mark.skipif(not has_solc, reason="solc compiler not found") COW_ADDRESS = '0xcd2a3d9f938e13cd947ec05abc7fe734df8dd826' def is_hex(s): try: int(s, 16)
return True except ValueError: return False def mock_json_response(status_code=200, error=None, result=None): m = mock.MagicMock(spec=requests.Response) m.status_code = status_code base_json_response = {u'jsonrpc': u'2.0', u'id': u'c7c427a5-b6e9
-4dbf-b218-a6f9d4f09246'} json_response = dict(base_json_response) if result: json_response[u'result'] = result elif error: json_response[u'error'] = error if status_code >= 400: m.reason = 'Error Reason' m.json.return_value = json_response return m
danielhers/tupa
tests/test_features.py
Python
gpl-3.0
5,494
0.003276
"""Testing code for the tupa.features package, unit-testing only.""" import os from collections import OrderedDict import pytest from ucca import textutil from tupa.action import Actions from tupa.features.dense_features import DenseFeatureExtractor from tupa.features.sparse_features import SparseFeatureExtractor from tupa.model import Model from tupa.oracle import Oracle from tupa.states.state import State from .conftest import passage_files, load_passage, basename SPARSE = "sparse" DENSE = "dense" VOCAB = os.path.join("test_files", "vocab", "en_core_web_lg.csv") WORD_VECTORS = os.path.join("test_files", "vocab", "wiki.en.vec") OMITTED = "d" class FeatureExtractorCreator: def __init__(self, name, indexed=False, annotated=False, vocab=None, wordvectors=None, omit=None): self.name = name self.indexed = indexed self.annotated = annotated self.vocab = vocab self.id = vocab == "-" self.wordvectors = wordvectors self.omit = omit def __str__(self): return "-".join([self.name] + [attr for attr in ("indexed", "annotated", "vocab", "id", "wordvectors", "omit") if getattr(self, attr)]) def __call__(self, config): config.args.vocab = self.vocab config.args.word_ve
ctors = self.wordvectors config.args.omit_features = self.omit return SparseFeatureExtractor(omit_features=self.omit) if self.name == SPARSE else DenseFeatureExtractor( OrderedDict((p.name, p.create_from_config()) for p in Model(None, c
onfig=config).param_defs()), indexed=self.indexed, node_dropout=0, omit_features=self.omit) def feature_extractors(*args, **kwargs): return [FeatureExtractorCreator(SPARSE, *args, **kwargs), FeatureExtractorCreator(DENSE, *args, **kwargs), FeatureExtractorCreator(DENSE, *args, indexed=True, **kwargs)] def extract_features(feature_extractor, state, features): values = feature_extractor.extract_features(state) if feature_extractor.params: for key, vs in values.items(): assert len(vs) == feature_extractor.params[key].num, key features.append(values) def _test_features(config, feature_extractor_creator, filename, write_features): feature_extractor = feature_extractor_creator(config) passage = load_passage(filename, annotate=feature_extractor_creator.annotated) textutil.annotate(passage, as_array=True, as_extra=False, vocab=config.vocab()) config.set_format(passage.extra.get("format") or "ucca") oracle = Oracle(passage) state = State(passage) actions = Actions() for key, param in feature_extractor.params.items(): if not param.numeric: param.dropout = 0 feature_extractor.init_param(key) features = [feature_extractor.init_features(state)] while True: extract_features(feature_extractor, state, features) action = min(oracle.get_actions(state, actions).values(), key=str) state.transition(action) if state.need_label: extract_features(feature_extractor, state, features) label, _ = oracle.get_label(state, action) state.label_node(label) if state.finished: break features = ["%s %s\n" % i for f in features if f for i in (sorted(f.items()) + [("", "")])] compare_file = os.path.join("test_files", "features", "-".join((basename(filename), str(feature_extractor_creator))) + ".txt") if write_features: with open(compare_file, "w", encoding="utf-8") as f: f.writelines(features) with open(compare_file, encoding="utf-8") as f: assert f.readlines() == features, compare_file @pytest.mark.parametrize("feature_extractor_creator", [f for v in (None, "-", VOCAB) for w in (None, WORD_VECTORS) for o in (None, OMITTED) for f in feature_extractors(vocab=v, wordvectors=w, omit=o)], ids=str) @pytest.mark.parametrize("filename", passage_files(), ids=basename) def test_features(config, feature_extractor_creator, filename, write_features): _test_features(config, feature_extractor_creator, filename, write_features) @pytest.mark.parametrize("feature_extractor_creator", [f for v in ("-", VOCAB) for w in (None, WORD_VECTORS) for o in (None, OMITTED) for f in feature_extractors(annotated=True, vocab=v, wordvectors=w, omit=o)], ids=str) @pytest.mark.parametrize("filename", passage_files("conllu"), ids=basename) def test_features_conllu(config, feature_extractor_creator, filename, write_features): _test_features(config, feature_extractor_creator, filename, write_features) @pytest.mark.parametrize("feature_extractor_creator", [f for o in (None, OMITTED) for f in feature_extractors(omit=o)[:-1]], ids=str) def test_feature_templates(config, feature_extractor_creator, write_features): config.set_format("amr") feature_extractor = feature_extractor_creator(config) features = ["%s\n" % i for i in feature_extractor.all_features()] compare_file = os.path.join("test_files", "features", "templates-%s.txt" % str(feature_extractor_creator)) if write_features: with open(compare_file, "w") as f: f.writelines(features) with open(compare_file) as f: assert f.readlines() == features, compare_file
padraic-padraic/MPHYSG001_CW1
example.py
Python
gpl-2.0
216
0.00463
import greengrap
h if __name__ == '__main__': from matplotlib i
mport pyplot as plt mygraph = greengraph.Greengraph('New York','Chicago') data = mygraph.green_between(20) plt.plot(data) plt.show()
mikeckennedy/python-data-driven-nov9
playground/classdict.py
Python
gpl-3.0
121
0.008264
class Person: def __init
__(self): self.name = 'jeff' self.age = 10 p = Per
son() print(p.__dict__)
stevenvanrossem/son-emu
src/emuvim/api/openstack/openstack_dummies/neutron_dummy_api.py
Python
apache-2.0
44,105
0.002993
""" Copyright (c) 2017 SONATA-NFV and Paderborn University ALL RIGHTS RESERVED. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. Neither the name of the SONATA-NFV, Paderborn University nor the names of its contributors may be used to endorse or promote products derived from this software without specific prior written permission. This work has been performed in the framework of the SONATA project, funded by the European Commission under Grant number 671517 through the Horizon 2020 and 5G-PPP programmes. The authors would like to acknowledge the contributions of their colleagues of the SONATA partner consortium (www.sonata-nfv.eu). """ from flask_restful import Resource from flask import request, Response from emuvim.api.openstack.openstack_dummies.base_openstack_dummy import BaseOpenstackDummy from emuvim.api.openstack.helper import get_host from datetime import datetime import neutron_sfc_dummy_api as SFC import logging import json import uuid import copy LOG = logging.getLogger("api.openstack.neutron") class NeutronDummyApi(BaseOpenstackDummy): def __init__(self, ip, port, compute): super(NeutronDummyApi, self).__init__(ip, port) self.compute = compute self.api.add_resource(NeutronListAPIVersions, "/") self.api.add_resource(Shutdown, "/shutdown") self.api.add_resource(NeutronShowAPIv2Details, "/v2.0") self.api.add_resource(NeutronListNetworks, "/v2.0/networks.json", "/v2.0/networks", resource_class_kwargs={'api': self}) self.api.add_resource(NeutronShowNetwork, "/v2.0/networks/<network_id>.json", "/v2.0/networks/<network_id>", resource_class_kwargs={'api': self}) self.api.add_resource(NeutronCreateNetwork, "/v2.0/networks.json", "/v2.0/networks", resource_class_kwargs={'api': self}) self.api.add_resource(NeutronUpdateNetwork, "/v2.0/networks/<network_id>.json", "/v2.0/networks/<network_id>", resource_class_kwargs={'api': self}) self.api.add_resource(NeutronDeleteNetwork, "/v2.0/networks/<network_id>.json", "/v2.0/networks/<network_id>", resource_class_kwargs={'api': self}) self.api.add_resource(NeutronListSubnets, "/v2.0/subnets.json", "/v2.0/subnets", resource_class_kwargs={'api': self}) self.api.add_resource(NeutronShowSubnet, "/v2.0/subnets/<subnet_id>.json", "/v2.0/subnets/<subnet_id>", resource_class_kwargs={'api': self}) self.api.add_resource(NeutronCreateSubnet, "/v2.0/subnets.json", "/v2.0/subnets", resource_class_kwargs={'api': self}) self.api.add_resource(NeutronUpdateSubnet, "/v2.0/subnets/<subnet_id>.json", "/v2.0/subnets/<subnet_id>", resource_class_kwargs={'api': self}) self.api.add_resource(NeutronDeleteSubnet, "/v2.0/subnets/<subnet_id>.json", "/v2.0/subnets/<subnet_id>", resource_class_kwargs={'api': self}) self.api.add_resource(NeutronListPorts, "/v2.0/ports.json", "/v2.0/ports", resource_class_kwargs={'api': self}) self.api.add_resource(NeutronShowPort, "/v2.0/ports/<port_id>.json", "/v2.0/ports/<port_id>", resource_class_kwargs={'api': self}) self.api.add_resource(NeutronCreatePort, "/v2.0/ports.json", "/v2.0/ports", resource_class_kwargs={'api': self}) self.api.add_resource(NeutronUpdatePort, "/v2.0/ports/<port_id>.json", "/v2.0/ports/<port_id>", resource_class_kwargs={'api': self}) self.api.add_resource(NeutronDeletePort, "/v2.0/ports/<port_id>.json", "/v2.0/ports/<port_id>", resource_class_kwargs={'api': self}) self.api.add_resource(NeutronAddFloatingIp, "/v2.0/floatingips.json", "/v2.0/floatingips", resource_class_kwargs={'api': self}) # Service Function Chaining (SFC) API self.api.add_resource(SFC.PortPairsCreate, "/v2.0/sfc/port_pairs.json", "/v2.0/sfc/port_pairs", resource_class_kwargs={'api': self}) self.api.add_resource(SFC.PortPairsUpdate, "/v2.0/sfc/port_pairs/<pair_id>.json", "/v2.0/sfc/port_pairs/<pair_id>", resource_class_kwargs={'api': self}) self.api.add_resource(SFC.PortPairsDelete, "/v2.0/sfc/port_pairs/<pair_id>.json", "/v2.0/sfc/port_pairs/<pair_id>", resource_class_kwargs={'api': self}) self.api.add_resource(SFC.PortPairsList, "/v2.0/sfc/port_pairs.json", "/v2.0/sfc/port_pairs", resource_class_kwargs={'api': self}) self.api.add_resource(SFC.PortPairsShow, "/v2.0/sfc/port_pairs/<pair_id>.json", "/v2.0/sfc/port_pairs/<pair_id>", resource_class_kwargs={'api': self}) self.api.add_resource(SFC.PortPairGroupCreate, "/v2.0/sfc/port_pair_groups.json", "/v2.0/sfc/port_p
air_
groups", resource_class_kwargs={'api': self}) self.api.add_resource(SFC.PortPairGroupUpdate, "/v2.0/sfc/port_pair_groups/<group_id>.json", "/v2.0/sfc/port_pair_groups/<group_id>", resource_class_kwargs={'api': self}) self.api.add_resource(SFC.PortPairGroupDelete, "/v2.0/sfc/port_pair_groups/<group_id>.json", "/v2.0/sfc/port_pair_groups/<group_id>", resource_class_kwargs={'api': self}) self.api.add_resource(SFC.PortPairGroupList, "/v2.0/sfc/port_pair_groups.json", "/v2.0/sfc/port_pair_groups", resource_class_kwargs={'api': self}) self.api.add_resource(SFC.PortPairGroupShow, "/v2.0/sfc/port_pair_groups/<group_id>.json", "/v2.0/sfc/port_pair_groups/<group_id>", resource_class_kwargs={'api': self}) self.api.add_resource(SFC.FlowClassifierCreate, "/v2.0/sfc/flow_classifiers.json", "/v2.0/sfc/flow_classifiers", resource_class_kwargs={'api': self}) self.api.add_resource(SFC.FlowClassifierUpdate, "/v2.0/sfc/flow_classifiers/<flow_classifier_id>.json", "/v2.0/sfc/flow_classifiers/<flow_classifier_id>", resource_class_kwargs={'api': self}) self.api.add_resource(SFC.FlowClassifierDelete, "/v2.0/sfc/flow_classifiers/<flow_classifier_id>.json", "/v2.0/sfc/flow_classifiers/<flow_classifier_id>", resource_class_kwargs={'api': self}) self.api.add_resource(SFC.FlowClassifierList, "/v2.0/sfc/flow_classifiers.json", "/v2.0/sfc/flow_classifiers", resource_class_kwargs={'api': self}) self.api.add_resource(SFC.FlowClassifierShow, "/v2.0/sfc/flow_classifiers/<flow_classifier_id>.json", "/v2.0/sfc/flow_classifiers/<flow_classifier_id>", resource_class_kwargs={'api': self}) self.api.add_resource(SFC.PortChainCreate, "/v2.0/sfc/port_chains.json", "/v2.0/sfc/port_chains", resource_class_kwargs={'api': self}) self.api.add_resource(SFC.PortChainUpdate, "/v2.0/sfc/port_chains/<chain_id>.json", "/v2.0/sfc/port_chains/<chain_id>",
desihub/fiberassign
old/py/mock.py
Python
bsd-3-clause
3,161
0.008858
''' Functions for working with DESI mocks and fiberassignment TODO (maybe): This contains hardcoded hacks, especially wrt priorities and interpretation of object types ''' from __future__ import print_function, division import sys, os import numpy as np from astropy.table import Table, Column from fiberassign import io from desitarget import desi_mask import desitarget import desispec.brick def rdzipn2targets(infile): """Read rdzipn infile and return target and truth tables """ ra, dec, z, itype, priority, numobs = io.read_rdzipn(infile) n = len(ra) #- Martin's itype is 1 to n, while Bob's fiberassign is 0 to n-1 it
ype -= 1 assert np.min(itype >= 0) #- rdzipn has float32 ra, dec, but it should be float64 ra = ra.astype
('float64') % 360 #- enforce 0 <= ra < 360 dec = dec.astype('float64') #- Hardcoded in rdzipn format # 0 : 'QSO', #- QSO-LyA # 1 : 'QSO', #- QSO-Tracer # 2 : 'LRG', #- LRG # 3 : 'ELG', #- ELG # 4 : 'STAR', #- QSO-Fake # 5 : 'UNKNOWN', #- LRG-Fake # 6 : 'STAR', #- StdStar # 7 : 'SKY', #- Sky qso_lya = (itype==0) qso_tracer = (itype==1) qso_fake = (itype==4) qso = qso_lya | qso_tracer | qso_fake lrg_real = (itype==2) lrg_fake = (itype==5) lrg = lrg_real | lrg_fake elg = (itype==3) std = (itype==6) sky = (itype==7) if not np.any(std): print("WARNING: no standard stars found") if not np.any(sky): print("WARNING: no sky locations found") if not np.any(~(std | sky)): print("WARNING: no science targets found") #- Create a DESI_TARGET mask desi_target = np.zeros(n, dtype='i8') desi_target[qso] |= desi_mask.QSO desi_target[elg] |= desi_mask.ELG desi_target[lrg] |= desi_mask.LRG desi_target[sky] |= desi_mask.SKY desi_target[std] |= desi_mask.STD_FSTAR bgs_target = np.zeros(n, dtype='i8') #- TODO mws_target = np.zeros(n, dtype='i8') #- TODO #- True type truetype = np.zeros(n, dtype='S10') assert np.all(truetype == '') truetype[qso_lya | qso_tracer] = 'QSO' truetype[qso_fake] = 'STAR' truetype[elg] = 'GALAXY' truetype[lrg_real] = 'GALAXY' truetype[lrg_fake] = 'UNKNOWN' truetype[std] = 'STAR' truetype[sky] = 'SKY' assert np.all(truetype != '') #- Misc other targetid = np.random.randint(2**62, size=n) ### brickname = np.zeros(n, dtype='S8') brickname = desispec.brick.brickname(ra, dec) subpriority = np.random.uniform(0, 1, size=n) targets = Table() targets['TARGETID'] = targetid targets['BRICKNAME'] = brickname targets['RA'] = ra targets['DEC'] = dec targets['DESI_TARGET'] = desi_target targets['BGS_TARGET'] = bgs_target targets['MWS_TARGET'] = mws_target targets['SUBPRIORITY'] = subpriority truth = Table() truth['TARGETID'] = targetid truth['BRICKNAME'] = brickname truth['RA'] = ra truth['DEC'] = dec truth['TRUEZ'] = z truth['TRUETYPE'] = truetype truth['CATEGORY'] = itype return targets, truth
megarcia/WxCD
setup.py
Python
gpl-3.0
18,007
0
""" Python script "setup.py" by Matthew Garcia, PhD student Dept. of Forest and Wildlife Ecology University of Wisconsin - Madison matt.e.garcia@gmail.com Copyrig
ht (C) 2015-2016 by Matthew Garcia Licensed Gnu GPL v3; see 'LICENSE_GnuGPLv3.txt' for complete terms Send questions, bug reports, any related requests
to matt.e.garcia@gmail.com See also 'README.md', 'DISCLAIMER.txt', 'CITATION.txt', 'ACKNOWLEDGEMENTS.txt' Treat others as you would be treated. Pay it forward. Valar dohaeris. PURPOSE: Verifies sample data, scripts, modules, documents, auxiliary files. Verifies availability of python dependencies used by various scripts. Uncompresses certain large example data files Builds directory structure for script output products. DEPENDENCIES: all software package source dependencies are polled here USAGE: '$ python setup.py' """ import os import sys import glob def message(char_string): """ prints a string to the terminal and flushes the buffer """ print char_string sys.stdout.flush() return txt_files = ['ACKNOWLEDGEMENTS.txt', 'CITATION.txt', 'DISCLAIMER.txt', 'LICENSE_GnuGPLv3.txt'] md_files = ['README.md'] main_dirs = ['data', 'docs', 'htcondor', 'source', 'tools'] # scripts = ['process_NCEI_00.py', 'process_NCEI_01.py', 'process_NCEI_02a.py', 'process_NCEI_02b.py', 'process_NCEI_03_chill_d.py', 'process_NCEI_03_chill_dd.py', 'process_NCEI_03_grow_dd.py', 'process_NCEI_03_grow_dd_base0.py', 'process_NCEI_03_prcp_03d.py', 'process_NCEI_03_prcp_07d.py', 'process_NCEI_03_prcp_120d.py', 'process_NCEI_03_prcp_15d.py', 'process_NCEI_03_prcp_180d.py', 'process_NCEI_03_prcp_30d.py', 'process_NCEI_03_prcp_365d.py', 'process_NCEI_03_prcp_60d.py', 'process_NCEI_03_prcp_90d.py', 'process_NCEI_03_prcp_90d_nd0.py', 'process_NCEI_03_prcp_90d_nd10.py', 'process_NCEI_03_prcp_90d_nd25.py', 'process_NCEI_03_preprocess.py', 'process_NCEI_03_tavg_03d.py', 'process_NCEI_03_tavg_07d.py', 'process_NCEI_03_tavg_15d.py', 'process_NCEI_03_tavg_30d.py', 'process_NCEI_03_tavg_60d.py', 'process_NCEI_03_tavg_90d.py', 'process_NCEI_03_tavg_frz.py', 'process_NCEI_03_tmax_03d.py', 'process_NCEI_03_tmax_07d.py', 'process_NCEI_03_tmax_15d.py', 'process_NCEI_03_tmax_30d.py', 'process_NCEI_03_tmax_60d.py', 'process_NCEI_03_tmax_90d.py', 'process_NCEI_03_tmax_frz.py', 'process_NCEI_03_tmin_03d.py', 'process_NCEI_03_tmin_07d.py', 'process_NCEI_03_tmin_15d.py', 'process_NCEI_03_tmin_30d.py', 'process_NCEI_03_tmin_60d.py', 'process_NCEI_03_tmin_90d.py', 'process_NCEI_03_tmin_frz.py', 'process_NCEI_03_vpd_03d.py', 'process_NCEI_03_vpd_07d.py', 'process_NCEI_03_vpd_15d.py', 'process_NCEI_03_vpd_30d.py', 'process_NCEI_03_vpd_60d.py', 'process_NCEI_03_vpd_90d.py', 'process_NCEI_04a.py', 'process_NCEI_04b.py', 'process_NCEI_05.py', 'process_NCEI_06.py', 'process_NCEI_07.py', 'process_NCEI_08.py', 'process_NCEI_09.py', 'process_NCEI_10.py', 'process_NCEI_11.py', 'process_NCEI_12.py', 'process_NCEI_13.py', 'process_NCEI_14.py', 'process_NCEI_15.py'] # modules = ['Date_Convert.py', 'Interpolation.py', 'Plots.py', 'process_NCEI_03_aux.py', 'Read_Header_Files.py', 'Stats.py', 'Teleconnections.py', 'UTM_Geo_Convert.py'] # htcondor = ['process_NCEI_00.sh', 'process_NCEI_00.sub', 'process_NCEI_01.sh', 'process_NCEI_01.sub', 'process_NCEI_02a.sh', 'process_NCEI_02a.sub', 'process_NCEI_02b.sh', 'process_NCEI_02b.sub', 'process_NCEI_02b_dag.sub', 'process_NCEI_03_chill_d.sh', 'process_NCEI_03_chill_dd.sh', 'process_NCEI_03_dag_gen.py', 'process_NCEI_03_generic.sub', 'process_NCEI_03_grow_dd.sh', 'process_NCEI_03_grow_dd_base0.sh', 'process_NCEI_03_prcp_03d.sh', 'process_NCEI_03_prcp_07d.sh', 'process_NCEI_03_prcp_120d.sh', 'process_NCEI_03_prcp_15d.sh', 'process_NCEI_03_prcp_180d.sh', 'process_NCEI_03_prcp_30d.sh', 'process_NCEI_03_prcp_365d.sh', 'process_NCEI_03_prcp_60d.sh', 'process_NCEI_03_prcp_90d.sh', 'process_NCEI_03_prcp_90d_nd0.sh', 'process_NCEI_03_prcp_90d_nd10.sh', 'process_NCEI_03_prcp_90d_nd25.sh', 'process_NCEI_03_preprocess.sh', 'process_NCEI_03_tavg_03d.sh', 'process_NCEI_03_tavg_07d.sh', 'process_NCEI_03_tavg_15d.sh', 'process_NCEI_03_tavg_30d.sh', 'process_NCEI_03_tavg_60d.sh', 'process_NCEI_03_tavg_90d.sh', 'process_NCEI_03_tavg_frz.sh', 'process_NCEI_03_tmax_03d.sh', 'process_NCEI_03_tmax_07d.sh', 'process_NCEI_03_tmax_15d.sh', 'process_NCEI_03_tmax_30d.sh', 'process_NCEI_03_tmax_60d.sh', 'process_NCEI_03_tmax_90d.sh', 'process_NCEI_03_tmax_frz.sh', 'process_NCEI_03_tmin_03d.sh', 'process_NCEI_03_tmin_07d.sh', 'process_NCEI_03_tmin_15d.sh', 'process_NCEI_03_tmin_30d.sh', 'process_NCEI_03_tmin_60d.sh', 'process_NCEI_03_tmin_90d.sh', 'process_NCEI_03_tmin_frz.sh', 'process_NCEI_03_vpd_03d.sh', 'process_NCEI_03_vpd_07d.sh', 'process_NCEI_03_vpd_15d.sh', 'process_NCEI_03_vpd_30d.sh', 'process_NCEI_03_vpd_60d.sh', 'process_NCEI_03_vpd_90d.sh', 'process_NCEI_04a.sh', 'process_NCEI_04a.sub', 'process_NCEI_04b.sh', 'process_NCEI_04b.sub', 'process_NCEI_05.sh', 'process_NCEI_05.sub', 'process_NCEI_06.sh', 'process_NCEI_06.sub', 'process_NCEI_07.sh', 'process_NCEI_07.sub', 'process_NCEI_08.sh', 'process_NCEI_08.sub', 'process_NCEI_09.sh', 'process_NCEI_09.sub'] # dependencies = ['os', 'sys', 'datetime', 'glob', 'numpy', 'pandas', 'h5py', 'matplotlib', 'matplotlib.pyplot', 'gdal', 'osgeo.osr', 'scipy.interpolate', 'scipy.ndimage', 'scipy.stats', 'mpl_toolkits', 'mpl_toolkits.basemap', 'pickle'] # gz_data_files = ['EPA_L4_Ecoregions_WLS_UTM15N.bil.gz', 'NCEI_WLS_19830101-20151031.csv.gz', 'NLCD_2011_WLS_UTM15N.bil.gz'] # data_files = ['EPA_L4_Ecoregions_WLS_polygonIDs.txt', 'EPA_L4_Ecoregions_WLS_UTM15N.bil', 'EPA_L4_Ecoregions_WLS_UTM15N.hdr', 'NCEI_WLS_19830101-20151031.csv', 'NCEP_CPC_AO_indices.csv', 'NCEP_CPC_ENSO_indices.csv', 'NCEP_CPC_NAO_indices.csv', 'NCEP_CPC_PNA_indices.csv', 'NLCD_2011_WLS_UTM15N.bil', 'NLCD_2011_WLS_UTM15N.hdr', 'NOAA_ESRL_AMO_indices.csv', 'NOAA_ESRL_PDO_indices.csv', 'NSIDC_MIFL_Superior_Ice.csv', 'Query_locations_dates_sample.csv'] # doc_files = ['How_to_get_NCEI_GHCND_data.txt', 'NCEI_GHCND_documentation.pdf'] # tools = ['query_NCEI_grids.py', 'orientation_maps.py'] # add_dirs = ['analyses', 'grids', 'images'] # analyses_dirs = ['annual_maps', 'cluster_maps', 'ecoregion_maps', 'figures', 'summary_maps'] # os.system('rm .DS_Store') os.system('rm */.DS_Store') os.system('rm ._*') os.system('rm */._*') # message('checking for auxiliary files that should accompany this software') txts_present = glob.glob('*.txt') mds_present = glob.glob('*.md') absent = 0 for txt in txt_files: if txt in txts_present: message('- found auxiliary file \'%s\' as expected' % txt) else: message('- auxiliary file \'%s\' is absent' % txt) absent += 1 for md in md_files: if md in mds_present: message('- found auxiliary file \'%s\' as expected' % md) else: message('- auxiliary file \'%s\' is absent' % md) absent += 1 if absent > 0: message('- you don\'t need them to run things, but you do need them to \ understand things') message('- you should probably download this package ag
ptroja/spark2014
testsuite/gnatprove/tests/NB19-026__flow_formal_vectors/test.py
Python
gpl-3.0
73
0
fro
m test_support import * do_flow(opt=["-u", "indefinite_bounded.ad
b"])
StegSchreck/RatS
tests/unit/criticker/test_criticker_ratings_inserter.py
Python
agpl-3.0
9,321
0.001609
import os from unittest import TestCase from unittest.mock import patch from bs4 import BeautifulSoup from RatS.criticker.criticker_ratings_inserter import CritickerRatingsInserter TESTDATA_PATH = os.path.abspath( os.path.join(os.path.dirname(__file__), os.pardir, os.pardir, "assets") ) class CritickerRatingsInserterTest(TestCase): def setUp(self): if not os.path.exists(os.path.join(TESTDATA_PATH, "exports")): os.makedirs(os.path.join(TESTDATA_PATH, "exports")) self.movie = dict() self.movie["title"] = "Fight Club" self.movie["year"] = 1999 self.movie["imdb"] = dict() self.movie["imdb"]["id"] = "tt0137523" self.movie["imdb"]["url"] = "https://www.imdb.com/title/tt0137523" self.movie["imdb"]["my_rating"] = 9 self.movie["tmdb"] = dict() self.movie["tmdb"]["id"] = "550" self.movie["tmdb"]["url"] = "https://www.themoviedb.org/movie/550" with open( os.path.join(TESTDATA_PATH, "criticker", "search_result.html"), encoding="UTF-8", ) as search_results: self.search_results = search_results.read() with open( os.path.join(TESTDATA_PATH, "criticker", "search_result_tile.html"), encoding="UTF-8", ) as result_tile: self.search_result_tile_list = [result_tile.read()] with open( os.path.join(TESTDATA_PATH, "criticker", "movie_details_page.html"), encoding="UTF-8", ) as movie_details_page: self.movie_details_page = movie_details_page.read() @patch("RatS.base.base_ratings_inserter.RatingsInserter.__init__") @patch("RatS.utils.browser_handler.Firefox") def test_init(self, browser_mock, base_init_mock): CritickerRatingsInserter(None) self.assertTrue(base_init_mock.called) @patch("RatS.base.base_ratings_inserter.RatingsInserter._print_progress_bar") @patch( "RatS.criticker.criticker_ratings_inserter.CritickerRatingsInserter._is_requested_movie" ) @patch( "RatS.criticker.criticker_ratings_inserter.CritickerRatingsInserter._get_search_results" ) @patch("RatS.criticker.criticker_ratings_inserter.Criticker") @patch("RatS.base.base_ratings_inserter.RatingsInserter.__init__") @patch("RatS.utils.browser_handler.Firefox") def test_insert( self, browser_mock, base_init_mock, site_mock, overview_page_mock, # pylint: disable=too-many-arguments eq_check_mock, progress_print_mock, ): overview_page_mock.return_value = self.search_result_tile_list eq_check_mock.return_value = True site_mock.browser = browser_mock inserter = CritickerRatingsInserter(None) inserter.args = False inserter.site = site_mock inserter.site.site_name = "Criticker" inserter.failed_movies = [] inserter.insert([self.movie], "IMDB") self.assertTrue(base_init_mock.called) self.assertTrue(progress_print_mock.called) @patch("RatS.criticker.criticker_ratings_inserter.Criticker") @patch("RatS.base.base_ratings_inserter.RatingsInserter.__init__") @patch("RatS.utils.browser_handler.Firefox") def test_external_link_compare_imdb_success( self, browser_mock, base_init_mock, site_mock ): site_mock.browser = browser_mock inserter = CritickerRatingsInserter(None) inserter.site = site_mock inserter.site.site_name = "Criticker" inserter.failed_movies = [] result = inserter._compare_external_links( self.movie_details_page, self.movie, "imdb.com", "imdb" ) # pylint: disable=protected-access self.assertTrue(result) @patch("RatS.criticker.criticker_ratings_inserter.Criticker") @patch("RatS.base.base_ratings_inserter.RatingsInserter.__init__") @patch("RatS.utils.browser_handler.Firefox") def test_external_link_compare_imdb_fail( self, browser_mock, base_init_mock, site_mock ): site_mock.browser = browser_mock inserter = CritickerRatingsInserter(None) inserter.site = site_mock inserter.site.site_name = "Criticker" inserter.failed_movies = [] movie2 = dict() movie2["title"] = "Arrival" movie2["year"] = 2006 movie2["imdb"] = dict() movie2["imdb"]["id"] = "tt2543164" movie2["imdb"]["url"] = "https://www.imdb.com/title/tt2543164" movie2["imdb"]["my_rating"] = 7 result = inserter._compare_external_links( self.movie_details_page, movie2, "imdb.com", "imdb" ) # pylint: disable=protected-access self.assertFalse(result) @patch( "RatS.criticker.criticker_ratings_inserter.CritickerRatingsInserter._compare_external_links" ) @patch("RatS.criticker.criticker_ratings_inserter.Criticker") @patch("RatS.base.base_ratings_inserter.RatingsInserter.__init__") @patch("RatS.utils.browser_handler.Firefox") def
test_find_movie_success_by_imdb( self, browser_mock, base_init_mock, site_mock, compare_mock ): site_mock.browser = browser_mock browser_mock.page_source = self.search_results inserter = CritickerRatingsInserter(N
one) inserter.site = site_mock inserter.site.site_name = "Criticker" inserter.failed_movies = [] compare_mock.return_value = True result = inserter._find_movie(self.movie) # pylint: disable=protected-access self.assertTrue(result) @patch( "RatS.criticker.criticker_ratings_inserter.CritickerRatingsInserter._compare_external_links" ) @patch("RatS.criticker.criticker_ratings_inserter.Criticker") @patch("RatS.base.base_ratings_inserter.RatingsInserter.__init__") @patch("RatS.utils.browser_handler.Firefox") def test_find_movie_success_by_year( self, browser_mock, base_init_mock, site_mock, compare_mock ): site_mock.browser = browser_mock browser_mock.page_source = self.movie_details_page inserter = CritickerRatingsInserter(None) inserter.site = site_mock inserter.site.site_name = "Criticker" inserter.failed_movies = [] compare_mock.return_value = True movie2 = dict() movie2["title"] = "Fight Club" movie2["year"] = 1999 search_result = BeautifulSoup(self.search_result_tile_list[0], "html.parser") result = inserter._is_requested_movie( movie2, search_result ) # pylint: disable=protected-access self.assertTrue(result) @patch( "RatS.criticker.criticker_ratings_inserter.CritickerRatingsInserter._compare_external_links" ) @patch("RatS.criticker.criticker_ratings_inserter.Criticker") @patch("RatS.base.base_ratings_inserter.RatingsInserter.__init__") @patch("RatS.utils.browser_handler.Firefox") def test_find_movie_fail_by_year( self, browser_mock, base_init_mock, site_mock, compare_mock ): site_mock.browser = browser_mock browser_mock.page_source = self.movie_details_page inserter = CritickerRatingsInserter(None) inserter.site = site_mock inserter.site.site_name = "Criticker" inserter.failed_movies = [] compare_mock.return_value = True movie2 = dict() movie2["title"] = "Fight Club" movie2["year"] = 1998 search_result = BeautifulSoup(self.search_result_tile_list[0], "html.parser") result = inserter._is_requested_movie( movie2, search_result ) # pylint: disable=protected-access self.assertFalse(result) @patch( "RatS.criticker.criticker_ratings_inserter.CritickerRatingsInserter._is_requested_movie" ) @patch( "RatS.criticker.criticker_ratings_inserter.CritickerRatingsInserter._get_search_results" ) @patch( "RatS.criticker.criticker_ratings_inserter.CritickerRatingsInserter._compare_external_links" ) @patch("RatS.criticker.criticker_ratings_inserter.Criticker") @
silvau/Addons_Odoo
module_reload/module.py
Python
gpl-2.0
1,349
0.008154
from openerp.modules.registry import RegistryManager from openerp.netsvc import Service from osv import fields, osv from osv.orm import MetaModel from reimport import reimport class module(osv.osv): _inherit = "ir.module.module" def button_reload(self, cr, uid, ids, context=None): for module_recor
d in
self.browse(cr, uid, ids, context=context): #Remove any report parsers registered for this module. module_path = 'addons/' + module_record.name for service_name, service in Service._services.items(): template = getattr(service, 'tmpl', '') if type(template) == type(''): if template.startswith(module_path): Service.remove(service_name) #Remove any model classes registered for this module MetaModel.module_to_models[module_record.name] = [] #Reload all Python modules from the OpenERP module's directory. modulename = 'openerp.addons.' + module_record.name root = __import__(modulename) module = getattr(root.addons, module_record.name) reimport(module) RegistryManager.delete(cr.dbname) RegistryManager.new(cr.dbname) return {} module()
Alberto-Beralix/Beralix
i386-squashfs-root/usr/share/pyshared/gi/_gobject/constants.py
Python
gpl-3.0
3,114
0.000321
# -*- Mode: Python; py-indent-offset: 4 -*- # pygobject - Python bindings for the GObject library # Copyright (C) 2006-2007 Johan Dahlin # # gobject/constants.py: GObject type constants # # This library is free software; you can redistribute it and/or # modify it under the terms of the GNU Lesser General Public # License as published by the Free Software Foundation; either # version 2.1 of the License, or (at your option) any later version. # # This library is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU # Lesser General Public License for more details. # # You should have received a copy of the GNU Lesser General Public # License along with this library; if not, write to the Free Software # Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 # USA import sys from . import _gobject # TYPE_INVALID defined in gobjectmodule.c TYPE_NONE = _gobject.type_from_name('void') TYPE_INTERFACE = _gobject.type_from_name('GInterface') TYPE_CHAR = _gobject.type_from_name('gchar') TYPE_UCHAR = _gobject.type_from_name('guchar') TYPE_BOOLEAN = _gobject.type_from_name('gboolean') TYPE_INT = _gobject.type_from_name('gint') TYPE_UINT = _gobject.type_from_name('guint') TYPE_LONG = _gobject.type_from_name('glong') TYPE_ULONG = _gobject.type_from_name('gulong') TYPE_INT64 = _gobject.type_from_name('gint64') TYPE_UINT64 = _gobject.type_from_name('guint64') TYPE_ENUM = _gobject.type_from_name('GEnum') TYPE_FLAGS = _gobject.type_from_name('GFlags') TYPE_FLOAT = _gobject.type_from_name('gfloat') TYPE_DOUBLE = _gobject.type_from_name('gdouble') TYPE_STRING = _gobject.type_from_name('gchararray') TYPE_POINTER = _gobject.type_from_name('gpointer') TYPE_BOXED = _gobject.type_from_name('GBoxed') TYPE_PARAM = _gobject.type_from_name('GParam') TYPE_OBJECT = _gobject.type_from_name('GObject') TYPE_PYOBJECT = _gobject.type_from_name('PyObject') TYPE_UNICHAR = TYPE_UINT # do a little dance to maintain API compatibility # as these were origianally defined here, and are # now defined in gobjectmodule.c G_MINFLOAT = _gobject.G_MINFLOAT G_MAXFLOAT = _gobject.G_MAXFLOAT G_MINDOUBLE = _gobject.G_MINDOUBLE G_MAXDOUBLE = _gobject.G_MAXDOUBLE G_MINSHORT = _gobject.G_MINSHORT G_MAXSHORT = _gobject.G_MAXSHORT G_MAXUSHORT = _gobject.G_MAXUSHORT G_MININT = _gobject.G_MININT G_MAXINT = _gobject.G_MAXINT G_MAXUINT = _gobject.G_MAXUINT G_MINLONG = _gobject.G_MINLONG G_MAXLONG = _gobject.G_MAXLONG G_MAXULONG = _gobject.G_MAXULONG G_MININT8 = _gobject.G_MININT8 G_MAXINT8 = _gobject.G_MAXINT8 G_MAXUINT8 = _gobject.G_MAXUINT8 G_MININT16 = _gobject.G_MININT16 G_MAXINT16 = _gobject.G_MAXINT16 G_MAXUINT16 = _gobject.G_MAXUINT16 G_MININT32
= _gobject.G_MININT32 G_MAXINT32 = _gobject.G_MAXINT32 G_MAXUINT32 = _gobject.G_MAXUINT32 G_MININT64 = _gobject.G_MININT64 G_MAXINT64 = _gobject.G_MAXINT64 G_MAXUINT64 = _gobject.G_MAXUINT64 G_MAXSIZE = _gobject.G_MAXSIZE G_MAXSSIZE = _gobject.G_MAXSSIZE G_MINOFFSET =
_gobject.G_MINOFFSET G_MAXOFFSET = _gobject.G_MAXOFFSET
Micronaet/micronaet-mx8
sale_delivery_partial_B/__init__.py
Python
agpl-3.0
1,040
0
# -*- coding: utf-8 -*- #############
################################################################## # # Copyright (C) 2001-2014 Micronaet SRL (<http://www.micronaet.it>). # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as published # by the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be us
eful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Affero General Public License for more details. # # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. # ############################################################################### from . import delivery_b # vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
smmbllsm/aleph
aleph/views/documents_api.py
Python
mit
5,217
0.000192
import os import logging from werkzeug.exceptions import BadRequest, NotFound from flask import Blueprint, redirect, send_file, request from apikit import jsonify, Pager, get_limit, get_offset, request_data from aleph.core import archive, url_for, db from aleph.model import Document, Entity, Reference, Collection from aleph.logic import update_document from aleph.events import log_event from aleph.views.cache import enable_cache from aleph.search.tabular import tabular_query, execute_tabular_query from aleph.search.util import next_params from aleph.views.util import get_document, get_tabular, get_page log = logging.getLogger(__name__) blueprint = Blueprint('documents_api', __name__) @blueprint.route('/api/1/documents', methods=['GET']) def index(): authz = request.authz collections = request.args.getlist('collection') collections = authz.collections_intersect(authz.READ, collections) q = Document.all() q = q.filter(Document.collec
tion_id.in_(collections)) hashes = request.args.getlist('content_hash') if len(hashes): q = q.filter(Document.content_hash.in_(hashes)) return jsonify(Pager(q)) @blueprint.route('/api/1/documents/<int:document_id>') def view(document_id): doc = get_document(document_id) enable_cache() data = doc.to_dict()
log_event(request, document_id=doc.id) data['data_url'] = archive.generate_url(doc.meta) if data['data_url'] is None: data['data_url'] = url_for('documents_api.file', document_id=document_id) if doc.meta.is_pdf: data['pdf_url'] = data['data_url'] else: try: data['pdf_url'] = archive.generate_url(doc.meta.pdf) except Exception as ex: log.info('Could not generate PDF url: %r', ex) if data.get('pdf_url') is None: data['pdf_url'] = url_for('documents_api.pdf', document_id=document_id) return jsonify(data) @blueprint.route('/api/1/documents/<int:document_id>', methods=['POST', 'PUT']) def update(document_id): document = get_document(document_id, action=request.authz.WRITE) data = request_data() document.update(data) db.session.commit() log_event(request, document_id=document.id) update_document(document) return view(document_id) @blueprint.route('/api/1/documents/<int:document_id>/references') def references(document_id): doc = get_document(document_id) q = db.session.query(Reference) q = q.filter(Reference.document_id == doc.id) q = q.filter(Reference.origin == 'regex') q = q.join(Entity) q = q.filter(Entity.state == Entity.STATE_ACTIVE) q = q.filter(Entity.collection_id.in_(request.authz.collections_read)) q = q.order_by(Reference.weight.desc()) return jsonify(Pager(q, document_id=document_id)) @blueprint.route('/api/1/documents/<int:document_id>/file') def file(document_id): document = get_document(document_id) enable_cache(server_side=True) log_event(request, document_id=document.id) url = archive.generate_url(document.meta) if url is not None: return redirect(url) local_path = archive.load_file(document.meta) if not os.path.isfile(local_path): raise NotFound("File does not exist.") fh = open(local_path, 'rb') return send_file(fh, as_attachment=True, attachment_filename=document.meta.file_name, mimetype=document.meta.mime_type) @blueprint.route('/api/1/documents/<int:document_id>/pdf') def pdf(document_id): document = get_document(document_id) enable_cache(server_side=True) log_event(request, document_id=document.id) if document.type != Document.TYPE_TEXT: raise BadRequest("PDF is only available for text documents") pdf = document.meta.pdf url = archive.generate_url(pdf) if url is not None: return redirect(url) try: local_path = archive.load_file(pdf) fh = open(local_path, 'rb') except Exception as ex: raise NotFound("Missing PDF file: %r" % ex) return send_file(fh, mimetype=pdf.mime_type) @blueprint.route('/api/1/documents/<int:document_id>/pages/<int:number>') def page(document_id, number): document, page = get_page(document_id, number) enable_cache(server_side=True) return jsonify(page) @blueprint.route('/api/1/documents/<int:document_id>/tables/<int:table_id>') def table(document_id, table_id): document, tabular = get_tabular(document_id, table_id) enable_cache(vary_user=True) return jsonify(tabular) @blueprint.route('/api/1/documents/<int:document_id>/tables/<int:table_id>/rows') def rows(document_id, table_id): document, tabular = get_tabular(document_id, table_id) query = tabular_query(document_id, table_id, request.args) query['size'] = get_limit(default=100) query['from'] = get_offset() result = execute_tabular_query(query) params = next_params(request.args, result) if params is not None: result['next'] = url_for('documents_api.rows', document_id=document_id, table_id=table_id, **params) return jsonify(result)
SevereOverfl0w/Krympa
krympa/__init__.py
Python
mit
597
0.001675
from pyramid.config import Configurator from pyramid.renderers import JSONP def main(global_config, **settings): """ This function returns a Pyramid WSGI application. """ config = Configurator(settings=settings) config.add_renderer('jsonp', JSONP(param_name='callback')) config.
include('pyramid_mako') config.include('pyramid_redis') config.add_static_view('static', 'static', cache_max_age=3600) config.add_route('home', '/') config.add_route('api', '/api') config.add_route('redirect', '/{shortened
}') config.scan() return config.make_wsgi_app()