repo_name
stringlengths
5
100
path
stringlengths
4
231
language
stringclasses
1 value
license
stringclasses
15 values
size
int64
6
947k
score
float64
0
0.34
prefix
stringlengths
0
8.16k
middle
stringlengths
3
512
suffix
stringlengths
0
8.17k
diogocs1/comps
web/openerp/addons/test_impex/models.py
Python
apache-2.0
5,891
0.003225
# -*- coding: utf-8 -*- from openerp.osv import orm, fields def selection_fn(obj, cr, uid, context=None): return list(enumerate(["Corge", "Grault", "Wheee", "Moog"])) def function_fn(model, cr, uid, ids, field_name, arg, context): return dict((id, 3) for id in ids) def function_fn_write(model, cr, uid, id, field_name, field_value, fnct_inv_arg, context): """ just so CreatorCase.export can be used """ pass models = [ ('boolean', fields.boolean()), ('integer', fields.integer()), ('float', fields.float()), ('decimal', fields.float(digits=(16, 3))), ('string.bounded', fields.char('unknown', size=16)), ('string.required', fields.char('unknown', size=None, required=True)), ('string', fields.char('unknown', size=None)), ('date', fields.date()), ('datetime', fields.datetime()), ('text', fields.text()), ('selection', fields.selection([(1, "Foo"), (2, "Bar"), (3, "Qux"), (4, '')])), # here use size=-1 to store the values as integers instead of strings ('selection.function', fields.selection(selection_fn, size=-1)), # just relate to an integer ('many2one', fields.many2one('export.integer')), ('one2many', fields.one2many('export.one2many.child', 'parent_id')), ('many2many', fields.many2many('export.many2many.other')), ('function', fields.function(function_fn, fnct_inv=function_fn_write, type="integer")), # related: specialization of fields.function, should work the same way # TODO: reference ] for name, field in models: class NewModel(orm.Model): _name = 'export.%s' % name _columns = { 'const': fields.integer(), 'value': field, } _defaults = { 'const': 4, } def name_get(self, cr, uid, ids, context=None): return [(record.id, "%s:%s" % (self._name, record.value)) for record in self.browse(cr, uid, ids, context=context)] def name_search(self, cr, user, name='', args=None, operator='ilike', context=None, limit=100): if isinstance(name, basestring) and name.split(':')[0] == self._name: ids = self.search(cr, user, [['value', operator, int(name.split(':')[1])]]) return self.name_get(cr, user, ids, context=context) else: return [] class One2ManyChild(orm.Model): _name = 'export.one2many.child' # FIXME: orm.py:1161, fix to name_get on m2o field _rec_name = 'value' _columns = { 'parent_id': fields.many2one('export.one2many'), 'str': fields.char('unknown', size=None), 'value': fields.integer(), } def name_get(self, cr, uid, ids, context=None): return [(record.id, "%s:%s" % (self._name, record.value)) for record in self.browse(cr, uid, ids, context=context)]
def name_search(self, cr, user, name='', args=None, operator='ilike', context=None, limit=100): if isinstance(name, basestring) and name.split(':')[0] == self._name: ids = self.search(cr, user, [['value', operator, int(name.split(':'
)[1])]]) return self.name_get(cr, user, ids, context=context) else: return [] class One2ManyMultiple(orm.Model): _name = 'export.one2many.multiple' _columns = { 'parent_id': fields.many2one('export.one2many.recursive'), 'const': fields.integer(), 'child1': fields.one2many('export.one2many.child.1', 'parent_id'), 'child2': fields.one2many('export.one2many.child.2', 'parent_id'), } _defaults = { 'const': 36, } class One2ManyChildMultiple(orm.Model): _name = 'export.one2many.multiple.child' # FIXME: orm.py:1161, fix to name_get on m2o field _rec_name = 'value' _columns = { 'parent_id': fields.many2one('export.one2many.multiple'), 'str': fields.char('unknown', size=None), 'value': fields.integer(), } def name_get(self, cr, uid, ids, context=None): return [(record.id, "%s:%s" % (self._name, record.value)) for record in self.browse(cr, uid, ids, context=context)] class One2ManyChild1(orm.Model): _name = 'export.one2many.child.1' _inherit = 'export.one2many.multiple.child' class One2ManyChild2(orm.Model): _name = 'export.one2many.child.2' _inherit = 'export.one2many.multiple.child' class Many2ManyChild(orm.Model): _name = 'export.many2many.other' # FIXME: orm.py:1161, fix to name_get on m2o field _rec_name = 'value' _columns = { 'str': fields.char('unknown', size=None), 'value': fields.integer(), } def name_get(self, cr, uid, ids, context=None): return [(record.id, "%s:%s" % (self._name, record.value)) for record in self.browse(cr, uid, ids, context=context)] def name_search(self, cr, user, name='', args=None, operator='ilike', context=None, limit=100): if isinstance(name, basestring) and name.split(':')[0] == self._name: ids = self.search(cr, user, [['value', operator, int(name.split(':')[1])]]) return self.name_get(cr, user, ids, context=context) else: return [] class SelectionWithDefault(orm.Model): _name = 'export.selection.withdefault' _columns = { 'const': fields.integer(), 'value': fields.selection([(1, "Foo"), (2, "Bar")]), } _defaults = { 'const': 4, 'value': 2, } class RecO2M(orm.Model): _name = 'export.one2many.recursive' _columns = { 'value': fields.integer(), 'child': fields.one2many('export.one2many.multiple', 'parent_id'), } class OnlyOne(orm.Model): _name = 'export.unique' _columns = { 'value': fields.integer(), } _sql_constraints = [ ('value_unique', 'unique (value)', "The value must be unique"), ]
valmynd/MediaFetcher
src/plugins/youtube_dl/youtube_dl/extractor/mitele.py
Python
gpl-3.0
3,700
0.028116
# coding: utf-8 from __future__ import unicode_literals from .common
import InfoExtractor from ..util
s import ( int_or_none, smuggle_url, parse_duration, ) class MiTeleIE(InfoExtractor): IE_DESC = 'mitele.es' _VALID_URL = r'https?://(?:www\.)?mitele\.es/(?:[^/]+/)+(?P<id>[^/]+)/player' _TESTS = [{ 'url': 'http://www.mitele.es/programas-tv/diario-de/57b0dfb9c715da65618b4afa/player', 'info_dict': { 'id': 'FhYW1iNTE6J6H7NkQRIEzfne6t2quqPg', 'ext': 'mp4', 'title': 'Tor, la web invisible', 'description': 'md5:3b6fce7eaa41b2d97358726378d9369f', 'series': 'Diario de', 'season': 'La redacción', 'season_number': 14, 'season_id': 'diario_de_t14_11981', 'episode': 'Programa 144', 'episode_number': 3, 'thumbnail': r're:(?i)^https?://.*\.jpg$', 'duration': 2913, }, 'add_ie': ['Ooyala'], }, { # no explicit title 'url': 'http://www.mitele.es/programas-tv/cuarto-milenio/57b0de3dc915da14058b4876/player', 'info_dict': { 'id': 'oyNG1iNTE6TAPP-JmCjbwfwJqqMMX3Vq', 'ext': 'mp4', 'title': 'Cuarto Milenio Temporada 6 Programa 226', 'description': 'md5:5ff132013f0cd968ffbf1f5f3538a65f', 'series': 'Cuarto Milenio', 'season': 'Temporada 6', 'season_number': 6, 'season_id': 'cuarto_milenio_t06_12715', 'episode': 'Programa 226', 'episode_number': 24, 'thumbnail': r're:(?i)^https?://.*\.jpg$', 'duration': 7313, }, 'params': { 'skip_download': True, }, 'add_ie': ['Ooyala'], }, { 'url': 'http://www.mitele.es/series-online/la-que-se-avecina/57aac5c1c915da951a8b45ed/player', 'only_matching': True, }] def _real_extract(self, url): video_id = self._match_id(url) paths = self._download_json( 'https://www.mitele.es/amd/agp/web/metadata/general_configuration', video_id, 'Downloading paths JSON') ooyala_s = paths['general_configuration']['api_configuration']['ooyala_search'] base_url = ooyala_s.get('base_url', 'cdn-search-mediaset.carbyne.ps.ooyala.com') full_path = ooyala_s.get('full_path', '/search/v1/full/providers/') source = self._download_json( '%s://%s%s%s/docs/%s' % ( ooyala_s.get('protocol', 'https'), base_url, full_path, ooyala_s.get('provider_id', '104951'), video_id), video_id, 'Downloading data JSON', query={ 'include_titles': 'Series,Season', 'product_name': ooyala_s.get('product_name', 'test'), 'format': 'full', })['hits']['hits'][0]['_source'] embedCode = source['offers'][0]['embed_codes'][0] titles = source['localizable_titles'][0] title = titles.get('title_medium') or titles['title_long'] description = titles.get('summary_long') or titles.get('summary_medium') def get(key1, key2): value1 = source.get(key1) if not value1 or not isinstance(value1, list): return if not isinstance(value1[0], dict): return return value1[0].get(key2) series = get('localizable_titles_series', 'title_medium') season = get('localizable_titles_season', 'title_medium') season_number = int_or_none(source.get('season_number')) season_id = source.get('season_id') episode = titles.get('title_sort_name') episode_number = int_or_none(source.get('episode_number')) duration = parse_duration(get('videos', 'duration')) return { '_type': 'url_transparent', # for some reason only HLS is supported 'url': smuggle_url('ooyala:' + embedCode, {'supportedformats': 'm3u8,dash'}), 'id': video_id, 'title': title, 'description': description, 'series': series, 'season': season, 'season_number': season_number, 'season_id': season_id, 'episode': episode, 'episode_number': episode_number, 'duration': duration, 'thumbnail': get('images', 'url'), }
mozilla/mozilla-ignite
apps/challenges/tests/test_views.py
Python
bsd-3-clause
32,366
0.001329
# Note: not using cStringIO here because then we can't set the "filename" from StringIO import StringIO from copy import copy from datetime import datetime, timedelta from django.contrib.auth.models import User, AnonymousUser from django.contrib.messages import SUCCESS from django.core.urlresolvers import reverse from django.db.models import Max from django.http import Http404 from django.test.utils import ContextList from django.test import signals from django.utils.functional import curry from mock import Mock, patch, MagicMock from nose.tools import assert_equal, with_setup, eq_, ok_ from test_utils import TestCase, RequestFactory from commons.middleware import LocaleURLMiddleware from challenges import views from challenges.models import (Challenge, Submission, Phase, Category, ExternalLink, SubmissionParent, SubmissionVersion, SubmissionHelp) from challenges.tests.fixtures import (challenge_setup, challenge_teardown, create_users, create_submissions, BLANK_EXTERNALS) from challenges.tests.fixtures.ignite_fixtures import (setup_ignite_challenge, teardown_ignite_challenge, setup_ideation_phase, create_submission, create_user) from ignite.tests.decorators import ignite_skip, ignite_only from projects.models import Project # Apply this decorator to a test to turn off the middleware that goes around # inserting 'en_US' redirects into all the URLs suppress_locale_middleware = patch.object(LocaleURLMiddleware, 'process_request', lambda *args: None) development_mock = MagicMock development_mock.has_started = False def _build_request(path=None): request = Mock() request.path = path request._messages = [] # Stop messaging code trying to iterate a Mock return request @ignite_skip @with_setup(challenge_setup, challenge_teardown) def test_show_challenge(): """Test the view to show an individual challenge.""" request = _build_request('/my-project/my-challenge/') response = views.show(request, 'my-project', 'my-challenge') assert_equal(response.status_code, 200) class MessageTestCase(TestCase): """Test case class to check messaging.""" def assertSuccessMessage(self, response): """Assert that there is a success message in the given response.""" eq_(len(response.context['messages']), 1) eq_(list(response.context['messages'])[0].level, SUCCESS) class ChallengeEntryTest(TestCase): # Need to inherit from this base class to get Jinja2 template hijacking def setUp(self): challenge_setup() def tearDown(self): challenge_teardown() @ignite_skip @suppress_locale_middleware def test_no_entries(self): """Test that challenges display ok without any entries.""" response = self.client.get(Challenge.objects.get().get_absolute_url()) assert_equal(response.status_code, 200) # Make sure the entries are present and in reverse creation order assert_equal(len(response.context['entries'].object_list), 0) @ignite_skip @suppress_locale_middleware def test_challenge_entries(self): """Test that challenge entries come through to the challenge view.""" submission_titles = create_submissions(3) response = self.client.get(Challenge.objects.get().get_entries_url()) assert_equal(response.status_code, 200) # Make sure the entries are present and in reverse creation order assert_equal([s.title for s in response.context['entries'].object_list], list(reversed(submission_titles))) @suppress_locale_middleware def test_entries_view(self): """Test the dedicated entries view. This is currently a thin proxy onto the challenge view, hence this test being practically identical to the one above. """ submission_titles = create_submissions(4) phase = Phase.objects.get() response = self.client.get(phase.get_absolute_url()) assert_equal(response.status_code, 200) # Make sure the entries are present and in reverse creation order assert_equal([s.title for s in response.context['entries'].object_list], list(reversed(submission_titles))) @suppress_locale_middleware def test_hidden_entries(self): """Test that draft entries are not visible on the entries page.""" create_submissions(3) submissions = Submission.objects.all() hidden_submission = submissions[0] hidden_submission.is_draft = True hidden_submission.save() phase = Phase.objects.get() response = self.client.get(phase.get_absolute_url()) # Check the draft submission is hidden assert_equal(set(response.context['entries'].object_list),
set(submissio
ns[1:])) @ignite_only def test_winning_entries(self): """Test the winning entries view.""" create_submissions(5) winners = Submission.objects.all()[1:3] for entry in winners: entry.is_winner = True entry.save() response = self.client.get(reverse('entries_winning')) eq_(set(e.title for e in response.context['ideation_winners']), set(e.title for e in winners)) assert_equal(len(response.context['development_winners']), 0) def _build_links(initial_count, *forms): prefix = 'externals' form_data = {} form_data.update({'%s-TOTAL_FORMS' % prefix: str(len(forms)), '%s-INITIAL_FORMS' % prefix: str(initial_count), '%s-MAX_NUM_FORMS' % prefix: ''}) for i, form in enumerate(forms): for key, value in form.iteritems(): form_data['%s-%s-%s' % (prefix, i, key)] = value return form_data def _form_from_link(link_object): return dict((k, getattr(link_object, k)) for k in ['id', 'name', 'url']) class CreateEntryTest(TestCase): """Tests related to posting a new entry.""" def setUp(self): challenge_setup() self.category_id = Category.objects.get().id self.project_slug, self.challenge_slug = (Project.objects.get().slug, Challenge.objects.get().slug) self.entry_form_path = '/en-US/%s/challenges/%s/entries/add/' % \ (self.project_slug, self.challenge_slug) create_users() def tearDown(self): challenge_teardown() @ignite_skip def test_anonymous_form(self): """Check we can't display the entry form without logging in.""" response = self.client.get(self.entry_form_path) # Check it's some form of redirect assert response.status_code in xrange(300, 400) @ignite_skip def test_anonymous_post(self): """Check we can't post an entry without logging in.""" form_data = {'title': 'Submission', 'brief_description': 'A submission', 'description': 'A submission of shining wonderment.', 'created_by': User.objects.get(username='alex').id, 'category': self.category_id} response = self.client.post(self.entry_form_path, data=form_data) assert response.status_code in xrange(300, 400) assert_equal(Submission.objects.count(), 0) @ignite_skip def test_display_form(self): """Test the new entry form.""" self.client.login(username='alex', password='alex') response = self.client.get(self.entry_form_path) assert_equal(response.status_code, 200) # Check nothing gets created assert_equal(Submission.objects.count(), 0) @i
liaorubei/depot_tools
tests/rietveld_test.py
Python
bsd-3-clause
15,103
0.006158
#!/usr/bin/env python # Copyright (c) 2012 The Chromium Authors. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. """Unit tests for rietveld.py.""" import logging import os import ssl import sys import time import traceback import unittest sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__)))) from testing_support.patches_data import GIT, RAW from testing_support import auto_stub import patch import rietveld def _api(files): """Mock a rietveld api request.""" return rietveld.json.dumps({'files': files}) def _file( status, is_binary=False, num_chunks=1, chunk_id=789, property_changes=''): """Mock a file in a rietveld api request.""" return { 'status': status, 'is_binary': is_binary, 'num_chunks': num_chunks, 'id': chunk_id, 'property_changes': property_changes, } class BaseFixture(unittest.TestCase): # Override. TESTED_CLASS = Exception def setUp(self): super(BaseFixture, self).setUp() # Access to a protected member XX of a client class # pylint: disable=W0212 self.rietveld = self.TESTED_CLASS('url', None, 'email') self.rietveld._send = self._rietveld_send self.requests = [] def tearDown(self): self.assertEqual([], self.requests) super(BaseFixture, self).tearDown() def _rietveld_send(self, url, *args, **kwargs): self.assertTrue(self.requests, url) request = self.requests.pop(0) self.assertEqual(2, len(request)) self.assertEqual(url, request[0]) return request[1] def _check_patch(self, p, filename, diff, source_filename=None, is_binary=False, is_delete=False, is_git_diff=False, is_new=False, patchlevel=0, svn_properties=None): svn_properties = svn_properties or [] self.assertEqual(p.filename, filename) self.assertEqual(p.source_filename, source_filename) self.assertEqual(p.is_binary, is_binary) self.assertEqual(p.is_delete, is_delete) if hasattr(p, 'is_git_diff'): self.assertEqual(p.is_git_diff, is_git_diff) self.assertEqual(p.is_new, is_new) if hasattr(p, 'patchlevel'): self.assertEqual(p.patchlevel, patchlevel) if diff: self.assertEqual(p.get(True), diff) if hasattr(p, 'svn_properties'): self.assertEqual(p.svn_properties, svn_properties) class RietveldTest(BaseFixture): TESTED_CLASS = rietveld.Rietveld def test_get_patch_empty(self): self.requests = [('/api/123/456', '{}')] patches = self.rietveld.get_patch(123, 456) self.assertTrue(isinstance(patches, patch.PatchSet)) self.assertEqual([], patches.patches) def test_get_patch_no_status(self): self.requests = [ ( '/api/123/456', _api( { 'tools/clang_check/README.chromium': { 'status': None, 'id': 789, }})), ('/download/issue123_456_789.diff', RAW.DELETE), ] patches = self.rietveld.get_patch(123, 456) self.assertEqual(1, len(patches.patches)) self._check_patch( patches.patches[0], 'tools/clang_check/README.chromium', RAW.DELETE, is_delete=True) def test_get_patch_2_files(self): self.requests = [ ('/api/123/456', _api({'foo': _file('A'), 'file_a': _file('M', chunk_id=790)})), ('/download/issue123_456_789.diff', RAW.NEW), ('/download/issue123_456_790.diff', RAW.NEW_NOT_NULL), ] patches = self.rietveld.get_patch(123, 456) self.assertEqual(2, len(patches.patches)) self._check_patch( patches.patches[0], 'file_a', RAW.NEW_NOT_NULL, is_new=True) self._check_patch(patches.patches[1], 'foo', RAW.NEW, is_new=True) def test_get_patch_add(self): self.requests = [ ('/api/123/456', _api({'foo': _file('A')})), ('/download/issue123_456_789.diff', RAW.NEW), ] patches = self.rietveld.get_patch(123, 456) self.assertEqual(1, len(patches.patches)) self._check_patch(patches.patches[0], 'foo', RAW.NEW, is_new=True) def test_invalid_status(self): self.requests = [ ('/api/123/45
6', _api({'file_a': _file('B')})), ] try: self.rietveld.get_patch(123, 456) self.fail() except patch.UnsupportedPatchFormat, e: self.assertEqual('file_a', e.filename) def test_add_plus_merge(self): # svn:mergeinfo is dropped. properties = ( '\nAdded: svn:mergeinfo\n' ' Merged /branches/funky/file_b:r69-2775\n')
self.requests = [ ('/api/123/456', _api({'pp': _file('A+', property_changes=properties)})), ('/download/issue123_456_789.diff', GIT.COPY), ] patches = self.rietveld.get_patch(123, 456) self.assertEqual(1, len(patches.patches)) self._check_patch( patches.patches[0], 'pp', GIT.COPY, is_git_diff=True, is_new=True, patchlevel=1, source_filename='PRESUBMIT.py') def test_add_plus_eol_style(self): properties = '\nAdded: svn:eol-style\n + LF\n' self.requests = [ ('/api/123/456', _api({'pp': _file('A+', property_changes=properties)})), ('/download/issue123_456_789.diff', GIT.COPY), ] patches = self.rietveld.get_patch(123, 456) self.assertEqual(1, len(patches.patches)) self._check_patch( patches.patches[0], 'pp', GIT.COPY, is_git_diff=True, is_new=True, patchlevel=1, source_filename='PRESUBMIT.py', svn_properties=[('svn:eol-style', 'LF')]) def test_add_empty(self): self.requests = [ ('/api/123/456', _api({'__init__.py': _file('A ', num_chunks=0)})), ('/download/issue123_456_789.diff', RAW.CRAP_ONLY), ] patches = self.rietveld.get_patch(123, 456) self.assertEqual(1, len(patches.patches)) self._check_patch( patches.patches[0], '__init__.py', RAW.CRAP_ONLY, is_new=True) def test_delete(self): name = 'tools/clang_check/README.chromium' self.requests = [ ('/api/123/456', _api({name: _file('D')})), ('/download/issue123_456_789.diff', RAW.DELETE), ] patches = self.rietveld.get_patch(123, 456) self.assertEqual(1, len(patches.patches)) self._check_patch(patches.patches[0], name, RAW.DELETE, is_delete=True) def test_delete_empty(self): name = 'tests/__init__.py' self.requests = [ ('/api/123/456', _api({name: _file('D')})), ('/download/issue123_456_789.diff', GIT.DELETE_EMPTY), ] patches = self.rietveld.get_patch(123, 456) self.assertEqual(1, len(patches.patches)) self._check_patch( patches.patches[0], name, GIT.DELETE_EMPTY, is_delete=True, is_git_diff=True, patchlevel=1) def test_m_plus(self): properties = '\nAdded: svn:eol-style\n + LF\n' self.requests = [ ('/api/123/456', _api({'chrome/file.cc': _file('M+', property_changes=properties)})), ('/download/issue123_456_789.diff', RAW.PATCH), ] patches = self.rietveld.get_patch(123, 456) self.assertEqual(1, len(patches.patches)) self._check_patch( patches.patches[0], 'chrome/file.cc', RAW.PATCH, svn_properties=[('svn:eol-style', 'LF')]) def test_m_plus_unknown_prop(self): properties = '\nAdded: svn:foobar\n + stuff\n' self.requests = [ ('/api/123/456', _api({'file_a': _file('M+', property_changes=properties)})), ] try: self.rietveld.get_patch(123, 456) self.fail() except patch.UnsupportedPatchFormat, e: self.assertEqual('file_a', e.filename) def test_get_patch_moved(self): self.requests = [ ('/api/123/456', _api({'file_b': _file('A+')})), ('/download/issue123_456_789.diff', RAW.MINIMAL_RENAME), ] patches = self.rietveld.get_patch(123, 456) self.assertEqual(1, len(patches.patches)) self._check_patch( patches.patches[0], 'file_b', RAW.MINIMAL_RENAME, source_filename='fil
maxive/erp
addons/l10n_es/__manifest__.py
Python
agpl-3.0
1,890
0.003178
# -*- coding: utf-8 -*- # Part of Odoo. See LICENSE file for full copyright and licensing details. # List of contributors: # Jordi Esteve <jesteve@zikzakmedia.com> # Dpto. Consultoría Grupo Opentia <consultoria@opentia.es> # Pedro M. Baeza <pedro.baeza@tecnativa.com> # Carlos Liébana <carlos.liebana@factorlibre.com> # Hugo Santos <hugo.santos@factorlibre.com> # Albert Cabedo <albert@gafic.com> # Olivier Colson <oco@odoo.com> # Roberto Lizana <robertolizana@trey.es> { "name" : "Spain - Accounting (PGCE 2008)", "version" : "4.0", "author" : "Spanish Localization Team", 'website' : 'https://launchpad.net/openerp-spain', 'category': 'Localization', "description": """ Spanish charts of accounts (PGCE 2008). ====================================
==== * Defines the following chart of account templates: * Spanish general chart of accounts 2008 * Spanish general chart of accounts 2008 for small and medium companies * Spanish general chart of acc
ounts 2008 for associations * Defines templates for sale and purchase VAT * Defines tax templates * Defines fiscal positions for spanish fiscal legislation * Defines tax reports mod 111, 115 and 303 """, "depends" : [ "account", "base_iban", "base_vat", ], "data" : [ 'data/account_group.xml', 'data/account_chart_template_data.xml', 'data/account_account_template_common_data.xml', 'data/account_account_template_pymes_data.xml', 'data/account_account_template_assoc_data.xml', 'data/account_account_template_full_data.xml', 'data/account_chart_template_account_account_link.xml', 'data/account_data.xml', 'data/account_tax_data.xml', 'data/account_fiscal_position_template_data.xml', 'data/account_chart_template_configure_data.xml', ], }
mne-tools/mne-tools.github.io
0.19/_downloads/0162af27293b0c7e7c35ef85531280ea/plot_55_setting_eeg_reference.py
Python
bsd-3-clause
10,338
0
# -*- coding: utf-8 -*- """ .. _tut-set-eeg-ref: Setting the EEG reference ========================= This tutorial describes how to set or change the EEG reference in MNE-Python. .. contents:: Page contents :local: :depth: 2 As usual we'll start by importing the modules we need, loading some :ref:`example data <sample-dataset>`, and cropping it to save memory. Since this tutorial deals specifically with EEG, we'll also restrict the dataset to just a few EEG channels so the plots are easier to see: """ import os import mne sample_data_folder = mne.datasets.sample.data_path() sample_data_raw_file = os.path.join(sample_data_folder, 'MEG', 'sample', 'sample_audvis_raw.fif') raw = mne.io.read_raw_fif(sample_data_raw_file, verbose=False) raw.crop(tmax=60).load_data() raw.pick(['EEG 0{:02}'.format(n) for n in range(41, 60)]) ############################################################################### # Background # ^^^^^^^^^^ # # EEG measures a voltage (difference in electric potential) between each # electrode and a reference electrode. This means that whatever signal is # present at the reference electrode is effectively subtracted from all the # measurement electrodes. Therefore, an ideal reference signal is one that # captures *none* of the brain-specific fluctuations in electric potential, # while capturing *all* of the environmental noise/interference that is being # picked up by the measurement electrodes. # # In practice, this means that the reference electrode is often placed in a # location on the subject's body and close to their head (so that any # environmental interference affects the reference and measurement electrodes # similarly) but as far away from the neural sources as possible (so that the # reference signal doesn't pick up brain-based fluctuations). Typical reference # locations are the subject's earlobe, nose, mastoid process, or collarbone. # Each of these has advantages and disadvantages regarding how much brain # signal it picks up (e.g., the mastoids pick up a fair amount compared to the # others), and regarding the environmental noise it picks up (e.g., earlobe # electrodes may shift easily, and have signals more similar to electrodes on # the same side of the head). # # Even in cases where no electrode is specifically designated as the reference, # EEG recording hardware will still treat one of the scalp electrodes as the # reference, and the recording software may or may not display it to you (it # might appear as a completely flat channel, or the software might subtract out # the average of all signals before displaying, making it *look like* there is # no reference). # # # Setting or changing the reference channel # ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ # # If you want to recompute your data with a different reference than was used # when the raw data were recorded and/or saved, MNE-Python provides the # :meth:`~mne.io.Raw.set_eeg_reference` method on :class:`~mne.io.Raw` objects # as well as the :func:`mne.add_reference_channels` function. To use an # existing channel as the new reference, use the # :meth:`~mne.io.Raw.set_eeg_reference` method; you can also designate multiple # existing electrodes as reference channels, as is sometimes done with mastoid # references: # code lines below are commented out because the sample data doesn't have # earlobe or mastoid channels, so this is just for demonstration purposes: # use a single channel reference (left earlobe) # raw.set_eeg_reference(ref_channels=['A1']) # use average of mastoid channels as reference # raw.set_eeg_reference(ref_channels=['M1', 'M2']) #######################################################
######################## # If a scalp electrode was used as reference but was not saved alongside the # raw data (reference channels often aren't), you may wish to add it back to # the dataset before re-referencing. For example, if your EEG system recorded # with channel ``Fp1`` as the reference but did not include ``Fp1`` in the data # file, using :meth:`~mne.io.Raw.set_eeg_reference
` to set (say) ``Cz`` as the # new reference will then subtract out the signal at ``Cz`` *without restoring # the signal at* ``Fp1``. In this situation, you can add back ``Fp1`` as a flat # channel prior to re-referencing using :func:`~mne.add_reference_channels`. # (Since our example data doesn't use the `10-20 electrode naming system`_, the # example below adds ``EEG 999`` as the missing reference, then sets the # reference to ``EEG 050``.) Here's how the data looks in its original state: raw.plot() ############################################################################### # By default, :func:`~mne.add_reference_channels` returns a copy, so we can go # back to our original ``raw`` object later. If you wanted to alter the # existing :class:`~mne.io.Raw` object in-place you could specify # ``copy=False``. # add new reference channel (all zero) raw_new_ref = mne.add_reference_channels(raw, ref_channels=['EEG 999']) raw_new_ref.plot() ############################################################################### # .. KEEP THESE BLOCKS SEPARATE SO FIGURES ARE BIG ENOUGH TO READ # set reference to `EEG 050` raw_new_ref.set_eeg_reference(ref_channels=['EEG 050']) raw_new_ref.plot() ############################################################################### # Notice that the new reference (``EEG 050``) is now flat, while the original # reference channel that we added back to the data (``EEG 999``) has a non-zero # signal. Notice also that ``EEG 053`` (which is marked as "bad" in # ``raw.info['bads']``) is not affected by the re-referencing. # # # Setting average reference # ^^^^^^^^^^^^^^^^^^^^^^^^^ # # To set a "virtual reference" that is the average of all channels, you can use # :meth:`~mne.io.Raw.set_eeg_reference` with ``ref_channels='average'``. Just # as above, this will not affect any channels marked as "bad", nor will it # include bad channels when computing the average. However, it does modify the # :class:`~mne.io.Raw` object in-place, so we'll make a copy first so we can # still go back to the unmodified :class:`~mne.io.Raw` object later: # sphinx_gallery_thumbnail_number = 4 # use the average of all channels as reference raw_avg_ref = raw.copy().set_eeg_reference(ref_channels='average') raw_avg_ref.plot() ############################################################################### # Creating the average reference as a projector # ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ # # If using an average reference, it is possible to create the reference as a # :term:`projector` rather than subtracting the reference from the data # immediately by specifying ``projection=True``: raw.set_eeg_reference('average', projection=True) print(raw.info['projs']) ############################################################################### # Creating the average reference as a projector has a few advantages: # # 1. It is possible to turn projectors on or off when plotting, so it is easy # to visualize the effect that the average reference has on the data. # # 2. If additional channels are marked as "bad" or if a subset of channels are # later selected, the projector will be re-computed to take these changes # into account (thus guaranteeing that the signal is zero-mean). # # 3. If there are other unapplied projectors affecting the EEG channels (such # as SSP projectors for removing heartbeat or blink artifacts), EEG # re-referencing cannot be performed until those projectors are either # applied or removed; adding the EEG reference as a projector is not subject # to that constraint. (The reason this wasn't a problem when we applied the # non-projector average reference to ``raw_avg_ref`` above is that the # empty-room projectors included in the sample data :file:`.fif` file were # only computed for the magnetometers.) for title, proj in zip(['Original', 'Average'], [False, True]): fig = raw.plot(proj=proj, n_channels=len(raw)) # make room for title fig.subplots_adjust(top=0.9) fig.suptitle('{} reference'.format(title), size='xx-large', weight='bold')
nicolaevladescu/hootch
workers/Reddit.py
Python
mit
6,671
0.008095
#!/usr/bin/env python # -*- coding: utf-8 -*- """ Reddit worker """ import praw import sys import time import yaml from datetime import datetime from pytz import UTC from pymongo import MongoClient, IndexModel, ASCENDING, DESCENDING, TEXT from pymongo.errors import PyMongoError __version__ = '0.0.1-alpha.1' class RedditWorker(object): def __init__(self, config = 'reddit.yml'): if isinstance(config, dict): self._config = config else: with open(config, 'r') as config_file: self._config = yaml.safe_load(config_file) config_file.close() self._client = praw.Reddit(client_id=self.get_config().get('client_id'), client_secret=self.get_config().get('client_secret'), user_agent='{}/{}'.format(__name__, __version__)) if not self._client.read_only: raise RuntimeError('This code is experimental, please connect to Reddit in read only mode') self._mongo = MongoClient( self.get_config('mongodb').get('host'), self.get_config('mongodb').get('port')) self._db = self._mongo[self.get_config('mongodb').get('db')] def crawl_data(self, **kwargs): items = [] sort = kwargs.get('sort', self.get_config().get('sort')) sort_limit = kwargs.get('limit', self.get_config().get('sort_limit')) subreddits = kwargs.get('subreddits', self._config.get('reddit', []).get('subreddits')) if isinstance(subreddits, str): subreddits = [subreddits] if isinstance(subreddits, (list, tuple)): for subreddit in subreddits: if not hasattr(self._client.subreddit(subreddit), self.get_config().get('sort')): raise RuntimeError('Config error: reddit.sort is invalid') reddit_sort = getattr(self._client.subreddit(subreddit), sort) for submission in reddit_sort(limit=sort_limit): print('Worker {}: Processing subreddit {}.'.format(__name__, submission.id)) items.append({'subreddit_id': submission.id, 'created_utc': int(submission.created_utc), 'parsed_at_utc': int(datetime.now(tz=UTC).strftime('%s')), 'permalink': submission.permalink, 'url': submission.url, 'author': str(submission.author), 'title': submission.title, 'search': submission.title, 'subreddit': subreddit}) # Discard bottom level comments submission.comments.replace_more(limit=0) for top_level_comment in submission.comments.list(): print('Worker {0}: Processing comment {2!s} of subreddit {1!s}.'.format( __name__, submission.id, top_level_comment.id)) items.append({'comment_id': top_level_comment.id, 'created_utc': int(top_level_comment.created_utc), 'parsed_at_utc': int(datetime.now(tz=UTC).strftime('%s')), 'body': top_level_comment.body, 'search': top_level_comment.body, 'subreddit': subreddit, 'permalink': top_level_comment.permalink(), 'author': str(top_level_comment.author), 'parent': submission.id}) else: raise TypeError('config.reddit.subreddits must be a list, tuple or string, found: {!s}'.format( type(subreddits))) return items def get_config(self, section='reddit'): return self._config.get(section) def save_data(self, coll=None, data=None): collection = self._db[coll or self.get_config('mongodb').get('collection')] return collection.insert_many(data) def get_data(self, coll=None, query=None, proj=None): collection = self._db[coll or self.get_config('mongodb').get('collection')] return [i for i in collection.find(query, proj)] def delete_data(self, coll=None, query=None): collection = self._db[coll or self.get_config('mongodb').get('collection')] return collection.delete_many(query) def indexes_created(self, coll=None): collection = self._db[coll or self.get_config('mongodb').get('collection')] if (collection.index_information().get('created_utc_1') and collection.index_information().get('search_text_created_utc_1')): return True return False def create_indexes(self, coll=None): collection = self._db[coll or self.get_config('mongodb').get('collection')] time_index = IndexModel([('created_utc', ASCENDING)], background=True) compound_index = IndexModel([('search', TEXT), ('created_utc', ASCENDING)], background=True) collection.create_indexes([time_index, compound_index]) def main(): tick = 0 reddit = RedditWorker() while True: tick += 1 print('Worker {}: Starting tick {}.'.format(__name__, tick)) data = reddit.crawl_data() if not isinstance(data, (list, tuple)): print('Worker {}: get_data() returned unknown data type: {}.'.format(__name__, type(data))) sys.exit(1) if len(data) == 0: print('Worker {}: get_data() returned 0 results.'.format(__name__)) sys.exit(1) try: results = reddit.save_data(d
ata=data) except (TypeError, PyMongoError) as exception: print('Worker {}
: Could not save documents because: {}.'.format(__name__, exception)) sys.exit(1) else: print('Worker {}: Saved {!s} documents.'.format(__name__, len(results.inserted_ids))) if not reddit.indexes_created(): try: reddit.create_indexes() except (NameError, TypeError, PyMongoError) as exception: print('Worker {}: Could not create indexes because: {}.'.format(__name__, exception)) else: print('Worker {}: Collection indexes are being created in the background.'.format(__name__)) time.sleep(reddit.get_config('worker').get('interval')) if __name__ == '__main__': main()
taikoa/taikoa
taikoa.py
Python
agpl-3.0
2,419
0.002894
import re import time from flask import Flask, render_template, request, flash, redirect from flaskext.babel import Babel from flask.ext.mail import Mail, Message from flask.ext.cache import Cache from flask.ext.assets import Environment from raven.contrib.flask import Sentry import feedparser app = Flask(__name__) app.config.from_pyfile('settings.cfg') babel = Babel(app) cache = Cache(app) mail = Mail(app) assets = Environment(app) sentry = Sentry(app) @babel.localeselector def get_locale(): return request.accept_languages.best_match(['es', 'fr', 'en']) @cache.cached(timeout=50) @app.route("/") def index(): return render_template('index.html', active='ho
me') @cache.cached(t
imeout=50) @app.route("/projects") def projects(): return render_template('projects.html', active='project') @cache.cached(timeout=50) @app.route("/about/me") def about_me(): return render_template('about-me.html', active='about') @cache.cached(timeout=50) @app.route("/contact") def contact(): return render_template('contact.html', active='contact') @cache.cached(timeout=50) @app.route("/lab") def lab(): feed = feedparser.parse('http://javaguirre.net/rss/') items = feed['items'] for item in items: item['published_parsed'] = time.strftime("%d %B %Y", item['published_parsed']) return render_template('lab.html', active='lab', items=items) @app.route("/contact_form", methods=['POST']) def contact_form(): email_re = re.compile( r"(^[-!#$%&'*+/=?^_`{}|~0-9A-Z]+(\.[-!#$%&'*+/=?^_`{}|~0-9A-Z]+)*" # dot-atom r'|^"([\001-\010\013\014\016-\037!#-\[\]-\177]|\\[\001-011\013\014\016-\177])*"' # quoted-string r')@(?:[A-Z0-9](?:[A-Z0-9-]{0,61}[A-Z0-9])?\.)+[A-Z]{2,6}\.?$', re.IGNORECASE) company = request.form['company'] subject = request.form['subject'] content = request.form['message'] client_email = request.form['email'] if not email_re.match(client_email): flash('Form error, please fix the error in the email') return render_template('contact.html') msg = Message('-'.join([company, subject]), sender=client_email, recipients=[app.config['EMAIL']]) msg.body = content mail.send(msg) flash('Message sent correctly, Thank you.') return redirect('/') if __name__ == "__main__": app.run(host='0.0.0.0')
hms-dbmi/clodius
scripts/get_hitile.py
Python
mit
1,168
0
#!/usr/bin/python from __future__ import print_function import clodius.hdf_tiles as hdft import h5py import argparse def main(): parser = argparse.ArgumentParser( description=""" python get_hitile.py filename z x """ ) parser.add_argument("filename") parser.add_argument("z", type=int) parser.add_argument("x", type=int) # parser.add_argument('argument', nargs=1) # parser.add_argument('-o', '--options', default='yo', # help="Some option", type='str') # parser.add_argument('-u', '--useless', action='store_true', # help='Another useless option') args = parser.parse_args() with h5py.File(args.filename, "r") as f: tileset_info = hdft.get_tileset_info(f) max_width = tileset_info["max_width"] max_pos = tileset_info["max_pos"] tile_size = tileset_info["tile_size"] print("max_width", max_width) print("max_pos", max_pos) last_index = int(tile_size * (max_pos / max_width)) print("last_index:", last_index) tile_data = hdft.get_data
(f, args.z, args.x) print("tile:", tile_data
) if __name__ == "__main__": main()
nmercier/linux-cross-gcc
win32/bin/Lib/lib2to3/fixes/fix_intern.py
Python
bsd-3-clause
1,451
0
# Copyright 2006 Georg Brandl. # Licensed to PSF under a Contributor Agreement. """Fixer for intern(). intern(s) -> sys.intern(s)""" # Local imports from .. import pytree from .. import fixer_base from ..fixer_util import Name, Attr, touc
h_import class FixIntern(fixer_base.BaseFix): BM_compatible = True order = "pre" PATTERN = """ power< 'intern' trailer< lpar='(' ( not(arglist | argument<any '=' any>) obj=any | obj=arglist<(not argument<any '=' any>) any ','> ) rpar=')' > after=any* > """ def transform(self, node, results): syms = self.syms
obj = results["obj"].clone() if obj.type == syms.arglist: newarglist = obj.clone() else: newarglist = pytree.Node(syms.arglist, [obj.clone()]) after = results["after"] if after: after = [n.clone() for n in after] new = pytree.Node(syms.power, Attr(Name(u"sys"), Name(u"intern")) + [pytree.Node(syms.trailer, [results["lpar"].clone(), newarglist, results["rpar"].clone()])] + after) new.prefix = node.prefix touch_import(None, u'sys', node) return new
spacelis/hrnn4sim
hrnn4sim/data_augmentation.py
Python
mit
3,901
0.001282
#!/usr/bin/env python # -*- coding: utf-8 -*- """ File: data_augmentation.py Author: Wen Li Email: spacelis@gmail.com Github: http://github.com/spacelis Description: Augmenting data with some sythetic negative examples. """ # pylint: disable=invalid-name from __future__ import print_function import sys import re import random import pandas as pd import click ## Data augmentation ### Some unility functions for data augmentations def rand_delta(m): ''' Generate a random numbers by applying a random delta''' x = int(m.group(0)) y = random.randint(1, x + 20) if x == y: return str(x + 1) return str(y) def change_num(addr): ''' Change the add by applying a random delta to the numbers''' return re.sub('[0-9]+', rand_delta, addr) def get_neg_examples(df): ''' Generate negative examples ''' addrPoolA = list(frozenset(df['addra'])) sampleA = random.sample(addrPoolA, len(addrPoolA))[:len(addrPoolA)//2 * 2] exA = sampleA[0:len(sampleA):2], sampleA[1:len(sampleA):2] addrPoolB = list(frozenset(df['addrb'])) sampleB = random.sample(addrPoolB, len(addrPoolB))[:len(addrPoolB)//2 * 2] exB = sampleB[0:len(sampleB):2], sampleB[1:len(sampleB):2] exC = [], [] for addr in sampleA: cn_addr = change_num(addr) if cn_addr != addr: exC[0].append(addr) exC[1].append(change_num(addr)) exD = [], [] for addr in
sampleB: cn_addr = change_num(addr) if cn_addr != addr: exD[0].append(addr) exD[1].append(change_num(addr)) return p
d.DataFrame({'addra': exA[0] + exB[0] + exC[0] + exD[0], 'addrb': exA[1] + exB[1] + exC[1] + exD[1]}) def get_pos_examples(df): ''' Make some more positive examples by cloning addresses ''' addrPoolA = list(frozenset(df['addra'])) addrPoolB = list(frozenset(df['addrb'])) return pd.DataFrame({'addra': list(df['addra']) + addrPoolA + addrPoolB, 'addrb': list(df['addrb']) + addrPoolA + addrPoolB}) def data_augmentation(df): ''' Data augmentation via constructing negative examples :param df: A pandas dataframe having columns of (addra, addrb, matched) ''' neg = get_neg_examples(df) pos = get_pos_examples(df) pos.loc[:, 'matched'] = 1 neg.loc[:, 'matched'] = 0 return pd.concat([pos, neg]).rename(columns={ 'addra': 'seqa', 'addrb': 'seqb' }) @click.command() @click.argument('src', type=click.Path(exists=True)) @click.argument('dst', type=click.Path(exists=False)) def console(src, dst): ''' This tool is for creating a augmented data set for training models for address matchings. The expected input is a CSV file of positive examples with the headers (addra, addrb). The output will be a CSV file of table filled with augmented data with the headers (seqa, seqb, matched). Augmentation includes number changing, identity matching. ''' if src.endswith('csv'): raw_data = pd.read_csv(src) elif src.endswith('.feather'): raw_data = pd.read_feather(src) else: print('Error: Input file format not supported', file=sys.stderr) sys.exit(-1) print("uniqA={}".format(raw_data['addra'].nunique())) print("uniqB={}".format(raw_data['addrb'].nunique())) print("pairCnt={}".format(len(raw_data))) examples = data_augmentation(raw_data).sample(frac=1) # Randomized rows print(examples.head()) if src.endswith('csv'): examples.to_csv(dst, index=False) elif src.endswith('.feather'): examples.reset_index()[['seqa', 'seqb', 'matched']].to_feather(dst) else: print('Error: Output file format not supported', file=sys.stderr) sys.exit(-1) if __name__ == "__main__": console() # pylint: disable=no-value-for-parameter
apache/bloodhound
installer/setup.py
Python
apache-2.0
1,462
0
#!/usr/bin/env python # # Licensed to the Apache Software Foundation (ASF) under one # or more contributor license agreements. See the NOTICE file # distributed with this work for additional information # regarding copyright ownership. The ASF licenses this file # to you under the Apache License, Version 2.0 (the # "License"); you may not use this file except in compliance # with the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. from setuptools import setup DESC = """Installer for Apache Bloodhound Adds the bloodhound_setup cli command. """ versions = [ (0, 8, 0), (0, 9, 0), ] latest = '.'.join(str(x) for x in versions[-1]) setup( name="bloodhound_in
staller", version=latest, description=DESC.split('\n', 1)[0], author="Apache Bloodhound", license="Apache License v2", url="https://bloodhound.apache.org/"
, requires=['trac', 'BloodhoundMultiProduct'], packages=['bhsetup'], entry_points=""" [console_scripts] bloodhound_setup = bhsetup.bloodhound_setup:run """, long_description=DESC, )
rbdavid/RMSD_analyses
PCA_RMSD_One_Ref/system_rmsd_plotting.py
Python
gpl-3.0
1,985
0.040302
#!/Library/Frameworks/Python.framework/Versions/2.7/bin/python # ---------------------------------------- # USAGE: # ---------------------------------------- # PREAMBLE: import numpy as np import sys import os import matplotlib.pyplot as plt from matplotlib.ticker import NullFormatter from mpl_toolkits.mplot3d import Axes3D from sel_list import * from plotting_functions import * # ---------------------------------------- # VARIABLE DECLARATION file1 = sys.argv[1] frame = [] frame.append(['Apo',650000,'steelblue','.']) frame.append(['ATP',650000,'cadetblue','.']) frame.append(['ssRNA',650000,'turquoise','.']) frame.append(['ssRNA+ATP',650000,'forestgreen','.']) frame.append(['ssRNA+ADP+Pi',650000,'limegreen','.']) frame.append(['ssRNA+ADP',650000,'orangered','.']) frame.append(['ssRNA+Pi',650000,'crimson','.']) #frame.append([,,,]) nSys = len(frame) nSel = len(sel) legend_list = [] for i in range(nSys): legend_list.append(frame[i][0]) flush = sys.stdout.flush # ---------------------------------------- # FUNCTIONS:: def ffprint(string): print '%s' %(string) flush() # ---------------------------------------- # MAIN: data1 = np
.loadtxt(file1) for i in range(nSel): events, edges, patches = plt.hist([ data1[0:650000,i], data1[650000:1300000,i], data1[1300000:1950000,i], data1[1950000:2600000,i], data1[2600000:3250000,i], data1[3250000:3900000,i], data1[3900000:4550000,i]], bins=100, histtype='bar', color=
[frame[0][2],frame[1][2],frame[2][2],frame[3][2],frame[4][2],frame[5][2],frame[6][2]],stacked=True) plt.grid(b=True, which='major', axis='both', color='#808080', linestyle='--') plt.xlabel('RMSD data for %s' %(sel[i][0])) plt.ylabel('Frequency') plt.xlim((min(data1[:,i]),max(data1[:,i]))) leg = plt.legend(legend_list,bbox_to_anchor=(-0.05, 1.03, 1.1, .100),fontsize='10',loc=3,ncol=4,mode="expand",borderaxespad=0.,markerscale=100,numpoints=1) plt.savefig('%02d.hist1d.png' %(i),dpi=200) plt.close()
ramcn/demo3
venv/lib/python3.4/site-packages/oauth2_provider/__init__.py
Python
mit
113
0
__version_
_ = '0.8.1' __author__ = "Massimiliano Pippi & Federico
Frenguelli" VERSION = __version__ # synonym
hpe-storage/horizon-hpe-storage-ui
horizon_hpe_storage/storage_panel/overview/tables.py
Python
apache-2.0
850
0
# (c) Copyright [2015] Hewlett Packard Enterprise Development LP # # Licens
ed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or im
plied. # See the License for the specific language governing permissions and # limitations under the License. from horizon import tables class OverviewTable(tables.DataTable): def get_object_id(self, obj): return None class Meta(object): name = "overview_panel" # hidden_title = False
thispc/download-manager
module/plugins/accounts/OneFichierCom.py
Python
gpl-3.0
2,134
0.000937
# -*- coding: utf-8 -*- import re import time import pycurl from module.network.HTTPRequest import BadHeader from ..internal.Account import Account class OneFichierCom(Account): __name__ = "OneFichierCom" __type__ = "account" __version__ = "0.23" __status__ = "testing" __description__ = """1fichier.com account plugin""" __license__ = "GPLv3" __authors__ = [("Elrick69", "elrick69[AT]roc
ketmail[DOT]com"), ("Walter Purcaro", "vuolter@gmail.com")] VALID_UNTIL_PATTERN = r'Your Premium offer subscription is valid until <span style="fon
t-weight:bold">(\d+\-\d+\-\d+)' def grab_info(self, user, password, data): validuntil = None trafficleft = -1 premium = None html = self.load("https://1fichier.com/console/abo.pl") m = re.search(self.VALID_UNTIL_PATTERN, html) if m is not None: expiredate = m.group(1) self.log_debug("Expire date: " + expiredate) try: validuntil = time.mktime(time.strptime(expiredate, "%Y-%m-%d")) except Exception, e: self.log_error(e, trace=True) else: premium = True return {'validuntil': validuntil, 'trafficleft': trafficleft, 'premium': premium or False} def signin(self, user, password, data): self.req.http.c.setopt( pycurl.REFERER, "https://1fichier.com/login.pl?lg=en") try: html = self.load("https://1fichier.com/login.pl?lg=en", post={'mail': user, 'pass': password, 'It': "on", 'purge': "off", 'valider': "Send"}) if any(_x in html for _x in ('>Invalid username or Password', '>Invalid email address', '>Invalid password')): self.fail_login() except BadHeader, e: if e.code == 403: self.fail_login() else: raise
fmarani/spam
tests/spamhaus_tests.py
Python
lgpl-3.0
1,040
0.002885
import unittest import sys from spam.spamhaus import * class MockSpamHausChecker(SpamHausChecker): def set_spam(self, is_spam): """docstring for setSpam""" self.is_spa
m = is_spam def _resolve(self, domain): """docstring for __resolve""" if self.is_spam: return "2.3.4.5" else: return "1.2.3.4" def _query_spamhaus(self, zone): """docstring
for __query_spamhaus""" if zone.startswith("5.4.3.2"): return "127.0.0.2" return None class TestSpamHausChecker(unittest.TestCase): def setUp(self): self.checker = MockSpamHausChecker() def test_spammer(self): self.checker.set_spam(True) result = self.checker.check_url("http://doevil.com/") self.assertEqual(result, MockSpamHausChecker.IS_SPAM) def test_innocent(self): self.checker.set_spam(False) result = self.checker.check_url("http://dogood.com/") self.assertEqual(result, MockSpamHausChecker.IS_NOT_SPAM)
pawlactb/DiffusionModels
LanguageShift/LanguageAgent.py
Python
gpl-3.0
3,360
0.00506
import numpy as np from mesa import Agent class LanguageAgent(Agent): def __init__(self, model, name, unique_id, initial_prob_v): """ A LanguageAgent represents a particular place during a language shift simulation. :param model: the model that the agent is in :param unique_id: Location number of the agent :param initial_prob_v: a li
st of probabilities of speaking partic
ular languages """ super().__init__(unique_id, model) self.name = name self.probability = np.array(initial_prob_v) self.next_probability = np.array(self.probability, copy=True) self.p_probability = np.array(initial_prob_v) self.p_next_probability = np.array(self.p_probability, copy=True) self.diffusion = self.model.diffusion self.get_population() def get_population(self): ''' Updates the population of the LanguageAgent Returns: None ''' self.population = self.model.agent_pop[self.unique_id][self.model.schedule.time] def calculate_contribution(self, other): ''' Args: other: Another agent for which you want to find the impact from. Returns: None ''' # this if statement turns the ret_val into 0 if the other agent is to far away # if self.model.grid.get_distance(self, other) > np.sqrt(2): # ret_val = 0 # print('zero ret_val!!!!' + str(self.unique_id) + ' ' + str(other.unique_id)) # else: ret_val = ((other.population * other.probability) / (4 * np.pi * self.diffusion)) * np.exp( -np.square(self.model.grid.get_distance(self, other))) / (4 * self.diffusion * self.model.timestep) return ret_val def prochazaka_contrib(self, other): ''' Args: other: Another agent for which you want to find the impact from. Returns: None ''' if self.model.grid.get_distance(self, other) > np.sqrt(2): ret_val = 0 # print('zero ret_val!!!!' + str(self.unique_id) + ' ' + str(other.unique_id)) else: ret_val = ((other.population * other.p_probability) / (4 * np.pi * self.diffusion)) * np.exp( -np.square(self.model.grid.get_distance(self, other))) / (4 * self.diffusion * self.model.timestep) return ret_val def step(self): ''' Prepare for the next timestep Returns: None ''' f = np.zeros(len(self.probability)) p = np.zeros(len(self.probability)) self.get_population() for neighbor in self.model.grid.get_neighbors_by_agent(self)[1:self.model.grid.neighborhood_size + 1]: f += self.calculate_contribution(neighbor) p += self.prochazaka_contrib(neighbor) self.next_probability = ((self.population * self.probability) + f) / (np.sum(f) + self.population) self.p_next_probability = ((self.population * self.p_probability) + p) / (np.sum(p) + self.population) def advance(self): ''' Advance to the next timestep Returns: None ''' self.probability, self.next_probability = self.next_probability, self.probability self.p_probability, self.p_next_probability = self.p_next_probability, self.p_probability
labase/activnce
main/question/database.py
Python
gpl-2.0
1,434
0.007714
# -*- coding: utf-8 -*- """ ################################################ Plataforma ActivUFRJ ################################################ :Author: *Núcleo de Computação Eletrônica (NCE/UFRJ)* :Contact: carlo@nce.ufrj.br :Date: $Date: 2009-2010
$ :Status: This is a "work in progress" :Revision: $Revision: 0.01 $ :Home: `LABASE `__ :Copyright: ©2009, `GPL """ from couchdb.design import ViewDefinition import core.database ################################################ # CouchDB Permanent Views ################################################ # Retorna lista de questões usadas num quiz, com todas as informações adicionais # # Uso: database.QUESTION.v
iew('question/by_quiz',startkey=[],endkey=[, {},{}]) question_by_quiz = ViewDefinition('question', 'by_quiz', \ ''' function(doc) { if (doc.type=="quiz") { emit ([doc._id, 0], null); for (q in doc.questions) emit([doc._id, 1],{"_id": doc.questions[q]} ); } } ''') ViewDefinition.sync_many(core.database.ACTIVDB, [ question_by_quiz \ ])
bschutze/ALTO-framework-sim
Views/ping_.py
Python
mit
611
0.022913
#!/usr/bin/python #Master-Thesis dot parsing framework (PING MODULE) #Date: 14.01.2014 #Author: Bruno-Johannes Schuetze #uses python 2.7.6 #uses the djikstra algorithm implemented by David Eppstein #Module does calculatio
ns to behave similar to ping, uses delay label defined in the dot file from libraries.dijkstra import * def getSingleValue(src, dst, edgeCostHash): return edgeCostHash[(src*100000)+dst] def getPathTotal(start, end, edgeCostHash, networkDict): #get shortest path between start and end shortPathList = shortestPath(networkDict, sta
rt, end) print "WE PINGING SHAWTY", shortPathList
textsaurabh/code_base
src/leetcode/script/remove_element_inplace.py
Python
mit
815
0.006135
#! /usr/local/bin/python -u # Given an array and a value, remove all instances of that value in place and return the new length. # The order of elements can be changed. It doesn't matter what you leave beyond the new length. class Solution: # @param A a list of integers # @param elem an integer, value need to be removed # @return an integer def removeElement(self, A, elem): if not A: return len(A) curr_idx = 0 total_array_len = len(A) while (curr_idx <= total_array_len - 1): if A[curr_idx] == elem: del A[curr_idx] tot
al_arr
ay_len -= 1 else: curr_idx += 1 return total_array_len if __name__ == '__main__': main([1], 1)
pyupio/octohook
hook/hook.py
Python
mit
3,954
0.000759
# -*- coding: utf-8 -*- from __future__ import absolute_import, print_function, unicode_literals import os import imp import hmac import hashlib import six from flask import Flask, abort, request DEBUG = os.environ.get("DEBUG", False) == 'True' HOST = os.environ.get("HOST", '0.0.0.0') ROOT_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) REPO_DIR = os.path.join(ROOT_DIR, "repos") GITHUB_EVENTS = [ "commit_comment", "create", "delete", "deployment", "deployment_status", "fork", "gollum", "issue_comment", "issues", "member", "membership", "page_build", "public", "pull_request_review_comment", "pull_request", "push", "repository", "release", "status", "team_add", "watch", "ping", # sent by github to check if the endpoint is available ] app = Flask(__name__) def hook(repo): """Processes an incoming webhook, see GITHUB_EVENTS for possible events. """ event, signature = ( request.headers.get('X-Github-Event', False), request.headers.get('X-Hub-Signature', False) ) # If we are not running on DEBUG, the X-Hub-Signature header has to be set. # Raising a 404 is not the right http return code, but we don't # want to give someone that is attacking this endpoint a clue # that we are serving this repo alltogether if he doesn't # know our secret key if not DEBUG: if not signature: abort(404) # Check that the payload is signed by the secret key. Again, # if this is not the case, abort with a 404 if not is_signed(payload=request.get_data(as_text=True), signature=signature, secret=repo.SECRET): abort(404) # make sure the event is set if event not in GITHUB_EVENTS: abort(400) data = request.get_json() # call the always function and the event function (when implemented) for function in ["always", event]: if hasattr(repo, function): getattr(repo, function)(data) return "ok" def is_signed(payload, signature, secret): """ https://developer.github.com/webhooks/securing/#validating-payloads-from-github """ if six.PY3: # pragma: no cover payload = payload.encode("utf-8") secret = secret.encode("utf-8") digest = "sha1=" + hmac.new( secret, msg=payload, digestmod=hashlib.sha1 ).hexdigest() return digest == signature def import_repo_by_name(name): module_name = ".".join(["repos", name]) full_path = os.pa
th.join(REPO_DIR, name + ".py") module = imp.load_source(module_name, full_path) env_var = "{name}_SECRET".format(name=name.upper()) if env_var not in os.environ: if DEB
UG: print("WARNING: You need to set the environment variable {env_var}" " when not in DEBUG mode.".format( env_var=env_var )) else: raise AssertionError( "You need to set {env_var}".format( env_var=env_var) ) else: setattr(module, "SECRET", os.environ.get(env_var)) return module def build_routes(): for _, _, filenames in os.walk(REPO_DIR): for filename in filenames: if filename.endswith(".py"): name, _, _ = filename.partition(".py") app.add_url_rule( rule="/{}/".format(name), endpoint=name, view_func=hook, methods=["POST"], defaults={"repo": import_repo_by_name(name)} ) if __name__ == "__main__": # pragma: no cover if DEBUG: print("WARNING: running in DEBUG mode. Incoming webhooks will not be checked for a " "valid signature.") build_routes() app.run(host=HOST, debug=DEBUG)
MalloyPower/parsing-python
front-end/testsuite-python-lib/Python-2.3/Lib/plat-mac/Carbon/Appearance.py
Python
mit
26,525
0.001018
# Generated from 'Appearance.h' def FOUR_CHAR_CODE(x): return x kAppearanceEventClass = FOUR_CHAR_CODE('appr') kAEAppearanceChanged = FOUR_CHAR_CODE('thme') kAESystemFontChanged = FOUR_CHAR_CODE('sysf') kAESmallSystemFontChanged = FOUR_CHAR_CODE('ssfn') kAEViewsFontChanged = FOUR_CHAR_CODE('vfnt') kThemeDataFileType = FOUR_CHAR_CODE('thme') kThemePlatinumFileType = FOUR_CHAR_CODE('pltn') kThemeCustomThemesFileType = FOUR_CHAR_CODE('scen') kThemeSoundTrackFileType = FOUR_CHAR_CODE('tsnd') kThemeBrushDialogBackgroundActive = 1 kThemeBrushDialogBackgroundInactive = 2 kThemeBrushAlertBackgroundActive = 3 kThemeBrushAlertBackgroundInactive = 4 kThemeBrushModelessDialogBackgroundActive = 5 kThemeBrushModelessDialogBackgroundInactive = 6 kThemeBrushUtilityWindowBackgroundActive = 7 kThemeBrushUtilityWindowBackgroundInactive = 8 kThemeBrushListViewSortColumnBackground = 9 kThemeBrushListViewBackground = 10 kThemeBrushIconLabelBackground = 11 kThemeBrushListViewSeparator = 12 kThemeBrushChasingArrows = 13 kThemeBrushDragHilite = 14 kThemeBrushDocumentWindowBackground = 15 kThemeBrushFinderWindowBackground = 16 kThemeBrushScrollBarDelimiterActive = 17 kThemeBrushScrollBarDelimiterInactive = 18 kThemeBrushFocusHighlight = 19 kThemeBrushPopupArrowActive = 20 kThemeBrushPopupArrowPressed = 21 kThemeBrushPopupArrowInactive = 22 kThemeBrushAppleGuideCoachmark = 23 kThemeBrushIconLabelBackgroundSelected = 24 kThemeBrushStaticAreaFill = 25 kThemeBrushActiveAreaFill = 26 kThemeBrushButtonFrameActive = 27 kThemeBrushButtonFrameInactive = 28 kThemeBrushButtonFaceActive = 29 kThemeBrushButtonFaceInactive = 30 kThemeBrushButtonFacePressed = 31 kThemeBrushButtonActiveDarkShadow = 32 kThemeBrushButtonActiveDarkHighlight = 33 kThemeBrushButtonActiveLightShadow = 34 kThemeBrushButtonActiveLightHighlight = 35 kThemeBrushButtonInactiveDarkShadow = 36 kThemeBrushButtonInactiveDarkHighlight = 37 kThemeBrushButtonInactiveLightShadow = 38 kThemeBrushButtonInactiveLightHighlight = 39 kThemeBrushButtonPressedDarkShadow = 40 kThemeBrushButtonPressedDarkHighlight = 41 kThemeBrushButtonPressedLightShadow = 42 kThemeBrushButtonPressedLightHighlight = 43 kThemeBrushBevelActiveLight = 44 kThemeBrushBevelActiveDark = 45 kThemeBrushBevelInactiveLight = 46 kThemeBrushBevelInactiveDark = 47 kThemeBrushNotificationWindowBackground = 48 kThemeBrushMovableModalBackground = 49 kThemeBrushSheetBackground = 50 kThemeBrushDrawerBackground = 51 kThemeBrushBlack = -1 kThemeBrushWhite = -2 kThemeTextColorDialogActive = 1 kThemeTextColorDialogInactive = 2 kThemeTextColorAlertActive = 3 kThemeTextColorAlertInactive = 4 kThemeTextColorModelessDialogActive = 5 kThemeTextColorModelessDialogInactive = 6 kThemeTextColorWindowHeaderActive = 7 kThemeTextColorWindowHeaderInactive = 8 kThemeTextColorPlacardActive = 9 kThemeTextColorPlacardInactive = 10 kThemeTextColorPlacardPressed = 11 kThemeTextColorPushButtonActive = 12 kThemeTextColorPushButtonInactive = 13 kThemeTextColorPushButtonPressed = 14 kThemeTextColorBevelButtonActive = 15 kThemeTextColorBevelButtonInactive = 16 kThemeTextColorBevelButtonPressed = 17 kThemeTextColorPopupButtonActive = 18 kThemeTextColorPopupButtonInactive = 19 kThemeTextColorPopupButtonPressed = 20 kThemeTextColorIconLabel = 21 kThemeTextColorListView = 22 kThemeTextColorDocumentWindowTitleActive = 23 kThemeTextColorDocumentWindowTitleInactive = 24 kThemeTextColorMovableModalWindowTitleActive = 25 kThemeTextColorMovableModalWindowTitleInactive = 26 kThemeTextColorUtilityWindowTitleActive = 27 kThemeTextColorUtilityWindowTitleInactive = 28 kThemeTextColorPopupWindowTitleActive = 29 kThemeTextColorPopupWindowTitleInactive = 30 kThemeTextColorRootMenuActive = 31 kThemeTextColorRootMenuSelected = 32 kThemeTextColorRootMenuDisabled = 33 kThemeTextColorMenuItemActive = 34 kThemeTextColorMenuItemSelected = 35 kThemeTextColorMenuItemDisabled = 36 kThemeTextColorPopupLabelActive = 37 kThemeTextColorPopupLabelInactive = 38 kThemeTextColorTabFrontActive = 39 kThemeTextColorTabNonFrontActive = 40 kThemeTextColorTabNonFrontPressed = 41 kThemeTextColorTabFrontInactive = 42 kThemeTextColorTabNonFrontInactive = 43 kThemeTextColorIconLabelSelected = 44 kThemeTextColorBevelButtonStickyActive = 45 kThemeTextColorBevelButtonStickyInactive = 46 kThemeTextColorNotification = 47 kThemeTextColorBlack = -1 kThemeTextColorWhite = -2 kThemeStateInactive = 0 kThemeStateActive = 1 kThemeStatePressed = 2 kThemeStateRollover = 6 kThemeStateUnavailable = 7 kThemeStateUnavailableInactive = 8 kThemeStateDisabled = 0 kThemeStatePressedUp = 2 kThemeStatePressedDown = 3 kThemeArrowCursor = 0 kThemeCopyArrowCursor = 1 kThemeAliasArrowCursor = 2 kThemeContextualMenuArrowCursor = 3 kThemeIBeamCursor = 4 kThemeCrossCursor = 5 kThemePlusCursor = 6 kThemeWatchCursor = 7 kThemeClosedHandCursor = 8 kThemeOpenHandCursor = 9 kThemePointingHandCursor = 10 kThemeCountingUpHandCursor = 11 kThemeCountingDownHandCursor = 12 kThemeCountingUpAndDownHandCursor = 13 kThemeSpinningCursor = 14 kThemeResizeLeftCursor = 15 kThemeResizeRightCursor = 16 kThemeResizeLeftRightCursor = 17 kThemeMenuBarNormal = 0 kThemeMenuBarSelected = 1 kThemeMenuSquareMenuBar = (1 << 0) kThemeMenuActive = 0 kThemeMenuSelected = 1 kThemeMenuDisabled = 3 kThemeMenuTypePullDown = 0 kThemeMenuTypePopUp = 1 kThemeMenuTypeHierarchical = 2 kThemeMenuTypeInactive = 0x0100 kThemeMenuItemPlain = 0 kThemeMenuItemHierarchical = 1 kThemeMenuItemScrollUpArrow = 2 kThemeMenuItemScrollDownArrow = 3 kThemeMenuItemAtTop = 0x0100 kThemeMenuItemAtBottom = 0x0200 kThemeMenuItemHierBackground = 0x0400 kThemeMenuItemPopUpBackground = 0x0800 kThemeMenuItemHasIcon = 0x8000 kThemeBackgroundTabPane = 1 kThemeBackgroundPlacard = 2 kThemeBackgroundWindowHeader = 3 kThemeBackgroundListViewWindowHeader = 4 kThemeBackgroundSecondaryGroupBox = 5 kThemeNameTag = FOUR_CHAR_CODE('name') kThemeVariantNameTag = FOUR_CHAR_CODE('varn') kThemeHighlightColorTag = FOUR_CHAR_CODE('hcol') kThemeScrollBarArrowStyleTag = FOUR_CHAR_CODE('sbar') kThemeScrollBarThumbStyleTag = FOUR_CHAR_CODE('sbth') kThemeSoundsEnabledTag = FOUR_CHAR_CODE('snds') kThemeDblClickCollapseTag = FOUR_CHAR_CODE('coll') kThemeAppearanceFileNameTag = FOUR_CHAR_CODE('thme') kThemeSystemFontTag = FOUR_CHAR_CODE('lgsf') kThemeSmallSystemFontTag = FOUR_CHAR_CODE('smsf') kThemeViewsFontTag = FOUR_CHAR_CODE('vfnt') kThemeViewsFontSizeTag = FOUR_CHAR_CODE('vfsz') kThemeDesktopPatternNameTag = FOUR_CHAR_CODE('patn') kThemeDesktopPatternTag = FOUR_CHAR_CODE('patt') kThemeDesktopPic
tureNa
meTag = FOUR_CHAR_CODE('dpnm') kThemeDesktopPictureAliasTag = FOUR_CHAR_CODE('dpal') kThemeDesktopPictureAlignmentTag = FOUR_CHAR_CODE('dpan') kThemeHighlightColorNameTag = FOUR_CHAR_CODE('hcnm') kThemeExamplePictureIDTag = FOUR_CHAR_CODE('epic') kThemeSoundTrackNameTag = FOUR_CHAR_CODE('sndt') kThemeSoundMaskTag = FOUR_CHAR_CODE('smsk') kThemeUserDefinedTag = FOUR_CHAR_CODE('user') kThemeSmoothFontEnabledTag = FOUR_CHAR_CODE('smoo') kThemeSmoothFontMinSizeTag = FOUR_CHAR_CODE('smos') kThemeCheckBoxClassicX = 0 kThemeCheckBoxCheckMark = 1 kThemeScrollBarArrowsSingle = 0 kThemeScrollBarArrowsLowerRight = 1 kThemeScrollBarThumbNormal = 0 kThemeScrollBarThumbProportional = 1 kThemeSystemFont = 0 kThemeSmallSystemFont = 1 kThemeSmallEmphasizedSystemFont = 2 kThemeViewsFont = 3 kThemeEmphasizedSystemFont = 4 kThemeApplicationFont = 5 kThemeLabelFont = 6 kThemeMenuTitleFont = 100 kThemeMenuItemFont = 101 kThemeMenuItemMarkFont = 102 kThemeMenuItemCmdKeyFont = 103 kThemeWindowTitleFont = 104 kThemePushButtonFont = 105 kThemeUtilityWindowTitleFont = 106 kThemeAlertHeaderFont = 107 kThemeCurrentPortFont = 200 kThemeTabNonFront = 0 kThemeTabNonFrontPressed = 1 kThemeTabNonFrontInactive = 2 kThemeTabFront = 3 kThemeTabFrontInactive = 4 kThemeTabNonFrontUnavailable = 5 kThemeTabFrontUnavailable = 6 kThemeTabNorth = 0 kThemeTabSouth = 1 kThemeTabEast = 2 kThemeTabWest = 3 kThemeSmallTabHeight = 16 kThemeLargeTabHeight = 21 kThemeTabPaneOverlap = 3 kThemeSmallTabHeightMax = 19 kThemeLargeTabHeightMax = 24 kThemeMediumScrollBar = 0 kThemeSmallScrollBar = 1 kThemeMediumSlider = 2 kThemeMediumProgressBar = 3 kThemeMediumIndetermin
mwojcikowski/opendrugdiscovery
oddt/spatial.py
Python
bsd-3-clause
2,224
0.009442
"""Spatial functions included in ODDT Mainly used by other modules, but can be accessed directly. """ import numpy as np from scipy.spatial.distance import cdist as distance __all__ = ['angle', 'angle_2v', 'dihedral', 'distance'] # angle functions def angle(p1,p2,p3): """Returns an angle from a series of 3 points (point #2 is centroid).Angle is returned in degrees. Parameters ---------- p1,p2,p3 : numpy arrays, shape = [n_points, n_dimensions] Triplets of points in n-dimensional space, aligned in rows. Returns ------- angles : numpy array, shape = [n_points] Series of angles in degrees """ v1 = p1-p2 v2 = p3-p2 return angle_2v(v1,v2) def angle_2v(v1, v2): """Returns an angle from a series of 3 points (point #2 is centroid).Angle is returned in degrees. Parameters ---------- v1,v2 : numpy arrays, shape = [n_vectors, n_dimensions] Pairs of vectors in n-dimensional space, aligned in rows. Returns ------- angles : numpy array, shape = [n_vectors] Series of angles in degrees """ dot = (v1*v2).sum(axis=-1) # better than np.dot(v1, v2), multiple vectors can be applied norm = np.linalg.norm(v1, axis=-1)* np.linalg.norm(v2, axis=-1) return np.degrees(np.arccos(dot/norm)) def dihedral(p1,p2,p3,p4): """Returns an dihedral angle from a series of 4 points. Dihedral is returned in degrees. Function distingishes clockwise and antyclockwise dihedrals. Parameters ---------- p1,p2,p3,p4 : numpy array
s, shape = [n_points, n_dimensions] Quadruplets of points in n-dimensional space, aligned in rows. Returns ------- angles : numpy array, shape = [n_points] Series of angles in degrees
""" v12 = (p1-p2)/np.linalg.norm(p1-p2) v23 = (p2-p3)/np.linalg.norm(p2-p3) v34 = (p3-p4)/np.linalg.norm(p3-p4) c1 = np.cross(v12, v23) c2 = np.cross(v23, v34) out = angle_2v(c1, c2) # check clockwise and anticlockwise n1 = c1/np.linalg.norm(c1) mask = (n1*v34).sum(axis=-1) > 0 if len(mask.shape) == 0 and mask: out = -out else: out[mask] = -out[mask] return out
TwilioDevEd/api-snippets
twiml/voice/gather/gather-2/gather-2.6.x.py
Python
mit
125
0
from twilio.twim
l.voice_response import Gat
her, VoiceResponse response = VoiceResponse() response.gather() print(response)
dan-gamble/cms
cms/tests/test_pipeline.py
Python
bsd-3-clause
978
0
from django.http import HttpResponse from django.test import TestCase from ..pipeline import make_staff class Backend(object): name = None def __init__(self, name, *args, **kwargs): super(Backend, self).__init__(*args, **kwargs) self.name = name class MockSuperUser(object): is_staff = False is_superuser = False def save(self): pass class PipelineTest(TestCase): def test_make_staff(self): facebook_backend = Backend('facebook') google_plus_backend = Backend('google-plus') user = MockSuperUser() response = HttpResponse() self.assertFalse(user.is_staff) self.assertFalse(user.is_superuser) make_st
aff(facebook_backend, user
, response) self.assertFalse(user.is_staff) self.assertFalse(user.is_superuser) make_staff(google_plus_backend, user, response) self.assertTrue(user.is_staff) self.assertTrue(user.is_superuser)
acdh-oeaw/defc-app
defcdb/migrations/0007_auto_20151120_0807.py
Python
mit
2,178
0.001837
# -*- coding: utf-8 -*- from __future__ import unicode_literals from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ('defcdb', '0006_site_reference'), ] operations = [ migrations.AddField( model_name='dc_country', name='lat', field=models.FloatField(null=True, blank=True), ), migrations.AddField( model_name='dc_country', name='lng', field=models.FloatField(null=True, blank=True), ), migrations.AddField( model_name='dc_province', name='lat', field=models.FloatField(null=True, blank=True), ), migrations.AddField( model_name='dc_province', name='lng', field=models.FloatField(null=True, blank=True), ), migrations.AddField( model_name='dc_region', name='lat', field=models.FloatField(null=True, blank=True), ), migrations.AddField( model_name='dc_region', name='lng', field=models.FloatField(null=True, blank=True), ), migrations.AddField( model_name='site',
name='authorityfile_id', field=models.CharField(max_length=100, help_text='Identifier provided by www.GeoNames.org', null=True, blank=True), ), migrations.AlterField( model_name='dc_country', name='authorityfile_id', field=models.CharField(max_length=100
, help_text='Identifier provided by www.GeoNames.org', null=True, blank=True), ), migrations.AlterField( model_name='dc_province', name='authorityfile_id', field=models.CharField(max_length=100, help_text='Identifier provided by www.GeoNames.org', null=True, blank=True), ), migrations.AlterField( model_name='dc_region', name='authorityfile_id', field=models.CharField(max_length=100, help_text='Identifier provided by www.GeoNames.org', null=True, blank=True), ), ]
nsi-iff/should-dsl
run_examples.py
Python
mit
630
0.009524
#!/usr/bin/env python import doctest import unittest import sys def test_suite(docs): suite = unittest.TestSuite() for doc in docs: suite.addTest(doctest.DocFileSuite(doc, optionflags=flags())) return suite def flags(): flags =
doctest.NORMALIZE_WHITESPACE|doctest.ELLIPSIS if sys.version_info >= (3,): flags |= doctest.IGNORE_EXCEPTION_DETAIL return flags def run(docs): suite = test_suite(docs) runner = unittest.TextTestRunner(verbosity=2) result = runner.run(suite) sys.exit(int(bool(result.failures or result.errors))) if __name__ == '__main__': run(sys.argv)
raven47git/readthedocs.org
readthedocs/projects/templatetags/projects_tags.py
Python
mit
554
0
from django import template from proj
ects.version_handling import comparable_version register = template.Library() @register.filter def sort_version_aware(versions): """ Takes a list of versions objects and sort them caring about version schemes """ return sorted( versions, key=lambda version: comparable_
version(version.verbose_name), reverse=True) @register.filter def is_project_user(user, project): """ Return if user is a member of project.users """ return user in project.users.all()
mapado/haversine
haversine/haversine.py
Python
mit
5,961
0.002852
from math import radians, cos, sin, asin, sqrt, degrees, pi, atan2 from enum import Enum from typing import Union # mean earth radius - https://en.wikipedia.org/wiki/Earth_radius#Mean_radius _AVG_EARTH_RADIUS_KM = 6371.0088 class Unit(Enum): """ Enumeration of supported units. The full list can be checked by iterating over the class; e.g. the expression `tuple(Unit)`. """ KILOMETERS = 'km' METERS = 'm' MILES = 'mi' NAUTICAL_MILES = 'nmi' FEET = 'ft' INCHES = 'in' RADIANS = 'rad' DEGREES = 'deg' class Direction(Enum): """ Enumeration of supported directions. The full list can be checked by iterating over the class; e.g. the expression `tuple(Direction)`. Angles expressed in radians. """ NORTH = 0 NORTHEAST = pi * 0.25 EAST = pi * 0.5 SOUTHEAST = pi * 0.75 SOUTH = pi SOUTHWEST = pi * 1.25 WEST = pi * 1.5 NORTHWEST = pi * 1.75 # Unit values taken from http://www.unitconversion.org/unit_converter/length.html _CONVERSIONS = { Unit.KILOMETERS: 1.0, Unit.METERS: 1000.0, Unit.MILES: 0.621371192, Unit.NAUTICAL_MILES: 0.539956803, Unit.FEET: 3280.839895013, Unit.INCHES: 39370.078740158, Unit.RADIANS: 1/_AVG_EARTH_RADIUS_KM, Unit.DEGREES: (1/_AVG_EARTH_RADIUS_KM)*(180.0/pi) } def get_avg_earth_radius(unit): unit = Unit(unit) return _AVG_EARTH_RADIUS_KM * _CONVERSIONS[unit] def haversine(point1, point2, unit=Unit.KILOMETERS): """ Calculate the great-circle distance between two points on the Earth surface. Takes two 2-tuples, containing the latitude and longitude of each point in decimal degrees, and, optionally, a unit of length. :param point1: first point; tuple of (latitude, longitude) in decimal degrees :param point2: second point; tuple of (latitude, longitude) in decimal degrees :param unit: a member of haversine.Unit, or, equivalently, a string containing the initials of its corresponding unit of measurement (i.e. miles = mi) default 'km' (kilometers). Example: ``haversine((45.7597, 4.8422), (48.8567, 2.3508), unit=Unit.METERS)`` Precondition: ``unit`` is a supported unit (supported units are listed in the `Unit` enum) :return: the distance between the two points in the requested unit, as a float. The default returned unit is kilometers. The default unit can be changed by setting the unit parameter to a member of ``haversine.Unit`` (e.g. ``haversine.Unit.INCHES``), or, equivalently, to a string containing the corresponding abbreviation (e.g. 'in'). All available units can be found in the ``Unit`` enum. """ # unpack latitude/longitude lat1, lng1 = point1 lat2, lng2 = point2 # convert all latitudes/longitudes from decimal degrees to radians lat1 = radians(lat1) lng1 = radians(lng1) lat2 = radians(lat2) lng2 = radians(lng2) # calculate haversine lat = lat2 - lat1 lng = lng2 - lng1 d = sin(lat * 0.5) ** 2 + cos(lat1) * cos(lat2) * sin(lng * 0.5) ** 2 return 2 * get_avg_earth_radius(unit) * asin(sqrt(d)) def haversine_vector(array1, array2, unit=Unit.KILOMETERS, comb=False): ''' The exact same function as "haversine", except that this version replaces math functions with numpy functions. This may make it slightly slower for computing the haversine distance between two points, but is much faster for computing the distance between two vectors of points due to vectorization. ''' try: import numpy except ModuleNotFoundError: return 'Error, unable to import Numpy,\ consider using haversine instead of haversine_vector.' # ensure arrays are numpy ndarrays if not isinstance(array1, numpy.ndarray): array1 = numpy.array(array1) if not isinstance(array2, numpy.ndarray): array2 = numpy.array(array2) # ensure will be able to iterate over rows by adding dimension if needed if array1.ndim == 1: array1 = numpy.expand_dims(array1, 0) if array2.ndim == 1: array2 = numpy.expand_dims(array2, 0) # Asserts that both arrays have same dimensions if not in combination mode if not comb: if array1.shape != array2.shape: raise IndexError("When not in combination mode, arrays must be of same size. If mode is required, use comb=True as argument.") # unpack latitude/longitude lat1, lng1 = array1[:, 0], array1[:, 1] lat2, lng2 = array2[:, 0], array2[:, 1] # convert all latitudes/longitudes from decimal degrees to radians lat1 = numpy.radians(lat1) lng1 = numpy.radians(lng1) lat2 = numpy.radians(lat2) lng2 = numpy.radians(lng2) # If in combination mode, turn coordinates of array1 into column vectors for broadcasting if comb: lat1 = numpy.expand_dims(lat1, axis=0) lng1 = numpy.expand_dims(lng1, axis=0) lat2 = numpy.expand_dims(lat2, axis=1) lng2 = numpy.expand_dims(lng2, axis=1) # calculate haversine lat = lat2 - lat1 lng = lng2 - lng1 d = (numpy.sin(lat * 0.5) ** 2 + numpy.cos(lat1) * numpy.cos(lat2) * numpy.sin(lng * 0.5) ** 2) return 2 * get_a
vg_earth_radius(unit) * numpy.arcsin(numpy.sqrt(d)) def inverse_haversine(point, distance, direction: Union[Direction, float], unit=Unit.KILOMETERS): lat, lng = point lat, lng = map(radians, (lat, lng)) d = distance r = get_avg_earth_radius(unit) brng = direction.value if isinstance(direction, Direction) else direction return_lat = asin(sin(lat) * co
s(d / r) + cos(lat) * sin(d / r) * cos(brng)) return_lng = lng + atan2(sin(brng) * sin(d / r) * cos(lat), cos(d / r) - sin(lat) * sin(return_lat)) return_lat, return_lng = map(degrees, (return_lat, return_lng)) return return_lat, return_lng
switchkiller/Python-and-Algorithms-and-Data-Structures
src/trees/check_ancestor.py
Python
mit
1,369
0.005113
#!/usr/bin/env python __author__ = "bt3" from binary_search_tree import BST, Node def find_ancestor(path, low_item, high_item): while path: current_item = path[0] if current_item < low_item: try: path = path[2:] except: return current_item elif current_item > high_item: try: path = path[1:] except: return current_item elif low_item <= current_item <= high_item: return current_item def find_ancestor2(tree, n1, n2): if not tree: return False if n1 <= tree.item and n2 >= tree.item or (not tree.left and not tree.rig
ht) : return tree.item if tree.left and (n1 < tree.item and n2 < tree.item): return find_ancestor(tree.left, n1, n2) or tree.item if tree.right and (n1 > tree.item and n2 > tree.item): return find_ancestor(tree.right, n1, n2) or tree.item if __name__ == '__main__': bst = BST() l = [10, 5, 6, 3,
8, 2, 1, 11, 9, 4] for i in l: bst.add(i) nodes = bst.preorder_array() print 'Original: ', l print 'Preorder: ', nodes print 'Method 1: ' print 'Ancestor for 3, 11:', find_ancestor(nodes, 3, 11) print 'Method 2: ' print 'Ancestor for 3, 11: ', find_ancestor2(bst.root, 3, 11)
google-research/federated
generalization/utils/trainer_utils_test.py
Python
apache-2.0
6,847
0.007156
# Copyright 2021, Google LLC. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """Tests for trainer_utils.py.""" from absl.testing import parameterized import tensorflow as tf import tensorflow_federated as tff from generalization.utils import eval_metric_distribution from generalization.utils import trainer_utils def keras_model_builder_with_zeros(): # Create a simple linear regression model, single output. # We initialize all weights to zero. model = tf.keras.Sequential([ tf.keras.layers.Dense( 1, kernel_initializer='zeros', bias_initializer='zeros', input_shape=(1,)) ]) return model def keras_model_builder_with_ones(): model = tf.keras.Sequential([ tf.keras.layers.Dense( 1, kernel_initializer='ones', bias_initializer='ones', input_shape=(1,)) ]) return model def create_dataset(): # Create data satisfying y = 2*x + 1 x = [[1.0], [2.0], [3.0]] y = [[3.0], [5.0], [7.0]] return tf.data.Dataset.from_tensor_slices((x, y)).batch(1) def create_federated_cd(): x1 = [[1.0]] y1 = [[3.0]] dataset1 = (x1, y1) x2 = [[2.0]] y2 = [[5.0]] dataset2 = (x2, y2) x3 = [[3.0]] y3 = [[7.0]] dataset3 = (x3, y3) return tff.simulation.datasets.TestClientData({ 1: dataset1, 2: dataset2, 3: dataset3 }).preprocess(lambda ds: ds.batch(1)) def get_input_spec(): return create_dataset().element_spec def metrics_builder(): return [tf.keras.metrics.MeanSquaredError()] def tff_model_builder(): return tff.learning.from_keras_model( keras_model=keras_model_builder_with_zeros(), input_spec=get_input_spec(), loss=tf.keras.losses.MeanSquaredError(), metrics=metrics_builder()) class CreateEvalFnsTest(tf.test.TestCase, parameterized.TestCase): @parameterized.named_parameters(('with_test_cd', True), ('without_test_cd', False)) def test_create_federated_eval_fns(self, use_test_cd): """Test for create_federated_eval_fns.""" (part_train_eval_fn, part_val_fn, unpart_fn, test_fn) = trainer_utils.create_federated_eval_fns( tff_model_builder=tff_model_builder, metrics_builder=metrics_builder, part_train_eval_cd=create_federated_cd(), part_val_cd=create_federated_cd(), unpart_cd=create_federated_cd(), test_cd=create_federated_cd() if use_test_cd else None, stat_fns=eval_metric_distribution.ALL_STAT_FNS, rounds_per_eval=1, part_clients_per_eval=2, unpart_clients_per_eval=2, test_clients_for_eval=3, resample_eval_clients=False, eval_clients_random_seed=1) keras_model = keras_model_builder_with_zeros() model_weights = tff.learning.ModelWeights.from_model(keras_model) server_state = tff.learning.framework.ServerState(model_weights, [], [], []) expected_keys = [ f'mean_squared_error/{s}' for s in eval_metric_distribution.ALL_STAT_FNS ] # Federated validation fn requires a positional arg round_num. if use_test_cd: self.assertIsNotNone(test_fn) eval_fns_to_test = (part_train_eval_fn, part_val_fn, unpart_fn, test_fn) else: self.assertIsNone(test_fn) eval_fns_to_test = (part_train_eval_fn, part_val_fn, unpart_fn) for eval_fn in eval_fns_to_test: metrics_dict = eval_fn(server_state, 0) self.assertEqual(list(metrics_dict.keys()), expected_keys) @parameterized.named_parameters(('case1', 3, 4), ('case2', 3, 5), ('case3', 2,
3)) def test_create_federated_eval_fns_skips_rounds(self, rounds_per_eval, round_num): """Test that create_federated_eval_fns skips the appropriate rounds.""" part_train_eval_fn, part_val_fn, unpart_fn, _ = trainer_utils.create_federated_eval_fns( tff_model_builder=tff_model_builder, metrics_builder=
metrics_builder, part_train_eval_cd=create_federated_cd(), part_val_cd=create_federated_cd(), unpart_cd=create_federated_cd(), test_cd=create_federated_cd(), stat_fns=eval_metric_distribution.ALL_STAT_FNS, rounds_per_eval=rounds_per_eval, part_clients_per_eval=2, unpart_clients_per_eval=2, test_clients_for_eval=3, resample_eval_clients=False, eval_clients_random_seed=1) keras_model = keras_model_builder_with_zeros() model_weights = tff.learning.ModelWeights.from_model(keras_model) server_state = tff.learning.framework.ServerState(model_weights, [], [], []) # Federated validation fn requires a positional arg round_num. for eval_fn in (part_train_eval_fn, part_val_fn, unpart_fn): metrics_dict = eval_fn(server_state, round_num) self.assertEmpty(metrics_dict.keys()) @parameterized.named_parameters(('with_test_cd', True), ('without_test_cd', False)) def test_create_centralized_eval_fns(self, use_test_cd): """Test for create_centralized_eval_fns.""" (part_train_eval_fn, part_val_fn, unpart_fn, test_fn) = trainer_utils.create_centralized_eval_fns( tff_model_builder=tff_model_builder, metrics_builder=metrics_builder, part_train_eval_cd=create_federated_cd(), part_val_cd=create_federated_cd(), unpart_cd=create_federated_cd(), test_cd=create_federated_cd() if use_test_cd else None, stat_fns=eval_metric_distribution.ALL_STAT_FNS, part_clients_per_eval=2, unpart_clients_per_eval=2, test_clients_for_eval=3, resample_eval_clients=False, eval_clients_random_seed=1) keras_model = keras_model_builder_with_zeros() expected_keys = [ f'mean_squared_error/{s}' for s in eval_metric_distribution.ALL_STAT_FNS ] if use_test_cd: self.assertIsNotNone(test_fn) eval_fns_to_test = (part_train_eval_fn, part_val_fn, unpart_fn, test_fn) else: self.assertIsNone(test_fn) eval_fns_to_test = (part_train_eval_fn, part_val_fn, unpart_fn) for eval_fn in eval_fns_to_test: metrics_dict = eval_fn(keras_model) self.assertEqual(list(metrics_dict.keys()), expected_keys) if __name__ == '__main__': tf.test.main()
holly/beretta
lib/beretta/parser.py
Python
mit
1,231
0.004062
from datetime import datetime from argparse import ArgumentParser import pprint import time import warnings import os, sys, io import signal import beretta import importlib __author__ = 'holly' class Parser(object): def __init__(self): self.parser = ArgumentParser(description=beretta.__doc__) self.parser.add_argument('--version', action='version', version='%(prog)s ' + beretta.__version__) self.subparsers = self.parser.add_subparsers(help='sub-command --help', dest='subparser_name') def run(self, loader=None): if loader is None: loader = importlib.import_module("beretta.loader").Loader() plugins = {} for (name, import_plugin) in loader.plugins(): plugin = import_plugin.Plugin(name) plugin_
parser = self.subparsers.add_parser(plugin.name, help=plugin.help, description=plugin.desc) for args, kwargs in
plugin.arguments(): plugin_parser.add_argument(*args, **kwargs) plugins[name] = plugin args = self.parser.parse_args() if args.subparser_name in plugins: plugins[args.subparser_name].run_plugin(args) else: self.parser.print_help()
bjorskog/majordomo
majordomo/task.py
Python
bsd-3-clause
578
0.00519
#!/usr/bin/env python # -*- coding: utf-8 -*- import abc class Task(object): """ represents work to do """ __metaclass__ = abc.ABCMeta _is_done = False def __init__(self): """ constr
uctor """ pass def run(self): self._is_done = True return self._run() def requires(self): """
dependencies """ return [] def output(self): """ target """ return [] @abc.abstractmethod def _run(self): pass @property def is_done(self): return self._is_done
pism/pism
examples/python/bed_deformation.py
Python
gpl-3.0
5,232
0.001911
#!/usr/bin/env python3 import PISM from PISM.util import convert from math import cos, pi # Simple testing program for Lingle & Clark bed deformation model. # Runs go for 150,000 years on 63.5km grid with 100a time steps and Z=2 in L&C model. # SCENARIOS: run 'python bed_deformation.py -scenario N' where N=1,2,3,4 as follows # (1) dump ice disc on initially level, non-uplifting land, use only viscous # half-space model: # include_elastic = FALSE, do_uplift = FALSE, H0 = 1000.0 # center depth b(0,0) should eventually equilibriate to near # -1000 * (910/3300) = -275.76 m # (2) dump ice disc on initially level, non-uplifting land, use both viscous # half-space model and elastic model # include_elastic = TRUE, do_uplift = FALSE, H0 = 1000.0 # (3) never loaded, initially level, uplifting land, use only viscous # half-space model (because elastic model gives no additional when no load): # include_elastic = FALSE, do_uplift = TRUE, H0 = 0.0 # (4) dump ice disc on initially level, uplifting land, use both viscous # half-space model and elastic model: # include_elastic = TRUE, do_uplift = TRUE, H0 = 1000.0; ctx = PISM.Context() config = ctx.config R0 = 1000e3 def initialize_uplift(uplift): "Initialize the uplift field." grid = uplift.grid() peak_uplift = convert(10, "mm/year", "m/second") with PISM.vec.Access(nocomm=[uplift]): for (i, j) in grid.points(): r = PISM.radius(grid, i, j) if r < 1.5 * R0: uplift[i, j] = peak_uplift * (cos(pi * (r / (1.5 * R0))) + 1.0) / 2.0 else: uplift[i, j] = 0.0 def initialize_thickness(thickness, H0): grid = thickness.grid() with PISM.vec.Access(nocomm=[thickness]): for (i, j) in grid.points(): r = PISM.radius(grid, i, j) if r < R0: thickness[i, j] = H0 else: thickness[i, j] = 0.0 def allocate(grid): H = PISM.model.createIceThicknessVec(grid) bed = PISM.model.createBedrockElevationVec(grid) uplift = PISM.IceModelVec2S() uplift.create(grid, "uplift", PISM.WITHOUT_GHOSTS) uplift.set_attrs("internal", "bed uplift", "m / second", "m / second", "", 0) sea_level = PISM.IceModelVec2S(grid, "sea_level", PISM.WITHOUT_GHOSTS) return H, bed, up
lift, sea_level def create_grid(): P = PISM.GridParameters(config) P.horizontal_size_from_options() P.horizontal_extent_from_options() P.vertical_grid_from_options(config) P.ownership_ranges_from_options(ctx.size) return PISM.IceGrid(ctx.ctx, P) def run(scenario, plot, pause, save): # set grid defaults config.set_number("grid.Mx", 193) config.set_number("grid.My", 129) config.set_number("grid.Lx", 3000e3) config.set_nu
mber("grid.Ly", 2000e3) config.set_number("grid.Mz", 2) config.set_number("grid.Lz", 1000) scenarios = {"1": (False, False, 1000.0), "2": (True, False, 1000.0), "3": (False, True, 0.0), "4": (True, True, 1000.0)} elastic, use_uplift, H0 = scenarios[scenario] print("Using scenario %s: elastic model = %s, use uplift = %s, H0 = %f m" % (scenario, elastic, use_uplift, H0)) config.set_flag("bed_deformation.lc.elastic_model", elastic) grid = create_grid() thickness, bed, uplift, sea_level = allocate(grid) # set initial geometry and uplift bed.set(0.0) thickness.set(0.0) sea_level.set(0.0) if use_uplift: initialize_uplift(uplift) time = ctx.ctx.time() time.init(ctx.ctx.log()) model = PISM.LingleClark(grid) model.bootstrap(bed, uplift, thickness, sea_level) # now add the disc load initialize_thickness(thickness, H0) dt = convert(100, "365 day", "seconds") # the time-stepping loop while time.current() < time.end(): # don't go past the end of the run dt_current = min(dt, time.end() - time.current()) model.update(thickness, sea_level, time.current(), dt_current) if plot: model.bed_elevation().view(400) model.uplift().view(400) print("t = %s years, dt = %s years" % (time.date(), time.convert_time_interval(dt_current, "years"))) time.step(dt_current) print("Reached t = %s years" % time.date()) if pause: print("Pausing for 5 seconds...") PISM.PETSc.Sys.sleep(5) if save: model.bed_elevation().dump("bed_elevation.nc") model.uplift().dump("bed_uplift.nc") if __name__ == "__main__": scenario = PISM.OptionKeyword("-scenario", "choose one of 4 scenarios", "1,2,3,4", "1") plot = PISM.OptionBool("-plot", "Plot bed elevation and uplift.") save = PISM.OptionBool("-save", "Save final states of the bed elevation and uplift.") pause = PISM.OptionBool("-pause", "Pause for 5 seconds to look at runtime 2D plots.") run(scenario.value(), plot, pause, save) def scenario1_test(): "Test if scenario 1 runs" run("1", False, False, False) def scenario3_test(): "Test if scenario 3 runs" run("3", False, False, False)
plotly/plotly.py
packages/python/plotly/plotly/validators/splom/_idssrc.py
Python
mit
388
0
import _plotly_utils.bas
eval
idators class IdssrcValidator(_plotly_utils.basevalidators.SrcValidator): def __init__(self, plotly_name="idssrc", parent_name="splom", **kwargs): super(IdssrcValidator, self).__init__( plotly_name=plotly_name, parent_name=parent_name, edit_type=kwargs.pop("edit_type", "none"), **kwargs )
leewp/TornadoPractice
application.py
Python
apache-2.0
336
0.020833
#!/usr/bin/env python # -*- coding: utf-8 -*- from url import url import tornado.web import os settings = dict( template_path = os.path.join(os.path.dirname(__file__), "templates"), static_path = os.path.join(os.path.dirn
ame(__file__), "statics") ) application = tornado.web.Application( handlers = url, *
*settings )
mcr/ietfdb
ietf/idtracker/migrations/0003_internet_draft_shepred_fk_blank_true.py
Python
bsd-3-clause
37,706
0.007771
from south.db import db from django.db import models from ietf.idtracker.models import * class Migration: def forwards(self, orm): # Changing field 'InternetDraft.shepherd' # (to signature: django.db.models.fields.related.ForeignKey(to=orm['idtracker.PersonOrOrgInfo'], null=True, blank=True)) db.alter_column('internet_drafts', 'shepherd_id', orm['idtracker.internetdraft:shepherd']) def backwards(self, orm): # Changing field 'InternetDraft.shepherd' # (to signature: django.db.models.fields.related.ForeignKey(to=orm['idtracker.PersonOrOrgInfo'])) db.alter_column('internet_drafts', 'shepherd_id', orm['idtracker.internetdraft:shepherd']) models = { 'idtracker.acronym': { 'Meta': {'db_table': "'acronym'"}, 'acronym': ('django.db.models.fields.CharField', [], {'max_length': '12'}), 'acronym_id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'name': ('django.db.models.fields.CharField', [], {'max_length': '100'}), 'name_key': ('django.db.models.fields.CharField', [], {'max_length': '50'}) }, 'idtracker.area': { 'Meta': {'db_table': "'areas'"}, 'area_acronym': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['idtracker.Acronym']", 'unique': 'True', 'primary_key': 'True'}), 'comments': ('django.db.models.fields.TextField', [], {'blank': 'True'}), 'concluded_date': ('django.db.models.fields.DateField', [], {'null': 'True', 'blank': 'True'}), 'extra_email_addresses': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}), 'last_modified_date': ('django.db.models.fields.DateField', [], {'auto_now': 'True', 'blank': 'True'}), 'start_date': ('django.db.models.fields.DateField', [], {'auto_now_add': 'True', 'blank': 'True'}), 'status': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['idtracker.AreaStatus']"}) }, 'idtracker.areadirector': { 'Meta': {'db_table': "'area_directors'"}, 'area': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['idtracker.Area']", 'null': 'True', 'db_column': "'area_acronym_id'"}), 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'person': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['idtracker.PersonOrOrgInfo']", 'db_column': "'person_or_org_tag'"}) }, 'idtracker.areagroup': { 'Meta': {'db_table': "'area_group'"}, 'area': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'areagroup'", 'db_column': "'area_acronym_id'", 'to': "orm['idtracker.Area']"}), 'group': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['idtracker.IETFWG']", 'unique': 'True', 'db_column': "'group_acronym_id'"}), 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}) }, 'idtracker.areastatus': { 'Meta': {'db_table': "'area_status'"}, 'status': ('django.db.models.fields.CharField', [], {'max_length': '25', 'db_column': "'status_value'"}), 'status_id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}) }, 'idtracker.areawgurl': { 'Meta': {'db_table': "'wg_www_pages'"}, 'description': ('django.db.models.fields.CharField', [], {'max_length': '50'}), 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True', 'db_column': "'area_ID'"}), 'name': ('django.db.models.fields.CharField', [], {'max_length': '50
', 'db_column': "'area_Name'"}), 'url': ('django.db.models.fields.CharField', [], {'max_length': '50'}) }, 'idtracker.ballotinfo': { 'Meta': {'db_table': "'ballot_info'"}, 'active': ('django.db.models.fields.BooleanField', [], {'de
fault': 'False', 'blank': 'True'}), 'an_sent': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}), 'an_sent_by': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'ansent'", 'null': 'True', 'db_column': "'an_sent_by'", 'to': "orm['idtracker.IESGLogin']"}), 'an_sent_date': ('django.db.models.fields.DateField', [], {'null': 'True', 'blank': 'True'}), 'approval_text': ('django.db.models.fields.TextField', [], {'blank': 'True'}), 'ballot': ('django.db.models.fields.AutoField', [], {'primary_key': 'True', 'db_column': "'ballot_id'"}), 'ballot_issued': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}), 'ballot_writeup': ('django.db.models.fields.TextField', [], {'blank': 'True'}), 'defer': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}), 'defer_by': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'deferred'", 'null': 'True', 'db_column': "'defer_by'", 'to': "orm['idtracker.IESGLogin']"}), 'defer_date': ('django.db.models.fields.DateField', [], {'null': 'True', 'blank': 'True'}), 'last_call_text': ('django.db.models.fields.TextField', [], {'blank': 'True'}) }, 'idtracker.chairshistory': { 'Meta': {'db_table': "'chairs_history'"}, 'chair_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['idtracker.Role']"}), 'end_year': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}), 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'person': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['idtracker.PersonOrOrgInfo']", 'db_column': "'person_or_org_tag'"}), 'present_chair': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}), 'start_year': ('django.db.models.fields.IntegerField', [], {}) }, 'idtracker.documentcomment': { 'Meta': {'db_table': "'document_comments'"}, 'ballot': ('django.db.models.fields.IntegerField', [], {'null': 'True'}), 'comment_text': ('django.db.models.fields.TextField', [], {'blank': 'True'}), 'created_by': ('BrokenForeignKey', ["orm['idtracker.IESGLogin']"], {'null': 'True', 'db_column': "'created_by'", 'null_values': '(0,999)'}), 'date': ('django.db.models.fields.DateField', [], {'default': 'datetime.date.today', 'db_column': "'comment_date'"}), 'document': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['idtracker.IDInternal']"}), 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'origin_state': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'comments_coming_from_state'", 'null': 'True', 'db_column': "'origin_state'", 'to': "orm['idtracker.IDState']"}), 'public_flag': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}), 'result_state': ('BrokenForeignKey', ["orm['idtracker.IDState']"], {'related_name': '"comments_leading_to_state"', 'null': 'True', 'db_column': "'result_state'", 'null_values': '(0,99)'}), 'rfc_flag': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}), 'time': ('django.db.models.fields.CharField', [], {'default': "'08:36:20'", 'max_length': '20', 'db_column': "'comment_time'"}), 'version': ('django.db.models.fields.CharField', [], {'max_length': '3', 'blank': 'True'}) }, 'idtracker.emailaddress': { 'Meta': {'db_table': "'email_addresses'"}, 'address': ('django.db.models.fields.CharField', [], {'max_length': '255', 'db_column': "'email_address'"}), 'comment': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'db_column': "'email_comment'", 'blank': 'True'}),
MisterTea/MLPlayground
Python/gd_numpy.py
Python
apache-2.0
5,435
0.014351
import random import math import sys import numpy as np random.seed(1L) labels = [] features = [] NUM_FEATURES = 0 #Parse libsvm fp = open("datasets/a1a/a1a","r") while True: line = fp.readline() if len(line)==0: break tokens = line.split(" ") del tokens[-1] labels.append(0 if int(tokens[0])==-1 else 1) features.append({}) for x in xrange(1,len(tokens)): index,feature = tokens[x].split(":") index = int(index) NUM_FEATURES = max(NUM_FEATURES,index) features[-1][index-1] = float(int(feature)) def normalize(weights): sum = 0.0 for x in xrange(0,len(weights)): sum += math.fabs(weights[x]) if sum > 1e-6: for x in xrange(0,len(weights)): weights[x] /= sum loss_old = 0 loss_new = 0 weights = [random.gauss(0, 1.0)]*NUM_FEATURES eps = 0.005 # step size NUM_INPUTS = len(features) def logistic(x): if x>=100: return 0.99 if x<=-100: return 0.01 ret = 1 / (1 + math.exp(-x)) return min(0.99, max(0.01, ret)) def logistic_derivative_i(x, x_i_feature): y = logistic(x) return y * (1 - y) * x_i_feature def dot(v1,v2): sum = 0.0 for x in xrange(0,len(v1)): sum += v1[x]*v2[x] return sum def dotSparse(v1,v2): sum = 0.0 for index,value in v1.iteritems(): sum += value*v2[index] return sum def printArray(v): print "[" + ", ".join('%+0.2f' % item for item in v) + "]" BATCH_SIZE = NUM_INPUTS/20 count=0 gradients = np.zeros([NUM_FEATURES]) while True: loss_old = loss_new loss_new = 0 gradients[:] = 0 for x in xrange(NUM_INPUTS/20,NUM_INPUTS): #f0 = features[x][0] #f1 = features[x][1] #w0 = weights[0] #w1 = weights[1] estimate = dotSparse(features[x],weights) # Log loss of logistic fn estimate = logistic(estimate) #if estimate>0.5: estimate = 0.99 #else: estimate = 0.01 loss = -1 * ((labels[x] * math.log(estimate)) + (1-labels[x]) * math.log(1-estimate)) #Adjust for the number of samples loss /= NUM_INPUTS loss_new += loss for y in xrange(0,NUM_FEATURES): gradient = (-1 * labels[x] * (1.0 / estimate) * features[x].get(y,0.0)) + \ ((labels[x] - 1) * features[x].get(y,0.0) / (estimate - 1)) #+ (-1 * (1-labels[x]) * (1.0 / (1 - estimate)) * -1 * features[x].get(y,0.0)) gradients[y] += gradient / BATCH_SIZE ''' Better least squares gradient, takes derivative of x^2 loss = (estimate - labels[x])**2 # Least Squared loss loss_new += loss g0 = 2 * (estimate - labels[x]) * features[x][0] g1 = 2 * (estimate - labels[x]) * features[x][1] ''' ''' Old least squared gradient, uses multinomial expansion # estimate**2 - 2 *labels[x]*estimate + labels[x]**2 # estimate**2 = (f0 * w0)**2 + (f1 * w1)**2 + 2*f0*w0*f1*w1 g0 = f0*w0*f0 + f0*f1*w1 g1 = f1*w1*f1 + f0*f1*w0 # The second part of least squares g0 += -1*labels[x]*f0 g1 += -1*labels[x]*f1 g0 *= 2; g1 *= 2; ''' #g0 = 2*f0*w0*f0 - 2*labels[x]*f0 #g1 = 2*f1*w1*f1 - 2*labels[x]*f1 ''' print 'EST',estimate,'LABEL',labels[x] print f0,f1 print labels[x],estimate,w0,w1 print g0,g1 print '---' if labels[x]<0.5: sys.exit(0) ''' #gradients[0] += g0 #gradients[1] += g1 if (x+1)%BATCH_SIZE == 0: for y in xrange(0,NUM_FEATURES): if abs(weights[y])<0.01 and abs(gradients[y])>0.5: weights[y] -= gradients[y] else: weights[y] -= eps * gradients[y] gradients[:] = 0 if True: # L2 regularization L2_STRENGTH = 0.05 unscaled_l2 = dot(weights,weights
) print 'UNSCALED L2',unscaled_l2 loss_new += L2_STRENGTH * unscaled_l2 / NUM_INPUTS # Partial derivative of L2 regularization if unscaled_l2 > 1e-6: for y in xrange(1,NUM_FEATURES): weights[y] -= eps * L2_STRENGTH * weights[y] * 2 if True: # L1 regularization
l1_strength = 0.005 loss_new += l1_strength * math.fsum(weights) / NUM_INPUTS for y in xrange(1,NUM_FEATURES): if abs(weights[y]) < l1_strength: weights[y] = 0 elif weights[y]>0: weights[y] -= l1_strength else: weights[y] += l1_strength print '***',count printArray(weights) print loss_new wins=0 FP=0 FN=0 for x in xrange(0,NUM_INPUTS/20): estimate = dotSparse(features[x],weights) # Log loss of logistic fn estimate = logistic(estimate) if estimate<0.5 and labels[x]<0.5: wins+=1 elif estimate>=0.5 and labels[x]>0.5: wins+=1 elif labels[x]<0.5: FP+=1 else: FN+=1 print 'TPR',(wins*100.0)/(NUM_INPUTS/20) print 'FPR',(FP*100.0)/(NUM_INPUTS/20) print 'FNR',(FN*100.0)/(NUM_INPUTS/20) print '***' count+=1 if abs(loss_old-loss_new) < 1e-9 and count >= 10000: break normalize(weights) printArray(weights) printArray(answer_weights)
biomodels/MODEL0912503622
MODEL0912503622/model.py
Python
cc0-1.0
427
0.009368
import os path = os.path.dirname(os.path.realpat
h(__file__)) sbmlFilePath = os.path.join(path, 'MODEL0912503622.xml') with open(sbmlFilePath,'r') as f: sbmlString = f.read()
def module_exists(module_name): try: __import__(module_name) except ImportError: return False else: return True if module_exists('libsbml'): import libsbml sbml = libsbml.readSBMLFromString(sbmlString)
libretro/mgba
src/platform/python/mgba/thread.py
Python
mpl-2.0
1,875
0
# Copyright (c) 2013-2017 Jeffrey Pfau # # This Source Code Form is subject to the terms of the Mozilla Public # License, v. 2.0. If a copy of the MPL was not distributed with this # file, You can obtain one at http://mozilla.org/MPL/2.0/. from ._pylib import ffi, lib # pylint: disable=no-name-in-module from .core import IRunner, ICoreOwner, Core class ThreadCoreOwner(ICoreOwner): def __init__(self, thread): self.thread = thread def claim(self): if not self.thread.isRunning(): raise ValueError lib.mCoreThreadInterrupt(self.thread._native) return self.thread._core def release(self): lib.mCoreThreadContinue(self.thread._native) class Thread(IRunner): def __init__(self, native=None): if native: self._native = native self._core = Core(native.core) self._core._was_res
et = lib.mCoreThreadHasStarted(self._native) else: self._native = ffi.new("struct mCoreThread*") def start(self, core): if lib.mCoreThreadHasStarted(self._native): raise ValueError self._core = core self._native.core = core._core lib.m
CoreThreadStart(self._native) self._core._was_reset = lib.mCoreThreadHasStarted(self._native) def end(self): if not lib.mCoreThreadHasStarted(self._native): raise ValueError lib.mCoreThreadEnd(self._native) lib.mCoreThreadJoin(self._native) def pause(self): lib.mCoreThreadPause(self._native) def unpause(self): lib.mCoreThreadUnpause(self._native) @property def running(self): return bool(lib.mCoreThreadIsActive(self._native)) @property def paused(self): return bool(lib.mCoreThreadIsPaused(self._native)) def use_core(self): return ThreadCoreOwner(self)
Parallel-in-Time/pySDC
pySDC/tutorial/step_8/B_multistep_SDC.py
Python
bsd-2-clause
6,586
0.002885
import os from pySDC.helpers.stats_helper import filter_stats, sort_stats from pySDC.helpers.visualization_tools import show_residual_across_simulation from pySDC.implementations.collocation_classes.gauss_radau_right import CollGaussRadau_Right from pySDC.implementations.controller_classes.controller_nonMPI import controller_nonMPI from pySDC.implementations.problem_classes.HeatEquation_1D_FD import heat1d from pySDC.implementations.sweeper_classes.generic_LU import generic_LU from pySDC.implementations.transfer_classes.TransferMesh import mesh_to_mesh def main(): """ A simple test program to do compare PFASST with multi-step SDC """ # initialize level parameters level_params = dict() level_params['restol'] = 5E-10 level_params['dt'] = 0.125 # initialize sweeper parameters sweeper_params = dict() sweeper_params['collocation_class'] = CollGaussRadau_Right sweeper_params['num_nodes'] = [3] # initialize problem parameters problem_params = dict() problem_params['nu'] = 0.1 # diffusion coefficient problem_params['freq'] = 2 # frequency for the test value # initialize step parameters step
_params = dict() step_params['maxiter'] = 50 # initialize space transfer parameters space_transfer_params = dict() space_transfer_params['rorder'] = 2 space_transfer_params['iorder'] = 6 # initialize controller parameters controller_params = dict() controller_params['logger_level'] = 40 # fill d
escription dictionary for easy step instantiation description = dict() description['problem_class'] = heat1d # pass problem class description['sweeper_class'] = generic_LU # pass sweeper description['sweeper_params'] = sweeper_params # pass sweeper parameters description['level_params'] = level_params # pass level parameters description['step_params'] = step_params # pass step parameters description['space_transfer_class'] = mesh_to_mesh # pass spatial transfer class description['space_transfer_params'] = space_transfer_params # pass paramters for spatial transfer # set up parameters for PFASST run problem_params['nvars'] = [63, 31] description['problem_params'] = problem_params.copy() description_pfasst = description.copy() # set up parameters for MSSDC run problem_params['nvars'] = [63] description['problem_params'] = problem_params.copy() description_mssdc = description.copy() controller_params['mssdc_jac'] = True controller_params_jac = controller_params.copy() controller_params['mssdc_jac'] = False controller_params_gs = controller_params.copy() # set time parameters t0 = 0.0 Tend = 1.0 # set up list of parallel time-steps to run PFASST/MSSDC with num_proc = 8 # instantiate controllers controller_mssdc_jac = controller_nonMPI(num_procs=num_proc, controller_params=controller_params_jac, description=description_mssdc) controller_mssdc_gs = controller_nonMPI(num_procs=num_proc, controller_params=controller_params_gs, description=description_mssdc) controller_pfasst = controller_nonMPI(num_procs=num_proc, controller_params=controller_params, description=description_pfasst) # get initial values on finest level P = controller_mssdc_jac.MS[0].levels[0].prob uinit = P.u_exact(t0) # call main functions to get things done... uend_pfasst, stats_pfasst = controller_pfasst.run(u0=uinit, t0=t0, Tend=Tend) uend_mssdc_jac, stats_mssdc_jac = controller_mssdc_jac.run(u0=uinit, t0=t0, Tend=Tend) uend_mssdc_gs, stats_mssdc_gs = controller_mssdc_gs.run(u0=uinit, t0=t0, Tend=Tend) # compute exact solution and compare for both runs uex = P.u_exact(Tend) err_mssdc_jac = abs(uex - uend_mssdc_jac) err_mssdc_gs = abs(uex - uend_mssdc_gs) err_pfasst = abs(uex - uend_pfasst) diff_jac = abs(uend_mssdc_jac - uend_pfasst) diff_gs = abs(uend_mssdc_gs - uend_pfasst) diff_jac_gs = abs(uend_mssdc_gs - uend_mssdc_jac) f = open('step_8_B_out.txt', 'w') out = 'Error PFASST: %12.8e' % err_pfasst f.write(out + '\n') print(out) out = 'Error parallel MSSDC: %12.8e' % err_mssdc_jac f.write(out + '\n') print(out) out = 'Error serial MSSDC: %12.8e' % err_mssdc_gs f.write(out + '\n') print(out) out = 'Diff PFASST vs. parallel MSSDC: %12.8e' % diff_jac f.write(out + '\n') print(out) out = 'Diff PFASST vs. serial MSSDC: %12.8e' % diff_gs f.write(out + '\n') print(out) out = 'Diff parallel vs. serial MSSDC: %12.8e' % diff_jac_gs f.write(out + '\n') print(out) # filter statistics by type (number of iterations) filtered_stats_pfasst = filter_stats(stats_pfasst, type='niter') filtered_stats_mssdc_jac = filter_stats(stats_mssdc_jac, type='niter') filtered_stats_mssdc_gs = filter_stats(stats_mssdc_gs, type='niter') # convert filtered statistics to list of iterations count, sorted by process iter_counts_pfasst = sort_stats(filtered_stats_pfasst, sortby='time') iter_counts_mssdc_jac = sort_stats(filtered_stats_mssdc_jac, sortby='time') iter_counts_mssdc_gs = sort_stats(filtered_stats_mssdc_gs, sortby='time') # compute and print statistics for item_pfasst, item_mssdc_jac, item_mssdc_gs in \ zip(iter_counts_pfasst, iter_counts_mssdc_jac, iter_counts_mssdc_gs): out = 'Number of iterations for time %4.2f (PFASST/parMSSDC/serMSSDC): %2i / %2i / %2i' % \ (item_pfasst[0], item_pfasst[1], item_mssdc_jac[1], item_mssdc_gs[1]) f.write(out + '\n') print(out) f.close() # call helper routine to produce residual plot show_residual_across_simulation(stats_mssdc_jac, 'step_8_residuals_mssdc_jac.png') show_residual_across_simulation(stats_mssdc_gs, 'step_8_residuals_mssdc_gs.png') assert os.path.isfile('step_8_residuals_mssdc_jac.png') assert os.path.isfile('step_8_residuals_mssdc_gs.png') assert diff_jac < 3.1E-10, \ "ERROR: difference between PFASST and parallel MSSDC controller is too large, got %s" % diff_jac assert diff_gs < 3.1E-10, \ "ERROR: difference between PFASST and serial MSSDC controller is too large, got %s" % diff_gs assert diff_jac_gs < 3.1E-10, \ "ERROR: difference between parallel and serial MSSDC controller is too large, got %s" % diff_jac_gs if __name__ == "__main__": main()
LumPenPacK/NetworkExtractionFromImages
win_build/nefi2_win_amd64_msvc_2015/site-packages/numpy/lib/tests/test_twodim_base.py
Python
bsd-2-clause
17,996
0.001
"""Test functions for matrix module """ from __future__ import division, absolute_import, print_function from numpy.testing import ( TestCase, run_module_suite, assert_equal, assert_array_equal, assert_array_max_ulp, assert_array_almost_equal, assert_raises, rand, ) from numpy import ( arange, rot90, add, fliplr, flipud, zeros, ones, eye, array, diag, histogram2d, tri, mask_indices, triu_indices, triu_indices_from, tril_indices, tril_indices_from, vander, ) import numpy as np from numpy.compat import asbytes_nested def get_mat(n): data = arange(n) data = add.outer(data, data) return data class TestEye(TestCase): def test_basic(self): assert_equal(eye(4), array([[1, 0, 0, 0], [0, 1, 0, 0], [0, 0, 1, 0], [0, 0, 0, 1]])) assert_equal(eye(4, dtype='f'), array([[1, 0, 0, 0], [0, 1, 0, 0], [0, 0, 1, 0], [0, 0, 0, 1]], 'f')) assert_equal(eye(3) == 1, eye(3, dtype=bool)) def test_diag(self): assert_equal(eye(4, k=1), array([[0, 1, 0, 0], [0, 0, 1, 0], [0, 0, 0, 1], [0, 0, 0, 0]])) assert_equal(eye(4, k=-1), array([[0, 0, 0, 0], [1, 0, 0, 0], [0, 1, 0, 0], [0, 0, 1, 0]])) def test_2d(self): assert_equal(eye(4, 3), array([[1, 0, 0], [0, 1, 0], [0, 0, 1], [0, 0, 0]])) assert_equal(eye(3, 4), array([[1, 0, 0, 0],
[0, 1, 0, 0], [0, 0, 1, 0]])) def
test_diag2d(self): assert_equal(eye(3, 4, k=2), array([[0, 0, 1, 0], [0, 0, 0, 1], [0, 0, 0, 0]])) assert_equal(eye(4, 3, k=-2), array([[0, 0, 0], [0, 0, 0], [1, 0, 0], [0, 1, 0]])) def test_eye_bounds(self): assert_equal(eye(2, 2, 1), [[0, 1], [0, 0]]) assert_equal(eye(2, 2, -1), [[0, 0], [1, 0]]) assert_equal(eye(2, 2, 2), [[0, 0], [0, 0]]) assert_equal(eye(2, 2, -2), [[0, 0], [0, 0]]) assert_equal(eye(3, 2, 2), [[0, 0], [0, 0], [0, 0]]) assert_equal(eye(3, 2, 1), [[0, 1], [0, 0], [0, 0]]) assert_equal(eye(3, 2, -1), [[0, 0], [1, 0], [0, 1]]) assert_equal(eye(3, 2, -2), [[0, 0], [0, 0], [1, 0]]) assert_equal(eye(3, 2, -3), [[0, 0], [0, 0], [0, 0]]) def test_strings(self): assert_equal(eye(2, 2, dtype='S3'), asbytes_nested([['1', ''], ['', '1']])) def test_bool(self): assert_equal(eye(2, 2, dtype=bool), [[True, False], [False, True]]) class TestDiag(TestCase): def test_vector(self): vals = (100 * arange(5)).astype('l') b = zeros((5, 5)) for k in range(5): b[k, k] = vals[k] assert_equal(diag(vals), b) b = zeros((7, 7)) c = b.copy() for k in range(5): b[k, k + 2] = vals[k] c[k + 2, k] = vals[k] assert_equal(diag(vals, k=2), b) assert_equal(diag(vals, k=-2), c) def test_matrix(self, vals=None): if vals is None: vals = (100 * get_mat(5) + 1).astype('l') b = zeros((5,)) for k in range(5): b[k] = vals[k, k] assert_equal(diag(vals), b) b = b * 0 for k in range(3): b[k] = vals[k, k + 2] assert_equal(diag(vals, 2), b[:3]) for k in range(3): b[k] = vals[k + 2, k] assert_equal(diag(vals, -2), b[:3]) def test_fortran_order(self): vals = array((100 * get_mat(5) + 1), order='F', dtype='l') self.test_matrix(vals) def test_diag_bounds(self): A = [[1, 2], [3, 4], [5, 6]] assert_equal(diag(A, k=2), []) assert_equal(diag(A, k=1), [2]) assert_equal(diag(A, k=0), [1, 4]) assert_equal(diag(A, k=-1), [3, 6]) assert_equal(diag(A, k=-2), [5]) assert_equal(diag(A, k=-3), []) def test_failure(self): self.assertRaises(ValueError, diag, [[[1]]]) class TestFliplr(TestCase): def test_basic(self): self.assertRaises(ValueError, fliplr, ones(4)) a = get_mat(4) b = a[:, ::-1] assert_equal(fliplr(a), b) a = [[0, 1, 2], [3, 4, 5]] b = [[2, 1, 0], [5, 4, 3]] assert_equal(fliplr(a), b) class TestFlipud(TestCase): def test_basic(self): a = get_mat(4) b = a[::-1, :] assert_equal(flipud(a), b) a = [[0, 1, 2], [3, 4, 5]] b = [[3, 4, 5], [0, 1, 2]] assert_equal(flipud(a), b) class TestRot90(TestCase): def test_basic(self): self.assertRaises(ValueError, rot90, ones(4)) a = [[0, 1, 2], [3, 4, 5]] b1 = [[2, 5], [1, 4], [0, 3]] b2 = [[5, 4, 3], [2, 1, 0]] b3 = [[3, 0], [4, 1], [5, 2]] b4 = [[0, 1, 2], [3, 4, 5]] for k in range(-3, 13, 4): assert_equal(rot90(a, k=k), b1) for k in range(-2, 13, 4): assert_equal(rot90(a, k=k), b2) for k in range(-1, 13, 4): assert_equal(rot90(a, k=k), b3) for k in range(0, 13, 4): assert_equal(rot90(a, k=k), b4) def test_axes(self): a = ones((50, 40, 3)) assert_equal(rot90(a).shape, (40, 50, 3)) class TestHistogram2d(TestCase): def test_simple(self): x = array( [0.41702200, 0.72032449, 1.1437481e-4, 0.302332573, 0.146755891]) y = array( [0.09233859, 0.18626021, 0.34556073, 0.39676747, 0.53881673]) xedges = np.linspace(0, 1, 10) yedges = np.linspace(0, 1, 10) H = histogram2d(x, y, (xedges, yedges))[0] answer = array( [[0, 0, 0, 1, 0, 0, 0, 0, 0], [0, 0, 0, 0, 0, 0, 1, 0, 0], [0, 0, 0, 0, 0, 0, 0, 0, 0], [1, 0, 1, 0, 0, 0, 0, 0, 0], [0, 1, 0, 0, 0, 0, 0, 0, 0], [0, 0, 0, 0, 0, 0, 0, 0, 0], [0, 0, 0, 0, 0, 0, 0, 0, 0], [0, 0, 0, 0, 0, 0, 0, 0, 0], [0, 0, 0, 0, 0, 0, 0, 0, 0]]) assert_array_equal(H.T, answer) H = histogram2d(x, y, xedges)[0] assert_array_equal(H.T, answer) H, xedges, yedges = histogram2d(list(range(10)), list(range(10))) assert_array_equal(H, eye(10, 10)) assert_array_equal(xedges, np.linspace(0, 9, 11)) assert_array_equal(yedges, np.linspace(0, 9, 11)) def test_asym(self): x = array([1, 1, 2, 3, 4, 4, 4, 5]) y = array([1, 3, 2, 0, 1, 2, 3, 4]) H, xed, yed = histogram2d( x, y, (6, 5), range=[[0, 6], [0, 5]], normed=True) answer = array( [[0., 0, 0, 0, 0], [0, 1, 0, 1, 0], [0, 0, 1, 0, 0], [1, 0, 0, 0, 0], [0, 1, 1, 1, 0], [0, 0, 0, 0, 1]]) assert_array_almost_equal(H, answer/8., 3) assert_array_equal(xed, np.linspace(0, 6, 7)) assert_array_equal(yed, np.linspace(0, 5, 6)) def test_norm(self): x = array([1, 2, 3, 1, 2, 3, 1, 2, 3]) y = array([1, 1, 1, 2, 2, 2, 3, 3, 3]) H, xed, yed = histogram2d( x, y, [[1, 2, 3, 5], [1, 2, 3, 5]], normed=True) answer = array([[1, 1, .5], [1, 1, .5], [.5, .5, .25]])/9. assert_array_almost_equal(H, answer, 3) def test_all_outliers(self): r = rand(100) + 1. + 1e6 #
pyfa-org/Pyfa
gui/fitCommands/gui/localDrone/mutatedImport.py
Python
gpl-3.0
1,558
0.001926
import wx import eos.db import gui.mainFrame from gui import globalEvents as GE from gui.fitCommands.calc.drone.localAdd import CalcAddLocalDroneCommand from gui.fitCommands.helpers import InternalCommandHistory, DroneInfo from service.fit import Fit class GuiImportLocalMutatedDroneCommand(wx.Command): def __init__(self, fitID, baseItem, mutaplasmid, mutations, amount): wx.Command.__init__(self, True, 'Import Local Mutated Drone') self.internalHistory = InternalCommandHistory() self.fitID = fitID self.newDroneInfo = DroneInfo( amount=amount, amountActive=0, itemID=mutaplasmid.resultingItem.ID, baseItemID=baseItem.ID, mutaplasmidID=mutaplasmid.ID, mutations=mutations) def Do(self): cmd = CalcAddLocalDroneCommand(fitID=self.fitID, droneInfo=self.newDroneInfo, forceNewStac
k=True) success = self.internalHistory.submit(cmd) eos.db.flush() sFit = Fit.getInstance() sFit.recalc(self.fitID) sFit.fill(self.fitID) eos.db.commit() wx.PostEvent(gui.main
Frame.MainFrame.getInstance(), GE.FitChanged(fitIDs=(self.fitID,))) return success def Undo(self): success = self.internalHistory.undoAll() eos.db.flush() sFit = Fit.getInstance() sFit.recalc(self.fitID) sFit.fill(self.fitID) eos.db.commit() wx.PostEvent(gui.mainFrame.MainFrame.getInstance(), GE.FitChanged(fitIDs=(self.fitID,))) return success
asherbar/json-plus-plus
jpp/parser/lex.py
Python
mit
2,482
0
import operator import ply.lex as lex from jpp.parser.operation import Operation from jpp.parser.expression import SimpleExpression res
erved = { 'extends': 'EXTENDS', 'import': 'IMPORT', 'local': 'LOCAL', 'imported': 'IMPORTED', 'user_input': 'USER_INPUT', } N
AME_TOK = 'NAME' tokens = [ 'INTEGER', 'STRING_LITERAL', 'COLON', NAME_TOK, 'COMMA', 'LCURL', 'RCURL', 'LBRAC', 'RBRAC', 'LPAREN', 'RPAREN', 'DOT', 'SEMICOLON', 'BOOLEAN', 'MINUS', 'COMPARISON_OP', 'PLUS', 'MUL_OP', 'BIT_SHIFT_OPS', 'BITWISE_OPS', 'INVERT', 'POW', 'FUNC', ] tokens.extend(reserved.values()) t_DOT = r'\.' t_LCURL = r'\{' t_RCURL = r'\}' t_COLON = r'\:' t_LBRAC = r'\[' t_RBRAC = r'\]' t_LPAREN = r'\(' t_RPAREN = r'\)' t_COMMA = ',' t_SEMICOLON = ';' def _create_operation_token(t): t.value = Operation(t.value) return t def t_BIT_SHIFT_OPS(t): """ <<|>> """ return _create_operation_token(t) def t_COMPARISON_OP(t): """ <|<=|==|!=|>= """ return _create_operation_token(t) def t_BITWISE_OPS(t): r""" &|\^|\| """ return _create_operation_token(t) def t_PLUS(t): r""" \+ """ return _create_operation_token(t) def t_MINUS(t): r""" - """ t.value = Operation(t.value, operator.sub) return t def t_POW(t): r""" \*\* """ return _create_operation_token(t) def t_MUL_OP(t): r""" \*|//|/|% """ return _create_operation_token(t) def t_INVERT(t): """ ~ """ return _create_operation_token(t) def t_FUNC(t): """ bool|abs """ return _create_operation_token(t) def t_INTEGER(t): r""" \d+ """ t.value = SimpleExpression(int(t.value)) return t def t_STRING_LITERAL(t): """ "[^"\n]*" """ t.value = SimpleExpression(str(t.value).strip('"')) return t def t_BOOLEAN(t): """ true|false """ t.value = SimpleExpression(t.value == 'true') return t def t_NAME(t): """ [a-zA-Z_][a-zA-Z_0-9]* """ t.type = reserved.get(t.value, NAME_TOK) # Check for reserved words return t def t_COMMENT(t): r""" \#.* """ # No return value. Token discarded pass def t_newline(t): r""" \n+ """ t.lexer.lineno += len(t.value) def t_error(_): return t_ignore = ' \t' def create_lexer(): return lex.lex(debug=False)
Marwari/spyChat
send_message.py
Python
mit
2,015
0.009429
# importing existing friend, steganography library, and datetime. from select_friend import select_friend from steganography.steganography import Steganography from datetime import datetime from spy_details import friends, ChatMessage #importing regular expression for proper validation import re # importing termcolor for colorful output. from termcolor import colored # function to send a secret message. def send_message(): # choose a friend from the list to communicate friend_choice = select_friend() # select an image in which you
want to hide a secret message. original_i
mage = raw_input("Provide the name of the image to hide the message : ") pattern_i = '^[a-zA-Z]+\.jpg$' # User validation for image files. if(re.match(pattern_i,original_image)!=None): print # Do Nothing here else: # Provide suggestions to user print colored("Please provide (.jpg) image type.","red") # name the output file output_image = raw_input("Provide the name of the output image : ") pattern_o = '^[a-zA-Z]+\.jpg$' # User validation for image files. if (re.match(pattern_o,output_image) != None): print # Do Nothing here else: # Provide suggestion to user. print colored("We can extract in only (.jpg) image type, please go for (.jpg).","red") # write the secret message text = raw_input("Enter your message here : ") # Encrypt the message using Steganography library Steganography.encode(original_image, output_image, text) # the message will be stored in chat message class new_chat = ChatMessage(text, True) # along the name of friend with whom we add message friends[friend_choice].chats.append(new_chat) # Successful message after encoding print (colored("Your message encrypted successfully.", 'red')) # name of the friend along which we add message. friends[friend_choice].chats.append(new_chat) print (colored("your secret message is ready.",'yellow'))
sdemyanov/tensorflow-worklab
classes/stats.py
Python
apache-2.0
3,985
0.01857
# -*- coding: utf-8 -*- """ Created on Mon May 16 17:14:41 2016 @author: sdemyanov """ import numpy as np from sklearn import metrics def get_prob_acc(probs, labels): return np.mean(np.argmax(probs, axis=1) == labels) def get_auc_score(scores, labels): fpr, tpr, thresholds = metrics.roc_curve(labels, scores, pos_label=0) return metrics.auc(fpr, tpr) def get_f1_score(confmat): assert confmat.shape[0] == 2 and confmat.shape[1] == 2 precision = float(confmat[0, 0]) / np.sum(confmat[:, 0]) recall = float(confmat[0, 0]) / np.sum(confmat[0, :]) print 'precision: %f' % precision print 'recall: %f' % recall return 2 * precision * recall / (precision + recall) def get_accuracy(confmat): correct = np.sum(np.diagonal(confmat)) overall = np.sum(confmat) return correct.astype(float) / overall def get_sensitivities(confmat): correct = np.diagonal(confmat) overall = np.sum(confmat, 1) return np.divide(np.array(correct, dtype=np.float), overall) def get_pred_confmat(classes, preds, labels): classnum = len(classes) mat = np.zeros((classnum, classnum), dtype=int) for pind in range(preds.shape[0]): labind = np.where(classes == labels[pind]) predind = np.where(classes == preds[p
ind]) mat[labind[0], predind[0]] += 1 # mat = np.transpose(mat) return mat def get_prob_confmat(probs, labels): classnum = probs.shape[1] mat = np.zeros((classnum, classnum), dtype=int) for pind in range(probs.shape[0]): mat[int(labels[pind]), np.argmax(probs[pind, :])] +=
1 #mat = np.transpose(mat) return mat def get_block_confmat(confmat, blocks): assert(confmat.shape[0] == confmat.shape[1]) classnum = confmat.shape[0] #assert(np.sum(blocks) == classnum) blocknum = len(blocks) blockconf = np.zeros((blocknum, blocknum)) for bi in range(blocknum): for bj in range(blocknum): blockconf[bi, bj] = 0 for i in blocks[bi]: for j in blocks[bj]: blockconf[bi, bj] += confmat[i, j] assert np.sum(blockconf) == np.sum(confmat), 'Blocks should represent a splitting of confmat' return blockconf def get_block_probs_labels(prob, labels, blocks): # IMPORTANT: blocks must not intersect, otherwise the result is not unique blocknum = len(blocks) assert prob.shape[0] == labels.shape[0] newprob = np.zeros((prob.shape[0], blocknum)) for i in range(blocknum): newprob[:, i] = np.sum(prob[:, blocks[i]], 1) #normalize to have sum = 1 mult_coefs = np.sum(newprob, 1, keepdims=True) newprob /= np.tile(mult_coefs, (1, blocknum)) newlab = np.zeros(prob.shape[0]) missing = [] for i in range(prob.shape[0]): is_missing = True for j in range(len(blocks)): if (labels[i] in blocks[j]): newlab[i] = j is_missing = False break if (is_missing): missing.append(i) newprob = np.delete(newprob, missing, axis=0) newlab = np.delete(newlab, missing, axis=0) return newprob, newlab def get_spec_for_sens(scores, labels, sens): fpr, tpr, thresholds = metrics.roc_curve(labels, scores, pos_label=0) curind = np.size(tpr) - 1 while (tpr[curind-1] >= sens): curind -= 1 return tpr[curind], 1 - fpr[curind], thresholds[curind] def get_sens_for_spec(scores, labels, spec): fpr, tpr, thresholds = metrics.roc_curve(labels, scores, pos_label=0) curind = 0 while (1 - fpr[curind+1] >= spec): curind += 1 return tpr[curind], 1 - fpr[curind], thresholds[curind] def get_average_precisions(probs, labels): print 'probshape:', np.shape(probs) classnum = np.size(probs, 1) labels_arr = np.zeros_like(probs) for i in xrange(classnum): labels_arr[labels == i, i] = 1 print 'macro:', metrics.average_precision_score(labels_arr, probs, average='macro') print 'weighted:', metrics.average_precision_score(labels_arr, probs, average='weighted') skap = metrics.average_precision_score(labels_arr, probs, average=None) return {i: round(skap[i] * 1000) / 10 for i in xrange(classnum)}
sadad111/leetcodebox
Minimum Time Difference.py
Python
gpl-3.0
1,137
0.002639
class Solution(object): def findMinDifference(self, timePoints): """ :type timePoints: List[str] :rtype: int """ def convert(time): return int(time[:2]) * 60 + int(time[3:]) minutes = map(convert, timePoints) minutes.sort() return min( (y - x) % (24 * 60) for x, y in zip(minutes, minutes[1:] + minutes[:1]) ) # public class Solution { # public int findMinDifference(List<String> timePoints) { # int mm = Integer.MAX_VALUE; # List<Integer> time = new ArrayList<>(); # # for(int i = 0; i < timePoints.size(); i++){ # Integ
er h = Integer.valueOf(timePoints.get(i).substring(0, 2)); # time.add(60 * h + Integer.valueOf(timePoints.get(i).substring(3, 5))); # } # # Collections.sort(time, (Integer a, Integer b) -> a - b); # # for(int i = 1; i < time.size(); i++){ # mm = Math.min(mm, tim
e.get(i) - time.get(i-1)); # } # # int corner = time.get(0) + (1440 - time.get(time.size()-1)); # return Math.min(mm, corner); # } # # }
jseabold/statsmodels
statsmodels/stats/tests/test_descriptivestats.py
Python
bsd-3-clause
6,216
0
import numpy as np from numpy.testing import assert_almost_equal, assert_equal import pandas as pd import pytest from statsmodels.iolib.table import SimpleTable from statsmodels.stats.descriptivestats import ( Describe, Description, describe, sign_test, ) pytestmark = pytest.mark.filterwarnings( "ignore::DeprecationWarning:statsmodels.stats.descriptivestats" ) @pytest.fixture(scope="function") def df(): a = np.random.RandomState(0).standard_normal(100) b = pd.Series(np.arange(100) % 10, dtype="category") return pd.DataFrame({"a": a, "b": b}) def test_sign_test(): x = [7.8, 6.6, 6.5, 7.4, 7.3, 7.0, 6.4, 7.1, 6.7, 7.6, 6.8] M, p = sign_test(x, mu0=6.5) # from R SIGN.test(x, md=6.5) # from R assert_almost_equal(p, 0.02148, 5) # not from R, we use a different convention assert_equal(M, 4) data5 = [ [25, "Bob", True, 1.2], [41, "John", False, 0.5], [30, "Alice", True, 0.3], ] data1 = np.array( [(1, 2, "a", "aa"), (2, 3, "b", "bb"), (2, 4, "b", "cc")], dtype=[ ("alpha", float), ("beta", int), ("gamma", "|S1"), ("delta", "|S2"), ], ) data2 = np.array( [(1, 2), (2, 3), (2, 4)], dtype=[("alpha", float), ("beta", float)] ) data3 = np.array([[1, 2, 4, 4], [2, 3, 3, 3], [2, 4, 4, 3]], dtype=float) data4 = np.array([[1, 2, 3, 4, 5, 6], [6, 5, 4, 3, 2, 1], [9, 9, 9, 9, 9, 9]]) class TestSimpleTable(object): # from statsmodels.iolib.table import SimpleTable, default_txt_fmt @pytest.mark.xfail(reason="Bad test") def test_basic_1(self): print("test_basic_1") t1 = Describe(data1) print(t1.summary()) def test_basic_2(self): print("test_basic_2") t2 = Describe(data2) print(t2.summary()) def test_describe_summary_float_ndarray(self): print("test_describe_summary_float_ndarray") t1 = Describe(data3) print(t1.summary()) def test_basic_4(self): print("test_basic_4") t1 = Describe(data4) print(t1.summary()) @pytest.mark.xfail(reason="Bad test") def test_basic_1a(self): print("test_basic_1a") t1 = Describe(data1) print(t1.summary(stats="basic", columns=["alpha"])) @pytest.mark.xfail(reason="Bad test") def test_basic_1b(self): print("test_basic_1b") t1 = Describe(data1) print(t1.summary(stats="basic", columns="all")) def test_basic_2a(self): print("test_basic_2a") t2 = Describe(data2) print(t2.summary(stats="all")) def test_basic_3(aself): t1 = Describe(data3) print(t1.summary(stats="all")) def test_basic_4a(self): t1 = Describe(data4) print(t1.summary(stats="all")) def test_description_exceptions(): df = pd.DataFrame( {"a": np.empty(100), "b": pd.Series(np.arange(100) % 10)}, dtype="category", ) with pytest.raises(ValueError): Description(df, stats=["unknown"]) with pytest.raises(ValueError): Description(df, alpha=-0.3) with pytest.raises(ValueError): Description(df, percentiles=[0, 100]) with pytest.raises(ValueError): Description(df, percentiles=[10, 20, 30, 10]) with pytest.raises(ValueError): Description(df, ntop=-3) with pytest.raises(ValueError): Description(df, numeric=False, categorical=False) def test_description_basic(df): res = Description(df) assert isinstance(res.frame, pd.DataFrame) assert isinstance(res.numeric, pd.DataFrame) assert isinstance(res.categorical, pd.DataFrame) assert isinstance(res.summary(), SimpleTable) assert isinstance(res.summary().as_text(), str) assert "Descriptive" in str(res) res = Description(df.a) assert isinstance(res.frame, pd.DataFrame) assert isinstance(res.numeric, pd.DataFrame) assert isinstance(res.categorical, pd.DataFrame) assert isinstance(res.summary(), SimpleTable) assert isinstance(res.summary().as_text(), str) assert "Descriptive" in str(res) res = Description(df.b) assert isinstance(res.frame, pd.DataFrame) assert isinstance(res.numeric, pd.DataFrame) assert isinstance(res.categorical, pd.DataFrame) assert isinstance(res.summary(), SimpleTable) assert isinstance(res.summary().as_text(), str) assert "Descriptive" in str(res) def test_odd_percentiles(df): percentiles = np.linspace(7.0, 93.0, 13) res = Description(df, percentiles=percentiles) print(res.frame.index) def test_large_ntop(df): res = Description(df, ntop=15) assert "top_15" in res.frame.index def test_use_t(df): res = Description(df) res_t = Description(df, use_t=True) assert res_t.frame.a.lower_ci < res.frame.a.lower_ci assert res_t.frame.a.upper_ci > res.frame.a.upper_ci SPECIAL = ( ("ci", ("lower_ci", "upper_ci")), ("jarque_bera", ("jarque_bera", "jarque_bera_pval")), ("mode", ("mode", "mode_freq")), ("top", tuple([f"top_{i}" for i in range(1, 6)])), ("freq", tuple([f"freq_{i}" for i in range(1, 6)])), ) @pytest.mark.parametrize("stat", SPECIAL, ids=[s[0] for s in SPECIAL]) def test_special_stats(df, stat): all_stats = [st for st in Description.default_statistics] all_stats.remove(stat[0]) res = Description(df, stats=all_stats) for val in stat[1]: assert val not in res.frame.
index def test_empty_columns(df): df["c"] = np.nan res = Description(df) dropped = res.frame.c.dropna() assert dropped.shape[0] == 2 assert "missing" in dropped assert "nobs" in dropped df["c"] = np.nan res = Description(df.c)
dropped = res.frame.dropna() assert dropped.shape[0] == 2 @pytest.mark.skipif(not hasattr(pd, "NA"), reason="Must support NA") def test_extension_types(df): df["c"] = pd.Series(np.arange(100.0)) df["d"] = pd.Series(np.arange(100), dtype=pd.Int64Dtype()) df.loc[df.index[::2], "c"] = np.nan df.loc[df.index[::2], "d"] = pd.NA res = Description(df) np.testing.assert_allclose(res.frame.c, res.frame.d) def test_describe(df): pd.testing.assert_frame_equal(describe(df), Description(df).frame)
Pikecillo/genna
external/4Suite-XML-1.0.2/test/Xml/Xslt/Borrowed/rt_20000515.py
Python
gpl-2.0
1,364
0.002199
#"Ron Ten-Hove" <rtenhove@forte.com>, by wondering why he doesn't get the expected result from passing params to unnamed templates, exposes a subtle gotcha. 15 May 2000 from Xml.Xslt import test_harness sheet_1 = """<?xml version="1.
0"?> <xsl:stylesheet xmlns:xsl="http://www.w3.org/1999/XSL/Transform" version="1.0"> <xsl:output indent="yes"/> <xsl:template match="/"> <root> <xsl:apply-templates> <xsl:with-param name="param">List</xsl:with-param> </xsl:apply-templates> </root> </xsl:template>
<xsl:template match="chapter"> <xsl:param name="param">Unset</xsl:param> <chap> <xsl:attribute name="title"><xsl:value-of select="@name"/></xsl:attribute> <xsl:attribute name="cat"><xsl:value-of select="$param"/></xsl:attribute> </chap> </xsl:template> <xsl:template match="text()" /> </xsl:stylesheet>""" source_1 = """<?xml version="1.0"?> <doc> <chapter name="The beginning"> Alpha. </chapter> </doc> """ expected_1="""<?xml version='1.0' encoding='UTF-8'?> <root> <chap title='The beginning' cat='Unset'/> </root>""" def Test(tester): source = test_harness.FileInfo(string=source_1) sheet = test_harness.FileInfo(string=sheet_1) test_harness.XsltTest(tester, source, [sheet], expected_1) return
onoga/wm
src/gnue/forms/GFObjects/GFLayout.py
Python
gpl-2.0
3,034
0.018128
# GNU Enterprise Forms - GF Object Hierarchy - Layout # # Copyright 2001-2007 Free Software Foundation # # This file is part of GNU Enterprise # # GNU Enterprise is free software; you can redistribute it # and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation; either # version 2, or (at your option) any later version. # # GNU Enterprise is distributed in the hope that it will be # useful, but WITHOUT ANY WARRANTY; without even the implied # warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR # PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public # License along with program; see the file COPYING. If not, # write to the Free Software Foundation, Inc., 59 Temple Place # - Suite 330, Boston, MA 02111-1307, USA. # # pylint: disable-msg=W0704 # # $Id: GFLayout.py,v 1.5 2008/11/04 20:14:16 oleg Exp $ """ Handles the <layout> tag. """ from GFContainer import GFContainer __all__ = ['GFLayout', 'LayoutConceptError'] # ============================================================================= # Class implementing the layout tag # ============================================================================= class GFLayout(GFContainer): """ Implementation of the <layout> tag """ # ------------------------------------------------------------------------- # Attributes #
--------------------------------------------------------------------
----- tabbed = 'none' name = 'layout' # ------------------------------------------------------------------------- # Constructor # ------------------------------------------------------------------------- def __init__(self, parent=None): GFContainer.__init__(self, parent, "GFLayout") self._triggerGlobal = 1 self._xmlchildnamespaces = {} self._triggerFunctions = { 'find_child': {'function': self.__trigger_find_child}, } def __trigger_find_child(self, name, childType = None, recursive = True): child = self.findChildNamed(name, childType, recursive) if child: return child.get_namespace_object() # ------------------------------------------------------------------------- # Implementation of virtual methods # ------------------------------------------------------------------------- def _phase_1_init_(self): """ Build a dictionary of all XML namespaces used by the layouts children """ GFContainer._phase_1_init_(self) self._xmlchildnamespaces = self.__find_namespaces(self) # ------------------------------------------------------------------------- # Find the XML namespace in use by any child objects # ------------------------------------------------------------------------- def __find_namespaces(self, gf_object): result = {} for child in gf_object._children: try: if child._xmlnamespaces: result.update(child._xmlnamespaces) else: result.update(self.__find_namespaces(child)) except AttributeError: pass return result
syed/PerfKitBenchmarker
perfkitbenchmarker/static_virtual_machine.py
Python
apache-2.0
10,858
0.005618
# Copyright 2014 PerfKitBenchmarker Authors. All rights reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to
in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """Class to represent a Static Virtual Machine object. All static VMs provided in a given group will be used before any non-static VMs are provisioned. For example, in a test that uses 4 VMs, if 3 static VMs ar
e provided, all of them will be used and one additional non-static VM will be provisioned. The VM's should be set up with passwordless ssh and passwordless sudo (neither sshing nor running a sudo command should prompt the user for a password). All VM specifics are self-contained and the class provides methods to operate on the VM: boot, shutdown, etc. """ import collections import json import logging import threading from perfkitbenchmarker import disk from perfkitbenchmarker import flags from perfkitbenchmarker import linux_virtual_machine from perfkitbenchmarker import virtual_machine from perfkitbenchmarker import windows_virtual_machine WINDOWS = 'windows' DEBIAN = 'debian' RHEL = 'rhel' UBUNTU_CONTAINER = 'ubuntu_container' FLAGS = flags.FLAGS class StaticVmSpec(virtual_machine.BaseVmSpec): """Object containing all info needed to create a Static VM.""" def __init__(self, ip_address=None, user_name=None, ssh_private_key=None, internal_ip=None, ssh_port=22, install_packages=True, password=None, disk_specs=None, os_type=None, **kwargs): """Initialize the StaticVmSpec object. Args: ip_address: The public ip address of the VM. user_name: The username of the VM that the keyfile corresponds to. ssh_private_key: The absolute path to the private keyfile to use to ssh to the VM. internal_ip: The internal ip address of the VM. ssh_port: The port number to use for SSH and SCP commands. install_packages: If false, no packages will be installed. This is useful if benchmark dependencies have already been installed. password: The password used to log into the VM (Windows Only). disk_specs: A list of dictionaries containing kwargs used to create disk.BaseDiskSpecs. os_type: The OS type of the VM. See the flag of the same name for more information. """ super(StaticVmSpec, self).__init__(**kwargs) self.ip_address = ip_address self.user_name = user_name self.ssh_private_key = ssh_private_key self.internal_ip = internal_ip self.ssh_port = ssh_port self.install_packages = install_packages self.password = password self.os_type = os_type self.disk_specs = disk_specs class StaticDisk(disk.BaseDisk): """Object representing a static Disk.""" def _Create(self): """StaticDisks don't implement _Create().""" pass def _Delete(self): """StaticDisks don't implement _Delete().""" pass def Attach(self): """StaticDisks don't implement Attach().""" pass def Detach(self): """StaticDisks don't implement Detach().""" pass class StaticVirtualMachine(virtual_machine.BaseVirtualMachine): """Object representing a Static Virtual Machine.""" is_static = True vm_pool = collections.deque() vm_pool_lock = threading.Lock() def __init__(self, vm_spec): """Initialize a static virtual machine. Args: vm_spec: A StaticVmSpec object containing arguments. """ super(StaticVirtualMachine, self).__init__(vm_spec, None, None) self.ip_address = vm_spec.ip_address self.user_name = vm_spec.user_name self.ssh_private_key = vm_spec.ssh_private_key self.internal_ip = vm_spec.internal_ip self.zone = self.zone or ('Static - %s@%s' % (self.user_name, self.ip_address)) self.ssh_port = vm_spec.ssh_port self.install_packages = vm_spec.install_packages self.password = vm_spec.password if vm_spec.disk_specs: for spec in vm_spec.disk_specs: self.disk_specs.append(disk.BaseDiskSpec(**spec)) self.from_pool = False def _Create(self): """StaticVirtualMachines do not implement _Create().""" pass def _Delete(self): """Returns the virtual machine to the pool.""" if self.from_pool: with self.vm_pool_lock: self.vm_pool.appendleft(self) def CreateScratchDisk(self, disk_spec): """Create a VM's scratch disk. Args: disk_spec: virtual_machine.BaseDiskSpec object of the disk. """ spec = self.disk_specs[len(self.scratch_disks)] self.scratch_disks.append(StaticDisk(spec)) def DeleteScratchDisks(self): """StaticVirtualMachines do not delete scratch disks.""" pass def GetLocalDisks(self): """Returns a list of local disks on the VM.""" return [disk_spec.device_path for disk_spec in self.disk_specs if disk_spec.device_path] @classmethod def ReadStaticVirtualMachineFile(cls, file_obj): """Read a file describing the static VMs to use. This function will read the static VM information from the provided file, instantiate VMs corresponding to the info, and add the VMs to the static VM pool. The provided file should contain a single array in JSON-format. Each element in the array must be an object with required format: ip_address: string. user_name: string. keyfile_path: string. ssh_port: integer, optional. Default 22 internal_ip: string, optional. zone: string, optional. local_disks: array of strings, optional. scratch_disk_mountpoints: array of strings, optional os_type: string, optional (see package_managers) install_packages: bool, optional Args: file_obj: An open handle to a file containing the static VM info. Raises: ValueError: On missing required keys, or invalid keys. """ vm_arr = json.load(file_obj) if not isinstance(vm_arr, list): raise ValueError('Invalid static VM file. Expected array, got: %s.' % type(vm_arr)) required_keys = frozenset(['ip_address', 'user_name']) linux_required_keys = required_keys | frozenset(['keyfile_path']) required_keys_by_os = { WINDOWS: required_keys | frozenset(['password']), DEBIAN: linux_required_keys, RHEL: linux_required_keys, UBUNTU_CONTAINER: linux_required_keys, } required_keys = required_keys_by_os[FLAGS.os_type] optional_keys = frozenset(['internal_ip', 'zone', 'local_disks', 'scratch_disk_mountpoints', 'os_type', 'ssh_port', 'install_packages']) allowed_keys = required_keys | optional_keys def VerifyItemFormat(item): """Verify that the decoded JSON object matches the required schema.""" item_keys = frozenset(item) extra_keys = sorted(item_keys - allowed_keys) missing_keys = required_keys - item_keys if extra_keys: raise ValueError('Unexpected keys: {0}'.format(', '.join(extra_keys))) elif missing_keys: raise ValueError('Missing required keys: {0}'.format( ', '.join(missing_keys))) for item in vm_arr: VerifyItemFormat(item) ip_address = item['ip_address'] user_name = item['user_name'] keyfile_path = item.get('keyfile_path') internal_ip = item.get('internal_ip') zone = item.get('zone') local_disks = item.get('local_disks', []) password = item.get('password') if not isinstance(local_disks, list): raise ValueError('Expected a list of local disks, got: {0}'.format( local_disks)) scratch_disk_mountpoints = item.get('scratch_disk_mountpoints', []) if not isinstance(scratch
hexlism/css_platform
sleepyenv/lib/python2.7/site-packages/Flask_Admin-1.2.0-py2.7.egg/flask_admin/contrib/mongoengine/view.py
Python
apache-2.0
20,150
0.000893
import logging from flask import request, flash, abort, Response from flask_admin import expose from flask_admin.babel import gettext, ngettext, lazy_gettext from flask_admin.model import BaseModelView from flask_admin.model.form import wrap_fields_in_fieldlist from flask_admin.model.fields import ListEditableFieldList from flask_admin._compat import iteritems, string_types import mongoengine import gridfs from mongoengine.connection import get_db from bson.objectid import ObjectId from flask_admin.actions import action from .filters import FilterConverter, BaseMongoEngineFilter from .form import get_form, CustomModelConverter from .typefmt import DEFAULT_FORMATTERS from .tools import parse_like_term from .helpers import format_error from .ajax import process_ajax_references, create_ajax_loader from .subdoc import convert_subdocuments # Set up logger log = logging.getLogger("flask-admin.mongo") SORTABLE_FIELDS = set(( mongoengine.StringField, mongoengine.IntField, mongoengine.FloatField, mongoengine.BooleanField, mongoengine.DateTimeField, mongoengine.ComplexDateTimeField, mongoengine.ObjectIdField, mongoengine.DecimalField, mongoengine.ReferenceField, mongoengine.EmailField, mongoengine.UUIDField, mongoengine.URLField )) class ModelView(BaseModelView): """ MongoEngine model scaffolding. """ column_filters = None """ Collection of the column filters. Can contain either field names or instances of :class:`flask_admin.contrib.mongoengine.filters.BaseFilter` classes. For example:: class MyModelView(BaseModelView): column_filters = ('user', 'email') or:: class MyModelView(BaseModelView): column_filters = (BooleanEqualFilter(User.name, 'Name')) """ model_form_converter = CustomModelConverter """ Model form conversion class. Use this to implement custom field conversion logic. Custom class should be derived from the `flask_admin.contrib.mongoengine.form.CustomModelConverter`. For example:: class MyModelConverter(AdminModelConverter): pass class MyAdminView(ModelView): model_form_converter = MyModelConverter """ object_id_converter = ObjectId """ Mongodb ``_id`` value conversion function. Default is `bson.ObjectId`. Use this if you are using String, Binary and etc. For example:: class MyModelView(BaseModelView): object_id_converter = int or:: class MyModelView(BaseModelView): object_id_converter = str """ filter_converter = FilterConverter() """ Field to filter converter. Override this attribute to use a non-default converter. """ column_type_formatters = DEFAULT_FORMATTERS """ Customized type formatters for MongoEngine backend """ allowed_search_types = (mongoengine.StringField, mongoengine.URLField, mongoengine.EmailField) """ List of allowed search field types. """ form_subdocuments = None """ Subdocument configuration options. This field accepts dictionary, where key is field name and value is either dictionary or instance of the `flask_admin.contrib.EmbeddedForm`. Consider following example:: class Comment(db.EmbeddedDocument): name = db.StringField(max_length=20, required=True) value = db.StringField(max_length=20) class Post(db.Document): text = db.StringField(max_length=30) data = db.EmbeddedDocumentField(Comment) class MyAdmin(ModelView): form_subdocuments = { 'data': { 'form_columns': ('name',) } } In this example, `Post` model has child `Comment` subdocument. When generating form for `Comment` embedded document, Flask-Admin will only create `name` field. It is also possible to use class-based embedded document configuration:: class CommentEmbed(EmbeddedForm): form_columns = ('name',) class MyAdmin(ModelView): form_subdocuments = { 'data': CommentEmbed() } Arbitrary depth nesting is supported:: class SomeEmbed(EmbeddedForm): form_excluded_columns = ('test',) class CommentEmbed(EmbeddedForm): form_columns = ('name',) form_subdocuments = { 'inner': SomeEmbed() } class MyAdmin(ModelView): form_subdocuments = { 'data': CommentEmbed() } There's also support for forms embedded into `ListField`. All you have to do is to create nested rule with `None` as a name. Even though it is slightly confusing, but that's how Flask-MongoEngine creates
form fields embedded into ListField:: class Comment(db.EmbeddedDocument): nam
e = db.StringField(max_length=20, required=True) value = db.StringField(max_length=20) class Post(db.Document): text = db.StringField(max_length=30) data = db.ListField(db.EmbeddedDocumentField(Comment)) class MyAdmin(ModelView): form_subdocuments = { 'data': { 'form_subdocuments': { None: { 'form_columns': ('name',) } } } } """ def __init__(self, model, name=None, category=None, endpoint=None, url=None, static_folder=None, menu_class_name=None, menu_icon_type=None, menu_icon_value=None): """ Constructor :param model: Model class :param name: Display name :param category: Display category :param endpoint: Endpoint :param url: Custom URL :param menu_class_name: Optional class name for the menu item. :param menu_icon_type: Optional icon. Possible icon types: - `flask_admin.consts.ICON_TYPE_GLYPH` - Bootstrap glyph icon - `flask_admin.consts.ICON_TYPE_FONT_AWESOME` - Font Awesome icon - `flask_admin.consts.ICON_TYPE_IMAGE` - Image relative to Flask static directory - `flask_admin.consts.ICON_TYPE_IMAGE_URL` - Image with full URL :param menu_icon_value: Icon glyph name or URL, depending on `menu_icon_type` setting """ self._search_fields = [] super(ModelView, self).__init__(model, name, category, endpoint, url, static_folder, menu_class_name=menu_class_name, menu_icon_type=menu_icon_type, menu_icon_value=menu_icon_value) self._primary_key = self.scaffold_pk() def _refresh_cache(self): """ Refresh cache. """ # Process subdocuments if self.form_subdocuments is None: self.form_subdocuments = {} self._form_subdocuments = convert_subdocuments(self.form_subdocuments) # Cache other properties super(ModelView, self)._refresh_cache() def _process_ajax_references(self): """ AJAX endpoint is exposed by top-level admin view class, but subdocuments might have AJAX references too. This method will recursively go over subdocument configuration and will precompute AJAX references for t
wonder-sk/inasafe
safe/impact_statistics/test/test_postprocessor_manager.py
Python
gpl-3.0
5,518
0.000363
# coding=utf-8 """ InaSAFE Disaster risk assessment tool developed by AusAid and World Bank - **GUI Test Cases.** Contact : ole.moller.nielsen@gmail.com .. note:: This program is free software; you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation; either version 2 of the License, or (at your option) any later version. """ __author__ = 'marco@opengis.ch' __revision__ = '$Format:%H$' __date__ = '19/05/2013' __copyright__ = ('Copyright 2012, Australia Indonesia Facility for ' 'Disaster Reduction') import unittest import os import logging from qgis.core import QgsMapLayerRegistry from safe.impact_functions import register_impact_functions from safe.test.utilities import ( set_canvas_crs, set_jakarta_extent, GEOCRS, load_standard_layers, setup_scenario, canvas_list, get_qgis_app) # AG: get_qgis_app() should be called before importing modules from # safe.gui.widgets.dock QGIS_APP, CANVAS, IFACE, PARENT = get_qgis_app() from safe.gui.widgets.dock import Dock DOCK = Dock(IFACE) LOGGER = logging.getLogger('InaSAFE') # noinspection PyArgumentList class PostprocessorManagerTest(unittest.TestCase): """Test the postprocessor manager""" # noinspection PyPep8Naming def setUp(self): """Fixture run before all tests""" os.environ['LANG'] = 'en' DOCK.show_only_visible_layers_flag = True load_standard_layers(DOCK) DOCK.cboHazard.setCurrentIndex(0) DOCK.cboExposure.setCurrentIndex(0) DOCK.cboFunction.setCurrentIndex(
0) DOCK.run_in_thread_flag = False DOCK.show_only_visible_layers_flag = False DOCK.set_layer_from_title_flag = False DOCK.zoom_to_impact_flag = False DOCK.hide_exposure_flag = False DOCK.show_intermediate_layers = False set_jakarta_extent() register_impact_functions() def tearDown(self): """Run after each test.""" # Let's
use a fresh registry, canvas, and dock for each test! QgsMapLayerRegistry.instance().removeAllMapLayers() DOCK.cboHazard.clear() DOCK.cboExposure.clear() # noinspection PyMethodMayBeStatic def test_check_postprocessing_layers_visibility(self): """Generated layers are not added to the map registry.""" # Explicitly disable showing intermediate layers DOCK.show_intermediate_layers = False # with KAB_NAME aggregation attribute defined in .keyword using # kabupaten_jakarta_singlepart.shp result, message = setup_scenario( DOCK, hazard='Continuous Flood', exposure='Population', function_id='FloodEvacuationRasterHazardFunction', aggregation_layer=u"Dístríct's of Jakarta") set_jakarta_extent(dock=DOCK) assert result, message # LOGGER.info("Registry list before:\n%s" % # QgsMapLayerRegistry.instance().mapLayers()) # one layer (the impact) should have been added expected_count = len(CANVAS.layers()) + 1 # # Press RUN DOCK.accept() # no KW dialog will popuo due to complete keywords after_count = len(CANVAS.layers()) # LOGGER.info("Registry list after:\n%s" % # QgsMapLayerRegistry.instance().mapLayers()) message = ( 'Expected %s items in canvas, got %s' % (expected_count, after_count)) assert expected_count == after_count, message # Now run again showing intermediate layers DOCK.show_intermediate_layers = True # Press RUN DOCK.accept() # no KW dialog will popup due to complete keywords # one layer (the impact) should have been added expected_count += 2 after_count = len(CANVAS.layers()) LOGGER.info("Canvas list after:\n %s" % canvas_list()) message = ( 'Expected %s items in canvas, got %s' % (expected_count, after_count)) # We expect two more since we enabled showing intermediate layers assert expected_count == after_count, message # noinspection PyMethodMayBeStatic def test_post_processor_output(self): """Check that the post processor does not add spurious report rows.""" # with KAB_NAME aggregation attribute defined in .keyword using # kabupaten_jakarta_singlepart.shp result, message = setup_scenario( DOCK, hazard='Continuous Flood', exposure='Population', function_id='FloodEvacuationRasterHazardFunction') # Enable on-the-fly reprojection set_canvas_crs(GEOCRS, True) set_jakarta_extent() assert result, message # Press RUN DOCK.accept() message = 'Spurious 0 filled rows added to post processing report.' result = DOCK.wvResults.page().currentFrame().toPlainText() for line in result.split('\n'): if 'Entire area' in line: tokens = str(line).split('\t') tokens = tokens[1:] total = 0 for token in tokens: total += float(token.replace(',', '')) assert total != 0, message if __name__ == '__main__': suite = unittest.makeSuite(PostprocessorManagerTest) runner = unittest.TextTestRunner(verbosity=2) runner.run(suite)
CLVsol/clvsol_odoo_api
__init__.py
Python
agpl-3.0
2,405
0
# -*- encoding: utf-8 -*- # -*- coding: utf-8 -*- ############################################################################### # # Copyright (C) 2013-Today Carlos Eduardo Vercelino - CLVsol # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Affero General Public License for more details. # # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. # ############################################################################### from hr_department import * from hr_department_history import * from hr_employee import * from hr_employee_history import * from hr_employee_history_log import * from hr_employee_log import * from hr_job import * from hr_job_history import * from ir_sequence import * from l10n_br_base_city import * from res_country import * from res_cou
ntry_state import * from res_partner import * from res_users import * from survey_survey import * from clv_address import * from clv_address_category import * from clv_address_history import * from clv_address_history_log import * from clv_address_log import * from clv_document import * from clv_document_category import * from clv_document_log import * from clv_document_person import * from clv_event import * from clv_event_categor
y import * from clv_event_log import * from clv_global_tag import * from clv_history_marker import * from clv_lab_test_criterion import * from clv_lab_test_request import * from clv_lab_test_result import * from clv_lab_test_type import * from clv_lab_test_unit import * from clv_mfile import * from clv_person import * from clv_person_address import * from clv_person_address_history import * from clv_person_address_history_log import * from clv_person_address_role import * from clv_person_category import * from clv_person_history import * from clv_person_history_log import * from clv_person_log import * from clv_person_mng import * from clv_person_mng_log import * from clv_survey import *
IfcOpenShell/IfcOpenShell
src/ifcopenshell-python/ifcopenshell/api/structural/edit_structural_load_case.py
Python
lgpl-3.0
1,177
0
# IfcOpenShell - IFC toolkit and geometry engine # Copyright (C) 2021 Dion Moult <dion@thinkmoult.com> # # This file is part of IfcOpenShell. # # IfcOpenShell is free software: you can redistribute it and/or modify # it under the terms of the GNU Lesser General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # IfcOpenShell is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Lesser General Public License for more details. # # You should have received a copy of the GNU Lesser General
Public License # along with IfcOpenShell. If not, see <http://www.gnu.org/licenses/>. class Usecase: def __init__(self, file, **settings): self.file = file self.settings = {"load_case": None, "attributes": {}} for key, value in settings.items(): self.settings[key] = value def execute(self): for name, value in self.settings["attributes"].
items(): setattr(self.settings["load_case"], name, value)
recipy/recipy
recipy/utils.py
Python
apache-2.0
1,587
0
import six from .log import log_input, log_output def open(*args, **kwargs): """Built-in open replacement that logs input and output Workaround for issue #44. Patching `__builtins__['open']` is complicated, because many libraries use standard open internally, while we only want to log inputs and outputs that are opened explicitly by the user. The user can either use `recipy.open` (only requires `import recipy` at the top of the script), or add `from recipy import open` and just use `open`. If python 2 is used, and an `encoding` parameter is passed to this
function, `codecs` is used to open the file with proper encoding. """ try: mode = args[1] except IndexError: mode = kwargs.get('mode', 'r') # open file for reading? for c in 'r+': if c in mode: log_input(args[0], 'recipy.open') # open file for writing? for c in 'wax+': if c in mode: log_output(args[0], 'recipy.open') # This if statement cannot be combined with the previous if statement, # bec
ause otherwise, files will be opened before they is logged. # This causes problems with logging of file diffs, because when a file is # opened for writing, its contents will be discarded. # TODO: add tests for this if six.PY3: f = __builtins__['open'](*args, **kwargs) else: if 'encoding' in kwargs.keys(): import codecs f = codecs.open(*args, **kwargs) else: f = __builtins__['open'](*args, **kwargs) return(f)
Jortolsa/l10n-spain
l10n_es_toponyms/wizard/__init__.py
Python
agpl-3.0
1,118
0
# -*- coding: utf-8 -*- ############################################################################## #
# OpenERP, Open Source Management Solution # Copyright (c) 2013-2015 Serv. Tecnol. Avanzados # Pedro M. Baeza <pedro.baeza@serviciosbaeza.com> # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as published # by the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be us
eful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Affero General Public License for more details. # # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. # ############################################################################## from . import geonames_import from . import l10n_es_toponyms_wizard
darthdeus/dotfiles
c_ycm_conf.py
Python
mit
5,178
0.018733
# This file is NOT licensed under the GPLv3, which is the license for the rest # of YouCompleteMe. # # Here's the license text for this file: # # This is free and unencumbered software released into the public domain. # # Anyone is free to copy, modify, publish, use, compile
, sell, or # distribute this software, either in source code form or as a compiled # binary, for any purpose, commercial or non-commercial, and by any # means. # # In jurisdictions that recognize copyright laws, the author or authors # of this software dedicate any and all copyright interest in the # software to the public domain. We make this dedication for the benefit # of the public at large and to the detriment of our heirs and
# successors. We intend this dedication to be an overt act of # relinquishment in perpetuity of all present and future rights to this # software under copyright law. # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, # EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF # MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. # IN NO EVENT SHALL THE AUTHORS BE LIABLE FOR ANY CLAIM, DAMAGES OR # OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, # ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR # OTHER DEALINGS IN THE SOFTWARE. # # For more information, please refer to <http://unlicense.org/> import os import ycm_core # These are the compilation flags that will be used in case there's no # compilation database set (by default, one is not set). # CHANGE THIS LIST OF FLAGS. YES, THIS IS THE DROID YOU HAVE BEEN LOOKING FOR. flags = [ '-Wall', '-Wextra', '-Werror', '-std=gnu11', '-x', 'c', '-isystem', '/usr/include', ] # Set this to the absolute path to the folder (NOT the file!) containing the # compile_commands.json file to use that instead of 'flags'. See here for # more details: http://clang.llvm.org/docs/JSONCompilationDatabase.html # # You can get CMake to generate this file for you by adding: # set( CMAKE_EXPORT_COMPILE_COMMANDS 1 ) # to your CMakeLists.txt file. # # Most projects will NOT need to set this to anything; you can just change the # 'flags' list of compilation flags. Notice that YCM itself uses that approach. compilation_database_folder = '' if os.path.exists( compilation_database_folder ): database = ycm_core.CompilationDatabase( compilation_database_folder ) else: database = None SOURCE_EXTENSIONS = [ '.cpp', '.cxx', '.cc', '.c', '.m', '.mm' ] def DirectoryOfThisScript(): return os.path.dirname( os.path.abspath( __file__ ) ) def MakeRelativePathsInFlagsAbsolute( flags, working_directory ): if not working_directory: return list( flags ) new_flags = [] make_next_absolute = False path_flags = [ '-isystem', '-I', '-iquote', '--sysroot=' ] for flag in flags: new_flag = flag if make_next_absolute: make_next_absolute = False if not flag.startswith( '/' ): new_flag = os.path.join( working_directory, flag ) for path_flag in path_flags: if flag == path_flag: make_next_absolute = True break if flag.startswith( path_flag ): path = flag[ len( path_flag ): ] new_flag = path_flag + os.path.join( working_directory, path ) break if new_flag: new_flags.append( new_flag ) return new_flags def IsHeaderFile( filename ): extension = os.path.splitext( filename )[ 1 ] return extension in [ '.h', '.hxx', '.hpp', '.hh' ] def GetCompilationInfoForFile( filename ): # The compilation_commands.json file generated by CMake does not have entries # for header files. So we do our best by asking the db for flags for a # corresponding source file, if any. If one exists, the flags for that file # should be good enough. if IsHeaderFile( filename ): basename = os.path.splitext( filename )[ 0 ] for extension in SOURCE_EXTENSIONS: replacement_file = basename + extension if os.path.exists( replacement_file ): compilation_info = database.GetCompilationInfoForFile( replacement_file ) if compilation_info.compiler_flags_: return compilation_info return None return database.GetCompilationInfoForFile( filename ) def FlagsForFile( filename, **kwargs ): if database: # Bear in mind that compilation_info.compiler_flags_ does NOT return a # python list, but a "list-like" StringVec object compilation_info = GetCompilationInfoForFile( filename ) if not compilation_info: return None final_flags = MakeRelativePathsInFlagsAbsolute( compilation_info.compiler_flags_, compilation_info.compiler_working_dir_ ) # NOTE: This is just for YouCompleteMe; it's highly likely that your project # does NOT need to remove the stdlib flag. DO NOT USE THIS IN YOUR # ycm_extra_conf IF YOU'RE NOT 100% SURE YOU NEED IT. # try: # final_flags.remove( '-stdlib=libc++' ) # except ValueError: # pass else: relative_to = DirectoryOfThisScript() final_flags = MakeRelativePathsInFlagsAbsolute( flags, relative_to ) return { 'flags': final_flags, 'do_cache': True }
AdamStelmaszczyk/pyechonest
doc/source/conf.py
Python
bsd-3-clause
8,757
0.006623
# -*- coding: utf-8 -*- # # pyechonest documentation build configuration file, created by # sphinx-quickstart on Thu Sep 30 15:51:03 2010. # # This file is execfile()d with the current directory set to its containing dir. # # Note that not all
possible configuration values are present in this # autogenerated file. # # All configuration values have a default; values that are commented out # serve to show the default. import sys, os, ins
pect # If extensions (or modules to document with autodoc) are in another directory, # add these directories to sys.path here. If the directory is relative to the # documentation root, use os.path.abspath to make it absolute, like shown here. #sys.path.insert(0, os.path.abspath('.')) sys.path.insert(0,os.path.abspath("../../pyechonest")) # -- General configuration ----------------------------------------------------- # If your documentation needs a minimal Sphinx version, state it here. #needs_sphinx = '1.0' # Add any Sphinx extension module names here, as strings. They can be extensions # coming with Sphinx (named 'sphinx.ext.*') or your custom ones. extensions = ['sphinx.ext.autodoc', 'sphinx.ext.intersphinx', 'sphinx.ext.coverage', 'sphinx.ext.ifconfig'] # Add any paths that contain templates here, relative to this directory. templates_path = ['_templates'] # The suffix of source filenames. source_suffix = '.rst' # The encoding of source files. #source_encoding = 'utf-8-sig' # The master toctree document. master_doc = 'contents' # General information about the project. project = u'pyechonest' copyright = u'2013, The Echo Nest' # The version info for the project you're documenting, acts as replacement for # |version| and |release|, also used in various other places throughout the # built documents. # # The short X.Y version. version = '8.0.0' # The full version, including alpha/beta/rc tags. release = '8.0.0' # The language for content autogenerated by Sphinx. Refer to documentation # for a list of supported languages. #language = None # There are two options for replacing |today|: either, you set today to some # non-false value, then it is used: #today = '' # Else, today_fmt is used as the format for a strftime call. #today_fmt = '%B %d, %Y' # List of patterns, relative to source directory, that match files and # directories to ignore when looking for source files. exclude_patterns = [] # The reST default role (used for this markup: `text`) to use for all documents. #default_role = None # If true, '()' will be appended to :func: etc. cross-reference text. #add_function_parentheses = True # If true, the current module name will be prepended to all description # unit titles (such as .. function::). #add_module_names = True # If true, sectionauthor and moduleauthor directives will be shown in the # output. They are ignored by default. #show_authors = False # The name of the Pygments (syntax highlighting) style to use. pygments_style = 'sphinx' # A list of ignored prefixes for module index sorting. #modindex_common_prefix = [] # -- Options for HTML output --------------------------------------------------- # The theme to use for HTML and HTML Help pages. See the documentation for # a list of builtin themes. html_theme = 'default' # Theme options are theme-specific and customize the look and feel of a theme # further. For a list of options available for each theme, see the # documentation. #html_theme_options = {} # Add any paths that contain custom themes here, relative to this directory. # html_theme_path = ['themes/'] # The name for this set of Sphinx documents. If None, it defaults to # "<project> v<release> documentation". #html_title = None # A shorter title for the navigation bar. Default is the same as html_title. #html_short_title = None # The name of an image file (relative to this directory) to place at the top # of the sidebar. html_logo = '200x160_lt.png' # The name of an image file (within the static path) to use as favicon of the # docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 # pixels large. #html_favicon = None # Add any paths that contain custom static files (such as style sheets) here, # relative to this directory. They are copied after the builtin static files, # so a file named "default.css" will overwrite the builtin "default.css". # html_static_path = ['_static'] # If not '', a 'Last updated on:' timestamp is inserted at every page bottom, # using the given strftime format. #html_last_updated_fmt = '%b %d, %Y' # If true, SmartyPants will be used to convert quotes and dashes to # typographically correct entities. #html_use_smartypants = True # Custom sidebar templates, maps document names to template names. #html_sidebars = {} # Additional templates that should be rendered to pages, maps page names to # template names. html_additional_pages = { "index": "index.html", } # If false, no module index is generated. #html_domain_indices = True # If false, no index is generated. #html_use_index = True # If true, the index is split into individual pages for each letter. #html_split_index = False # If true, links to the reST sources are added to the pages. #html_show_sourcelink = True # If true, "Created using Sphinx" is shown in the HTML footer. Default is True. #html_show_sphinx = True # If true, "(C) Copyright ..." is shown in the HTML footer. Default is True. #html_show_copyright = True # If true, an OpenSearch description file will be output, and all pages will # contain a <link> tag referring to it. The value of this option must be the # base URL from which the finished HTML is served. #html_use_opensearch = '' # This is the file name suffix for HTML files (e.g. ".xhtml"). #html_file_suffix = None # Output file base name for HTML help builder. htmlhelp_basename = 'pyechonestdoc' # -- Options for LaTeX output -------------------------------------------------- # The paper size ('letter' or 'a4'). #latex_paper_size = 'letter' # The font size ('10pt', '11pt' or '12pt'). #latex_font_size = '10pt' # Grouping the document tree into LaTeX files. List of tuples # (source start file, target name, title, author, documentclass [howto/manual]). latex_documents = [ ('index', 'pyechonest.tex', u'pyechonest Documentation', u'The Echo Nest', 'manual'), ] # The name of an image file (relative to this directory) to place at the top of # the title page. #latex_logo = None # For "manual" documents, if this is true, then toplevel headings are parts, # not chapters. #latex_use_parts = False # If true, show page references after internal links. #latex_show_pagerefs = False # If true, show URL addresses after external links. #latex_show_urls = False # Additional stuff for the LaTeX preamble. #latex_preamble = '' # Documents to append as an appendix to all manuals. #latex_appendices = [] # If false, no module index is generated. #latex_domain_indices = True # -- Options for manual page output -------------------------------------------- # One entry per manual page. List of tuples # (source start file, name, description, authors, manual section). man_pages = [ ('index', 'pyechonest', u'pyechonest Documentation', [u'The Echo Nest'], 1) ] # -- Options for Epub output --------------------------------------------------- # Bibliographic Dublin Core info. epub_title = u'pyechonest' epub_author = u'The Echo Nest' epub_publisher = u'The Echo Nest' epub_copyright = u'2012, The Echo Nest' # The language of the text. It defaults to the language option # or en if the language is not set. #epub_language = '' # The scheme of the identifier. Typical schemes are ISBN or URL. #epub_scheme = '' # The unique identifier of the text. This can be a ISBN number # or the project homepage. #epub_identifier = '' # A unique identification for the text. #epub_uid = '' # HTML files that should be inserted before the pages created by sphinx. # The format is a list of tuples containing the path and title. #epub_pre_files = [] # HTML files shat should be inserted after the pages created by sphinx. # The format is a list of tuples containing the path and title. #epub_post_files = [] # A list of files that should not be
vmiklos/darcs-hooks
config.py
Python
gpl-2.0
88
0.011364
#!/usr/bin/env python class config: enabled_
plugins = ['cia', 'sendmail', 'sync
hook']
apdjustino/DRCOG_Urbansim
src/opus_core/tools/explore_model.py
Python
agpl-3.0
3,937
0.013462
# Opus/UrbanSim urban simulation software. # Copyright (C) 2010-2011 University of California, Berkeley, 2005-2009 University of Washington # See opus_core/LICENSE from optparse import OptionParser from opus_core.misc import get_config_from_opus_path from opus_core.logger import logger from opus_core.
configurations.xml_configuration import XMLConfiguration from opus_core.simulation.model_explorer import ModelExplorer class ModelExplorerOptionGroup: def __init__(self, usage="python %prog [options] ", description="Runs the given model for the given year, using data from given directory. Options -y and -d are mandatory. Furth
ermore, either -c or -x must be given."): self.parser = OptionParser(usage=usage, description=description) self.parser.add_option("-m", "--model", dest="model_name", default = None, action="store", help="Name of the model to run.") self.parser.add_option("-y", "--year", dest="year", default = None, action="store", help="Year for which the model should run.") self.parser.add_option("-d", "--directory", dest="cache_directory", default = None, action="store", help="Cache directory to be used for the run. Use the keyword 'BASE', if the base year data should be used.") self.parser.add_option("-x", "--xml-configuration", dest="xml_configuration", default = None, action="store", help="Full path to an XML configuration file (must also provide a scenario name using -s). Either -x or -c must be given.") self.parser.add_option("-s", "--scenario_name", dest="scenario_name", default=None, help="Name of the scenario. Must be given if option -x is used.") self.parser.add_option("-c", "--configuration-path", dest="configuration_path", default=None, help="Opus path to Python module defining a configuration in dictionary format. Either -c or -x must be given.") self.parser.add_option("--group", dest="model_group", default = None, action="store", help="Name of the model group") def main(): import sys option_group = ModelExplorerOptionGroup() parser = option_group.parser (options, args) = parser.parse_args() if options.year is None: raise StandardError, "Year (argument -y) must be given." if options.cache_directory is None: raise StandardError, "Cache directory (argument -d) must be given." if (options.configuration_path is None) and (options.xml_configuration is None): raise StandardError, "Configuration path (argument -c) or XML configuration (argument -x) must be given." if (options.scenario_name is None) and (options.xml_configuration is not None): raise StandardError, "No scenario given (argument -s). Must be specified if option -x is used." if options.xml_configuration is not None: xconfig = XMLConfiguration(options.xml_configuration) else: xconfig = None if options.configuration_path is None: config = None else: config = get_config_from_opus_path(options.configuration_path) if options.cache_directory == 'BASE': cache_directory = None else: cache_directory = options.cache_directory explorer = ModelExplorer(model=options.model_name, year=int(options.year), scenario_name=options.scenario_name, model_group=options.model_group, configuration=config, xml_configuration=xconfig, cache_directory=cache_directory) explorer.run() return explorer if __name__ == '__main__': try: import wingdbstub except: pass ex = main()
wnormandin/bftest_cli
cli/dockcli.py
Python
mit
4,675
0.003422
# Basic command-line interface to manage docker containers which will use an # image stored in a dockerhub registry - 'pokeybill/bftest' import click from click.testing import CliRunner import docker import sys import time import requests this = sys.modules[__name__] BASE_URL = 'unix://var/run/docker.sock' REGISTRY = 'pokeybill/bftest' DIGEST = 'sha256:79215d32e5896c1ccd3f57d22ee6aaa7c9d79c9c87737f2b96673186de6ab060' @click.group() def default(): """ A basic docker container management wrapper """ pass @click.command() @click.argument('container') def run(container): """ attempts to start the docker container specified """ try: fetch_client() this.client.pull(REGISTRY) start_container(container) result = health_check(container) except docker.errors.APIError as e: click.echo('[!] Docker API Error: {}'.format(e)) sys.exit(1) except KeyboardInterrupt, SystemExit: click.echo('[!] Aborting') @click.command() @click.argument('container') def stop(container): """ attempts to stop the docker container specified """ try: fetch_client() this.client.stop(container) this.client.remove_container(container) except docker.errors.APIError as e: click.echo('[!] Error stopping container: {}'.format(e)) sys.exit(1) except KeyboardInterrupt, SystemExit: click.echo('[!] Aborting') @click.command() def test(): """ basic functional test to ensure containers can be managed """ click.echo('[*] Testing docker container creation/removal') cont_name = 'funky_aardvark' try: runner = CliRunner() # Test the RUN command result = runner.invoke(run, [cont_name]) result_txt = result.output.strip('\n') assert result.exit_code == 0, '[!] Application START failed: {}'.format(result_txt) assert 'Your app is running on' in result.output, \ '[!] Unexpected output: {}'.format(result.output) click.echo(result_txt) # Test container access click.echo('[*] Ensuring we can communicate with the containerized application') result = requests.get('http://127.0.0.1:8888/hello') assert result.status_code == 200, \ '[!] Unexpected HTTP response: {}'.format(result.status_code) click.echo('\t{}'.format(result.text)) # Test the STOP command result = runner.invoke(stop, [cont_name]) result_txt = result.output.strip('\n') assert result.exit_code == 0, '[!] Application STOP failed: {}'.format(result_txt) click.echo('[*] Container {} stopped'.format(cont_name)) except requests.exceptions.ConnectionError as e: click.echo('[!] Failed to communicate with the application') click.echo(e[0]) except AssertionError as e: click.echo('[*] Test failed - {}'.format(e)) except KeyboardInterrupt, SystemExit: click.echo('[!] Aborting') else: click.echo('[*] Test succeeded') default.add_command(run) default.add_command(stop) default.add_command(test) # Functions start here def health_check(inst_name): def __check_state(): cont_state = this.client.inspect_container(inst_name)['State'] if cont_state['Status']=='running': return cont_state['Health']['Status'] else: click.echo('[!] Container is not running!') repea
t = 0 while True: cont_status = __check_state() if cont_status == 'healthy': click.echo('[*] Your app is running on http://127.0.0.1:8888') return True eli
f cont_status == 'starting': if repeat > 6: return time.sleep(1) repeat += 1 else: click.echo('[!] Container status: {}'.format(cont_status)) return def start_container(inst_name): this.client.create_container( REGISTRY, detach=False, name=inst_name, ports=[8888], host_config=this.client.create_host_config( port_bindings={8888: ('127.0.0.1',8888)} ), ) this.client.start(inst_name) def fetch_client(base_url=BASE_URL): this.client = docker.APIClient(base_url=base_url, version='1.24') try: this.client.version() except requests.exceptions.ConnectionError as e: click.echo('[!] Unable to connect to Docker daemon @ {}'.format(BASE_URL)) sys.exit(1) if __name__=="__main__": default()
FDio/vpp
test/vpp_vxlan_tunnel.py
Python
apache-2.0
3,138
0
from vpp_interface import VppInterface from vpp_papi import VppEnum INDEX_INVALID = 0xffffffff DEFAULT_PORT = 4789 UNDEFINED_PORT = 0 def find_vxlan_tunnel(test, src, dst, s_port, d_port, vni): ts = test.vapi.vxlan_tunnel_v2_dump(INDEX_INVALID) src_port = DEFAULT_PORT if s_port != UNDEFINED_PORT: src_port = s_port dst_port = DEFAULT_PORT if d_port != UNDEFINED_PORT: dst_port = d_port for t in ts: if src == str(t.src_address) and \ dst == str(t.dst_address) and \ src_port == t.src_port and \ dst_port == t.dst_port and \ t.vni == vni: return t.sw_if_index return INDEX_INVALID class VppVxlanTunnel(VppInterface): """ VPP VXLAN interface """ def __init__(self, test, src, dst, vni, src_port=UNDEFINED_PORT, dst_port=UNDEFINED_PORT, mcast_itf=None, mcast_sw_if_index=INDEX_INVALID, decap_next_index=INDEX_INVALID, encap_vrf_id=None, instance=0xffffffff, is_l3=False): """ Create VXLAN Tunnel interface """ super(VppVxlanTunnel, self).__init__(test) self.src = src self.dst = dst self.vni = vni self.src_port = src_port self.dst_port = dst_port self.mcast_itf = mcast_itf self.mcast_sw_if_index = mcast_sw_if_index self.encap_vrf_id = encap_vrf_id self.decap_next_index = decap_next_index self.instance = instance self.is_l3 = is_l3 if (self.mcast_itf): self.mcast_sw_if_index = self.mcast_itf.sw_if_index def add_vpp_config(self): reply = self.test.vapi.vxlan_add_del_tunnel_v3( is_add=1, src_address=self.src, dst_address=self.dst, vni=self.vni, src_port=self.src_port, dst_port=self.dst_port, mcast_sw_if_index=self.mcast_sw_if_index, encap_vrf_id=self.encap_vrf_id, is_l3=self.is_l3, instance=self.instance, decap_next_index=self.decap_next_index) self.set_sw_if_index(reply.sw_if_index) self._test.registry.register(self, self._test.logger) def remove_vpp_config(self): self.test.vapi.vxlan_add_del_tunnel_v2( is_add=0, src_address=self.src, dst_address=self.dst, vni=self.vni, src_port=self.src_port, dst_port=self.dst_port, mcast_sw_if_index=self.mcast_sw_if
_index, encap_vrf_id=self.encap_vrf_id, instance=self.instance, decap_next_index=self.decap_next_index) def query_vpp_config(self): return (INDEX_INVALID != find_vxlan_tunnel(self._test, self.src, self.dst, self.src_port,
self.dst_port, self.vni)) def object_id(self): return "vxlan-%d-%d-%s-%s" % (self.sw_if_index, self.vni, self.src, self.dst)
karlch/vimiv
tests/window_test.py
Python
mit
1,521
0.001315
# vim: ft=python fileencoding=utf-8 sw=4 et sts=4 """Tests window.py for vimiv's test suite.""" import os from unittest import main, skipUnless from gi import require_version require_version('Gtk', '3.0') from gi.repository import Gdk from vimiv_testcase import VimivTestCase, refresh_gui class WindowTest(VimivTestCase): """Window Tests.""" @classmethod def setUpClass(cls): cls.init_test(cls, ["vimiv/testimages/"]) def test_fullscreen(self): """Toggle fullscreen.""" # Start without fullscreen self.assertFalse(self._is_fullscreen()) # Fullscreen self.vimiv["window"].toggle_fullscreen() refresh_gui(0.05) # Still not reliable # self.assertTrue(self._is_fullscreen()) # Unfullscreen self.vimiv["window"].toggle_fullscreen() refresh_gui(0.05) # self.assertFalse(self.vimiv["window"].is_fullscreen) self.vimiv["window"].fullscreen() def _is_fullscreen(self): state = self.vimiv["window"].get_window().get_state() return True if state & Gdk.WindowState.FULLS
CREEN else False @skipUnless(os.getenv("DISPLAY") == ":42", "Must run in Xvfb") def test_check_resize(self): """Resize window and check winsize.""" self.assertEqua
l(self.vimiv["window"].winsize, (800, 600)) self.vimiv["window"].resize(400, 300) refresh_gui() self.assertEqual(self.vimiv["window"].winsize, (400, 300)) if __name__ == "__main__": main()
aodag/WebDispatch
webdispatch/testing.py
Python
mit
523
0
""" utilities for testing """ def setup_environ(**kwargs): """ setup basic wsgi environ"""
environ = {} from wsgiref.util import setup_testing_defaults setup_testing_defaults(environ) environ.update(kwargs) return environ def make_env(path_info, script_name): """ set up basic wsgi environ""" from wsgiref.util impor
t setup_testing_defaults environ = { "PATH_INFO": path_info, "SCRIPT_NAME": script_name, } setup_testing_defaults(environ) return environ
ArtemBernatskyy/FundExpert.NET
mutual_funds/company/__init__.py
Python
gpl-3.0
66
0
defa
ult_app_config = 'mutual_funds.company.apps.CompanyAppConfig
'
cloudkick/libcloud
libcloud/compute/drivers/dummy.py
Python
apache-2.0
9,524
0.001575
# Licensed to the Apache Software Foundation (ASF) under one or more # contributor license agreements. See the NOTICE file distributed with # this work for additional information regarding copyright ownership. # The ASF licenses this file to You under the Apache License, Version 2.0 # (the "License"); you may not use this file except in compliance with # the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """ Dummy Driver @note: This driver is out of date """ import uuid import socket import struct from libcloud.base import ConnectionKey, NodeDriver, NodeSize, NodeLocation from libcloud.compute.base import NodeImage, Node from libcloud.compute.types import Provider,NodeState class DummyConnection(ConnectionKey): """ Dummy connection class """ def connect(self, host=None, port=None): pass class DummyNodeDriver(NodeDriver): """ Dummy node driver This is a fake driver which appears to always create or destroy nodes successfully. >>> from libcloud.compute.drivers.dummy import DummyNodeDriver >>> driver = DummyNodeDriver(0) >>> node=driver.create_node() >>> node.public_ip[0] '127.0.0.3' >>> node.name 'dummy-3' If the credentials you give convert to an integer then the next node to be created will be one higher. Each time you create a node you will get a different IP address. >>> driver = DummyNodeDriver(22) >>> node=driver.create_node() >>> node.name 'dummy-23' """ name = "Dummy Node Provider" type = Provider.DUMMY def __init__(self, creds): self.creds = creds try: num = int(creds) except ValueError: num = None if num: self.nl = [] startip = _ip_to_int('127.0.0.1') for i in xrange(num): ip = _int_to_ip(startip + i) self.nl.append( Node(id=i, name='dummy-%d' % (i), state=NodeState.RUNNING, public_ip=[ip], private_ip=[], driver=self, extra={'foo': 'bar'}) ) else: self.nl = [ Node(id=1, name='dummy-1', state=NodeState.RUNNING, public_ip=['127.0.0.1'], private_ip=[], driver=self, extra={'foo': 'bar'}), Node(id=2, name='dummy-2', state=NodeState.RUNNING, public_ip=['127.0.0.1'], private_ip=[], driver=self, extra={'foo': 'bar'}), ] self.connection = DummyConnection(self.creds) def get_uuid(self, unique_field=None): return str(uuid.uuid4()) def list_nodes(self): """ List the nodes known to a particular driver; There are two default nodes created at the beginning >>> from libcloud.compute.drivers.dummy import DummyNodeDriver >>> driver = DummyNodeDriver(0) >>> node_list=driver.list_nodes() >>> sorted([node.name for node in node_list ]) ['dummy-1', 'dummy-2'] each item in the list returned is a node object from which you can carry out any node actions you wish >>> node_list[0].reboot() True As more nodes are added, list_nodes will return them >>> node=driver.create_node() >>> sorted([node.name for node in driver.list_nodes()]) ['dummy-1', 'dummy-2', 'dummy-3'] """ return self.nl def reboot_node(self, node): """ Sets the node state to rebooting; in this dummy driver always returns True as if the reboot had been successful. >>> from libcloud.compute.drivers.dummy import DummyNodeDriver >>> driver = DummyNodeDriver(0) >>> node=driver.create_node() >>> from libcloud.compute.types import NodeState >>> node.state == NodeState.RUNNING True >>> node.state == NodeState.REBOOTING False >>> driver.reboot_node(node) True >>> node.state == NodeState.REBOOTING True Please note, dummy nodes never recover from the reboot. """ node.state = NodeState.REBOOTING return True def destroy_node(self, node): """ Sets the node state to terminated and removes it from the node list >>> from libcloud.compute.drivers.dummy import DummyNodeDriver >>> driver = DummyNodeDriver(0) >>> from libcloud.compute.types import NodeState >>> node = [node for node in driver.list_nodes() if node.name == 'dummy-1'][0] >>> node.state == NodeState
.RUNNING True >>> driver.destroy_node(node) True >>> node.state == NodeState.RUNNING False >>> [node for node in driver.list_nodes() if node.name == 'dummy-1'] []
""" node.state = NodeState.TERMINATED self.nl.remove(node) return True def list_images(self, location=None): """ Returns a list of images as a cloud provider might have >>> from libcloud.compute.drivers.dummy import DummyNodeDriver >>> driver = DummyNodeDriver(0) >>> sorted([image.name for image in driver.list_images()]) ['Slackware 4', 'Ubuntu 9.04', 'Ubuntu 9.10'] """ return [ NodeImage(id=1, name="Ubuntu 9.10", driver=self), NodeImage(id=2, name="Ubuntu 9.04", driver=self), NodeImage(id=3, name="Slackware 4", driver=self), ] def list_sizes(self, location=None): """ Returns a list of node sizes as a cloud provider might have >>> from libcloud.compute.drivers.dummy import DummyNodeDriver >>> driver = DummyNodeDriver(0) >>> sorted([size.ram for size in driver.list_sizes()]) [128, 512, 4096, 8192] """ return [ NodeSize(id=1, name="Small", ram=128, disk=4, bandwidth=500, price=4, driver=self), NodeSize(id=2, name="Medium", ram=512, disk=16, bandwidth=1500, price=8, driver=self), NodeSize(id=3, name="Big", ram=4096, disk=32, bandwidth=2500, price=32, driver=self), NodeSize(id=4, name="XXL Big", ram=4096*2, disk=32*4, bandwidth=2500*3, price=32*2, driver=self), ] def list_locations(self): """ Returns a list of locations of nodes >>> from libcloud.compute.drivers.dummy import DummyNodeDriver >>> driver = DummyNodeDriver(0) >>> sorted([loc.name + " in " + loc.country for loc in driver.list_locations()]) ['Island Datacenter in FJ', 'London Loft in GB', "Paul's Room in US"] """ return [ NodeLocation(id=1, name="Paul's Room", country='US', driver=self), NodeLocation(id=2, name="London Loft", country='GB', driver=self), NodeLocation(id=3, name="Island Datacenter", country='FJ',
sumyfly/vdebug
plugin/python/vdebug/opts.py
Python
mit
1,365
0.009524
class Options: instance = None def __init__(self,options): self.options = options @classmethod def set(cls,options): """Create an Options instance with the provided dictionary of options""" cls.instance = Options(options) @classmethod def inst(cls): """Get the Options instance. """ if cls.instance is None: raise OptionsError("No options have been set") return cls.instance @classmethod def get(cls,name,as_type = str): """Get an option by name. Raises an OptionsError if the option doesn't exist. """ inst = cls.inst() if name in inst.options: return as_type(inst.options[name]) else: raise OptionsError("No option with key '%s'"
% name) @classmethod def overwrite(cls,name,value): inst = cls.inst() inst.options[name] = value @classmethod def isset(cls,name): """Checks whether the option exists and is set. By set, it means whether the option has length. All the option values are strings.
""" inst = cls.inst() if name in inst.options and \ len(inst.options[name]) > 0: return True else: return False class OptionsError(Exception): pass
arunchandramouli/fanofpython
code/features/datatypes/lists1.py
Python
gpl-3.0
3,368
0.039489
''' Aim :: To demonstrate the use of a list Define a simple list , add values to it and iterate and print it A list consists of comma seperated values which could be of any type which is reprsented as [,,,,] .. all values are enclosed between '[' and ']' ** A list object is a mutable datatype which means it couldn't be hashed Anything that can be hashed can be set as a dictionary key ** Modifying an exisiting list will not result in a new list object, memory address will not be changed too. There are 2 scenarios of modification; -> Edit the existing item -> Both Mutable and Immutable datatypes can be edited, memory location not changed -> Replace the existing item -> Both mutable and immutable can be replaced ''' ''' Empty Mutable Types ... ''' list1 = [] dict1 = {} set1 = set() ''' Empty Immutable Types ... ''' tuple1 = () str1 = "" ''' Define a simple list with multiple datatypes ''' def_list = [1,2,"1","100","Python","Anne","A!@345<>_()",True,False,{1:100,2:200,3:300},range(10)] ''' Now create a variable ''' vara = def_list ''' M
odification of vara will result in modifying def_list ''' vara.append("Hero") print "Address of vara and def_list %s and %s "%(id(vara),id(def_list)),'\n\n' print "vara = %s "%(vara),'\n\n' print "def_list = %s "%(def_list),'\n\n' ''' Now creating a Partial Slice ... When a slice is created partially , we are actually breaking a container into pieces , hence it shall represent a new memory location. Hence modification of such will not affect the original container ''' getmeasliceofit = def
_list[3:] print "Address of getmeasliceofit and def_list %s and %s "%(id(getmeasliceofit),id(def_list)),'\n\n' print "getmeasliceofit = %s "%(getmeasliceofit),'\n\n' print "def_list = %s "%(def_list),'\n\n' ''' Now creating a Full Slice ... When a slice is created fully , we are actually creating a container which has its original values but represents the same address. Hence modification of such will affect the original container for eg :: If you verify all of the address below, but for getmeasliceofit, rest are all the same if I edit as def_list[0:] = range(5) , def_list will also get modified Meanwhile also If I edit as def_list[3:] = range(5), def_list will get modified But If I edit getmeasliceofit def_list will not get modified ''' getmeasliceofit = def_list[:] print "Address == ",id(def_list),'\n',id(def_list[3:]),'\n',id(getmeasliceofit),'\n',id(def_list[::]),'\n',id(def_list[0:]),'\n',id(def_list[:]),'\n' ''' Modifying def_list[3:] will affect def_list , but modifying getmeasliceofit doesn't This is because getmeasliceofit resides at a different memory location. ''' print '\n\n' , def_list , '\n\n' def_list[3:] = range(50) getmeasliceofit = None print def_list , '\n\n\n',def_list[3:],'\n\n' , getmeasliceofit,'\n\n\n' print 'Analyze memory locations of mutables examples ... ... ','\n\n' sayx = [1,2,3,4,5] print id(sayx),'\n' sayx = [4,5,6,7,8] print id(sayx),'\n' x = range(10) print id(x),'\n' x = range(10,50) print id(x),'\n' print 'Modify a mutable it shall still refer same location ... ... ','\n\n' ''' A Simple list ''' sayx = [1,2,3,4,5] print id(sayx),'\n' ''' A Simple list modified - change element @ position 4 ''' sayx[4] = range(10) print id(sayx),'\n'
NOAA-PMEL/PyFerret
pviewmod/cmndhelperpq.py
Python
unlicense
21,512
0.002836
''' CmndHelperPQ is a helper class for dealing with commands sent to a PyQt piped viewer. This package was developed by the Thermal Modeling and Analysis Project (TMAP) of the National Oceanographic and Atmospheric Administration's (NOAA) Pacific Marine Environmental Lab (PMEL). ''' import sys # First try to import PySide2, then try PyQt5 if that fails, and finally try PyQt4 if that fails try: import PySide2 PYTHONQT_VERSION = 'PySide2' except ImportError: try: import PyQt5 PYTHONQT_VERSION = 'PyQt5' except ImportError: import PyQt4 PYTHONQT_VERSION = 'PyQt4' # Now that the Python Qt version is determined, import the parts # allowing any import errors to propagate out if PYTHONQT_VERSION == 'PySide2': from PySide2.QtCore import Qt, QPointF, QSizeF from PySide2.QtGui import QBrush, QColor, QFont, QPainterPath, QPen elif PYTHONQT_VERSION == 'PyQt5': from PyQt5.QtCore import Qt, QPointF, QSizeF from PyQt5.QtGui import QBrush, QColor, QFont, QPainterPath, QPen else: from PyQt4.QtCore import Qt, QPointF, QSizeF from PyQt4.QtGui import QBrush, QColor, QFont, QPainterPath, QPen class SidesRectF(object): ''' Trivial helper class for defining a rectangle with floating point values for the left-x, top-y, right-x, and bottom-y edges. ''' def __init__(self, left, top, right, bottom): ''' Create a SidesRectF with the given left, top, right, and bottom as float values. ''' super(SidesRectF, self).__init__() self.__left = float(left) self.__top = float(top) self.__right = float(right) self.__bottom = float(bottom) def left(self): ''' Return the left value as a float. ''' return self.__left def setLeft(self, val): ''' Set the SidesRectF left as a float value of the argument. ''' self.__left = float(val) def top(self): ''' Return the top value as a float. ''' return self.__top def setTop(self, val): ''' Set the SidesRectF top as a float value of the argument. ''' self.__top = float(val) def right(self): ''' Return the right value as a float. ''' return self.__right def setRight(self, val): ''' Set the SidesRectF right as a float value of the argument. ''' self.__right = float(val) def bottom(self): ''' Return the bottom value as a float. ''' return self.__bottom def setBottom(self, val): ''' Set the SidesRectF bottom as a float value of the argument. ''' self.__bottom = float(val) class SymbolPath(object): ''' Trivial helper class for defining a symbol ''' def __init_
_(self, painterpath, isfilled): '
'' Create a SymbolPath representing a symbol. Arguments: painterpath: the QPainterPath representing this symbol isfilled: if True, the symbol should be drawn with a solid brush; if False, the symbol should be drawn with a solid pen ''' super(SymbolPath, self).__init__() self.__painterpath = painterpath self.__isfilled = isfilled if isfilled: try: self.__painterpath = painterpath.simplified() except: pass def painterPath(self): ''' Return the QPainterPath for this symbol ''' return self.__painterpath def isFilled(self): ''' Return True if the symbol should be drawn with a solid brush; return False if the symbol should be drawn with a solid pen. ''' return self.__isfilled class CmndHelperPQ(object): ''' Helper class of static methods for dealing with commands sent to a PyQt piped viewer. ''' def __init__(self, viewer): ''' Creates a cmndpipe command helper. The widget viewer is only used for determining the default font and for translation of error messages. ''' super(CmndHelperPQ, self).__init__() self.__viewer = viewer self.__symbolpaths = { } def getFontFromCmnd(self, fontinfo): ''' Returns a QFont based on the information in the dictionary fontinfo. Recognized keys in the font dictionary are: "family": font family name (string) "size": text size in points (1/72 inches) "italic": italicize? (False/True) "bold": make bold? (False/True) "underline": underline? (False/True) ''' try: myfont = QFont(fontinfo["family"]) except KeyError: myfont = self.__viewer.font() try: myfont.setPointSizeF(fontinfo["size"]) except KeyError: pass try: myfont.setItalic(fontinfo["italic"]) except KeyError: pass try: myfont.setBold(fontinfo["bold"]) except KeyError: pass try: myfont.setUnderline(fontinfo["underline"]) except KeyError: pass return myfont def getBrushFromCmnd(self, brushinfo): ''' Returns a QBrush based on the information in the dictionary brushinfo. A ValueError is raised if the value for the "style" key, if given, is not recognized. Recognized keys in the fill dictionary are: "color": color name or 24-bit RGB integer value (eg, 0xFF0088) "alpha": alpha value from 0 (transparent) to 255 (opaque) "style": brush style name ("solid", "dense1" to "dense7", "none", "hor", "ver", "cross", "bdiag", "fdiag", "diagcross") ''' try: mycolor = self.getColorFromCmnd(brushinfo) mybrush = QBrush(mycolor) except KeyError: mybrush = QBrush() try: mystyle = brushinfo["style"] if mystyle == "solid": mystyle = Qt.SolidPattern elif mystyle == "dense1": mystyle = Qt.Dense1Pattern elif mystyle == "dense2": mystyle = Qt.Dense2Pattern elif mystyle == "dense3": mystyle = Qt.Dense3Pattern elif mystyle == "dense4": mystyle = Qt.Dense4Pattern elif mystyle == "dense5": mystyle = Qt.Dense5Pattern elif mystyle == "dense6": mystyle = Qt.Dense6Pattern elif mystyle == "dense7": mystyle = Qt.Dense7Pattern elif mystyle == "none": mystyle = Qt.NoBrush elif mystyle == "hor": mystyle = Qt.HorPattern elif mystyle == "ver": mystyle = Qt.VerPattern elif mystyle == "cross": mystyle = Qt.CrossPattern elif mystyle == "bdiag": mystyle = Qt.BDiagPattern elif mystyle == "fdiag": mystyle = Qt.FDiagPattern elif mystyle == "diagcross": mystyle = Qt.DiagCrossPattern else: raise ValueError("Unknown brush style '%s'" % str(mystyle)) mybrush.setStyle(mystyle) except KeyError: pass return mybrush def getPenFromCmnd(self, peninfo): ''' Returns a QPen based on the information in the dictionary peninfo. A ValueError is raised if the value for the "style", "capstyle", or "joinstyle" key, if given, is not recognized. Recognized keys in the outline dictionary are: "color": color name or 24-bit RGB integer value (eg, 0xFF0088) "alpha": alpha value from 0 (transparent) to 255 (opaque) "width": pen width in points (1/72 inches); possibly
banglakit/spaCy
spacy/language_data/punctuation.py
Python
mit
2,781
0.001566
# encoding: utf8 from __future__ import unicode_literals import re _ALPHA_LOWER = """ a ä à á â ǎ æ ã å ā ă ą b c ç ć č ĉ ċ c̄ d ð ď e é è ê ë ė ȅ ȩ ẽ ę f g ĝ ğ h i ı î ï í ī ì ȉ ǐ į ĩ j k ķ l ł ļ m n ñ ń ň ņ o ö ó ò ő ô õ œ ø ō ő ǒ ơ p q r ř ŗ s ß ś š ş ŝ t ť u ú û ù ú ū ű ǔ ů ų ư v w ŵ x y ÿ ý ỳ ŷ ỹ z ź ž ż þ """ _ALPHA_UPPER = """ A Ä À Á  Ǎ Æ Ã Å Ā Ă Ą B C Ç Ć Č Ĉ Ċ C̄ D Ð Ď E É È Ê Ë Ė Ȅ Ȩ Ẽ Ę F G Ĝ Ğ H I İ Î Ï Í Ī Ì Ȉ Ǐ Į Ĩ J K Ķ L Ł Ļ M N Ñ Ń Ň Ņ O Ö Ó Ò Ő Ô Õ Œ Ø Ō Ő Ǒ Ơ P Q R Ř Ŗ S Ś Š Ş Ŝ T Ť U Ú Û Ù Ú Ū Ű Ǔ Ů Ų Ư V W Ŵ X Y Ÿ Ý Ỳ Ŷ Ỹ Z Ź Ž Ż Þ """ _UNITS = """ km km² km³ m
m² m³ dm dm² dm³ cm cm² cm³ mm mm² mm³ ha µm nm yd in ft kg g mg µg t lb oz m/s km/h kmh mph h
Pa Pa mbar mb MB kb KB gb GB tb TB T G M K """ _CURRENCY = r""" \$ £ € ¥ ฿ US\$ C\$ A\$ """ _QUOTES = r""" ' '' " ” “ `` ` ‘ ´ ‚ , „ » « """ _PUNCT = r""" … , : ; \! \? ¿ ¡ \( \) \[ \] \{ \} < > _ # \* & """ _HYPHENS = r""" - – — -- --- """ LIST_ELLIPSES = [ r'\.\.+', "…" ] LIST_CURRENCY = list(_CURRENCY.strip().split()) LIST_QUOTES = list(_QUOTES.strip().split()) LIST_PUNCT = list(_PUNCT.strip().split()) LIST_HYPHENS = list(_HYPHENS.strip().split()) ALPHA_LOWER = _ALPHA_LOWER.strip().replace(' ', '').replace('\n', '') ALPHA_UPPER = _ALPHA_UPPER.strip().replace(' ', '').replace('\n', '') ALPHA = ALPHA_LOWER + ALPHA_UPPER QUOTES = _QUOTES.strip().replace(' ', '|') CURRENCY = _CURRENCY.strip().replace(' ', '|') UNITS = _UNITS.strip().replace(' ', '|').replace('\n', '|') HYPHENS = _HYPHENS.strip().replace(' ', '|') # Prefixes TOKENIZER_PREFIXES = ( ['§', '%', '=', r'\+'] + LIST_PUNCT + LIST_ELLIPSES + LIST_QUOTES + LIST_CURRENCY ) # Suffixes TOKENIZER_SUFFIXES = ( LIST_PUNCT + LIST_ELLIPSES + LIST_QUOTES + [ r'(?<=[0-9])\+', r'(?<=°[FfCcKk])\.', r'(?<=[0-9])(?:{c})'.format(c=CURRENCY), r'(?<=[0-9])(?:{u})'.format(u=UNITS), r'(?<=[0-9{al}{p}(?:{q})])\.'.format(al=ALPHA_LOWER, p=r'%²\-\)\]\+', q=QUOTES), r'(?<=[{au}][{au}])\.'.format(au=ALPHA_UPPER), "'s", "'S", "’s", "’S" ] ) # Infixes TOKENIZER_INFIXES = ( LIST_ELLIPSES + [ r'(?<=[0-9])[+\-\*^](?=[0-9-])', r'(?<=[{al}])\.(?=[{au}])'.format(al=ALPHA_LOWER, au=ALPHA_UPPER), r'(?<=[{a}]),(?=[{a}])'.format(a=ALPHA), r'(?<=[{a}])[?";:=,.]*(?:{h})(?=[{a}])'.format(a=ALPHA, h=HYPHENS), r'(?<=[{a}"])[:<>=](?=[{a}])'.format(a=ALPHA) ] ) __all__ = ["TOKENIZER_PREFIXES", "TOKENIZER_SUFFIXES", "TOKENIZER_INFIXES"]
eduNEXT/edunext-platform
openedx/core/lib/command_utils.py
Python
agpl-3.0
1,739
0.002875
""" Useful utilities for management commands. """ from django.core.management.base import CommandError from opaque_keys import InvalidKeyError from opaque_keys.edx.keys import CourseKey def get_mutually
_exclusive_required_option(options, *selections): """ Validates that exactly one of the 2 given options is specified. Returns the name of the found option. """ selected = [sel for sel in selections if options.get(sel)] if len(selected) != 1: selection_string = ', '.join(f'--{selection}' for selection in selections) raise Comman
dError(f'Must specify exactly one of {selection_string}') return selected[0] def validate_mutually_exclusive_option(options, option_1, option_2): """ Validates that both of the 2 given options are not specified. """ if options.get(option_1) and options.get(option_2): raise CommandError(f'Both --{option_1} and --{option_2} cannot be specified.') def validate_dependent_option(options, dependent_option, depending_on_option): """ Validates that option_1 is specified if dependent_option is specified. """ if options.get(dependent_option) and not options.get(depending_on_option): raise CommandError(f'Option --{dependent_option} requires option --{depending_on_option}.') def parse_course_keys(course_key_strings): """ Parses and returns a list of CourseKey objects from the given list of course key strings. """ try: return [CourseKey.from_string(course_key_string) for course_key_string in course_key_strings] except InvalidKeyError as error: raise CommandError('Invalid key specified: {}'.format(str(error))) # lint-amnesty, pylint: disable=raise-missing-from
rmcdermo/sandbox
oflow.py
Python
mit
2,209
0.018108
#!/usr/bin/python #McDermott #15 Sep 2017 # # Calculations for compressible orifice flow # # Refs: # See my notes from 1996 # Munson, Young, Okishi. Fundamentals of Fluid Mechanics. Wiley, 1990. import
math HOC = 50010. # heat of combustion [kJ/kg] psig = 0.0003 T_F = 100. C_d = 0.85 # orifice discharge coefficient N = 1844 # number of holes D_in = 1./8. # diameter [in] D0_in = 8.*D_in # upstream manifold diameter [in] D = D_in*2.54/100. # fuel port diameter [m] A = N*math.pi*(D/2.)**2 # total flow ar
ea [m^2] D0 = D0_in*2.54/100. A0 = N*math.pi*(D0/2.)**2 # upstream flow area [m^2] beta = A/A0 # "beta ratio" k = 1.4 # isentropic coefficient W = 16. # molecular weight R = 8314.5 # universal gas constant [Pa*m3/(kmol*K)] T0 = 293. #(T_F+459.67)/1.8 # upstream absolute temperature [K] patm = 101325. # atmospheric pressure [Pa] pcon = 101325./14.696 # pressure units conversion factor p0 = (psig + patm/pcon)*pcon # upstream absolute pressure [Pa] pb = patm # downstream absolute pressure [Pa] print('T0 [K] = '+str(T0)) print('p0 [Pa] = '+str(p0)) print('A [m2] = '+str(A)) print('beta = '+str(beta)) # determine critical pressure for choked flow pstar = p0*(2./(k+1.))**(k/(k-1.)) # MYO (11.61) Tstar = T0*(pstar/p0)**(k/(k-1.)) # MYO (11.58) print('pb/p0 = '+str(pb/p0)) print('p*/p0 = '+str(pstar/p0)) if pb/p0 < pstar/p0: # sonic (choked) print('sonic') mdot = C_d*A*p0*math.sqrt( 2.*W/(R*T0) * (k/(k-1.)) * (1.-(2./(k+1.))) / ( ((k+1.)/2.)**(2./(k-1.)) - beta**4 ) ) # RJM notes (37) rho = pstar*W/(R*Tstar) else: # subsonic print('subsonic') mdot = C_d*A*p0*math.sqrt( 2.*W/(R*T0) * (k/(k-1.)) * ( 1.-(pb/p0)**((k-1.)/k) ) / ( (p0/pb)**(2./k) - beta**4 ) ) # RJM notes (39) rho = pb*W/(R*T0) print('mdot [kg/s] = '+str(mdot)) print('HOC [kJ/kg] = '+str(HOC)) print('HRR [kW] = '+str(mdot*HOC)) print('HRR [MBTU/h] = '+str(mdot*HOC*0.94783*3600/1.e6)) # determine velocity at nozzle exit vdot = mdot/rho vel = vdot/A print('vel [m/s] = '+str(vel))
TheArchives/Nexus
core/plugins/fetch.py
Python
bsd-2-clause
5,886
0.003738
# The Nexus software is licensed under the
BSD 2-Clause license. # # You should have recieved a copy of this license with the software. # If you did not, you can find one at the following
link. # # http://opensource.org/licenses/bsd-license.php from core.plugins import ProtocolPlugin from ConfigParser import RawConfigParser as ConfigParser from core.decorators import * class FetchPlugin(ProtocolPlugin): commands = { "fetch": "commandFetch", "bring": "commandFetch", "invite": "commandInvite", "fp": "commandFetchProtect", "fo": "commandFetchOverride", } hooks = { "chatmsg": "message" } def gotClient(self): self.client.var_fetchrequest = False self.client.var_fetchdata = () def message(self, message): if self.client.var_fetchrequest: self.client.var_fetchrequest = False if message in ["y", "yes"]: sender, world, rx, ry, rz = self.client.var_fetchdata if self.client.world == world: self.client.teleportTo(rx, ry, rz) else: self.client.changeToWorld(world.id, position=(rx, ry, rz)) self.client.sendServerMessage("You have accepted the fetch request.") sender.sendServerMessage("%s has accepted your fetch request." % self.client.username) elif message in ["n", "no"]: sender = self.client.var_fetchdata[0] self.client.sendServerMessage("You did not accept the fetch request.") sender.sendServerMessage("%s did not accept your request." % self.client.username) else: sender = self.client.var_fetchdata[0] self.client.sendServerMessage("You have ignored the fetch request.") sender.sendServerMessage("%s has ignored your request." % self.client.username) return return True @player_list @username_command def commandInvite(self, user, fromloc, overriderank): "/invite username - Guest\Invites a user to be where you are." # Shift the locations right to make them into block coords rx = self.client.x >> 5 ry = self.client.y >> 5 rz = self.client.z >> 5 user.var_prefetchdata = (self.client, self.client.world) if self.client.world.id == user.world.id: user.sendServerMessage("%s would like to fetch you." % self.client.username) else: user.sendServerMessage("%s would like to fetch you to %s." % (self.client.username, self.client.world.id)) user.sendServerMessage("Do you wish to accept? [y]es [n]o") user.var_fetchrequest = True user.var_fetchdata = (self.client, self.client.world, rx, ry, rz) self.client.sendServerMessage("The fetch request has been sent.") @mod_only def commandFetchProtect(self, parts, fromloc, overriderank): "/fp on|off - Mod\nToggles Fetch Protection for yourself." if len(parts) != 2: self.client.sendServerMessage("You must specify either \'on\' or \'off\'.") elif parts[1] == "on": config = ConfigParser() config.read('config/data/fprot.meta') config.add_section(self.client.username) fp = open('config/data/fprot.meta', "w") config.write(fp) fp.close() self.client.sendServerMessage("Fetch protection is now on.") elif parts[1] == "off": config = ConfigParser() config.read('config/data/fprot.meta') config.remove_section(self.client.username) fp = open('config/data/fprot.meta', "w") config.write(fp) fp.close() self.client.sendServerMessage("Fetch protection is now off.") else: self.client.sendServerMessage("You must specify either \'on\' or \'off\'.") @player_list @admin_only @username_command def commandFetchOverride(self, user, fromloc, overriderank): "/fo username - Mod\nTeleports a user to be where you are" # Shift the locations right to make them into block coords rx = self.client.x >> 5 ry = self.client.y >> 5 rz = self.client.z >> 5 if user.world == self.client.world: user.teleportTo(rx, ry, rz) else: if self.client.isModPlus(): user.changeToWorld(self.client.world.id, position=(rx, ry, rz)) else: self.client.sendServerMessage("%s cannot be fetched from '%s'" % (self.client.username, user.world.id)) return user.sendServerMessage("You have been fetched by %s" % self.client.username) @player_list @op_only @username_command def commandFetch(self, user, fromloc, overriderank): "/fetch username - Op\nAliases: bring\nTeleports a user to be where you are" # Shift the locations right to make them into block coords rx = self.client.x >> 5 ry = self.client.y >> 5 rz = self.client.z >> 5 config = ConfigParser() config.read('config/data/fprot.meta') if config.has_section(user.username): self.client.sendServerMessage("You can't fetch this person; they're Fetch Protected!") else: if user.world == self.client.world: user.teleportTo(rx, ry, rz) else: if self.client.isModPlus(): user.changeToWorld(self.client.world.id, position=(rx, ry, rz)) else: self.client.sendServerMessage("%s cannot be fetched from '%s'" % (self.client.username, user.world.id)) return user.sendServerMessage("You have been fetched by %s" % self.client.username)
ecoron/SerpScrap
examples/example_csv.py
Python
mit
266
0
#!/
usr/bin/python3 # -*- coding: utf-8 -*- import serpscrap keywords = ['stellar'] config = serpscrap.Config() config.set('scrape_urls', False) scrap = serpscrap.SerpScrap() scrap.init(config=config.get(), keywords=keywords) results = scrap.a
s_csv('/tmp/output')
sdague/home-assistant
tests/components/androidtv/patchers.py
Python
apache-2.0
6,084
0.00263
"""Define patches used for androidtv tests.""" from tests.async_mock import mock_open, patch KEY_PYTHON = "python" KEY_SERVER = "server" ADB_DEVICE_TCP_ASYNC_FAKE = "AdbDeviceTcpAsyncFake" DEVICE_ASYNC_FAKE = "DeviceAsyncFake" class AdbDeviceTcpAsyncFake: """A fake of the `adb_shell.adb_device_async.AdbDeviceTcpAsync` class.""" def __init__(self, *args, **kwargs): """Initialize a fake `adb_shell.adb_device_async.AdbDeviceTcpAsync` instance.""" self.available = False async def close(self): """Close the socket connection.""" self.available = False async def connect(self, *args, **kwargs): """Try to connect to a device.""" raise NotImplementedError async def shell(self, cmd, *args, **kwargs): """Send an ADB shell command.""" return None class ClientAsyncFakeSuccess: """A fake of the `ClientAsync` class when the connection and shell commands succeed.""" def __init__(self, host="127.0.0.1", port=5037): """Initialize a `ClientAsyncFakeSuccess` instance.""" self._devices = [] async def device(self, serial): """Mock the `ClientAsync.device` method when the device is connected via ADB.""" device = DeviceAsyncFake(serial) self._devices.append(device) return device class ClientAsyncFakeFail: """A fake of the `ClientAsync` class when the connection and shell commands fail.""" def __init__(self, host="127.0.0.1", port=5037): """Initialize a `ClientAsyncFakeFail` instance.""" self._devices = [] async def device(self, serial): """Mock the `ClientAsync.device` method when the device is not connected via ADB.""" self._devices = [] return None class DeviceAsyncFake: """A fake of the `DeviceAsync` class.""" def __init__(self, host): """Initialize a `DeviceAsyncFake` instance.""" self.host = host async def shell(self, cmd): """Send an ADB shell command.""" raise NotImplementedError def patch_connect(success): """Mock the `adb_shell.adb_device_async.AdbDeviceTcpAsync` and `ClientAsync` classes.""" async def connect_success_python(self, *args, **kwargs): """Mock the `AdbDeviceTcpAsyncFake.connect` method when it succeeds.""" self.available = True async def connect_fail_python(self, *args, **kwargs): """Mock the `AdbDeviceTcpAsyncFake.connect` method when it fails.""" raise OSError if success: return { KEY_PYTHON: patch( f"{__name__}.{ADB_DEVICE_TCP_ASYNC_FAKE}.connect", connect_success_python, ), KEY_SERVER: patch( "androidtv.adb_manager.adb_manager_async.ClientAsync", ClientAsyncFakeSuccess, ), } return { KEY_PYTHON: patch( f"{__name__}.{ADB_DEVICE_TCP_ASYNC_FAKE}.connect", connect_fail_python ), KEY_SERVER: patch( "androidtv.adb_manager.adb_manager_async.ClientAsync", ClientAsyncFakeFail ), } def patch_shell(response=None, error=False): """Mock the `AdbDeviceTcpAsyncFake.shell` and `DeviceAsyncFake.shell` methods.""" async def shell_success(self, cmd, *args, **kwargs): """Mock the `AdbDeviceTcpAsyncFake.shell` and `DeviceAsyncFake.shell` methods when they are successful.""" self.shell_cmd = cmd return response async def shell_fail_python(self, cmd, *args, **kwargs): """Mock the `AdbDeviceTcpAsyncFake.shell` method when it fails.""" self.shell_cmd = cmd raise ValueError async def shell_fail_server(self, cmd): """Mock the `DeviceAsyncFake.shell` method when it fails.""" self.shell_cmd = cmd raise ConnectionResetError if not error: return { KEY_PYTHON: patch( f"{__name__}.{ADB_DEVICE_TCP_ASYNC_FAKE}.shell", shell_success ), KEY_SERVER: patch(f"{__name__}.{DEVICE_ASYNC_FAKE}.shell", shell_success), } return { KEY_PYTHON: patch(
f"{__name__}.{ADB_DEVICE_TCP_ASYNC_FAKE}.shell", shell_fail_python ), KEY_SERVER
: patch(f"{__name__}.{DEVICE_ASYNC_FAKE}.shell", shell_fail_server), } PATCH_ADB_DEVICE_TCP = patch( "androidtv.adb_manager.adb_manager_async.AdbDeviceTcpAsync", AdbDeviceTcpAsyncFake ) PATCH_ANDROIDTV_OPEN = patch( "homeassistant.components.androidtv.media_player.open", mock_open() ) PATCH_KEYGEN = patch("homeassistant.components.androidtv.media_player.keygen") PATCH_SIGNER = patch( "homeassistant.components.androidtv.media_player.ADBPythonSync.load_adbkey", return_value="signer for testing", ) def isfile(filepath): """Mock `os.path.isfile`.""" return filepath.endswith("adbkey") PATCH_ISFILE = patch("os.path.isfile", isfile) PATCH_ACCESS = patch("os.access", return_value=True) def patch_firetv_update(state, current_app, running_apps, hdmi_input): """Patch the `FireTV.update()` method.""" return patch( "androidtv.firetv.firetv_async.FireTVAsync.update", return_value=(state, current_app, running_apps, hdmi_input), ) def patch_androidtv_update( state, current_app, running_apps, device, is_volume_muted, volume_level, hdmi_input ): """Patch the `AndroidTV.update()` method.""" return patch( "androidtv.androidtv.androidtv_async.AndroidTVAsync.update", return_value=( state, current_app, running_apps, device, is_volume_muted, volume_level, hdmi_input, ), ) PATCH_LAUNCH_APP = patch("androidtv.basetv.basetv_async.BaseTVAsync.launch_app") PATCH_STOP_APP = patch("androidtv.basetv.basetv_async.BaseTVAsync.stop_app") # Cause the update to raise an unexpected type of exception PATCH_ANDROIDTV_UPDATE_EXCEPTION = patch( "androidtv.androidtv.androidtv_async.AndroidTVAsync.update", side_effect=ZeroDivisionError, )
restudToolbox/package
respy/fortran/interface.py
Python
mit
11,491
0.002176
""" This module serves as the interface between the PYTHON code and the FORTRAN implementations. """ import pandas as pd import numpy as np import subprocess import os from respy.python.shared.shared_auxiliary import dist_class_attributes from respy.python.shared.shared_auxiliary import dist_model_paras from respy.python.shared.shared_constants import OPTIMIZERS_FORT from respy.python.shared.shared_constants import HUGE_FLOAT from respy.python.shared.shared_constants import EXEC_DIR def resfort_interface(respy_obj, request, data_array=None): """ This function provides the interface to the FORTRAN functionality. """ # Add mock specification for FORTRAN optimizers if not defined by user. # This is required so the initialization file for FORTRAN is complete. respy_obj = add_optimizers(respy_obj) # Distribute class attributes model_paras, num_periods, edu_start, is_debug, edu_max, delta, \ num_draws_emax, seed_emax, is_interpolated, num_points_interp, \ is_myopic, min_idx, tau, is_parallel, num_procs, \ num_agents_sim, num_draws_prob, num_agents_est, seed_prob, seed_sim, \ paras_fixed, optimizer_options, optimizer_used, maxfun, paras_fixed, \ derivatives, scaling = dist_class_attributes(respy_obj, 'model_paras', 'num_periods', 'edu_start', 'is_debug', 'edu_max', 'delta', 'num_draws_emax', 'seed_emax', 'is_interpolated', 'num_points_interp', 'is_myopic', 'min_idx', 'tau', 'is_parallel', 'num_procs', 'num_agents_sim', 'num_draws_prob', 'num_agents_est', 'seed_prob', 'seed_sim', 'paras_fixed', 'optimizer_options', 'optimizer_used', 'maxfun', 'paras_fixed', 'derivatives', 'scaling') dfunc_eps = derivatives[1] is_scaled, scale_minimum = scaling if request == 'estimate': # Check that selected optimizer is in line with version of program. if maxfun > 0: assert optimizer_used in OPTIMIZERS_FORT assert data_array is not None # If an evaluation is requested, then a specially formatted dataset is # written to a scratch file. This eases the reading of the dataset in # FORTRAN. write_dataset(data_array) # Distribute model parameters coeffs_a, coeffs_b, coeffs_edu, coeffs_home, shocks_cholesky = \ dist_model_paras(model_paras, is_debug) args = (coeffs_a, coeffs_b, coeffs_edu, coeffs_home, shocks_cholesky, is_interpolated, num_draws_emax, num_periods, num_points_interp, is_myopic, edu_start, is_debug, edu_max, min_idx, delta) args = args + (num_draws_prob, num_agents_est, num_agents_sim, seed_prob, seed_emax, tau, num_procs, request, seed_sim, optimizer_options, optimizer_used, maxfun, paras_fixed, dfunc_eps, is_scaled, scale_minimum) write_resfort_initialization(*args) # Call executable if not is_parallel: cmd = [EXEC_DIR + '/resfort_scalar'] subprocess.check_call(cmd) else: cmd = ['mpiexec', '-n', '1', EXEC_DIR + '/resfort_parallel_master'] subprocess.check_call(cmd) # Return arguments depends on the request. if request == 'simulate': results = get_results(num_periods, min_idx, num_agents_sim, 'simulate') args = (results[:-1], results[-1]) elif request == 'estimate': args = None else: raise AssertionError return args def add_optimizers(respy_obj): """ This function fills up missing information about optimizers to ensure a common interface. """ optimizer_options = respy_obj.get_attr('optimizer_options') for optimizer in ['FORT-NEWUOA', 'FORT-BFGS']: # Skip if defined by user. if optimizer in optimizer_options.keys(): continue if optimize
r in ['FORT-NEWUOA']: optimizer_options[optimizer] = dict() optimizer_options[optimizer]['npt'] = 40 optimizer_options[optimizer]['rhobeg'] = 0.1 optimizer_options[optimizer]['rhoend'] = 0.0001 optimizer_options[optimizer]['maxfun'] = 20 if optimizer in ['FORT-BFGS']: optimizer_options[optimizer] = dict() optimizer_options[optimizer]['gtol'] = 0.00001
optimizer_options[optimizer]['maxiter'] = 10 optimizer_options[optimizer]['stpmx'] = 100.0 respy_obj.unlock() respy_obj.set_attr('optimizer_options', optimizer_options) respy_obj.lock() return respy_obj def get_results(num_periods, min_idx, num_agents_sim, which): """ Add results to container. """ # Get the maximum number of states. The special treatment is required as # it informs about the dimensions of some of the arrays that are # processed below. max_states_period = int(np.loadtxt('.max_states_period.resfort.dat')) os.unlink('.max_states_period.resfort.dat') shape = (num_periods, num_periods, num_periods, min_idx, 2) mapping_state_idx = read_data('mapping_state_idx', shape).astype('int') shape = (num_periods,) states_number_period = \ read_data('states_number_period', shape).astype('int') shape = (num_periods, max_states_period, 4) states_all = read_data('states_all', shape).astype('int') shape = (num_periods, max_states_period, 4) periods_payoffs_systematic = read_data('periods_payoffs_systematic', shape) shape = (num_periods, max_states_period) periods_emax = read_data('periods_emax', shape) # In case of a simulation, we can also process the simulated dataset. if which == 'simulate': shape = (num_periods * num_agents_sim, 8) data_array = read_data('simulated', shape) else: raise AssertionError # Update class attributes with solution args = (periods_payoffs_systematic, states_number_period, mapping_state_idx, periods_emax, states_all, data_array) # Finishing return args def read_data(label, shape): """ Read results """ file_ = '.' + label + '.resfort.dat' # This special treatment is required as it is crucial for this data # to stay of integer type. All other data is transformed to float in # the replacement of missing values. if label == 'states_number_period': data = np.loadtxt(file_, dtype=np.int64) else: data = np.loadtxt(file_) data = np.reshape(data, shape) # Cleanup os.unlink(file_) # Finishing return data def write_resfort_initialization(coeffs_a, coeffs_b, coeffs_edu, coeffs_home, shocks_cholesky, is_interpolated, num_draws_emax, num_periods, num_points_interp, is_myopic, edu_start, is_debug, edu_max, min_idx, delta, num_draws_prob, num_agents_est, num_agents_sim, seed_prob, seed_emax, tau, num_procs, request, seed_sim, optimizer_options, optimizer_used, maxfun, paras_fixed, dfunc_eps, is_scaled, scale_minimum): """ Write out model request to hidden file .model.resfort.ini. """ # Write out to link file with open('.model.resfort.ini', 'w') as file_: # BASICS line = '{0:10d}\n'.format(num_periods) file_.write(line) line = '{0:15.10f}\n'.format(delta) file_.write(line) # WORK for num in [coeffs_a, coeffs_b]: fmt_ = ' {:15.10f}' * 6 + '\n' file_.write(fmt_.format(*num)) # EDUCATION num = coeffs_edu line = ' {:20.10f} {:20.10f} {:20.10f}\n'.format(*num) file_.write(line) line = '{0:10d} '.format(edu_start) file_.write(line) line = '{0:10d}\n'.format(edu_max) file_.write(line) # HOME line = ' {0:15.10f}\n'.format(coeffs_home[0]) file_.write(line) # SHOCKS for j in range(4): fmt_ = ' {:20.10f}' * 4 + '\n' file_.write(fmt_.format(*shocks_cholesky[j, :])) # SOLUTION line = '{0:10d}\n'.format(num_draws_emax) file_.write(line) line = '{0
shagabutdinov/sublime-semicolon
semicolon.py
Python
mit
3,677
0.018493
import sublime import sublime_plugin import re from Statement import statement from Expression import expression try: from SublimeLinter.lint import persist except ImportError as error: print("Dependency import failed; please read readme for " + "Semicolon plugin for installation instructions; to disable this " + "message remove this plugin; message: " + str(error) + "; can not import " + "persist from SublimeLinter.lint: add_all functionality will not be " + "avaiable") def add(view, edit, point): container = statement.get_root_statement(view, point) line = view.line(container[1]) next_char = view.substr(sublime.Region(line.b, line.b + 1)) prev_char_region = sublime.Region(line.a, line.b) prev_chars = view.substr(prev_char_region) prev_char_match = re.search(r'(\S)\s*$', prev_chars) prev_char = None if prev_char_match != None: prev_char = prev_char_match.group(1) is_semicolon_not_required = ( prev_char == ';' or prev_char == ':' or prev_char == ',' or prev_char == '>' or next_char == ';' ) is_source = ( 'source' not in view.scope_name(line.b) or 'source' not in view.scope_name(line.b + 1) ) if is_semicolon_not_required: return if is_source: return is_keyword = is_keyword_statement( view, line.a + prev_char_match.start(1) + 1 ) if prev_char == '}' and is_keyword: return is_keyword view.insert(edit, container[1], ';') new_sels = [] for current_sel in view.sel(): a, b = current_sel.a, current_sel.b if a - 1 == container[1]: a -= 1 if b - 1 == contai
ner[1]: b -= 1 new_sels.append(sublime.Region(a, b)) view.sel().clear() view.sel().add_al
l(new_sels) def is_keyword_statement(view, point): nesting = expression.get_nesting(view, point - 1, expression = r'{') if nesting == None: return False chars_before_nesting = view.substr(sublime.Region( max(nesting[0] - 512, 0), nesting[0] - 1 )) match = re.search(r'\)(\s*)$', chars_before_nesting) if match == None: return False parenthesis_nesting = expression.get_nesting(view, nesting[0] - 2 - len(match.group(1)), expression = r'\(') if parenthesis_nesting == None: return False chars_before_parenthesis = view.substr(sublime.Region( max(parenthesis_nesting[0] - 512, 0), parenthesis_nesting[0] - 1 )) keyword_regexp = r'(if|for|while|function\s+\w+)\s*$' return re.search(keyword_regexp, chars_before_parenthesis) != None def add_all(view, edit): if not view.id() in persist.errors: return errors = persist.errors[view.id()] for line in errors: for error in errors[line]: position, error_text = error point = view.text_point(line, position) - 1 is_semicolon_required = ( 'unexpected' in error_text or 'Missing semicolon' in error_text or 'missing semicolon' in error_text ) if is_semicolon_required: _add(view, edit, point) def _add(view, edit, point): statement_start = view.line(point).a statement_point = _get_previous_statement_point(view, statement_start) add(view, edit, statement_point) def _get_previous_statement_point(view, point): while True: if point <= 0: return None line = view.line(point) point = line.a - 1 text = view.substr(line) if text.strip() == '': continue scope_a_point = line.a + len(text) - len(text.lstrip()) scope_a = view.scope_name(scope_a_point) scope_b = view.scope_name(line.b - 1) if 'comment' in scope_b: if 'comment' in scope_a: continue else: return scope_a_point return line.b
robrocker7/h1z1map
server/wsgi.py
Python
apache-2.0
388
0.002577
""" WSGI config for h1z1map project. It exposes the WSGI callable as a module-level variable named ``application``. For
more information on this file, see https://docs.djangoproject.com/en/1.7/howto/deployment/wsgi/ """ import os os.environ.setdefault("DJANGO_SETTINGS_MODULE", "server.settings") from djang
o.core.wsgi import get_wsgi_application application = get_wsgi_application()
shaded-enmity/dnf
dnf/repo.py
Python
gpl-2.0
27,545
0.001307
# repo.py # DNF Repository objects. # # Copyright (C) 2013-2015 Red Hat, Inc. # # This copyrighted material is made available to anyone wishing to use, # modify, copy, or redistribute it subject to the terms and conditions of # the GNU General Public License v.2, or (at your option) any later version. # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY expressed or implied, including the implied warranties of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General # Public License for more details. You should have received a copy of the # GNU General Public License along with this program; if not, write to the # Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA # 02110-1301, USA. Any Red Hat trademarks that are incorporated in the # source code or documentation are not subject to the GNU General Public # License and may only be used or replicated with the express permission of # Red Hat, Inc. # from __future__ import absolute_import from __future__ import unicode_literals from dnf.i18n import ucd, _ import dnf.callback import dnf.conf.substitutions import dnf.const import dnf.crypto import dnf.exceptions import dnf.logging import dnf.pycomp import dnf.util import dnf.yum.config import dnf.yum.misc import functools import hashlib import hawkey import logging import librepo import operator import os import shutil import string import time import types _METADATA_RELATIVE_DIR = "repodata" _METALINK_FILENAME = "metalink.xml" _MIRRORLIST_FILENAME = "mirrorlist" _RECOGNIZED_CHKSUMS = ['sha512', 'sha256'] logger = logging.getLogger("dnf") def repo_id_invalid(repo_id): """Return index of an invalid character in the repo ID (if present). :api""" allowed_chars = ''.join((string.ascii_letters, string.digits, '-_.:')) invalids = (index for index, char in enumerate(repo_id) if char not in allowed_chars) return dnf.util.first(invalids) def _user_pass_str(user, password): if user is None: return None user = dnf.pycomp.urllib_quote(user) password = '' if password is None else dnf.pycomp.urllib_quote(password) return '%s:%s' % (user, password) def _metalink_path(dirname): return os.path.join(dirname, _METALINK_FILENAME) def _mirrorlist_path(dirname): return os.path.join(dirname, _MIRRORLIST_FILENAME) def _subst2tuples(subst_dct): return [(k, v) for (k, v) in subst_dct.items()] def pkg2payload(pkg, progress, *factories): for fn in factories: pload = fn(pkg, progress) if pload is not None: return pload raise ValueError('no matching payload factory for %s' % pkg) class _DownloadErrors(object): def __init__(self): self._irrecoverable = {} self._recoverable = {} self.fatal = None self.skipped = set() @property def irrecoverable(self): if self._irrecoverable: return self._irrecoverable if self.fatal: return {'': [self.fatal]} return {} @property def recoverable(self): return self._recoverable @recoverable.setter def recoverable(self, new_dct): self._recoverable = new_dct def bandwidth_used(self, pload): if pload.pkg in self.skipped: return 0 return pload.download_size def download_payloads(payloads, drpm): # download packages drpm.err.clear() targets = [pload.librepo_target() for pload in payloads]
errs = _DownloadErrors() try: librepo.download_packages(targets, failfast=True) except librepo.LibrepoException as e: e
rrs.fatal = e.args[1] or '<unspecified librepo error>' drpm.wait() # process downloading errors errs.recoverable = drpm.err.copy() for tgt in targets: err = tgt.err if err is None or err.startswith('Not finished'): continue payload = tgt.cbdata pkg = payload.pkg if err == 'Already downloaded': errs.skipped.add(pkg) continue errs.irrecoverable[pkg] = [err] return errs def update_saving(saving, payloads, errs): real, full = saving for pload in payloads: pkg = pload.pkg if pkg in errs: real += pload.download_size continue real += pload.download_size full += pload.full_size return real, full class _DetailedLibrepoError(Exception): def __init__(self, librepo_err, source_url): Exception.__init__(self) self.librepo_code = librepo_err.args[0] self.librepo_msg = librepo_err.args[1] self.source_url = source_url class _Handle(librepo.Handle): def __init__(self, gpgcheck, max_mirror_tries, max_parallel_downloads=None): super(_Handle, self).__init__() self.gpgcheck = gpgcheck self.maxmirrortries = max_mirror_tries self.interruptible = True self.repotype = librepo.LR_YUMREPO self.useragent = dnf.const.USER_AGENT self.maxparalleldownloads = max_parallel_downloads self.yumdlist = [ "primary", "filelists", "prestodelta", "group_gz", "updateinfo"] def __str__(self): return '_Handle: metalnk: %s, mlist: %s, urls %s.' % \ (self.metalinkurl, self.mirrorlisturl, self.urls) @classmethod def new_local(cls, subst_dct, gpgcheck, max_mirror_tries, cachedir): h = cls(gpgcheck, max_mirror_tries) h.varsub = _subst2tuples(subst_dct) h.destdir = cachedir h.urls = [cachedir] h.local = True return h @property def metadata_dir(self): return os.path.join(self.destdir, _METADATA_RELATIVE_DIR) @property def metalink_path(self): return _metalink_path(self.destdir) @property def mirrorlist_path(self): return _mirrorlist_path(self.destdir) def perform(self, result=None): try: return super(_Handle, self).perform(result) except librepo.LibrepoException as exc: source = self.metalinkurl or self.mirrorlisturl or \ ', '.join(self.urls) raise _DetailedLibrepoError(exc, source) class _NullKeyImport(dnf.callback.KeyImport): def confirm(self, _keyinfo): return True class Metadata(object): def __init__(self, res, handle): self.fresh = False # :api self.repo_dct = res.yum_repo self.repomd_dct = res.yum_repomd self._mirrors = handle.mirrors[:] @property def age(self): return self.file_age('primary') @property def comps_fn(self): return self.repo_dct.get("group_gz") or self.repo_dct.get("group") @property def content_tags(self): return self.repomd_dct.get('content_tags') @property def distro_tags(self): pairs = self.repomd_dct.get('distro_tags', []) return {k:v for (k, v) in pairs} def file_age(self, what): return time.time() - self.file_timestamp(what) def file_timestamp(self, what): try: return dnf.util.file_timestamp(self.repo_dct[what]) except OSError as e: raise dnf.exceptions.MetadataError(ucd(e)) @property def filelists_fn(self): return self.repo_dct.get('filelists') @property def mirrors(self): return self._mirrors @property def md_timestamp(self): """Gets the highest timestamp of all metadata types.""" timestamps = [content.get('timestamp') for (_, content) in self.repomd_dct.items() if isinstance(content, dict)] return max(timestamps) @property def presto_fn(self): return self.repo_dct.get('prestodelta') @property def primary_fn(self): return self.repo_dct.get('primary') def reset_age(self): dnf.util.touch(self.primary_fn, no_create=True) @property def repomd_fn(self): return self.repo_dct.get('repomd') @property def revision(self): return self.repomd_dct.get('revision') @property def timestamp(self
pythonbyexample/PBE
dbetut/conf.py
Python
bsd-3-clause
6,209
0.005315
# -*- coding: utf-8 -*- # # Test documentation build configuration file, created by # sphinx-quickstart on Sat Feb 21 22:42:03 2009. # # This file is execfile()d with the current directory set to its containing dir. # # The contents of this file are pickled, so don't put values in the namespace # that aren't pickleable (module imports are okay, they're removed automatically). # # Note that not all possible configuration values are present in this # autogenerated file. # # All configuration values have a default; values that are commented out # serve to show the default. import sys, os # If your extensions are in another directory, add it here. If the directory # is relative to the documentation root, use os.path.abspath to make it # absolute, like shown here. #sys.path.append(os.path.abspath('.')) # General configuration # --------------------- # Add any Sphinx extension module names here, as strings. They can be extensions # coming with Sphinx (named 'sphinx.ext.*') or your custom ones. extensions = [] # Add any paths that contain templates here, relative to this directory. templates_path = ['.templates'] # The suffix of source filenames. source_suffix = '.rst' # The encoding of source files. source_encoding = 'utf-8' # The master toctree document. master_doc = 'index' # General information about the project. project = u'Django By Example' copyright = u'2013, lightbird.net' # The version info for the project you're documenting, acts as replacement for # |version| and |release|, also used in various other places throughout the # built documents. # # The short X.Y version. version = '' # The full version, including alpha/beta/rc tags. release = '' # The language for content autogenerated by Sphinx. Refer to documentation # for a list of supported languages. #language = None # There are two options for replacing |today|: either, you set today to some # non-false value, then it is used: #today = '' # Else, today_fmt is used as the format for a strftime call. #today_fmt = '%B %d, %Y' # List of documents that shouldn't be included in the build. #unused_docs = [] # List of directories, relative to source directory, that shouldn't be searched # for source files. exclude_trees = ['.build'] # The reST default role (used for this markup: `text`) to use for all documents. #default_role = None # If true, '()' will be appended to :func: etc. cross-reference text. #add_function_parentheses = True # If true, the current module name will be prepended to all description # unit titles (such as .. function::). #add_module_names = True # If true, sectionauthor and moduleauthor directives will be shown in the # output. They are ignored by default. #show_authors = False # The name of the Pygments (syntax highlighting) style to use. pygments_style = 'sphinx' # Options for HTML output # ----------------------- # The style sheet to use for HTML and HTML Help pages. A file of that name # must exist either in Sphinx' static/ path, or in one of the custom paths # given in html_static_path. html_style = 'default.css' # The name for this set of Sphinx documents. If None, it defaults to # "<project> v<release> documentation". html_title = "Django By Example" # A shorter title for the navigation bar. Default is the same as html_title. #html_short_title = None # The name of an image file (relative to this directory) to place at the top # of the sidebar. html_logo = "lb-logosm.jpg" # The name of an image file (within the static path) to use as favicon of the # docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 # pixels large. #html_favicon = None # Add any paths that contain custom static files (such as style sheets) here, # relative to this directory. They are copied after the builtin static files, # so a file named "default.css" will overwrite the builtin "default.css". html_static_path = ['.static'] # If not '', a 'Last updated on:' timestamp is inserted at every page bottom, # using the given strftime format. #html_last_updated_fmt = '%b %d, %Y' # If true, SmartyPants will be used to convert quotes and dashes to # typographically correct entities. #html_use_smartypants = True # Custom sidebar templates, maps document names to template names. #html_sidebars = {} # Additional templates that should be rendered to pages, maps page names to # template names. #html_additional_pages = {} # If false, no module index is generated. #html_use_modindex = True # If false, no index is generated. html_use_index = False # If true, the index is split into individual pages for each letter. #html_split_index = False # If true, the reST sources are included in the HTML build as _sources/<name>. #html_copy_source = True # If true, an OpenSearch description file will be output, and all pages will # contain a <link> tag referring to it. The value of this option must be the # base URL from which the finished HTML is served. #html_use_opensearch = '' # If nonempty, this is the file name suffix for HTML files (e.g. ".xhtml"). #html_file_suffix = '' # Output file base name for HTML help builder. htmlhelp_basename = 'Testdoc' # Options for LaTeX output # -------
----------------- # The paper size ('letter' or 'a4'). #latex_paper_size = 'letter' # The font size ('
10pt', '11pt' or '12pt'). #latex_font_size = '10pt' # Grouping the document tree into LaTeX files. List of tuples # (source start file, target name, title, author, document class [howto/manual]). latex_documents = [ ('index', 'Test.tex', ur'Django By Example', ur'lightbird.net', 'manual'), ] # The name of an image file (relative to this directory) to place at the top of # the title page. #latex_logo = None # For "manual" documents, if this is true, then toplevel headings are parts, # not chapters. #latex_use_parts = False # Additional stuff for the LaTeX preamble. #latex_preamble = '' # Documents to append as an appendix to all manuals. #latex_appendices = [] # If false, no module index is generated. #latex_use_modindex = True
Azure/azure-sdk-for-python
sdk/network/azure-mgmt-network/azure/mgmt/network/v2020_11_01/operations/_virtual_hub_route_table_v2_s_operations.py
Python
mit
22,766
0.005183
# coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for license information. # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- from typing import TYPE_CHECKING import warnings from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error from azure.core.paging import ItemPaged from azure.core.pipeline import PipelineResponse from azure.core.pipeline.transport import HttpRequest, HttpResponse from azure.core.polling import LROPoller, NoPolling, PollingMethod from azure.mgmt.core.exceptions import ARMErrorFormat from azure.mgmt.core.polling.arm_polling import ARMPolling from .. import models as _models if TYPE_CHECKING: # pylint: disable=unused-import,ungrouped-imports from typing import Any, Callable, Dict, Generic, Iterable, Optional, TypeVar, Union T = TypeVar('T') ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] class VirtualHubRouteTableV2SOperations(object): """VirtualHubRouteTableV2SOperations operations. You should not instantiate this class directly. Instead, you should create a Client instance that instantiates it for you and attaches it as an attribute. :ivar models: Alias to model classes used in this operation group. :type models: ~azure.mgmt.network.v2020_11_01.models :param client: Client for service requests. :param config: Configuration of service client. :param serializer: An object model serializer. :param deserializer: An object model deserializer. """ models = _models def __init__(self, client, config, serializer, deserializer): self._client = client self._serialize = serializer self._deserialize = deserializer self._config = config def get( self, resource_group_name, # type: str virtual_hub_name, # type: str route_table_name, # type: str **kwargs # type: Any ): # type: (...) -> "_models.VirtualHubRouteTableV2" """Retrieves the details of a VirtualHubRouteTableV2. :param resource_group_name: The resource group name of the VirtualHubRouteTableV2. :type resource_group_name: str :param virtual_hub_name: The name of the VirtualHub. :type virtual_hub_name: str :param route_table_name: The name of the VirtualHubRouteTableV2. :type route_table_name: str :keyword callable cls: A custom type or function that will be passed the direct response :return: VirtualHubRou
teTableV2, or the result of cls(response) :rtype: ~azure.mgmt.network.v2020_11_01.models.VirtualHubRouteTableV2 :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["_models.VirtualHubRouteTableV2"] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) api_version = "2020-11-01"
accept = "application/json" # Construct URL url = self.get.metadata['url'] # type: ignore path_format_arguments = { 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'), 'virtualHubName': self._serialize.url("virtual_hub_name", virtual_hub_name, 'str'), 'routeTableName': self._serialize.url("route_table_name", route_table_name, 'str'), } url = self._client.format_url(url, **path_format_arguments) # Construct parameters query_parameters = {} # type: Dict[str, Any] query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') # Construct headers header_parameters = {} # type: Dict[str, Any] header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') request = self._client.get(url, query_parameters, header_parameters) pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) error = self._deserialize.failsafe_deserialize(_models.Error, response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) deserialized = self._deserialize('VirtualHubRouteTableV2', pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/virtualHubs/{virtualHubName}/routeTables/{routeTableName}'} # type: ignore def _create_or_update_initial( self, resource_group_name, # type: str virtual_hub_name, # type: str route_table_name, # type: str virtual_hub_route_table_v2_parameters, # type: "_models.VirtualHubRouteTableV2" **kwargs # type: Any ): # type: (...) -> "_models.VirtualHubRouteTableV2" cls = kwargs.pop('cls', None) # type: ClsType["_models.VirtualHubRouteTableV2"] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) api_version = "2020-11-01" content_type = kwargs.pop("content_type", "application/json") accept = "application/json" # Construct URL url = self._create_or_update_initial.metadata['url'] # type: ignore path_format_arguments = { 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'), 'virtualHubName': self._serialize.url("virtual_hub_name", virtual_hub_name, 'str'), 'routeTableName': self._serialize.url("route_table_name", route_table_name, 'str'), } url = self._client.format_url(url, **path_format_arguments) # Construct parameters query_parameters = {} # type: Dict[str, Any] query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') # Construct headers header_parameters = {} # type: Dict[str, Any] header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') body_content_kwargs = {} # type: Dict[str, Any] body_content = self._serialize.body(virtual_hub_route_table_v2_parameters, 'VirtualHubRouteTableV2') body_content_kwargs['content'] = body_content request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs) pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200, 201]: map_error(status_code=response.status_code, response=response, error_map=error_map) error = self._deserialize.failsafe_deserialize(_models.Error, response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) if response.status_code == 200: deserialized = self._deserialize('VirtualHubRouteTableV2', pipeline_response) if response.status_code == 201: deser
FinnStutzenstein/OpenSlides
server/docker/settings.py
Python
mit
5,214
0.001534
""" Settings file for OpenSlides. For more information on this file, see https://github.com/OpenSlides/OpenSlides/blob/master/SETTINGS.rst """ import os import json from openslides.global_settings import * class MissingEnvironmentVariable(Exception): pass undefined = object() def get_env(name, default=undefined, cast=str): env = os.environ.get(name) default_extension = "" if not env: env = default default_extension = " (default)" if env is undefined: raise MissingEnvironmentVariable(name) if env is not None: if cast is bool: env = env in ("1", "true", "True") else: env = cast(env) if env is None: print(f"{name}={default_extension}", flush=True) else: print(f'{name}="{env}"{default_extension}', flush=True) return env # The directory for user specific data files OPENSLIDES_USER_DATA_DIR = "/app/personal_data/var" SECRET_KEY = get_env("SECRET_KEY") DEBUG = False # Controls the verbosity on errors during a reset password. If enabled, an error #
will be shown, if there does not exist a user with a given email address. So one # can check, if a email is registered. If
this is not wanted, disable verbose # messages. An success message will always be shown. RESET_PASSWORD_VERBOSE_ERRORS = get_env("RESET_PASSWORD_VERBOSE_ERRORS", True, bool) # OpenSlides specific settings AUTOUPDATE_DELAY = get_env("AUTOUPDATE_DELAY", 1, float) DEMO_USERS = get_env("DEMO_USERS", default=None) DEMO_USERS = json.loads(DEMO_USERS) if DEMO_USERS else None # Email settings # For an explaination and more settings values see https://docs.djangoproject.com/en/2.2/topics/email/#smtp-backend EMAIL_HOST = get_env("EMAIL_HOST", "postfix") EMAIL_PORT = get_env("EMAIL_PORT", 25, int) EMAIL_HOST_USER = get_env("EMAIL_HOST_USER", "") EMAIL_HOST_PASSWORD = get_env("EMAIL_HOST_PASSWORD", "") EMAIL_USE_SSL = get_env("EMAIL_USE_SSL", False, bool) EMAIL_USE_TLS = get_env("EMAIL_USE_TLS", False, bool) EMAIL_TIMEOUT = get_env("EMAIL_TIMEOUT", None, int) DEFAULT_FROM_EMAIL = get_env("DEFAULT_FROM_EMAIL", "noreply@example.com") # Increasing Upload size to 100mb (default is 2.5mb) DATA_UPLOAD_MAX_MEMORY_SIZE = 104857600 # Database # https://docs.djangoproject.com/en/1.10/ref/settings/#databases DATABASES = { "default": { "ENGINE": "django.db.backends.postgresql", "NAME": get_env("DATABASE_NAME", "openslides"), "USER": get_env("DATABASE_USER", "openslides"), "PASSWORD": get_env("DATABASE_PASSWORD", "openslides"), "HOST": get_env("DATABASE_HOST", "db"), "PORT": get_env("DATABASE_PORT", "5432"), "USE_TZ": False, # Requires postgresql to have UTC set as default "DISABLE_SERVER_SIDE_CURSORS": True, }, "mediafiles": { "ENGINE": "django.db.backends.postgresql", "NAME": get_env("MEDIAFILE_DATABASE_NAME", "mediafiledata"), "USER": get_env("MEDIAFILE_DATABASE_USER", "openslides"), "PASSWORD": get_env("MEDIAFILE_DATABASE_PASSWORD", "openslides"), "HOST": get_env("MEDIAFILE_DATABASE_HOST", "db"), "PORT": get_env("MEDIAFILE_DATABASE_PORT", "5432"), }, } MEDIAFILE_DATABASE_TABLENAME = get_env("MEDIAFILE_DATABASE_TABLENAME", "mediafile_data") # Redis REDIS_HOST = get_env("REDIS_HOST", "redis") REDIS_PORT = get_env("REDIS_PORT", 6379, int) REDIS_SLAVE_HOST = get_env("REDIS_SLAVE_HOST", "redis-slave") REDIS_SLAVE_PORT = get_env("REDIS_SLAVE_PORT", 6379, int) # Collection Cache REDIS_ADDRESS = f"redis://{REDIS_HOST}:{REDIS_PORT}/0" REDIS_READ_ONLY_ADDRESS = f"redis://{REDIS_SLAVE_HOST}:{REDIS_SLAVE_PORT}/0" CONNECTION_POOL_LIMIT = get_env("CONNECTION_POOL_LIMIT", 100, int) # SAML integration ENABLE_SAML = get_env("ENABLE_SAML", False, bool) if ENABLE_SAML: INSTALLED_APPS += ["openslides.saml"] # Controls if electronic voting (means non-analog polls) are enabled. ENABLE_ELECTRONIC_VOTING = get_env("ENABLE_ELECTRONIC_VOTING", False, bool) # Enable Chat ENABLE_CHAT = get_env("ENABLE_CHAT", False, bool) # Jitsi integration JITSI_DOMAIN = get_env("JITSI_DOMAIN", None) JITSI_ROOM_NAME = get_env("JITSI_ROOM_NAME", None) JITSI_ROOM_PASSWORD = get_env("JITSI_ROOM_PASSWORD", None) TIME_ZONE = "Europe/Berlin" STATICFILES_DIRS = [os.path.join(OPENSLIDES_USER_DATA_DIR, "static")] + STATICFILES_DIRS STATIC_ROOT = os.path.join(OPENSLIDES_USER_DATA_DIR, "collected-static") MEDIA_ROOT = os.path.join(OPENSLIDES_USER_DATA_DIR, "media", "") LOGGING = { "version": 1, "disable_existing_loggers": False, "formatters": { "gunicorn": { "format": "{asctime} [{process:d}] [{levelname}] {name} {message}", "style": "{", "datefmt": "[%Y-%m-%d %H:%M:%S %z]", }, }, "handlers": { "console": {"class": "logging.StreamHandler", "formatter": "gunicorn",}, }, "loggers": { "django": { "handlers": ["console"], "level": get_env("DJANGO_LOG_LEVEL", "INFO"), }, "openslides": { "handlers": ["console"], "level": get_env("OPENSLIDES_LOG_LEVEL", "INFO"), }, }, } SETTINGS_FILEPATH = __file__
Stratoscale/rackattack-api
py/rackattack/tcp/node.py
Python
apache-2.0
1,797
0.001113
from rackattack import api class Node(api.Node): def __init__(self, ipcClient, allocation, name, info): assert 'id' in info assert 'primaryMACAddress' in info assert 'secondaryMACAddress' in info assert 'ipAddress' in info self._ipcClient = ipcClient self._allocation = allocation self._name = name self._info = info self._id = info['id'] def rootSSHCredentials(self): return self._ipcClient.call( "node__rootSSHCredentials", allocationID=self._allocation._idForNodeIPC(), nodeID=self._id) def id(self): return self._id def name(self): return self._name def primaryMACAddress(self): return self._info['primaryMACAddress'] def secondaryMACAddress(self): return self._info['secondaryMACAddress'] def NICBondings(self): return self._info.get('NICBondings', None) def getOtherMACAddresses(self): return self._info.get("otherMACAddresses", None) def getMacAddress(self, macName): return self._info[macName] def ipAddress(self): return self._info['ipAddress'] def coldRestart(self): return self._ipcClient.call( 'node__coldRestart', allocationID=self._allocation._idForNodeIPC(), nodeID=self._id) def fetch
SerialLog(self): connection = self._ipcClient.urlopen("/host/%s/serialLog" % self._id) try: return connection.read() finally: connection.close() def networkInfo(self): return self._info def answerDHCP(self, shou
ldAnswer): return self._ipcClient.call( 'node__answerDHCP', allocationID=self._allocation._idForNodeIPC(), nodeID=self._id, shouldAnswer=shouldAnswer)
AversivePlusPlus/AversivePlusPlus
tools/ik/src/kinematics/__init__.py
Python
bsd-3-clause
55
0
from chain import * from matr
ix_chain_element im
port *
albf/spitz
monitor/Exec.py
Python
gpl-2.0
245
0.008163
from MonitorData import * from Config import * a =
Config() Data = MonitorData(1, 10) Data.startAllNodes(a.subscription_id, a.certificate_path, a.lib_file, a.script, a.s
sh_user, a.ssh_pass, a.jm_address, a.jm_port, upgrade=True, verbose=True)
shumik/skencil-c
Sketch/UI/command.py
Python
gpl-2.0
7,045
0.042725
# Sketch - A Python-based interactive drawing program # Copyright (C) 1997, 1998, 2001 by Bernhard Herzog # # This library is free software; you can redistribute it and/or # modify it under the terms of the GNU Library General Public # License as published by the Free Software Foundation; either # version 2 of the License, or (at your option) any later version. # # This library is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty
of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU # Library General Public
License for more details. # # You should have received a copy of the GNU Library General Public # License along with this library; if not, write to the Free Software # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA from types import StringType, TupleType, FunctionType from Sketch import Publisher from Sketch.const import CHANGED, SELECTION from Sketch.warn import warn, warn_tb, INTERNAL # # Command Class # class Command(Publisher): def __init__(self, cmd_class, object): self.cmd_class = cmd_class self.object = object def __getattr__(self, attr): try: return getattr(self.cmd_class, attr) except AttributeError: if attr == 'button_name': return self.menu_name raise AttributeError, attr def get_method(self, path): if callable(path): return path method = self.object if type(path) != TupleType: path = (path,) for name in path: method = getattr(method, name) return method def Invoke(self, args = ()): if type(args) != TupleType: args = (args,) try: apply(self.get_method(self.command), self.args + args) except: warn_tb(INTERNAL) def Update(self): # XXX: bitmaps and key_strokes should probably be also changeable changed = self.set_name(self.get_name()) changed = self.set_sensitive(self.get_sensitive()) or changed changed = self.set_value(self.get_value()) or changed if changed: self.issue(CHANGED) def get_name(self): if self.name_cb: method = self.get_method(self.name_cb) if method: return method() return self.menu_name def set_name(self, menu_name = None): changed = self.menu_name != menu_name if changed: self.menu_name = menu_name return changed def get_sensitive(self): #print 'get_sensitive', self if self.sensitive_cb: method = self.get_method(self.sensitive_cb) if method: return method() else: warn(INTERNAL, 'no method for sensitive_cb (%s)', self.sensitive_cb) return 0 return 1 def set_sensitive(self, sensitive): changed = self.sensitive != sensitive if changed: self.sensitive = sensitive return changed def get_value(self): if self.value_cb: method = self.get_method(self.value_cb) if method: return method() return self.value def set_value(self, value): changed = self.value != value if changed: self.value = value return changed def GetKeystroke(self): return self.key_stroke def GetValue(self): return self.value def IsOn(self): return self.value == self.value_on def InContext(self): return 1 def set_bitmap(self, bitmap): if bitmap: changed = self.bitmap != bitmap self.bitmap = bitmap return changed return 0 def __repr__(self): return 'Command: %s' % self.name class CommandClass: cmd_class = Command # default attributes menu_name = '???' bitmap = None key_stroke = None name_cb = None sensitive_cb = None sensitive = 1 value_cb = None value = 0 value_on = 1 value_off = 0 is_command = 1 is_check = 0 invoke_with_keystroke = 0 callable_attributes = ('name_cb', 'sensitive_cb', 'value_cb') def __init__(self, name, command, subscribe_to = None, args = (), is_check = 0, **rest): self.name = name self.command = command self.subscribe_to = subscribe_to if type(args) != TupleType: self.args = (args,) else: self.args = args for key, value in rest.items(): setattr(self, key, value) if is_check: self.is_check = 1 self.is_command = 0 def InstantiateFor(self, object): cmd = self.cmd_class(self, object) if self.subscribe_to: if type(self.subscribe_to) == TupleType: attrs = self.subscribe_to[:-1] for attr in attrs: object = getattr(object, attr) subscribe_to = self.subscribe_to[-1] else: subscribe_to = self.subscribe_to object.Subscribe(subscribe_to, cmd.Update) return cmd def __repr__(self): return 'CommandClass: %s' % self.name class ObjectCommand(Command): def get_method(self, path): if type(path) == type(""): return self.object.document.GetObjectMethod(self.object_class,path) return Command.get_method(self, path) def Invoke(self, args = ()): if type(args) != TupleType: args = (args,) try: apply(self.object.document.CallObjectMethod, (self.object_class, self.menu_name, self.command) \ + self.args + args) except: warn_tb(INTERNAL) def get_sensitive(self): if self.object.document.CurrentObjectCompatible(self.object_class): return Command.get_sensitive(self) return 0 def GetKeystroke(self): return self.key_stroke def GetValue(self): return self.value def InContext(self): return self.object.document.CurrentObjectCompatible(self.object_class) def __repr__(self): return 'ObjectCommand: %s' % self.name class ObjectCommandClass(CommandClass): cmd_class = ObjectCommand object_class = None def SetClass(self, aclass): if self.object_class is None: self.object_class = aclass # # # class Commands: def Update(self): for item in self.__dict__.values(): item.Update() def __getitem__(self, key): return getattr(self, key) def Get(self, name): try: return getattr(self, name) except AttributeError: for item in self.__dict__.values(): if item.__class__ == Commands: cmd = item.Get(name) if cmd: return cmd else: return None # # # class Keymap: def __init__(self): self.map = {} def AddCommand(self, command): key_stroke = command.GetKeystroke() if key_stroke: if type(key_stroke) == StringType: key_stroke = (key_stroke,) for stroke in key_stroke: if self.map.has_key(stroke): # XXX: should be user visible if keybindings can be # changed by user warn(INTERNAL, 'Warning: Binding %s to %s replaces %s', command.name, stroke, self.map[stroke].name) self.map[stroke] = command def MapKeystroke(self, stroke): if self.map.has_key(stroke): return self.map[stroke] # # # def AddCmd(list, name, menu_name, method = None, **kw): if type(name) == FunctionType: name = name.func_name if method is None: method = name elif type(method) == FunctionType: method = method.func_name kw['menu_name'] = menu_name kw['subscribe_to'] = SELECTION cmd = apply(ObjectCommandClass, (name, method), kw) list.append(cmd)
Farforr/overlord
overlord/minions/api/v1/urls.py
Python
bsd-3-clause
478
0
# -*- coding: utf-8 -*- from __future__ import absolute_import, unicode_literals from django.conf.urls import url, include from rest_framework
.routers import DefaultRouter from rest_framework.urlpatterns import format_suffix_patterns from . import views router = DefaultRouter() router.register(r'minion', views.MinionViewSet, 'minion') router.register(r'data', views.MinionDataViewSet, 'data') urlpatterns = [ url( r'^',
include(router.urls) ), ]
arrayexpress/ae_auto
settings/settings_no_password.py
Python
apache-2.0
2,129
0.001879
__author__ = 'Ahmed G. Ali' ANNOTARE_DB = { 'name': 'annotare2', 'host': 'mysql-annotare-prod.ebi.ac.uk', 'port': 4444, 'username': '', 'password': '' } AE_AUTO_SUB_DB = { 'name': 'ae_autosubs', 'host': 'mysql-ae-autosubs-prod.ebi.ac.uk', 'port': 4091, 'username': '', 'password': '' } AE2 = { 'name': 'AE2PRO', 'host': 'ora-vm5-022.ebi.ac.uk', 'port': '1531', 'username': '', 'password': '' } BIOSTUDIES_DB = { 'name': 'BIOSDRO', 'host': 'ora-dlvm-010.ebi.ac.uk', 'port': '1521', 'username': '', 'password': '', 'is_service': True } ERA = { 'name': 'ERAPRO', 'host': 'ora-vm-009.ebi.ac.uk', 'port': '1541', 'username': '', 'password': '' } CONAN_DB = { 'name': 'AE2PRO', 'host': 'ora-vm5-022.ebi.ac.uk', 'port': '1531', 'username': '',
'password': '' } ANNOTARE_DIR = '/ebi/microarray/ma-exp/AutoSubmissions/annotare/' GEO_ACCESSIONS_PATH = '/ebi/microarray/home/fgpt/sw/lib/perl/supporting_files/geo_import_supporting_files/geo_accessions.yml' TEMP_FOLDER = '/nfs/ma/home/arrayexpress/ae_automation/ae_automation/tmp/' ADF_LOAD_DIR = '/nfs/ma/home/arrayexpress/ae2_production/data/ARRAY/GEOD' BASH_PATH = '/nfs/ma/home/arrayexpress/ae_automation/ae_automa
tion/env_bashrc' EXPERIMENTS_PATH = '/ebi/microarray/home/arrayexpress/ae2_production/data/EXPERIMENT/' ADF_DB_FILE = '/nfs/production3/ma/home/atlas3-production/sw/configs/adf_db_patterns.txt' ENA_SRA_URL = 'https://www.ebi.ac.uk/ena/submit/drop-box/submit/' \ '?auth=' ENA_SRA_DEV_URL = 'https://www-test.ebi.ac.uk/ena/submit/drop-box/submit/' \ '?auth=' ENA_FTP_URI = 'ftp://ftp.sra.ebi.ac.uk/vol1/fastq/' ENA_DIR = '/fire/staging/aexpress/' CONAN_URL = 'http://banana.ebi.ac.uk:14054/conan2/' CONAN_LOGIN_EMAIL = '' AUTOMATION_EMAIL = 'AE Automation<ae-automation@ebi.ac.uk>' SMTP = 'smtp.ebi.ac.uk' CURATION_EMAIL = '' GEO_SOFT_URL = 'ftp://ftp.ncbi.nih.gov/pub/geo/DATA/SOFT/by_%s/' ATLAS_CONTACT = {'name': 'Curators', 'email': ''} PMC_BASE_URL = 'https://www.ebi.ac.uk/europepmc/webservices/rest/'
ingkebil/trost
scripts/maintanance/count_all_tables.py
Python
gpl-2.0
217
0.018433
#!/usr/bin/env python
import sql import sys def main(argv): tables = sql.get_tables() for table in tables:
print "%s: %d" % (table, sql.count(table)) if __name__ == '__main__': main(sys.argv[1:])
willhardy/Adjax
website/urls.py
Python
bsd-3-clause
863
0.010429
from django.conf.urls.defaults import * urlpatterns = patterns('django.views.generic.simple', url(r'^$', 'redirect_to', {'url': '/what/'}, name="home"), url(r'^what/$', 'direct_to_template', {'template': 'what.html', 'extra_context': {'page': 'what'}}, name="what"), url(r'^how/$', 'direct_to_template', {'template': 'how.html', 'extra_context': {'page': 'how'}}, name="how"), url(r'^where/$', 'direct_to_template', {'template': 'where.html', 'extra_context': {'page': 'where'}}, name="where"), url(r'^who/$', 'direct_to_template', {'template': 'who.html', 'extra_context': {'page': 'who'}}, name="who"), (r'^demo/', include('basic.urls')), ) from django.conf import settings if settings.DEBUG: urlpatterns += patterns('django.views.static', (r'^media/(?P<path>.*)$', 'serve', {'document_root': settings.
MEDIA_ROOT}),
)
mikoim/funstuff
codecheck/codecheck-3608/app/plugins/gengo.py
Python
mit
676
0.003077
from datetime import datetime import rfGengou from . import PluginBase __all__ = ['Gengo'] class Gengo(PluginBase): def execute(self, args): if len(args) == 0: target
= datetime.now() elif len(args) == 1: target = datetime.strptime(args[0], '%Y/%m/%d') else: raise ValueError('wrong number of arguments are given') return '{:s}{:d}年{:d}月{:d}日'.format(*rfGengou.s2g(target)) def help(self
): return """[yyyy/mm/dd] Convert from string to Japanese Gengo. If string is not given, use current time. ex) > gengo 平成28年12月2日 > gengo 2000/01/01 平成12年1月1日 """
Teagan42/home-assistant
homeassistant/components/rflink/cover.py
Python
apache-2.0
5,550
0.000541
"""Support for Rflink Cover devices.""" import logging import voluptuous as vol from homeassistant.components.cover import PLATFORM_SCHEMA, CoverDevice from homeassistant.const import CONF_NAME, CONF_TYPE, STATE_OPEN import homeassistant.helpers.config_validation as cv from homeassistant.helpers.restore_state import RestoreEntity from . import ( CONF_ALIASES, CONF_DEVICE_DEFAULTS, CONF_DEVICES, CONF_FIRE_EVENT, CONF_GROUP, CONF_GROUP_ALIASES, CONF_NOGROUP_ALIASES, CONF_SIGNAL_REPETITIONS, DEVICE_DEFAULTS_SCHEMA, RflinkCommand, ) _LOGGER = logging.getLogger(__name__) TYPE_STANDARD = "standard" TYPE_INVERTED = "inverted" PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend( { vol.Optional( CONF_DEVICE_DEFAULTS, default=DEVICE_DEFAULTS_SCHEMA({}) ): DEVICE_DEFAULTS_SCHEMA, vol.Optional(CONF_DEVICES, default={}): vol.Schema( { cv.string: { vol.Optional(CONF_NAME): cv.string, vol.Optional(CONF_TYPE): vol.Any(TYPE_STANDARD, TYPE_INVERTED), vol.Optional(CONF_ALIASES, default=[]): vol.All( cv.ensure_list, [cv.string] ), vol.Optional(CONF_GROUP_ALIASES, default=[]): vol.All( cv.ensure_list, [cv.string] ), vol.Optional(CONF_NOGROUP_ALIASES, default=[]): vol.All( cv.ensure_list, [cv.string] ), vol.Optional(CONF_FIRE_EVENT, default=False): cv.boolean, vol.Optional(CONF_SIGNAL_REPETITIONS): vol.Coerce(int), vol.Optional(CONF_GROUP, default=True): cv.boolean, } } ), } ) def entity_type_for_device_id(device_id): """Return entity class for protocol of a given device_id. Async friendly. """ entity_type_mapping = { # KlikAanKlikUit cover have the controls inverted "newkaku": TYPE_INVERTED } protocol = device_id.split("_")[0] return entity_type_mapping.get(protocol, TYPE_STANDARD) def entity_class_for_type(entity_type): """Translate entity type to entity class. Async friendly. """ entity_device_mapping = { # default cover implementation TYPE_STANDARD: RflinkCover, # cover with open/close commands inverted # like KAKU/COCO ASUN-650 TYPE_INVERTED: InvertedRflinkCover, } return entity_device_mapping.get(entity_type, RflinkCover) def devices_from_config(domain_config): """Parse configuration and add Rflink cover devices.""" devices = [] for device_id, config in domain_config[CONF_DEVICES].items(): # Determine what kind of entity to create, RflinkCover # or InvertedRflinkCover if CONF_TYPE in config: # Remove type from config to not pass it as and argument # to entity instantiation entity_type = config.pop(CONF_TYPE) else: entity_type = entity_type_for_device_id(device_id) entity_class = entity_class_for_type(entity_type) device_config = dict(domain_config[CONF_DEVICE_DEFAULTS], **config) device = entity_class(device_id, **device_config) devices.append(device) return devices async def async_setup_platform(hass, config, async_add_entities, discovery_info=None): """Set up the Rflink cover platform.""" async_add_entities(devices_from_config(config)) class RflinkCover(RflinkCommand, CoverDevice, RestoreEntity): """Rflink entity which can switch on/stop/off (eg: cover).""" async def async_added_to_hass(self): """Restore RFLink cover state (OPEN/CLOSE).""" await super().async_added_to_hass() old_state = await self.async_get_last_state() if old_state is not None: self._state = old_state.state == STATE_OPEN def _handle_event(self, event): """Adjust state if Rflink picks up a remote command for this device.""" self.cancel_queued_send_commands() command = event["command"] if command in ["on", "allon", "up"]: self._state = True elif command in ["off", "alloff", "down"]: self._state = False @property def should_poll(self): """No polling available in RFlink cover.""" return False @property def is_closed(self): """Return if the cover is closed.""" return not self._state @property def assumed_state(self): """Return True because covers can be stopped midway.""" return True async def async_close_cover(self, **kwargs): """Turn the device close.""" await self._async_handle_command("close_cover") async def async_open_cover(self, **kwargs): """Turn the device open.""" await self._async_handle_command("open_cover") async def async_stop_cover(self, **kwargs): """
Turn the device stop.""" await self._async_handle_command("stop_cover") class InvertedRflinkCover(RflinkCover): """Rflink c
over that has inverted open/close commands.""" async def _async_send_command(self, cmd, repetitions): """Will invert only the UP/DOWN commands.""" _LOGGER.debug("Getting command: %s for Rflink device: %s", cmd, self._device_id) cmd_inv = {"UP": "DOWN", "DOWN": "UP"} await super()._async_send_command(cmd_inv.get(cmd, cmd), repetitions)
culturagovbr/sistema-nacional-cultura
gestao/tests/test_componentes.py
Python
agpl-3.0
11,165
0.004498
import pytest import datetime from django.template import Context from django.template import Engine from django.template import Template from django.template import TemplateDoesNotExist from django.urls import reverse from django.core.files.uploadedfile import SimpleUploadedFile from planotrabalho.models import PlanoTrabalho from gestao.forms import DiligenciaForm, DiligenciaComponenteForm from gestao.models import Diligencia from adesao.models import SistemaCultura from model_mommy import mommy pytestmark = pytest.mark.django_db @pytest.fixture def context(login): """ Retorna um contexto básico necessário para rendereziar o template de diligência """ context = Context({'usuario_id': login.id}) return context @pytest.fixture def engine(): """ Configura a engine de Templates do Django """ engine = Engine.get_default() return engine @pytest.fixture def template(engine): """ Injeta o template 'gestao/diligencia/diligencia.html' como um objeto Template pronto para ser usado.""" template = engine.get_template(template_name='diligencia.html') return template def test_existencia_template_diligencia(engine, client): """ Testando existência do template para criação da diligência""" try: template = engine.get_template(template_name='diligencia.html') except TemplateDoesNotExist: template = '' assert isinstance(template, Template) def test_retorno_do_botao_cancelar_de_diligencia(client, template, context, sistema_cultura): """ Testa se o botão cancelar presente na página de diligência retorna para a página de detalhe do município correspondente""" context['sistema_cultura'] = sistema_cultura rendered_template = template.render(context) url = reverse('gestao:detalhar', kwargs={"cod_ibge": sistema_cultura.ente_federado.cod_ibge}) html = "<a href=\"{url}\" class=\"btn btn-secondary pull-right\">Cancelar</a>".format(url=url) assert html in rendered_template def test_botao_acao_enviar_diligencia_template(template, client, context, sistema_cultura): """Testa existencia dos botão de enviar no template de diligência""" context['sistema_cultura'] = sistema_cultura rendered_template = template.render(context) assert "<button class=\"btn btn-primary pull-right\" type=\"submit\">Enviar</button>" in rendered_template def test_gestao_template(template, client, context, sistema_cultura): """Testa se o template da gestão está sendo carregado""" context['sistema_cultura'] = sistema_cultura rendered_template = template.render(context) assert "<!DOCTYPE html>" in rendered_template def test_informacoes_arquivo_enviado(template, client, context, sistema_cultura): """Testa se o template exibe as informações do arquivo enviado"""
context['sistema_cultura'] = sistema_cultura rendered_template = template.render(context) assert context['sistema_cultura'].ente_federado.nome in rendered_template def test_opcoes_de_classificacao_da_diligencia(template, client, context, login, sistema_cultura): """Testa se a Classificação(Motivo) apresenta as opções conforme a especificação.""" opcoes = ("Arquivo danificado", "Arquivo incompleto", "Arquivo incorreto" )
form = DiligenciaComponenteForm(componente='orgao_gestor', arquivo="arquivo", usuario=login, sistema_cultura=sistema_cultura) context['form'] = form context['sistema_cultura'] = sistema_cultura context['componente'] = mommy.make("Componente") rendered_template = template.render(context) assert opcoes[0] in rendered_template assert opcoes[1] in rendered_template assert opcoes[2] in rendered_template def test_opcoes_em_um_dropdown(template, client, context, login, sistema_cultura): """Testa se as Classificações(Motivo) estão presentes dentro de um dropdown.""" opcoes = [ {"description": "Em preenchimento", "value": "0"}, {"description": "Avaliando anexo", "value": "1"}, {"description": "Conclu[ida", "value": "2"}, {"description": "Arquivo aprovado com ressalvas", "value": "3"}, {"description": "Arquivo danificado", "value": "4"}, {"description": "Arquivo incompleto", "value": "5"}, {"description": "Arquivo incorreto", "value": "6"} ] form = DiligenciaComponenteForm(componente='orgao_gestor', arquivo="arquivo", usuario=login, sistema_cultura=sistema_cultura) context['form'] = form context['sistema_cultura'] = sistema_cultura context['componente'] = mommy.make("Componente") rendered_template = template.render(context) assert "<select name=\"classificacao_arquivo\" class=\"form-control form-control-sm\" id=\"id_classificacao_arquivo\">" in rendered_template for opcao in opcoes: assert "<option value=\"{value}\">{description}</option>".format(value=opcao['value'], description=opcao['description']) assert "</select>" in rendered_template @pytest.mark.skip def test_informacoes_do_historico_de_diligecias_do_componente(template, client, context, sistema_cultura): """ Testa informações referente ao histórico de diligências do componente. """ diligencias = [ {"usuario": {"nome_usuario": "Jaozin Silva" }, "classificacao_arquivo": {"descricao": "Arquivo Danificado"}, "data_criacao": "10/08/2018", "texto_diligencia": "Arquivo danificado, corrompido"}, {"usuario": {"nome_usuario": "Pedrin Silva" }, "classificacao_arquivo": {"descricao": "Arquivo incompleto"}, "data_criacao": "10/08/2018", "texto_diligencia": "Arquivo incompleto, informações faltando"}, {"usuario": {"nome_usuario": "Luizin Silva" }, "classificacao_arquivo": {"descricao": "Arquivo incorreto"}, "data_criacao": "10/08/2018", "texto_diligencia": "Arquivo com informações incorretas"} ] context['historico_diligencias'] = diligencias context['sistema_cultura'] = sistema_cultura rendered_template = template.render(context) for diligencia in diligencias: assert diligencia['usuario']["nome_usuario"] in rendered_template assert diligencia['classificacao_arquivo']['descricao'] in rendered_template assert diligencia['data_criacao'] in rendered_template assert diligencia['texto_diligencia'] in rendered_template def test_formatacao_individual_das_diligencias_no_historico(template, client, context, sistema_cultura): """Testa a formatacao de cada uma das diligências dentro do bloco de Histórico de Diligências.""" sistema_cultura.legislacao.arquivo = SimpleUploadedFile( "componente.txt", b"file_content", content_type="text/plain" ) sistema_cultura.legislacao.save() sistema_cultura.legislacao.refresh_from_db() sistema_cultura.legislacao.diligencia = mommy.make("DiligenciaSimples", texto_diligencia="Arquivo com informações incorretas", data_criacao=datetime.date(2018, 6, 25)) context['historico_diligencias_componentes'] = [sistema_cultura.legislacao] context['sistema_cultura'] = sistema_cultura context['componente'] = sistema_cultura.legislacao rendered_template = template.render(context) diligencia = sistema_cultura.legislacao.diligencia assert "<li class=\"list-group-item\"><b>Componente:</b> {componente}</li>".format(componente='Lei Sistema') in rendered_template assert "<li class=\"list-group-item\"><b>Usuário:</b> {nome}</li>".format(nome=diligencia.usuario.nome_usuario) in rendered_template assert "<li class=\"list-group-item\"><b>Data:</b> 25 de Junho de 2018</li>" in rendered_template assert "<li class=\"list-group-item\"><b>Resumo:</b> {resumo}</li>".format(resumo=diligencia.texto_diligencia) in rendered_template assert "<li class=\"list-group-item\"><b>Motivo:</b> {motivo}</li>".format(motivo=diligencia.get_classificacao_arquivo_display()) in rendered_template def test_renderizacao_js_form_diligencia(template, client, context, sistema_cultura, login): """Testa se o javascript do form está sendo renderizado corretamente""" form =
PaulSec/API-Yatedo
yatedoAPI.py
Python
mit
2,867
0.002093
""" This is the (unofficial) Python API for Yatedo.com Website. Using this code, you can manage to retrieve employees from a specific company """ import requests from bs4 import BeautifulSoup import re class YatedoAPI(object): """ YatedoAPI Main Handler """ _instance = None _verbose = False def __init__(self, arg=None): pass def __new__(cls, *args, **kwargs): """ __new__ builtin """ if not cls._instance: cls._instance = super(YatedoAPI, cls).__new__( cls, *args, **kwargs) if (args and args[0] and args[0]['verbose']): cls._verbose = True return cls._instance def display_message(self, s): if (self._verbose): print '[verbose] %s' % s def get_number_of_results(self, company_name): url = 'http://www.yatedo.com/s/companyname:(%s)/normal' % (company_name) self.display_message(url) req = requests.get(url) if 'did not match any' in req.content: return 0 return re.search(r"<span id=\"snb_elm_m\">([\d\s]+)</span>", req.content).group(1).replace(' ', '') def search(self, company_name, start_index, page): url = 'http://www.yatedo.com/search/profil?c=normal&q=companyname:(%s)&rlg=en&uid=-1&start=%s&p=%s' % (company_name, start_index, page) self.display_message(url) req = requests.get(url) soup = BeautifulSoup(req.content) res = [] for contact in soup.findAll('div', attrs={'class': 'span4 spanalpha ycardholder'}): contact_name = contact.find('a', attrs={}) contact_job = contact.find('div', attrs={'class': 'ytdmgl'}) contact_name = contact_name.text contact_job = contact_job.text[:-1] self.display_message("%s (%s)" % (contact_name, contact_job)) # creating structure for the contact
contact = {} contact['name'] = contact_name contact['job'] = contact_job
res.append(contact) return res def get_employees(self, company_name): self.display_message('Fetching result for company "%s"' % (company_name)) num = int(self.get_number_of_results(company_name)) if num == 0: self.display_message('Stopping here, no results for %s' % company_name) return [] res = {} res['company_name'] = company_name res['employees'] = [] self.display_message('Found %s results, collecting them..' % (num)) i = 0 while i * 16 < num: new_employees = self.search(company_name, i * 16, i + 1) for employee in new_employees: res['employees'].append(employee) i = i + 1 return res # return json.dumps(res)
dimagi/commcare-hq
corehq/apps/domain/utils.py
Python
bsd-3-clause
3,824
0.001569
import logging import os import re import sys from collections import Counter import simplejson from django.conf import settings from memoized import memoized from corehq.apps.domain.dbaccessors import iter_all_domains_and_deleted_domains_with_name from corehq.apps.domain.extension_points import custom_domain_module from corehq.util.test_utils import unit_testing_only from corehq.apps.domain.models import Domain from corehq.apps.es import DomainES from corehq.util.quickcache import quickcache ADM_DOMAIN_KEY = 'ADM_ENABLED_DOMAINS' new_domain_re = r"(?:[a-z0-9]+\-)*[a-z0-9]+" # lowercase letters, numbers, and '-' (at most one between "words") grandfathered_domain_re = r"[a-z0-9\-\.:]+" legacy_domain_re = r"[\w\.:-]+" domain_url_re = re.compile(r'^/a/(?P<domain>%s)/' % legacy_domain_re) logger = logging.getLogger('domain') @memoized def get_custom_domain_module(domain): if domain in settings.DOMAIN_MODULE_MAP: return settings.DOMAIN_MODULE_MAP[domain] return custom_domain_module(domain) def normalize_domain_name(domain): if domain: normalized = domain.replace('_', '-').lower() if settings.DEBUG: assert(re.match('^%s$' % grandfathered_domain_re, normalized)) return normalized return domain def get_domain_from_url(path): try: domain, = domain_url_re.search(path).groups() except Exception: domain = None return domain @quickcache(['domain']) def domain_restricts_superusers(domain): domain_obj = Domain.get_by_name(domain) if not domain_obj: return False return domain_obj.restrict_superusers def get_domains_created_by_user(creating_user): query = DomainES().created_by_user(creating_user) data = query.run() return [d['name'] for d in data.hits] @quickcache([], timeout=3600) def domain_name_stop_words(): path = os.path.join(os.path.dirname(__file__), 'static', 'domain', 'json') with open(os.path.join(path, 'stop_words.yml')) as f: return {word.strip() for word in f.readlines() if word[0] != '#'} def get_domain_url_slug(hr_name, max_length=25, separator='-'): from dimagi.utils.name_to_url import name_to_url name = name_to_url(hr_name, "project") if len(name) <= max_length: return name stop_words = domain_name_stop_words() words = [word for word in name.split('-') if word not in stop_words] words = iter(words) try: text = next(words) except StopIteration: return '' for word in words: if len(text + separator + word) <= max_length: text += separator + word return text[:max_length] def guess_domain_language(domain_name): """ A domain does not have a default language, but its apps do. Return the language code of the most common default language across apps. """ domain_obj = Domain.get_by_name(domain_name) counter = Counter([app.default_language for app in domain_obj.applications() if not app.is_remote_app()]) return counter.most_common(1)[0][0] if counter else 'en' def silence_during_tests(): if settings.UNIT_TESTING: return open(os.devnull, 'w') else: return sys.stdout @unit_testing_only def clear_domain_names(*domain_names): for domain_names in domain_n
ames: for domain in iter_all_domains_and_deleted_domains_with_name(domain_names): domain.delete() def get_serializable_wire_invoice_general_credit(general_credit): if general_credit > 0: return [{ 'type': 'General Credits', 'amount': simplejson.dumps(general_
credit, use_decimal=True) }] return [] def log_domain_changes(user, domain, new_obj, old_obj): logger.info(f"{user} changed UCR permsissions {old_obj} to {new_obj} for domain {domain} ")
wzyy2/HackRFWebtools
func.py
Python
gpl-2.0
6,028
0.019741
import threading,signal ,traceback import random,ctypes,math,time,copy,Queue import numpy from dsp import common from GlobalData import * from common import Rx,Tx _funclist = {} def reg_func(func,param_types,param_defaults): ret = False try: _funclist[func.__name__]=(func,param_types,param_defaults) ret = True except: traceback.print_exc() return ret def get_func(name): if name in _funclist: return _funclist[name] else: return None def call_func(name,params): ret = None try: ret = _funclist[name][0](params) except: traceback.print_exc() return ret ### api - program # params:page as int ,count as int # ret:total_page as int,total as int,programs as array def hackrf_reconf(): hackrf.set_freq(hackrf_settings.centre_frequency) hackrf.set_sample_rate(hackrf_settings.sample_rate) hackrf_settings.bb_bandwidth = hackrf.compute_baseband_filter_bw_round_down_lt(hackrf_settings.sample_rate) hackrf.set_baseband_filter_bandwidth(hackrf_settings.bb_bandwidth) hackrf.set_amp_enable(False) hackrf.set_lna_gain(hackrf_settings.if_gain) hackrf.set_vga_gain(hackrf_settings.bb_gain) hackrf_settings.name = hackrf.NAME_LIST[hackrf.board_id_read()] hackrf_settings.version = hackrf.version_string_read() def test(params): ret = dict() ret['count'] = 100
ret['retstr'] = "hello word" return ret def reset(params): ret = dict() try: stop(None) hackrf_settings.current_status = 0 hackrf.close() hackrf.open() hackrf_reconf() ret['ret'] = 'ok' except: ret['ret'] = 'fail' return ret def get_board_data(params): ret = dict() ret['board_name'] = hackrf_settings.name ret['version'] = hackrf_settings.version ret['serial_nr'] = hackrf_settings.serial_num return ret
def set_centre_frequency(params): ret = dict() hackrf_settings.centre_frequency = int(params['centre_frequency']) hackrf.set_freq(hackrf_settings.centre_frequency) return ret def waterfall(params): ret = dict() ret['centre_frequency'] = hackrf_settings.centre_frequency ret['sample_rate'] = hackrf_settings.sample_rate ret['data'] = Rx.get_spectrum() ret['exit'] = 0 return ret def get_control_options(params): ret = dict() ret['centre_frequency'] =hackrf_settings.centre_frequency ret['sample_rate'] =hackrf_settings.sample_rate ret['rf_gain'] = hackrf_settings.rf_gain ret['if_gain'] = hackrf_settings.if_gain ret['bb_gain'] = hackrf_settings.bb_gain ret['demodulator'] = hackrf_settings.modulation ret['bb_bandwidth'] = hackrf_settings.bb_bandwidth ret['squelch_threshold'] = 10 ret['current_status'] = hackrf_settings.current_status ret['fft_rate'] = hackrf_settings.fft_rate ret['fft_size'] = hackrf_settings.fft_size return ret def demodulator(params): ret = dict() print params['demodulator'] hackrf_settings.modulation = params['demodulator'] return ret def set_bb_bandwidth(params): ret = dict() hackrf_settings.bb_bandwidth = int(params['value']) hackrf.set_baseband_filter_bandwidth(hackrf_settings.bb_bandwidth) return ret def set_sample_rate(params): ret = dict() hackrf_settings.sample_rate = int(params['value']) hackrf.set_sample_rate(hackrf_settings.sample_rate) #automatically set baseband bandwidth hackrf_settings.bb_bandwidth = hackrf.compute_baseband_filter_bw_round_down_lt(hackrf_settings.sample_rate) hackrf.set_baseband_filter_bandwidth(hackrf_settings.bb_bandwidth) return ret def set_rf_gain(params): ret = dict() hackrf_settings.rf_gain = int(params['value']) if hackrf_settings.rf_gain != 0: hackrf.set_amp_enable(True) else: hackrf.set_amp_enable(False) return ret def set_if_gain(params): ret = dict() hackrf_settings.if_gain = int(params['value']) hackrf.set_lna_gain(hackrf_settings.if_gain) return ret def set_bb_gain(params): ret = dict() hackrf_settings.bb_gain = int(params['value']) hackrf.set_vga_gain(hackrf_settings.bb_gain) return ret def set_fft_size(params): ret = dict() hackrf_settings.fft_size = int(params['value']) return ret def set_fft_rate(params): ret = dict() hackrf_settings.fft_rate = int(params['value']) return ret def start_rx(params): ret = dict() if hackrf_settings.current_status == 1: return hackrf_settings.rx_thread = Rx.RxThread() hackrf_settings.rx_thread.setDaemon(True) hackrf_settings.rx_thread.start() if hackrf_settings.current_status == 0: hackrf.start_rx_mode(Rx.rx_callback_fun) hackrf_settings.current_status = 1 return ret def start_tx(params): ret = dict() if hackrf_settings.current_status == 2: return if hackrf_settings.current_status == 0: hackrf.start_tx_mode(rx_callback_fun) hackrf_settings.current_status = 2 return ret def stop(params): ret = dict() if hackrf_settings.current_status == 1: hackrf_settings.rx_thread.running = False hackrf_settings.rx_thread.join() hackrf.stop_rx_mode() hackrf.close() hackrf.open() elif hackrf_settings.current_status == 2: hackrf.stop_tx_mode() hackrf_settings.current_status = 0 return ret reg_func(test,{},{}) reg_func(get_board_data,{},{}) reg_func(set_centre_frequency,{},{}) reg_func(set_sample_rate,{},{}) reg_func(get_control_options,{},{}) reg_func(demodulator,{},{}) reg_func(set_bb_bandwidth,{},{}) reg_func(set_rf_gain,{},{}) reg_func(set_if_gain,{},{}) reg_func(set_bb_gain,{},{}) reg_func(waterfall,{},{}) reg_func(set_fft_size,{},{}) reg_func(set_fft_rate,{},{}) reg_func(start_rx,{},{}) reg_func(start_tx,{},{}) reg_func(stop,{},{}) reg_func(reset,{},{})
joopert/home-assistant
tests/components/solarlog/test_config_flow.py
Python
apache-2.0
4,982
0.000602
"""Test the solarlog config flow.""" from unittest.mock import patch import pytest from homeassistant import data_entry_flow from homeassistant import config_entries, setup from homeassistant.components.solarlog import config_flow from homeassistant.components.solarlog.const import DEFAULT_HOST, DOMAIN from homeassistant.const import CONF_HOST, CONF_NAME from tests.common import MockConfigEntry, mock_coro NAME = "Solarlog test 1 2 3" HOST = "http://1.1.1.1" async def test_form(hass): """Test we get the form.""" await setup.async_setup_component(hass, "persistent_notification", {}) result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER} ) assert result["type"] == "form" assert result["errors"] == {} with patch( "homeassistant.components.solarlog.config_flow.SolarLogConfigFlow._test_connection", return_value=mock_coro({"title": "solarlog test 1 2 3"}), ), patch( "homeassistant.components.solarlog.async_setup", return_value=mock_coro(True) )
as mock_setup, patch( "homeassistant.components.solarlog.async_setup_entry", return_value=mock_coro(True), ) as mock_setup_entry: result2 = await hass.config_entries.flow.async_configure( result["flow_id"], {"host": HOST,
"name": NAME} ) assert result2["type"] == "create_entry" assert result2["title"] == "solarlog_test_1_2_3" assert result2["data"] == {"host": "http://1.1.1.1"} await hass.async_block_till_done() assert len(mock_setup.mock_calls) == 1 assert len(mock_setup_entry.mock_calls) == 1 @pytest.fixture(name="test_connect") def mock_controller(): """Mock a successfull _host_in_configuration_exists.""" with patch( "homeassistant.components.solarlog.config_flow.SolarLogConfigFlow._test_connection", side_effect=lambda *_: mock_coro(True), ): yield def init_config_flow(hass): """Init a configuration flow.""" flow = config_flow.SolarLogConfigFlow() flow.hass = hass return flow async def test_user(hass, test_connect): """Test user config.""" flow = init_config_flow(hass) result = await flow.async_step_user() assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "user" # tets with all provided result = await flow.async_step_user({CONF_NAME: NAME, CONF_HOST: HOST}) assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY assert result["title"] == "solarlog_test_1_2_3" assert result["data"][CONF_HOST] == HOST async def test_import(hass, test_connect): """Test import step.""" flow = init_config_flow(hass) # import with only host result = await flow.async_step_import({CONF_HOST: HOST}) assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY assert result["title"] == "solarlog" assert result["data"][CONF_HOST] == HOST # import with only name result = await flow.async_step_import({CONF_NAME: NAME}) assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY assert result["title"] == "solarlog_test_1_2_3" assert result["data"][CONF_HOST] == DEFAULT_HOST # import with host and name result = await flow.async_step_import({CONF_HOST: HOST, CONF_NAME: NAME}) assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY assert result["title"] == "solarlog_test_1_2_3" assert result["data"][CONF_HOST] == HOST async def test_abort_if_already_setup(hass, test_connect): """Test we abort if the device is already setup.""" flow = init_config_flow(hass) MockConfigEntry( domain="solarlog", data={CONF_NAME: NAME, CONF_HOST: HOST} ).add_to_hass(hass) # Should fail, same HOST different NAME (default) result = await flow.async_step_import( {CONF_HOST: HOST, CONF_NAME: "solarlog_test_7_8_9"} ) assert result["type"] == data_entry_flow.RESULT_TYPE_ABORT assert result["reason"] == "already_configured" # Should fail, same HOST and NAME result = await flow.async_step_user({CONF_HOST: HOST, CONF_NAME: NAME}) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["errors"] == {CONF_HOST: "already_configured"} # SHOULD pass, diff HOST (without http://), different NAME result = await flow.async_step_import( {CONF_HOST: "2.2.2.2", CONF_NAME: "solarlog_test_7_8_9"} ) assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY assert result["title"] == "solarlog_test_7_8_9" assert result["data"][CONF_HOST] == "http://2.2.2.2" # SHOULD pass, diff HOST, same NAME result = await flow.async_step_import( {CONF_HOST: "http://2.2.2.2", CONF_NAME: NAME} ) assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY assert result["title"] == "solarlog_test_1_2_3" assert result["data"][CONF_HOST] == "http://2.2.2.2"
dubrayn/dubrayn.github.io
examples/multiprocessing/example9.py
Python
mit
747
0.037483
#
!/usr/bin/env python3 import logging import multiprocessing import time logging.basicConfig(level = logging.DEBUG, format = '%(asctime)s.%(msecs)03d [%(levelname)s] (%(process)d) %(message)s', datefmt='%Y-%m-%d %H:%M:%S') def worker(n, a, l): logging.debug("lock.acquire()") l.acquire() logging.debug(" lock acqui
red !") b = a.value time.sleep(0.2) a.value = b + 1 logging.debug(" worker %d: a = %d" % (n, a.value)) logging.debug(" lock.release()") l.release() logging.debug(" lock released !") logging.debug("start") lock = multiprocessing.Lock() a = multiprocessing.Value('i', 0, lock = False) for i in range(3): multiprocessing.Process(name = 'THREAD-%01d' % (i), target = worker, args = (i, a, lock)).start()