repo_name
stringlengths
5
100
path
stringlengths
4
231
language
stringclasses
1 value
license
stringclasses
15 values
size
int64
6
947k
score
float64
0
0.34
prefix
stringlengths
0
8.16k
middle
stringlengths
3
512
suffix
stringlengths
0
8.17k
svastm/mbed
tools/export/codered.py
Python
apache-2.0
1,779
0.001124
""" mbed SDK Copyright (c) 2011-2016 ARM Limited Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. """ from exporters import Exporter from os.path import splitext, basename class CodeRed(Exporter): NAME = 'CodeRed' TOOLCHAIN = 'GCC_CR' MBED_CONFIG_HEADER_SUPPORTED = True TARGETS = [ 'LPC1768', 'LPC4088', 'LPC4088_DM', 'LPC4330_M4', 'LPC1114', 'LPC11U35_401', 'LPC11U35_501', 'UBLOX_C027', 'ARCH_PRO', 'LPC1549', 'LPC11U68', 'LPCCAPPUCCINO', 'LPC824', 'LPC11U37H_401', ] def generate(self): libraries = [] for lib in self.resources.libraries: l, _ = splitext(basename(lib)) libraries.append(l[3:]) ctx = {
'name': self.project_name, 'i
nclude_paths': self.resources.inc_dirs, 'linker_script': self.resources.linker_script, 'object_files': self.resources.objects, 'libraries': libraries, 'symbols': self.toolchain.get_symbols() } ctx.update(self.flags) self.gen_file('codered_%s_project.tmpl' % self.target.lower(), ctx, '.project') self.gen_file('codered_%s_cproject.tmpl' % self.target.lower(), ctx, '.cproject')
coxmediagroup/googleads-python-lib
examples/adwords/v201506/advanced_operations/add_click_to_download_ad.py
Python
apache-2.0
3,318
0.006329
#!/usr/bin/python # # Copyright 2015 Google Inc. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """This code example creates a click-to-download ad in a given ad group. This type of ad is also known as an app promotion ad. To list ad groups, run get_ad_groups.py. The LoadFromStorage method is pulling credentials and properties from a "googleads.yaml" file. By default, it looks for this file in your home directory. For more information, see the "Caching authentication information" section of our README. Tags: AdGroupAdService.mutate Api: AdWordsOnly """ __author__ = 'Joseph DiLallo' from googleads import adwords AD_GROUP_ID = 'INSERT_AD_GROUP_ID_HERE' def main(client, ad_group_id): # Initialize appropriate service. ad_group_ad_service = client.GetService('AdGroupAdService', version='v201506') # Create the template elements for the ad. You can refer to # https://developers.google.com/adwords/api/docs/appendix/templateads # for the list of available template fields. ad_data = { 'uniqueName': 'adData', 'fields': [ { 'name': 'headline', 'fieldText': 'Enjoy your drive in Mars', 'type': 'TEXT' }, { 'name': 'description1', 'fieldText': 'Realistic physics simulation', 'type': 'TEXT' }, { 'name': 'description2', 'fieldText': 'Race against players online', 'type': 'TEXT' },
{ 'nam
e': 'appId', 'fieldText': 'com.example.demogame', 'type': 'TEXT' }, { 'name': 'appStore', 'fieldText': '2', 'type': 'ENUM' } ] } # Create click to download ad. click_to_download_app_ad = { 'xsi_type': 'TemplateAd', 'name': 'Ad for demo game', 'templateId': '353', 'finalUrls': [ 'http://play.google.com/store/apps/details?id=com.example.demogame' ], 'displayUrl': 'play.google.com', 'templateElements': [ad_data] } # Create ad group ad. ad_group_ad = { 'adGroupId': ad_group_id, 'ad': click_to_download_app_ad, # Optional. 'status': 'PAUSED' } # Add ad. ads = ad_group_ad_service.mutate([ {'operator': 'ADD', 'operand': ad_group_ad} ]) # Display results. if 'value' in ads: for ad in ads['value']: print ('Added new click-to-download ad to ad group ID \'%s\' ' 'with URL \'%s\'.' % (ad['ad']['id'], ad['ad']['finalUrls'][0])) else: print 'No ads were added.' if __name__ == '__main__': # Initialize client object. adwords_client = adwords.AdWordsClient.LoadFromStorage() main(adwords_client, AD_GROUP_ID)
dcos/dcos-launch
dcos_launch/gcp.py
Python
apache-2.0
4,208
0.002852
""" Launcher functionality for the Google Compute Engine (GCE) """ import json import logging import os from dcos_launch import onprem, util from dcos_launch.platforms import gcp from dcos_test_utils.helpers import Host from googleapiclient.errors import HttpError log = logging.getLogger(__name__) def get_credentials(env=None) -> tuple: path = None if env is None: env = os.environ.copy() if 'GCE_CREDENTIALS' in env: json_credentials = env['GCE_CREDENTIALS'] elif 'GOOGLE_APPLICATION_CREDENTIALS' in env: path = env['GOOGLE_APPLICATION_CREDENTIALS'] json_credentials = util.read_file(path) else: raise util.LauncherError( 'MissingParameter', 'Either GCE_CREDENTIALS or GOOGLE_APPLICATION_CREDENTIALS must be set in env') return json_credentials, path class OnPremLauncher(onprem.AbstractOnpremLauncher): # Launches a homogeneous cluster of plain GMIs intended for onprem DC/OS def __init__(self, config: dict, env=None): creds_string, _ = get_credentials(env) self.gcp_wrapper = gcp.GcpWrapper(json.loads(creds_string)) self.config = config @property def deployment(self): """ Builds a BareClusterDeployment instance with self.config, but only returns it successfully if the corresponding real deployment (active machines) exists and doesn't contain any errors. """ try: deployment = gcp.BareClusterDeployment(self.gcp_wrapper, self.config['deployment_name'], self.config['gce_zone']) info = deployment.get_info() errors = info['operation'].get('error') if errors: raise util.LauncherError('DeploymentContainsErrors', str(errors)) return deployment except HttpError as e: if e.resp.status == 404: raise util.LauncherError('DeploymentNotFound', "The deployment you are trying to access doesn't exist") from e raise e def create(self) -> dict: self.key_helper() node_count = 1 + (self.config['num_masters'] + self.config['num_public_agents'] + self.config['num_private_agents']) gcp.BareClusterDeployment.create( self.gcp_wrapper, self.config['deployment_name'], self.config['gce_zone'], node_count, self.config['disk_size'], self.config['disk_type'], self.config['source_image'], self.config['machine_type'], self.config['image_project'], self.config['ssh_user'], self.config['ssh_public_key'], self.config['disable_updates'], self.config['use_preemptible_vms'], tags=self.config.get('tags')) return self.config def key_helper(self): """ Generates a public key and a private key and stores them in the config. The public key will be applied to all the instances in the deployment later on when wait() is called. """ if self.config['key_helper']: private_key,
public_key = util.generate_rsa_keypair() self.config['ssh_private_key'] = private_key.decode() self.config['ssh_public_key'] = public_key.decode() def get_cluster_hosts(self) -> [Host]: return list(self.d
eployment.hosts)[1:] def get_bootstrap_host(self) -> Host: return list(self.deployment.hosts)[0] def wait(self): """ Waits for the deployment to complete: first, the network that will contain the cluster is deployed. Once the network is deployed, a firewall for the network and an instance template are deployed. Finally, once the instance template is deployed, an instance group manager and all its instances are deployed. """ self.deployment.wait_for_completion() def delete(self): """ Deletes all the resources associated with the deployment (instance template, network, firewall, instance group manager and all its instances. """ self.deployment.delete()
Khushbu27/Tutorial
swift/obj/mem_server.py
Python
apache-2.0
4,314
0
# Copyright (c) 2010-2013 OpenStack, LLC. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or # implied. # See the License for the specific language governing permissions and # limitations under the License. """ In-Memory Object Server for Swift """ import os from swift import gettext_ as _ from eventlet import Timeout from swift.common.bufferedhttp import http_connect from swift.common.exceptions import ConnectionTimeout from swift.common.http import is_success from swift.obj.mem_diskfile import InMemoryFileSystem from swift.obj import server class ObjectController(server.ObjectController): """ Implements the WSGI application for the Swift In-Memory Object Server. """ def setup(self, conf): """ Nothing specific to
do for the in-memory version. :param conf: WSGI configuration parameter """ self._filesystem = InMemoryFileSystem() def get_diskfile(self, device, partition, account, container, obj, **kwargs): """ Utility method for instantiating a DiskFile object supporting a given REST API. An implementation of the object server that wants to use a different DiskFile class would simply over-ride this method to provid
e that behavior. """ return self._filesystem.get_diskfile(account, container, obj, **kwargs) def async_update(self, op, account, container, obj, host, partition, contdevice, headers_out, objdevice, policy_idx): """ Sends or saves an async update. :param op: operation performed (ex: 'PUT', or 'DELETE') :param account: account name for the object :param container: container name for the object :param obj: object name :param host: host that the container is on :param partition: partition that the container is on :param contdevice: device name that the container is on :param headers_out: dictionary of headers to send in the container request :param objdevice: device name that the object is in :param policy_idx: the associated storage policy index """ headers_out['user-agent'] = 'obj-server %s' % os.getpid() full_path = '/%s/%s/%s' % (account, container, obj) if all([host, partition, contdevice]): try: with ConnectionTimeout(self.conn_timeout): ip, port = host.rsplit(':', 1) conn = http_connect(ip, port, contdevice, partition, op, full_path, headers_out) with Timeout(self.node_timeout): response = conn.getresponse() response.read() if is_success(response.status): return else: self.logger.error(_( 'ERROR Container update failed: %(status)d ' 'response from %(ip)s:%(port)s/%(dev)s'), {'status': response.status, 'ip': ip, 'port': port, 'dev': contdevice}) except (Exception, Timeout): self.logger.exception(_( 'ERROR container update failed with ' '%(ip)s:%(port)s/%(dev)s'), {'ip': ip, 'port': port, 'dev': contdevice}) # FIXME: For now don't handle async updates def REPLICATE(self, request): """ Handle REPLICATE requests for the Swift Object Server. This is used by the object replicator to get hashes for directories. """ pass def app_factory(global_conf, **local_conf): """paste.deploy app factory for creating WSGI object server apps""" conf = global_conf.copy() conf.update(local_conf) return ObjectController(conf)
manashmndl/LearningPyQt
pyqt/chap05/numberformatdlg2.py
Python
mit
4,502
0.002221
#!/usr/bin/env python # Copyright (c) 2008-14 Qtrac Ltd. All rights reserved. # This program or module is free software: you can redistribute it and/or # modify it under the terms of the GNU General Public License as published # by the Free Software Foundation, either version 2 of the License, or # version 3 of the License, or (at your option) any later version. It is # provided for educational purposes and is distributed in the hope that # it will be useful, but WITHOUT ANY WARRANTY; without even the implied # warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See # the GNU General Public License for more details. from __future__ import division from __future__ import print_function from __future__ import unicode_literals from future_builtins import * from PyQt4.QtCore import (QRegExp, Qt) from PyQt4.QtCore import pyqtSignal as Signal from PyQt4.QtGui import (QCheckBox, QDialog, QDialogButtonBox, QGridLayout, QLabel, QLineEdit, QMessageBox, QRegExpValidator, QSpinBox) class NumberFormatDlg(QDialog): changed = Signal() def __init__(self, format, parent=None): super(NumberFormatDlg, self).__init__(parent) self.setAttribute(Qt.WA_DeleteOnClose) self.format = format self.create_widgets() self.layout_widgets() self.create_connections() self.setWindowTitle("Set Number Format (Modeless)") def create_widgets(self): punctuationRe = QRegExp(r"[ ,;:.]") self.thousandsLabel = QLabel("&Thousands separator") self.thousandsEdit = QLineEdit(self.format["thousandsseparator"]) self.thousandsLabel.setBuddy(self.thousandsEdit) self.thousandsEdit.setMaxLength(1) self.thousandsEdit.setValidator( QRegExpValidator(punctuationRe, self)) self.decimalMarkerLabel = QLabel("Decimal &marker") self.decimalMarkerEdit = QLineEdit(self.format["decimalmarker"]) self.decimalMarkerLabel.setBuddy(self.decimalMarkerEdit) self.decimalMarkerEdit.setMaxLength(1) self.decimalMarkerEdit.setValidator( QRegExpValidator(punctuationRe, self)) self.decimalMarkerEdit.setInputMask("X") self.decimalPlacesLabel = QLabel("&Decimal places") self.decimalPlacesSpinBox = QSpinBox() self.decimalPlacesLabel.setBuddy(self.decimalPlacesSpinBox) self.decimalPlacesSpinBox.setRange(0, 6) self.decimalPlacesSpinBox.setValue(self.format["decimalplaces"]) self.redNegativesCheckBox = QCheckBox("&Red negative numbers") self.redNegativesCheckBox.setChecked(self.format["rednegatives"]) self.buttonBox = QDialogButtonBox(QDialogButtonBox.Apply| QDialogButtonBox.Close) def layout_widgets(self): grid = QGridLayout() grid.addWidget(self.thousandsLabel, 0, 0) grid.addWidget(self.thousandsEdit, 0, 1) grid.addWidget(self.decimalMarkerLabel, 1, 0) grid.addWidget(self.decimalMarkerEdit, 1, 1) grid.addWidget(self.decimalPlacesLabel, 2, 0) grid.addWidget(self.decimalPlacesSpinBox, 2, 1) grid.addWidget(self.redNegativesCheckBox, 3, 0, 1, 2) grid.addWidget(self.buttonBox, 4, 0, 1, 2) self.setLayout(grid) def create_connections(self): self.buttonBox.button(QDialogButtonBox.Apply).clicked.connect( self.apply) self.buttonBox.rejected.connect(self.reject) def apply(self): thousands = unicode(self.thousandsEdit.text()) decimal = unicode(self.decimalMarkerEdit.text()) if thousands == decimal: QMessageBox.warning(self, "Format Error",
"The thousands separator and the decimal marker " "must be different.") self.thousandsEdit.selectAll() self.thousandsEdit.setFocus() return if len(decimal) == 0: QMessageBox.warning(self, "Format Error", "The decimal marker may not be empty.") self.decimalMarkerEdit.selectAll() self.decimalMarkerEdit.setFocus() return self.format["thousandsseparator"] = thousands self.format["decimalmarker"] = decimal self.format["decimalplaces"] = ( self.decimalPlacesSpinBox.value()) self.format["rednegatives"] = ( self.redNegativesCheckBox.isChecked()) self.changed.emit()
jayvdb/travis_log_fetch
tests/test_historical.py
Python
mit
5,569
0
"""Test loading historical builds and jobs.""" from __future__ import absolute_import, unicode_literals from travis_log_fetch.config import _get_travispy from travis_log_fetch._target import Target from travis_log_fetch.get import ( get_travis_repo, get_historical_builds, get_historical_build, get_historical_job, ) import pytest class TestHistorical(object): def test_latest(self): _travis = _get_travispy() repo = get_travis_repo(_travis, 'travispy/on_pypy') builds = get_historical_builds(_travis, repo) build = next(builds) assert build.repository_id == 2598880 assert build.id == repo.last_build_id def test_after(self): _travis = _get_travispy() repo = get_travis_repo(_travis, 'travispy/on_pypy') builds = get_historical_builds(_travis, repo, _after=3, _load_jobs=False) build = next(builds) assert build.repository_id == 2598880 assert build.number == '2' build = next(builds) assert build.repository_id == 2598880 assert build.number == '1' def test_all_small(self): _travis = _get_travispy() repo = get_travis_repo(_travis, 'travispy/on_pypy') builds = get_historical_builds(_travis, repo) ids = [] for build in builds: assert build.repository_id == 2598880 ids.append(build.id) assert ids == [53686685, 37521698, 28881355] def test_multiple_batches_menegazzo(self): """Test using a repository that has greater than 2*25 builds.""" # Ideally each has one or two jobs, so that doesnt slow down the test, # and the logs are small in case the log is fetched with the job. _travis = _get_travispy() repo = get_travis_repo(_travis, 'menegazzo/travispy') builds = get_historical_builds(_travis, repo, _load_jobs=False) ids = [] prev_number = None for build in builds: assert build.repository_id == 2419489 if int(build.number) in [80]: # There are two '80' # See https://github.com/travis-ci/travis-ci/issues/2582 print('duplicate build number {0}: {1}'.format( build.number, build.id)) assert build.id in [45019395, 45019396] if build.id == 45019395: assert prev_number == int(build.number) else: assert prev_number == int(build.number) + 1 elif prev_number: # All other build numbers decrease rather orderly assert prev_number == int(build.number) + 1 prev_number = int(build.number) if ids: assert build.id < ids[-1] ids.append(build.id) if len(ids) > 100: break assert len(ids) == len(set(ids)) def test_multiple_batches_bootstrap(self): """Test using a repository that has lots of builds, esp. PRs.""" _travis = _get_travispy() repo = get_travis_repo(_travis, 'twbs/bootstrap') builds = get_hist
orical_builds(_travis, repo, _after=12071, _load_jobs=False)
ids = [] prev_number = None for build in builds: assert build.repository_id == 12962 if int(build.number) in [12069, 12062, 12061, 12054, 12049, 12048, 12041, 12038, 12037, 12033]: # Many duplicates # See https://github.com/travis-ci/travis-ci/issues/2582 print('duplicate build number {0}: {1}'.format( build.number, build.id)) if build.id in [53437234, 53350534, 53350026, 53263731, 53263730, # two extra 12054 53180440, 53179846, 53062896, 53019568, 53004896, 52960766]: assert prev_number == int(build.number) else: assert prev_number == int(build.number) + 1 elif prev_number: # All other build numbers decrease rather orderly assert prev_number == int(build.number) + 1 prev_number = int(build.number) if ids: assert build.id < ids[-1] ids.append(build.id) # There are many more duplicates, so we stop here. if int(build.number) == 12033: break assert len(ids) == len(set(ids)) def test_logical_single_job_build(self): target = Target.from_extended_slug('travispy/on_pypy#1.1') _travis = _get_travispy() job = get_historical_job(_travis, target) assert job.repository_id == 2598880 assert job.number == '1.1' assert job.id == 28881356 def test_logical_multiple_job_build(self): target = Target.from_extended_slug('menegazzo/travispy#101.3') _travis = _get_travispy() job = get_historical_job(_travis, target) assert job.repository_id == 2419489 assert job.number == '101.3' assert job.id == 82131391 def test_logical_duplicate_build(self): target = Target.from_extended_slug('menegazzo/travispy#80.3') _travis = _get_travispy() pytest.raises(AssertionError, get_historical_build, _travis, target)
CyCraig/AtariRL
ale_python_interface/ale_python_interface.py
Python
gpl-2.0
10,369
0.001929
# ale_python_interface.py # Author: Ben Goodrich # This directly implements a python version of the arcade learning # environment interface. from ctypes import * import numpy as np from numpy.ctypeslib import as_ctypes import os ale_lib = cdll.LoadLibrary(os.path.join(os.path.dirname(__file__), 'libale_c.so')) ale_lib.ALE_new.argtypes = None ale_lib.ALE_new.restype = c_void_p ale_lib.ALE_del.argtypes = [c_void_p] ale_lib.ALE_del.restype = None ale_lib.getString.argtypes = [c_void_p, c_char_p] ale_lib.getString.restype = c_char_p ale_lib.getInt.argtypes = [c_void_p, c_char_p] ale_lib.getInt.restype = c_int ale_lib.getBool.argtypes = [c_void_p, c_char_p] ale_lib.getBool.restype = c_bool ale_lib.getFloat.argtypes = [c_void_p, c_char_p] ale_lib.getFloat.restype = c_float ale_lib.setString.argtypes = [c_void_p, c_char_p, c_char_p] ale_lib.setString.restype = None ale_lib.setInt.argtypes = [c_void_p, c_char_p, c_int] ale_lib.setInt.restype = None ale_lib.setBool.argtypes = [c_void_p, c_char_p, c_bool] ale_lib.setBool.restype = None ale_lib.setFloat.argtypes = [c_void_p, c_char_p, c_float] ale_lib.setFloat.restype = None ale_lib.loadROM.argtypes = [c_void_p, c_char_p] ale_lib.loadROM.restype = None ale_lib.act.argtypes = [c_void_p, c_int] ale_lib.act.restype = c_int ale_lib.game_over.argtypes = [c_void_p] ale_lib.game_over.restype = c_bool ale_lib.reset_game.argtypes = [c_void_p] ale_lib.reset_game.restype = None ale_lib.getLegalActionSet.argtypes = [c_void_p, c_void_p] ale_lib.getLegalActionSet.restype = None ale_lib.getLegalActionSize.argtypes = [c_void_p] ale_lib.getLegalActionSize.restype = c_int ale_lib.getMinimalActionSet.argtypes = [c_void_p, c_void_p] ale_lib.getMinimalActionSet.restype = None ale_lib.getMinimalActionSize.argtypes = [c_void_p] ale_lib.getMinimalActionSize.restype = c_int ale_lib.getFrameNumber.argtypes = [c_void_p] ale_lib.getFrameNumber.restype = c_int ale_lib.lives.argtypes = [c_void_p] ale_lib.lives.restype = c_int ale_lib.getEpisodeFrameNumber.argtypes = [c_void_p] ale_lib.getEpisodeFrameNumber.restype = c_int ale_lib.getScreen.argtypes = [c_void_p, c_void_p] ale_lib.getScreen.restype = None ale_lib.getRAM.argtypes = [c_void_p, c_void_p] ale_lib.getRAM.restype = None ale_lib.getRAMSize.argtypes = [c_void_p] ale_lib.getRAMSize.restype = c_int ale_lib.getScreenWidth.argtypes = [c_void_p] ale_lib.getScreenWidth.restype = c_int ale_lib.getScreenHeight.argtypes = [c_void_p] ale_lib.getScreenHeight.restype = c_int ale_lib.getScreenRGB.argtypes = [c_void_p, c_void_p] ale_lib.getScreenRGB.restype = None ale_lib.getScreenGrayscale.argtypes = [c_void_p, c_void_p] ale_lib.getScreenGrayscale.restype = None ale_lib.saveState.argtypes = [c_void_p] ale_lib.saveState.restype = None ale_lib.loadState.argtypes = [c_void_p] ale_lib.loadState.restype = None ale_lib.cloneState.argtypes = [c_void_p] ale_lib.cloneState.restype = c_void_p ale_lib.restoreState.argtypes = [c_void_p, c_void_p] ale_lib.restoreState.restype = None ale_lib.cloneSystemState.argtypes = [c_void_p] ale_lib.cloneSystemState.restype = c_void_p ale_lib.restoreSystemState.argtypes = [c_void_p, c_void_p] ale_lib.restoreSystemState.restype = None ale_lib.deleteState.argtypes = [c_void_p] ale_lib.deleteState.restype = None ale_lib.saveScreenPNG.argtypes = [c_void_p, c_char_p] ale_lib.saveScreenPNG.restype = None ale_lib.encodeState.argtypes = [c_void_p, c_void_p, c_int] ale_lib.encodeState.restype = None ale_lib.encodeStateLen.argtypes = [c_void_p] ale_lib.encodeStateLen.restype = c_int ale_lib.decodeState.argtypes = [c_void_p, c_int] ale_lib.decodeState.restype = c_void_p class ALEInterface(object): def __init__(self): self.obj = ale_lib.ALE_new() def getString(self, key): return ale_lib.getString(self.obj, key) def getInt(self, key): return ale_lib.getInt(self.obj, key) def getBool(self, key): return ale_lib.getBool(self.obj, key) def getFloat(self, key): return ale_lib.getFloat(self.obj, key) def setString(self, key, value): ale_lib.setString(self.obj, key, value) def setInt(self, key, value): ale_lib.setInt(self.obj, key, value) def setBool(self, key, value): ale_lib.setBool(self.obj, key, value) def setFloat(self, key, value): ale_lib.setFloat(self.obj, key, value) def loadROM(self, rom_file): ale_lib.loadROM(self.obj, rom_file) def act(self, action): return ale_lib.act(self.obj, int(action)) def game_over(self): return ale_lib.game_over(self.obj) def reset_game(self): ale_lib.reset_game(self.obj) def getLegalActionSet(self): act_size = ale_lib.getLegalActionSize(self.obj) act = np.zeros((act_size), dtype=np.intc) ale_lib.getLegalActionSet(self.obj, as_ctypes(act)) return act def getMinimalActionSet(self): act_size = ale_lib.getMinimalActionSize(self.obj) act = np.zeros((act_size), dtype=np.intc) ale_lib.getMinimalActionSet(self.obj, as_ctypes(act)) return act def getFrameNumber(self): return ale_lib.getFrameNumber(self.obj) def lives(self): return ale_lib.lives(self.obj) def getEpisodeFrameNumber(self): return ale_lib.getEpisodeFrameNumber(self.obj) def getScreenDims(self): """returns a tuple that contains (screen_width, screen_height) """ width = ale_lib.getScreenWidth(self.obj) height = ale_lib.getScreenHeight(self.obj) return (width, height) def getScreen(self, screen_data=None): """This function fills screen_data with the RAW Pixel data screen_data MUST be a numpy array of uint8/int8. This could be initialized like so: screen_data = np.empty(w*h, dtype=np.uint8) Notice, it must be width*height in size also If it is None, then this function will initialize it Note: This is the raw pixel values from the atari, before any RGB palette transformation takes place """ if(screen_data is None): width = ale_lib.getScreenWidth(self.obj) height = ale_lib.getScreenHeight(self.obj) screen_data = np.zeros(width*height, dtype=np.uint8) ale_lib.getScreen(self.obj, as_ctypes(screen_data)) return screen_data def getScreenRGB(self, screen_data=None): """This function fills screen_data with the data in RGB format screen_data MUST be a numpy array of uint8. This can be initialized like so: screen_data = np.empty((height,width,3), dtype=np.uint8) If it is None, then this function will initialize it. """ if(screen_data is None): width = ale_lib.getScreenWidth(self.obj) height = ale_lib.getSc
reenHeight(self.obj) screen_data = np.empty((height, width,3), dtype=np.uint8) ale_lib.getScreenRGB(self.obj, as_ctypes(screen_data[:])) return screen_data def getScreenGrayscale(self, screen_data=None): """This function fills screen_data with the data in grayscale screen_data MUST b
e a numpy array of uint8. This can be initialized like so: screen_data = np.empty((height,width,1), dtype=np.uint8) If it is None, then this function will initialize it. """ if(screen_data is None): width = ale_lib.getScreenWidth(self.obj) height = ale_lib.getScreenHeight(self.obj) screen_data = np.empty((height, width,1), dtype=np.uint8) ale_lib.getScreenGrayscale(self.obj, as_ctypes(screen_data[:])) return screen_data def getRAMSize(self): return ale_lib.getRAMSize(self.obj) def getRAM(self, ram=None): """This function grabs the atari RAM. ram MUST be a numpy array of uint8/int8. This can be initialized like so: ram = np.array(ram_size, dtype=uint8) Notice: It must be ram_size where ram_size can be retrieved via the getRAMSize function. If it is None, then this function will initialize it. """ if(ram is None): ram_size = ale_lib.getRAMSize(sel
Annelutfen/gittigidiyor-python
examples/applicationservice.py
Python
mit
526
0.003802
# Bu kod calismayacak, mantigi anlamak icin yazildi. from gittigidiyor.applicationservice import * from gittigidiyor.auth import * if __name__ == "__main__": # HTTP B
asic authentication credentials.. It blows up for the wrong credentials.. auth = Auth("testuser", "testpassword", None, None) api = ApplicationServi
ce(auth) result = api.createApplication("testdeveloper", "Test Application", "This is the test application", "C", "W", "", "xml", "xml", "tr") print result
auto-mat/klub
apps/pdf_storage/serializers.py
Python
gpl-3.0
1,144
0.000874
from django.db.models.expressions import F, Func from rest_framework import serializers fr
om .models import PdfStorage class PdfStorageListSerializer(serializers.ModelSerializer): author = serializers.SerializerMethodField("full_name") class Meta: model = PdfStorage fields = [ "id", "name", "topic", "author",
"created", ] def full_name(self, pdf): return pdf.author.person_name() class PaidPdfDownloadLinkSerializer(serializers.ModelSerializer): download_url = serializers.SerializerMethodField() class Meta: model = PdfStorage fields = ["download_url"] def get_download_url(self, obj): return obj.pdf_file.url class AllRelatedIdsSerializer(serializers.Serializer): ids = serializers.SerializerMethodField() class Meta: fields = ["ids"] def get_ids(self, obj): all_ids = ( PdfStorage.objects.annotate(ids=Func(F("related_ids"), function="unnest")) .values_list("ids", flat=True) .distinct() ) return all_ids
shamangeorge/beets
test/test_thumbnails.py
Python
mit
11,414
0.000088
# -*- coding: utf-8 -*- # This file is part of beets. # Copyright 2016, Bruno Cauet # # Permission is hereby granted, free of charge, to any person obtaining # a copy of this software and associated documentation files (the # "Software"), to deal in the Software without restriction, including # without limitation the rights to use, copy, modify, merge, publish, # distribute, sublicense, and/or sell copies of the Software, and to # permit persons to whom the Software is furnished to do so, subject to # the following conditions: # # The above copyright notice and this permission notice shall be # included in all copies or substantial portions of the Software. from __future__ import division, absolute_import, print_function import os.path from mock import Mock, patch, call from tempfile import mkdtemp from shutil import rmtree import unittest from test.helper import TestHelper from beets.util import bytestring_path from beetsplug.thumbnails import (ThumbnailsPlugin, NORMAL_DIR, LARGE_DIR, write_metadata_im, write_metadata_pil, PathlibURI, GioURI) class ThumbnailsTest(unittest.TestCase, TestHelper): def setUp(self): self.setup_beets() def tearDown(self): self.teardown_beets() @patch('beetsplug.thumbnails.util') def test_write_metadata_im(self, mock_util): metadata = {"a": u"A", "b": u"B"} write_metadata_im("foo", metadata) try: command = u"convert foo -set a A -set b B foo".split(' ') mock_util.command_output.assert_called_once_with(command) except AssertionError: command = u"convert foo -set b B -set a A foo".split(' ') mock_util.command_output.assert_called_once_with(command) @patch('beetsplug.thumbnails.ThumbnailsPlugin._check_local_ok') @patch('beetsplug.thumbnails.os.stat') def test_add_tags(self, mock_stat, _): plugin = ThumbnailsPlugin() plugin.write_metadata = Mock() plugin.get_uri = Mock(side_effect={b"/path/to/cover": "COVER_URI"}.__getitem__) album = Mock(artpath=b"/path/to/cover") mock_stat.return_value.st_mtime = 12345 plugin.add_tags(album, b"/path/to/thumbnail") metadata = {"Thumb::URI": "COVER_URI", "Thumb::MTime": u"12345"} plugin.write_metadata.assert_called_once_with(b"/path/to/thumbnail", metadata) mock_stat.assert_called_once_with(album.artpath) @patch('beetsplug.thumbnails.os') @patch('beetsplug.thumbnails.ArtResizer') @patch('beetsplug.thumbnails.get_im_version') @patch('beetsplug.thumbnails.get_pil_version') @patch('beetsplug.thumbnails.GioURI') def test_check_local_ok(self, mock_giouri, mock_pil, mock_im, mock_artresizer, mock_os): # test local resizing capability mock_artresizer.shared.local = False plugin = ThumbnailsPlugin() self.assertFalse(plugin._check_local_ok()) # test dirs creation mock_artresizer.shared.local = True def exists(path): if path == NORMAL_DIR: return False if path == LARGE_DIR: return True raise ValueError(u"unexpected path {0!r}".format(path)) mock_os.path.exists = exists plugin = ThumbnailsPlugin() mock_os.makedirs.assert_called_once_with(NORMAL_DIR) self.assertTrue(plugin._check_local_ok()) # test metadata writer function mock_os.path.exists = lambda _: True mock_pil.return_value = False mock_im.return_value = False with self.assertRaises(AssertionError): ThumbnailsPlugin() mock_pil.return_value = True self.assertEqual(ThumbnailsPlugin().write_metadata, write_metadata_pil) mock_im.return_value = True self.assertEqual(ThumbnailsPlugin().write_metadata, write_metadata_im) mock_pil.return_value = False self.assertE
qual(ThumbnailsPlugin().write_metadata, write_metadata_im) self.assertTrue(ThumbnailsPlugin()._check_local_ok()) # test URI getter function giouri_inst = mock_giouri.return_value giouri_inst.available = True self.assertEqual(ThumbnailsPlu
gin().get_uri, giouri_inst.uri) giouri_inst.available = False self.assertEqual(ThumbnailsPlugin().get_uri.__self__.__class__, PathlibURI) @patch('beetsplug.thumbnails.ThumbnailsPlugin._check_local_ok') @patch('beetsplug.thumbnails.ArtResizer') @patch('beetsplug.thumbnails.util') @patch('beetsplug.thumbnails.os') @patch('beetsplug.thumbnails.shutil') def test_make_cover_thumbnail(self, mock_shutils, mock_os, mock_util, mock_artresizer, _): thumbnail_dir = os.path.normpath(b"/thumbnail/dir") md5_file = os.path.join(thumbnail_dir, b"md5") path_to_art = os.path.normpath(b"/path/to/art") mock_os.path.join = os.path.join # don't mock that function plugin = ThumbnailsPlugin() plugin.add_tags = Mock() album = Mock(artpath=path_to_art) mock_util.syspath.side_effect = lambda x: x plugin.thumbnail_file_name = Mock(return_value=b'md5') mock_os.path.exists.return_value = False def os_stat(target): if target == md5_file: return Mock(st_mtime=1) elif target == path_to_art: return Mock(st_mtime=2) else: raise ValueError(u"invalid target {0}".format(target)) mock_os.stat.side_effect = os_stat plugin.make_cover_thumbnail(album, 12345, thumbnail_dir) mock_os.path.exists.assert_called_once_with(md5_file) mock_os.stat.has_calls([call(md5_file), call(path_to_art)], any_order=True) resize = mock_artresizer.shared.resize resize.assert_called_once_with(12345, path_to_art, md5_file) plugin.add_tags.assert_called_once_with(album, resize.return_value) mock_shutils.move.assert_called_once_with(resize.return_value, md5_file) # now test with recent thumbnail & with force mock_os.path.exists.return_value = True plugin.force = False resize.reset_mock() def os_stat(target): if target == md5_file: return Mock(st_mtime=3) elif target == path_to_art: return Mock(st_mtime=2) else: raise ValueError(u"invalid target {0}".format(target)) mock_os.stat.side_effect = os_stat plugin.make_cover_thumbnail(album, 12345, thumbnail_dir) self.assertEqual(resize.call_count, 0) # and with force plugin.config['force'] = True plugin.make_cover_thumbnail(album, 12345, thumbnail_dir) resize.assert_called_once_with(12345, path_to_art, md5_file) @patch('beetsplug.thumbnails.ThumbnailsPlugin._check_local_ok') def test_make_dolphin_cover_thumbnail(self, _): plugin = ThumbnailsPlugin() tmp = bytestring_path(mkdtemp()) album = Mock(path=tmp, artpath=os.path.join(tmp, b"cover.jpg")) plugin.make_dolphin_cover_thumbnail(album) with open(os.path.join(tmp, b".directory"), "rb") as f: self.assertEqual( f.read().splitlines(), [b"[Desktop Entry]", b"Icon=./cover.jpg"] ) # not rewritten when it already exists (yup that's a big limitation) album.artpath = b"/my/awesome/art.tiff" plugin.make_dolphin_cover_thumbnail(album) with open(os.path.join(tmp, b".directory"), "rb") as f: self.assertEqual( f.read().splitlines(), [b"[Desktop Entry]", b"Icon=./cover.jpg"] ) rmtree(tmp) @patch('beetsplug.thumbnails.ThumbnailsPlugin._check_local_ok') @patch('beetsplu
mic4ael/indico
indico/modules/users/views.py
Python
mit
1,654
0.000605
# This file is part of Indico. # Copyright (C) 2002 - 2020 CERN # # Indico is free software; you can redistribute it and/or # modify it under the terms of the MIT License; see the # LICENSE file for more details. from __future__ import unicode_literals from flask import request from indico.modules.admin.views import WPAdmin from indico.modules.users import User from indico.util.i18n import _ from indico.web.breadcrumbs import render_breadcrumbs from indico.web.views import WPDecorated, WPJinjaMixin class WPUser(WPJinjaMixin, WPDecorated): """Base WP for user profile pages. Whenever you use this, you MUST include `user` in the params passed to `render_template`. Any RH using this should inherit from `RHUserBase` which already handles user/admin access. In this case, simply ad
d ``user=self.user`` to your `render_template` call. """ template_prefix = 'users/' def __init__(self, rh, active_menu_item, **kwargs): kwargs['active_menu_item'] = active_menu_item WPDecorated.__init__(self, rh, **kwargs) def _get_breadcrumbs(self): if 'user_id' in req
uest.view_args: user = User.get(request.view_args['user_id']) profile_breadcrumb = _('Profile of {name}').format(name=user.full_name) else: profile_breadcrumb = _('My Profile') return render_breadcrumbs(profile_breadcrumb) def _get_body(self, params): return self._get_page_content(params) class WPUserDashboard(WPUser): bundles = ('module_users.dashboard.js',) class WPUsersAdmin(WPAdmin): template_prefix = 'users/' bundles = ('module_users.js',)
tkettu/rokego
distances/migrations/0011_auto_20170602_1044.py
Python
mit
513
0.001949
# -*- coding: utf-8 -*- # Generated by Django 1.11 on 2017-06-02
07:44 from __future__ import unicode_literals import datetime from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ('distances', '0010_auto_20170519_1604'), ] operations = [ migrations.AlterField( model_name='dates', name='startDate', field=models.DateField(default=datetime.datetime(2017, 5, 26, 10, 44, 7, 194576)),
), ]
MADindustries/WhatManager2
bibliotik/utils.py
Python
mit
7,895
0.002027
import re import shutil import time import os import os.path from pyquery.pyquery import PyQuery import requests import requests.utils from WhatManager2.settings import MEDIA_ROOT from bibliotik import manage_bibliotik from bibliotik.models import BibliotikTorrent, BibliotikFulltext from bibliotik.settings import BIBLIOTIK_UPLOAD_URL, BIBLIOTIK_DOWNLOAD_TORRENT_URL from home.models import DownloadLocation def extract_torrents(html): result = [] pq = PyQuery(html) for row in pq('#torrents_table tbody tr.torrent').items(): data = { 'id': row.attr('id')[len('torrent-'):], 'type': row('td:eq(0) img').attr('title'), 'title': row('td:eq(1) span.title').text(), 'publishers': [], 'authors': [], 'year': row('td:eq(1) span.torYear').text()[1:-1], 'format': row('td:eq(1) span.torFormat').text()[1:-1], 'retail': bool(row('td:eq(1) span.torRetail')), 'tags': [] } for dlink in row('td:eq(1) > a').items(): href = dlink.attr('href') if '/creators/' in href: data['authors'].append({ 'id': href[href.rfind('/') + 1:], 'name': dlink.text() }) elif '/publishers/' in href: d
ata['publishers'].append({ 'id': href[href.rfind('/') + 1:], 'name': dlink.text()
}) for tag in row('td:eq(1) > span.taglist > a').items(): href = tag.attr('href') data['tags'].append({ 'id': href[href.rfind('/') + 1:], 'name': tag.text() }) result.append(data) return result class BibliotikClient(object): def __init__(self, session_id): self.session_id = session_id self.session = requests.Session() requests.utils.add_dict_to_cookiejar(self.session.cookies, { 'id': session_id }) self.auth_key = None def get_auth_key(self): if self.auth_key: return self.auth_key for i in xrange(3): try: response = self.session.get('https://bibliotik.me/upload/ebooks') response.raise_for_status() break except Exception: pass response.raise_for_status() pq = PyQuery(response.content) self.auth_key = pq('input[name="authkey"]').val() if not self.auth_key: raise Exception('Could not get the authkey') return self.auth_key def send_upload(self, payload, payload_files): return self.session.post(BIBLIOTIK_UPLOAD_URL, data=payload, files=payload_files, allow_redirects=False) def download_torrent(self, torrent_id): torrent_page = BIBLIOTIK_DOWNLOAD_TORRENT_URL.format(torrent_id) for i in xrange(3): try: r = self.session.get(torrent_page, allow_redirects=False) r.raise_for_status() if r.status_code == 200 and 'application/x-bittorrent' in r.headers['content-type']: filename = re.search('filename="(.*)"', r.headers['content-disposition']).group(1) return filename, r.content else: raise Exception('Wrong status_code or content-type') except Exception as ex: print u'Error while download bibliotik torrent. Will retry: {0}'.format(ex) time.sleep(3) download_exception = ex raise download_exception def search(self, query): url = 'https://bibliotik.me/torrents/' response = self._search_request(url, query) if not response.url.startswith(url): raise Exception(u'Search redirected to {0}. Probably invalid id. Was {1}.'.format( response.url, self.session_id )) return { 'results': extract_torrents(response.content), } def _search_request(self, url, query): for i in xrange(3): try: response = self.session.get(url, params={ 'search': query }) response.raise_for_status() return response except Exception as ex: time.sleep(3) exception = ex raise exception def upload_book_to_bibliotik(bibliotik_client, book_upload): print 'Sending request for upload to bibliotik.me' payload_files = dict() payload_files['TorrentFileField'] = ('torrent.torrent', book_upload.bibliotik_torrent_file) payload = dict() payload['upload'] = '' payload['authkey'] = bibliotik_client.get_auth_key() payload['AuthorsField'] = book_upload.author payload['TitleField'] = book_upload.title payload['IsbnField'] = book_upload.isbn or '' payload['PublishersField'] = book_upload.publisher payload['PagesField'] = book_upload.pages or '' payload['YearField'] = book_upload.year payload['FormatField'] = { 'AZW3': '21', 'EPUB': '15', 'PDF': '2', }[book_upload.format] payload['LanguageField'] = '1' # English if book_upload.retail: payload['RetailField'] = '1' payload['TagsField'] = ','.join(book_upload.tag_list) payload['ImageField'] = book_upload.cover_url payload['DescriptionField'] = book_upload.description response = bibliotik_client.send_upload(payload, payload_files) response.raise_for_status() if response.status_code == requests.codes.ok: with open(os.path.join(MEDIA_ROOT, 'bibliotik_upload.html'), 'wb') as f: f.write(response.content) raise Exception('Bibliotik does not want this. Written to media/') redirect_match = re.match('^https://bibliotik.me/torrents/(?P<id>\d+)$', response.headers['location']) if not redirect_match: raise Exception('Could not get new torrent ID.') torrent_id = redirect_match.groupdict()['id'] book_upload.bibliotik_torrent = BibliotikTorrent.get_or_create(bibliotik_client, torrent_id) book_upload.save() # Add the torrent to the client location = DownloadLocation.get_bibliotik_preferred() download_dir = os.path.join(location.path, unicode(book_upload.bibliotik_torrent.id)) book_path = os.path.join(download_dir, book_upload.target_filename) if not os.path.exists(download_dir): os.mkdir(download_dir) os.chmod(download_dir, 0777) shutil.copyfile( book_upload.book_data.storage.path(book_upload.book_data), book_path ) os.chmod(book_path, 0777) manage_bibliotik.add_bibliotik_torrent( book_upload.bibliotik_torrent.id, location=location, bibliotik_client=bibliotik_client ) return book_upload def search_torrents(query): b_fulltext = BibliotikFulltext.objects.only('id').all() b_fulltext = b_fulltext.extra(where=['MATCH(`info`, `more_info`) AGAINST (%s IN BOOLEAN MODE)'], params=[query]) b_fulltext = b_fulltext.extra(select={'score': 'MATCH (`info`) AGAINST (%s)'}, select_params=[query]) b_fulltext = b_fulltext.extra(order_by=['-score']) b_torrents_dict = BibliotikTorrent.objects.in_bulk([b.id for b in b_fulltext]) b_torrents = list() for i in b_fulltext: b_torrent = b_torrents_dict[i.id] coef = 1.0 if b_torrent.retail: coef *= 1.2 if b_torrent.format == 'EPUB': coef *= 1.1 elif b_torrent.format == 'PDF': coef *= 0.9 b_torrent.score = i.score * coef b_torrents.append(b_torrent) return b_torrents
stewartsmith/bzr
bzrlib/tests/commands/test_cat.py
Python
gpl-2.0
1,932
0.000518
# Copyright (C) 2007, 2009, 2010 Canonical Ltd # # This program is free software; you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation; either version 2 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program; if not, write to the Free Software # Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA import sys from bzrlib.builtins import cmd_cat from bzrlib.tests import StringIOWrapper from bzrlib.tests.transport_util import TestCaseWithConnectionHookedTransport class TestCat(TestCaseWithConnectionHookedTransport): def setUp(self): super(TestCat, self).setUp() # Redirect sys.stdout as this is what cat uses self.outf = StringIOWrapper() self.overrideAttr(sys, 'stdout', self.outf) def test_cat(self): # FIXME: sftp raises ReadError instead of NoSuchFile when probing for # branch/foo/.bzr/branch-format when used with the paramiko test # server. from bzrlib.tests import TestSkipped raise TestSkipped('SFTPTransport raises incorrect exception' ' when reading from paramiko server') wt1 = self.make_branch_and_tree('branch') self.build_tree_contents([('branch/foo', '
foo')]
) wt1.add('foo') wt1.commit('add foo') self.start_logging_connections() cmd = cmd_cat() cmd.run(self.get_url('branch/foo')) self.assertEquals(1, len(self.connections)) self.assertEquals('foo', self.outf.getvalue())
automatthias/aubio
waflib/fixpy2.py
Python
gpl-3.0
1,110
0.067568
#! /usr/bin/env python # encoding: utf-8 # WARNING! Do not edit! http://waf.googlecode.com/git/docs/wafbook/single.html#_obtaining_the_waf_file import os all_modifs={} def fixdir(dir): global all_modifs for k in all_modifs: for v in all_modifs[k]: modif(os.path.join(dir,'waflib'),k,v) def modif(dir,name,fun): if name=='*': lst=[] for y in'. Tools extras'.split():
for x in os.listdir(os.path.join(dir,y)): if x.endswith('.py'): lst.append(y+os.
sep+x) for x in lst: modif(dir,x,fun) return filename=os.path.join(dir,name) f=open(filename,'r') try: txt=f.read() finally: f.close() txt=fun(txt) f=open(filename,'w') try: f.write(txt) finally: f.close() def subst(*k): def do_subst(fun): global all_modifs for x in k: try: all_modifs[x].append(fun) except KeyError: all_modifs[x]=[fun] return fun return do_subst @subst('*') def r1(code): code=code.replace(',e:',',e:') code=code.replace("",'') code=code.replace('','') return code @subst('Runner.py') def r4(code): code=code.replace('next(self.biter)','self.biter.next()') return code
lbryio/lbry
torba/torba/server/text.py
Python
mit
3,433
0
import time from torba.server import util def sessions_lines(data): """A generator returning lines for a list of sessions. data is the return value of rpc_sessions().""" fmt = ('{:<6} {:<5} {:>17} {:>5} {:>5} {:>5} ' '{:>7} {:>7} {:>7} {:>7} {:>7} {:>9} {:>21}') yield fmt.format('ID', 'Flags', 'Client', 'Proto', 'Reqs', 'Txs', 'Subs', 'Recv', 'Recv KB', 'Sent', 'Sent KB', 'Time', 'Peer') for (id_, flags, peer, client, proto, reqs, txs_sent, subs, recv_count, recv_size, send_count, send_size, time) in data: yield fmt.format(id_, flags, client, proto, '{:,d}'.format(reqs), '{:,d}'.format(txs_sent), '{:,d}'.format(subs), '{:,d}'.format(recv_count), '{:,d}'.format(recv_size // 1024), '{:,d}'.format(send_count), '{:,d}'.format(send_size // 1024), util.formatted_time(time, sep=''), peer) def groups_lines(data): """A generator returning lines for a list of groups. data is the return value of rpc_groups().""" fmt = ('{:<6} {:>9} {:>9} {:>6} {:>6} {:>8}' '{:>7} {:>9} {:>7} {:>9}') yield fmt.format('ID', 'Sessions', 'Bwidth KB', 'Reqs', 'Txs', 'Subs', 'Recv', 'Recv KB', 'Sent', 'Sent KB') for (id_, session_count, bandwidth, reqs, txs_sent, subs, recv_count, recv_size, send_count, send_size) in data: yield fmt.format(id_, '{:,d}'.format(session_count), '{:,d}'.format(bandwidth // 1024), '{:,d}'.format(reqs), '{:,d}'.format(txs_sent), '{:,d}'.format(subs), '{:,d}'.format(recv_count), '{:,d}'.format(recv_size
// 1024), '{:,d}'.format(send_count), '{:,d}'.format(send_size // 1024)) def peers_lines(data): """A generator returning lines for a list of peers. data is the return value of rpc_peers().""" def time_fmt(t): if not t: return 'Never' return util.formatted_time(now - t) now = time.time() fmt = ('{:<30} {:<6} {:>5} {:>5} {:<17} {:>4} ' '{:>4} {:>8} {:>11} {:>11}
{:>5} {:>20} {:<15}') yield fmt.format('Host', 'Status', 'TCP', 'SSL', 'Server', 'Min', 'Max', 'Pruning', 'Last Good', 'Last Try', 'Tries', 'Source', 'IP Address') for item in data: features = item['features'] hostname = item['host'] host = features['hosts'][hostname] yield fmt.format(hostname[:30], item['status'], host.get('tcp_port') or '', host.get('ssl_port') or '', features['server_version'] or 'unknown', features['protocol_min'], features['protocol_max'], features['pruning'] or '', time_fmt(item['last_good']), time_fmt(item['last_try']), item['try_count'], item['source'][:20], item['ip_addr'] or '')
stargaser/spscviz
spscinspector.py
Python
bsd-3-clause
15,570
0.008157
#!/usr/bin/env python # -*- coding: utf-8 -*- # vispy: gallery 30 # ----------------------------------------------------------------------------- # Copyright (c) 2015, California Institute of Technology. # Distributed under the (new) BSD License. See LICENSE.txt for more info. # ----------------------------------------------------------------------------- """ Simple use of SceneCanvas to display an Image. """ from __future__ import print_function import sys import os from vispy import scene, app, visuals, gloo from vispy.visuals.transforms import STTransform, TransformSystem import numpy as np import astropy.io.fits as fits from astropy.table import Table from astropy.wcs import WCS from scipy.misc import bytescale from time import sleep from vispy.geometry import Rect from vispy.scene import PanZoomCamera from vispy.util import keys from vispy.app import Timer import warnings is_pacs = False warnings.filterwarnings('ignore') class SourceInspectCamera(PanZoomCamera): """ """ _state_props = PanZoomCamera._state_props + ('index', ) def __init__(self, image, img_data, sources, poslist, index=0, **kwargs): PanZoomCamera.__init__(self, **kwargs) self.index = index self.image = image self.img_data = img_data self.sources = sources self.poslist = poslist #self.smin = 0.9*np.nanmin(self.img_data) #self.smax = 1.02*np.nanmax(self.img_data) pcts = np.nanpercentile(self.img_data, [5.0, 99.0]) if np.all(np.isfinite(pcts)): self.smin = pcts[0] self.smax = pcts[1] self.accelerator = 5.0 self.nsrc = len(poslist) self._keymap = { keys.UP: +1, keys.DOWN: -1, keys.LEFT: -1, keys.RIGHT: +1, keys.SPACE: +1 } self._timer = Timer(0.2, start=False, connect=self.on_timer) @property def keymap(self): """ """ return self._keymap def update_index(self, val): self.index += val if (self.index > self.nsrc-1): self.index = 0 if (self.index < 0): self.index = self.nsrc - 1 def update_pan(self): newX = self.poslist[self.index][0] newY = self.poslist[self.index][1] curX = self.rect.left + self.rect.width/2.0 curY = self.rect.bottom + self.rect.height/2.0 self.pan((newX-curX,newY-curY)) # update image data imsect = self.img_data[int(self.rect.bottom):int(self.rect.top), int(self.rect.left):int(self.rect.right)] pcts = np.nanpercentile(imsect, [5.0, 99.0]) if np.all(np.isfinite(pcts)): self.smin = pcts[0] #cmin = -0.01 + 1.2*self.sources['background'][self.sources.index==self.index].values[0] if (is_pacs): self.smax = 1.2*self.sources['susflux']\ [self.sources.index==self.index].values[0]/1000.0/10.0 + self.smin else: self.smax = 1.2*self.sources['fluxtml']\ [self.sources.index==self.index].values[0]/1000.0/0.95 + self.smin self.update_scale() def update_scale(self): self.image.set_data(bytescale(self.img_data, cmin=self.smin, cmax=self.smax)) self.view_changed() def on_timer(self, event): """Timer event handler Parameters ---------- event : instance of Event The event. """ self.update_index(1) self.update_pan() self.view_changed() def viewbox_key_event(self, event): """ViewBox key event handler Parameters ---------- event : instance of Event The event. """
PanZoomCamera.viewbox_key_event(self, event)
if event.handled or not self.interactive: return if event.type == 'key_press': if event.key in self._keymap: val = self._keymap[event.key] self.update_index(val) self.update_pan() self.view_changed() elif event.key == 'M': self._timer.start() elif event.key == 'S': self._timer.stop() #elif event.key == 'X': # ind = np.argsort(self.poslist[:,0]) # self.poslist = self.poslist[ind] #elif event.key == 'Y': # ind = np.argsort(self.poslist[:,1]) # self.poslist = self.poslist[ind] elif event.key == 'L': print(self.sources[self.sources.sourceid==self.sources['sourceid'][self.index]]) elif event.key == 'T': sdiff = self.accelerator*(self.smax - self.smin)/255.0 self.smax += sdiff self.smin += sdiff self.update_scale() elif event.key == 'B': sdiff = self.accelerator*(self.smax - self.smin)/255.0 self.smax -= sdiff self.smin -= sdiff self.update_scale() elif event.key == 'N': sdiff = self.accelerator*(self.smax - self.smin)/255.0 self.smax -= sdiff self.smin += sdiff self.update_scale() elif event.key == 'W': sdiff = self.accelerator*(self.smax - self.smin)/255.0 self.smax += sdiff self.smin -= sdiff self.update_scale() elif event.key == 'U': print("Current stretch limits: %10.4g, %10.4g"%(self.smin, self.smax)) self.smin = float(input("New lower value?")) self.smax = float(input("New upper value?")) self.update_scale() def find_map(obsid, band, mapdir, template="{}{}_map.fits.zip"): """ Walk the map directory and return the map data and marker size Parameters: ----------- obsid (int): observation id (10-digit integer) band (string) : blue, green, red, PSW, PMW or PLW mapdir (string) : top-level of map directory template (string) : how to format obsid and filter into a map name Returns: -------- img_data : numpy array of image data filter : 'blue', 'green', 'red' for PACS, 'PSW', 'PMW', 'PSW' for SPIRE mrkr_size : size of markers in pixels wcs : astropy.wcs object for the image """ fname = template.format(obsid, band) fullname = fname for root, dir, files in os.walk(os.path.expanduser(mapdir), followlinks=True): for name in files: if name.endswith(fname): fullname = os.path.join(root, fname) break elif name.endswith(fname.replace('map','pmd')): fullname = os.path.join(root, fname.replace('map','pmd')) break elif name.endswith(fname.replace('L25','L3')): fullname = os.path.join(root, fname.replace('L25','L3')) break elif name.endswith(fname.replace('L25','L2').replace('JSMAP','PMAP')): fullname = os.path.join(root, fname.replace('L25','L2').replace('JSMAP','PMAP')) break # Get the data hdu = fits.open(fullname) img_data = hdu[1].data filter = band if (band == 'B'): if (hdu[0].header['WAVELNTH'] == 100.0): filter = 'green' else: filter = 'blue' elif (band == 'R'): filter = 'red' # Handle illegal CUNITn in PACS SPG12 and earlier maps for key in ['CUNIT1', 'CUNIT2']: if key in hdu[1].header.keys(): del hdu[1].header[key] img_wcs = WCS(hdu[1].header) deg_per_pix = np.sqrt(np.abs(np.linalg.det(img_wcs.pixel_scale_matrix))) beams = {'blue':5.5, 'green':7.0, 'red':11.5, 'PSW':17.0, 'PMW':32.0, 'PLW':42.0} beam_size = beams[filter]/3600. mrkr_size = beam_size/deg_per_pix return(img_data, filter, mrkr_size, img_wcs) def sourcelist_pscdb(obsid, filter, sql_statement, dbname, username, hostname, por
Ilias95/lib389
lib389/cli_conf/plugin.py
Python
gpl-3.0
4,136
0.005561
# --- BEGIN COPYRIGHT BLOCK --- # Copyright (C) 2016 Red Hat, Inc. # All rights reserved. # # License: GPL (version 3 or any later version). # See LICENSE for details. # --- END COPYRIGHT BLOCK --- from lib389.plugins import Plugin, Plugins import argparse from lib389.cli_base import ( _generic_list, _generic_get, _generic_get_dn, _generic_create, _generic_delete, _get_arg, _get_args, _get_attributes, _warn, ) SINGULAR = Plugin MANY = Plugins RDN = 'cn' def plugin_list(inst, basedn, log, args): _generic_list(inst, basedn, log.getChild('plugin_list'), MANY) def plugin_get(inst, basedn, log, args): rdn = _get_arg( args.selector, msg="Enter %s to retrieve" % RDN) _generic_get(inst, basedn, log.getChild('plugin_get'), MANY, rdn) def plugin_get_dn(inst, basedn, log, args): dn = _get_arg( args.dn, msg="Enter dn to retrieve") _generic_get_dn(inst, basedn, log.getChild('plugin_get_dn'), MANY, dn) # Plugin enable def plugin_enable(inst, basedn, log, args): dn = _get_arg( args.dn, msg="Enter plugin dn to enable") mc = MANY(inst, basedn) o = mc.get(dn=dn) o.enable() o_str = o.display() log.info('Enabled %s', o_str) # Plugin disable def plugin_disable(inst, basedn, log, args, warn=True): dn = _get_arg( args.dn, msg="Enter plugin dn to disable") if warn:
_warn(dn, msg="Disabling %s %s" % (SINGULAR.__name__, dn)) mc = MANY(inst, basedn) o = mc.get(dn=dn) o.disable() o_str = o.display() log.info('Disabled %s', o_str) # Plugin configure? def plugin_configure(inst, basedn, log, args): pass def generic_show(inst, basedn, log, args): """Display plugin configurat
ion.""" plugin = args.plugin_cls(inst) log.info(plugin.display()) def generic_enable(inst, basedn, log, args): plugin = args.plugin_cls(inst) plugin.enable() log.info("Enabled %s", plugin.rdn) def generic_disable(inst, basedn, log, args): plugin = args.plugin_cls(inst) plugin.disable() log.info("Disabled %s", plugin.rdn) def generic_status(inst, basedn, log, args): plugin = args.plugin_cls(inst) if plugin.status() == True: log.info("%s is enabled", plugin.rdn) else: log.info("%s is disabled", plugin.rdn) def add_generic_plugin_parsers(subparser, plugin_cls): show_parser = subparser.add_parser('show', help='display plugin configuration') show_parser.set_defaults(func=generic_show, plugin_cls=plugin_cls) enable_parser = subparser.add_parser('enable', help='enable plugin') enable_parser.set_defaults(func=generic_enable, plugin_cls=plugin_cls) disable_parser = subparser.add_parser('disable', help='disable plugin') disable_parser.set_defaults(func=generic_disable, plugin_cls=plugin_cls) status_parser = subparser.add_parser('status', help='display plugin status') status_parser.set_defaults(func=generic_status, plugin_cls=plugin_cls) def create_parser(subparsers): plugin_parser = subparsers.add_parser('plugin', help="Manage plugins available on the server") subcommands = plugin_parser.add_subparsers(help="action") list_parser = subcommands.add_parser('list', help="List current configured (enabled and disabled) plugins") list_parser.set_defaults(func=plugin_list) get_parser = subcommands.add_parser('get', help='get') get_parser.set_defaults(func=plugin_get) get_parser.add_argument('selector', nargs='?', help='The plugin to search for') get_dn_parser = subcommands.add_parser('get_dn', help='get_dn') get_dn_parser.set_defaults(func=plugin_get_dn) get_dn_parser.add_argument('dn', nargs='?', help='The plugin dn to get') enable_parser = subcommands.add_parser('enable', help='enable a plugin in the server') enable_parser.set_defaults(func=plugin_enable) enable_parser.add_argument('dn', nargs='?', help='The dn to enable') disable_parser = subcommands.add_parser('disable', help='disable the plugin configuration') disable_parser.set_defaults(func=plugin_disable) disable_parser.add_argument('dn', nargs='?', help='The dn to disable')
petrjasek/superdesk-server
superdesk/io/commands/update_ingest.py
Python
agpl-3.0
9,268
0.002266
# -*- coding: utf-8; -*- # # This file is part of Superdesk. # # Copyright 2013, 2014 Sourcefabric z.u. and contributors. # # For the full copyright and license information, please see the # AUTHORS and LICENSE files distributed with this source code, or # at https://www.sourcefabric.org/superdesk/license import logging import superdesk from flask import current_app as app from settings import DAYS_TO_KEEP from datetime import timedelta from werkzeug.exceptions import HTTPException from superdesk.notification import push_notification from superdesk.io import providers from superdesk.celery_app import celery from superdesk.utc import utcnow from superdesk.workflow import set_default_state from superdesk.errors import ProviderError from superdesk.stats import stats from superdesk.upload import url_for_media from superdesk.media.media_operations import download_file_from_url, process_file from superdesk.media.renditions import generate_renditions UPDATE_SCHEDULE_DEFAULT = {'minutes': 5} LAST_UPDATED = 'last_updated' STATE_INGESTED = 'ingested' logger = logging.getLogger(__name__) superdesk.workflow_state(STATE_INGESTED) superdesk.workflow_action( name='ingest' ) def is_valid_type(provider, provider_type_filter=None): """Test if given provider has valid type and should be updated. :param provider: provider to be updated :param provider_type_filter: active provider type filter """ provider_type = provider.get('type') if provider_type not in providers: return False if provider_type_filter and provider_type != provider_type_filter: return False return True def is_scheduled(provider): """Test if given provider should be scheduled for update. :param provider: ingest provider """ now = utcnow() last_updated = provider.get(LAST_UPDATED, now - timedelta(days=100)) # if never updated run now update_schedule = provider.get('update_schedule', UPDATE_SCHEDULE_DEFAULT) return last_updated + timedelta(**update_schedule) < now def is_closed(provider): """Test if provider is closed. :param provider: ingest provider """ return provider.get('is_closed', False) def filter_expired_items(provider, items): try: days_to_keep_content = provider.get('days_to_keep', DAYS_TO_KEEP) expiration_date = utcnow() - timedelta(days=days_to_keep_content) return [item for item in items if item.get('versioncreated', utcnow()) > expiration_date] except Exception as ex: raise ProviderError.providerFilterExpiredContentError(ex, provider) def get_provider_rule_set(provider): if provider.get('rule_set'): return superdesk.get_resource_service('rule_sets').find_one(_id=provider['rule_set'], req=None) def get_task_ttl(provider): update_schedule = provider.get('update_schedule', UPDATE_SCHEDULE_DEFAULT) return update_schedule.get('minutes', 0) * 60 + update_schedule.get('hours', 0) * 3600 def get_task_id(provider): return 'update-ingest-{0}-{1}'.format(provider.get('name'), provider.get('_id')) class UpdateIngest(superdesk.Command): """Update ingest providers.""" option_list = ( superdesk.Option('--provider', '-p', dest='provider_type'), ) def run(self, provider_type=None): for provider in superdesk.get_resource_service('ingest_providers').get(req=None, lookup={}): if is_valid_type(provider, provider_type) and is_scheduled(provider) and not is_closed(provider): kwargs = { 'provider': provider, 'rule_set': get_provider_rule_set(provider) } update_provider.apply_async( task_id=get_task_id(provider), expires=get_task_ttl(provider), kwargs=kwargs) @celery.task def update_provider(provider, rule_set=None): """ Fetches items from ingest provider as per the configuration, ingests them into Superdesk and updates the provider. """ superdesk.get_resource_service('ingest_providers').update(provider['_id'], { LAST_UPDATED: utcnow(), # Providing the _etag as system updates to the documents shouldn't override _etag. app.config['ETAG']: provider.get(app.config['ETAG']) }) for items in providers[provider.get('type')].update(provider): ingest_items(items, provider, rule_set) stats.incr('ingest.ingested_items', len(items)) logger.info('Provider {0} updated'.format(provider['_id'])) push_notification('ingest:update') def process_anpa_category(item, provider): try: anpa_categories = superdesk.get_resource_service('vocabularies').find_one(req=None, _id='categories') if anpa_categories: for anpa_category in anpa_categories['items']: if anpa_category['is_active'] is True \ and item['anpa-category']['qcode'].lower() == anpa_category['value'].lower(): item['anpa-category'] = {'qcode': item['anpa-category']['qcode'], 'name': anpa_category['name']} break except Exception as ex: raise ProviderError.anpaError(ex, provider) def apply_rule_set(item, provider, rule_set=None): """ Applies rules set on the item to be ingested into the system. If there's no rule set then the item will be returned without any change. :param item: Item to be ingested :param provider: provider object from whom the item was received :return: item """ try: if rule_set is None and provider.get('rule_set') is not None: rule_set = superdesk.get_resource_service('rule_sets').find_one(_id=provider['rule_set'], req=None) if rule_set and 'body_html' in item: body = item['body_html'] for rule in rule_set['rules']: body = body.replace(rule['old'], rule['new']) item['body_html'] = body return item except Exception as ex: raise ProviderError.ruleError(ex, provider) def ingest_items(items, provider, rule_
set=None): all_items = filter_expired_items(provider, items) items_dict = {doc['guid']: doc for doc in all_items} for item in [doc for doc in all_items if doc.get('type') != 'composite']: ingest_item(item, provider, rule_set) for item in [doc for doc in all_items if doc.get('type
') == 'composite']: for ref in [ref for group in item.get('groups', []) for ref in group.get('refs', []) if 'residRef' in ref]: ref.setdefault('location', 'ingest') itemRendition = items_dict.get(ref['residRef'], {}).get('renditions') if itemRendition: ref.setdefault('renditions', itemRendition) ingest_item(item, provider, rule_set) def ingest_item(item, provider, rule_set=None): try: item.setdefault('_id', item['guid']) providers[provider.get('type')].provider = provider item['ingest_provider'] = str(provider['_id']) item.setdefault('source', provider.get('source', '')) set_default_state(item, STATE_INGESTED) if 'anpa-category' in item: process_anpa_category(item, provider) apply_rule_set(item, provider, rule_set) ingest_service = superdesk.get_resource_service('ingest') if item.get('ingest_provider_sequence') is None: ingest_service.set_ingest_provider_sequence(item, provider) rend = item.get('renditions', {}) if rend: baseImageRend = rend.get('baseImage') or next(iter(rend.values())) if baseImageRend: href = providers[provider.get('type')].prepare_href(baseImageRend['href']) update_renditions(item, href) old_item = ingest_service.find_one(_id=item['guid'], req=None) if old_item: ingest_service.put(item['guid'], item) else: try: ingest_service.post([item]) except HTTPException as e: logger.error("Exception while persisting item in ingest collection
ocket8888/slackbot
slackbot/modules/math/math.py
Python
gpl-3.0
1,975
0.022785
""" Contains a function to generate and upload a LaTeX-rendered math image. """ import subprocess import sys import typing def uploadLatex(math: typing.List[str], slackAPI: object, channel: object, users: list) -> str: """
Generates a LaTeX math image from the LaTeX source contained in `math`, and posts it to the api `slackapi` in channel `channel`. Returns a string describing any errors that occurred. """ toParse =
"".join(math).replace("&amp;","&") # create a temporary directory response = subprocess.run(["mktemp", "-d"], stdout=subprocess.PIPE, stderr=subprocess.PIPE) # check for errors if response.returncode != 0 or response.stderr.decode() != '': return "EE: latex: couldn't make temp. dir: '"+response.stderr.decode()+"'" # Decode and store the temporary directory name latexdir = response.stdout.decode().splitlines()[0] # Generate the image using l2p response = subprocess.run(["l2p", "-i", toParse, "-o", latexdir+"/latex_output.png"], stdout=subprocess.PIPE, stderr=subprocess.PIPE) # Check for errors, both posting to the channel (because it's probable that a user messed up) # as well as logging to the logfile if response.stderr.decode() != '': msg = "Unable to parse expression: %s: %s" slackAPI.chat.post_message(channel['name'], msg % ("`%s` because" % toParse, "`%s`" % response.stderr.decode())) return "EE: latex: " + msg % ("'%s'" % toParse, "'%s'" % response.stderr.decode()) # If all went well, upload then delete the file slackAPI.files.upload(latexdir+"/latex_output.png", channels=channel['id']) retstr = "II: latex: uploaded image to slack (input: '%s')" % toParse response = subprocess.run(["rm", "-r", "-f", latexdir], stderr=subprocess.PIPE) if response.returncode != 0 or response.stderr.decode() != "": return retstr+"\nEE: latex: error encountered during cleanup: '%s'" % response.stderr.decode() return retstr
nirgal/ngw
core/migrations/0010_auto_drop_proxy_models.py
Python
bsd-2-clause
1,407
0
# -*- coding: utf-8 -*- from __future__ import unicode_literals from django.db import migrations class Migration(migrations.Migration): dependencies = [ ('ngw', '0009_config_eventdefaultperms'), ] operations = [ migrat
ions.DeleteModel( name='ChoiceContactField', ), migrations.DeleteModel( name='DateContactField', ), migrations.DeleteModel(
name='DateTimeContactField', ), migrations.DeleteModel( name='EmailContactField', ), migrations.DeleteModel( name='FileContactField', ), migrations.DeleteModel( name='ImageContactField', ), migrations.DeleteModel( name='LongTextContactField', ), migrations.DeleteModel( name='MultipleChoiceContactField', ), migrations.DeleteModel( name='MultipleDoubleChoiceContactField', ), migrations.DeleteModel( name='NumberContactField', ), migrations.DeleteModel( name='PasswordContactField', ), migrations.DeleteModel( name='PhoneContactField', ), migrations.DeleteModel( name='RibContactField', ), migrations.DeleteModel( name='TextContactField', ), ]
jordanemedlock/psychtruths
temboo/core/Library/Google/Drive/Changes/__init__.py
Python
apache-2.0
223
0.008969
from temboo.Library.Google.Drive.Changes.Get import Get, GetInputSet, GetResultSet, GetChoreographyExecution from temboo.Library.Google.Drive.Changes.List import List, ListInputSet, ListResultSet, ListChoreographyExecuti
on
xdevelsistemas/taiga-back-community
taiga/projects/issues/apps.py
Python
agpl-3.0
3,000
0.001668
# -*- coding: utf-8 -*- # Copyright (C) 2014-2016 Andrey Antukh <niwi@niwi.nz> # Copyright (C) 2014-2016 Jesús Espino <jespinog@gmail.com> # Copyright (C) 2014-2016 David Barragán <bameda@dbarragan.com> # Copyright (C) 2014-2016 Alejandro Alonso <alejandro.alonso@kaleidos.net> # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as # published by the Free Software Foundation, either version 3 of the # License, or (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Affero General Public License for more details. # # You should have rece
ived a copy of the GNU Affero General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. from django.apps import AppConfig from django.apps import apps from django.db.models import signals def connect_issues_signals(): from taiga.projects.tagging import signals as tagging_handlers from . import signals as handlers # Finished date signals.pre_save.connect(handlers.set_finished_date_when_edit_issue, sender=apps.get_model("iss
ues", "Issue"), dispatch_uid="set_finished_date_when_edit_issue") # Tags signals.pre_save.connect(tagging_handlers.tags_normalization, sender=apps.get_model("issues", "Issue"), dispatch_uid="tags_normalization_issue") def connect_issues_custom_attributes_signals(): from taiga.projects.custom_attributes import signals as custom_attributes_handlers signals.post_save.connect(custom_attributes_handlers.create_custom_attribute_value_when_create_issue, sender=apps.get_model("issues", "Issue"), dispatch_uid="create_custom_attribute_value_when_create_issue") def connect_all_issues_signals(): connect_issues_signals() connect_issues_custom_attributes_signals() def disconnect_issues_signals(): signals.pre_save.disconnect(sender=apps.get_model("issues", "Issue"), dispatch_uid="set_finished_date_when_edit_issue") signals.pre_save.disconnect(sender=apps.get_model("issues", "Issue"), dispatch_uid="tags_normalization_issue") def disconnect_issues_custom_attributes_signals(): signals.post_save.disconnect(sender=apps.get_model("issues", "Issue"), dispatch_uid="create_custom_attribute_value_when_create_issue") def disconnect_all_issues_signals(): disconnect_issues_signals() disconnect_issues_custom_attributes_signals() class IssuesAppConfig(AppConfig): name = "taiga.projects.issues" verbose_name = "Issues" def ready(self): connect_all_issues_signals()
pranjan77/narrative
src/scripts/test_data_uploader/populate_mini_ws.py
Python
mit
6,648
0.002106
from biokbase.workspace.client import Workspace import requests import json import sys from time import time from fix_workspace_info import fix_all_workspace_info from pprint import pprint kb_port = 9999 mini_ws_url = f"http://localhost:{kb_port}/services/ws" mini_auth_url = f"http://localhost:{kb_port}/services/auth/testmode" mini_ws_admin = "wsadmin" narrative_spec_file = '../../../narrative_object.spec' old_narrative_spec_file = 'old_narrative_object.spec' test_narrative_data = 'narrative_test_data.json' test_user = "kbasetest" #### # BEFORE YOU RUN THIS: # 1. Spin up mini_kb with the workspace env pointed to my branch: # that is, the "-env" line in the ws command points to # "https://raw.githubusercontent.com/briehl/mini_kb/master/deployment/conf/workspace-minikb.ini" # # 2. When this starts up, the workspace will complain. Auth is in testmode, and there's no test user/token set up # for the Shock configuration. Do the following: # a. enter the mongo container # > docker exec -it mini_kb_ci-mongo_1 /bin/bash # b. start mongo (just "mongo" at the prompt) # c. Run the following to use gridFS: # > use workspace # > db.settings.findAndModify({ query: {backend: "shock"}, update: { $set: {"backend": "gridFS"} } }) # d. Exit that container, and restart the workspace container # > docker-compose restart ws # # With the setup done, this script should do the job of creating accounts, importing the Narrative type, # loading test data, etc. def create_user(user_id): """ Returns a token for that user. """ headers = { "Content-Type": "application/json" } r = requests.post(mini_auth_url + '/api/V2/testmodeonly/user', headers=headers, data=json.dumps({'user': user_id, 'display': "User {}".format(user_id)})) if r.status_code != 200 and r.status_code != 400: print("Can't create dummy user!") r.raise_for_status() r = requests.post(mini_auth_url + '/api/V2/testmodeonly/token', headers=headers, data=json.dumps({'user': user_id, 'type': 'Login'})) if r.status_code != 200: print("Can't make dummy token!") r.raise_for_status() token = json.loads(r.text) return token['token'] def load_narrative_type(ws): """ Loads the KBaseNarrative.Narrative type info into mini kb. ws = Workspace client configured for admin """ ws.request_module_ownership("KBaseNarrative") ws.administer({ 'command': 'approveModRequest', 'module': 'KBaseNarrative' }) with open(old_narrative_spec_file, "r") as f: old_spec = f.read() ws.register_typespec({ 'spec': old_spec, 'dryrun': 0, 'new_types': [ 'Narrative', 'Cell', 'Worksheet', 'Metadata' ] }) ws.release_module('KBaseNarrative') for n in ws.get_module_info({'mod': 'KBaseNarrative'})['types'].keys(): if '.Narrative' in n: old_ver = n.split('-')[-1] with open(narrative_spec_file, "r") as f: spec = f.read() ws.register_typespec({ 'spec': spec, 'dryrun': 0, 'new_types': [] }) ws.release_module('KBaseNarrative') for n in ws.get_module_info({'mod': 'KBaseNarrative'})['types'].keys(): if '.Narrative' in n: new_ver = n.split('-')[-1] return { 'old_ver': old_ver, 'new_ver': new_ver } def load_narrative_test_data(ws, vers): """ Loads the test data set into mini kb ws. Returns this structure: wsid: { narrative_id: int correct_ws_meta: {} correct_ws_perms: {} } there's more than 1 wsid (should be ~7-10), but that's it. """ with open(test_narrative_data, 'r') as f: test_data = json.loads(f.read().strip()) uploaded_data = list() for ws_data in test_data["old"]: uploaded_data.append(_load_workspace_data(ws, ws_data, len(uploaded_data), vers['old_ver'])) for ws_data in test_data["new"]: uploaded_data.append(_load_workspace_data(ws, ws_data, len(uploaded_data), vers['new_ver'])) return uploaded_data def _load_workspace_data(ws, ws_data, idx, narrative_ver): """ Loads up a single workspa
ce with data and returns a dict about it. Dict contains: id = the workspace id perms = the workspace permissions correct_meta = the correct workspace metadata (for validation) """ print
(ws_data.keys()) narratives = ws_data['narratives'] ws_meta = ws_data['ws_meta'] ws_info = ws.create_workspace({"workspace": "NarrativeWS-{}-{}".format(idx, int(time()*1000))}) ws_id = ws_info[0] info = { "ws_id": ws_id, "ws_info": ws_info, "nar_info": [], "perms": ws_data["perms"], "correct_meta": ws_data["correct_meta"], "loaded_meta": ws_meta } if len(narratives): for idx, nar in enumerate(narratives): objects = ws.save_objects({ 'id': ws_id, 'objects': [{ 'type': 'KBaseNarrative.Narrative-{}'.format(narrative_ver), 'data': nar, 'name': 'Narrative-{}'.format(idx) }] }) info['nar_info'].append(objects[0]) if len(ws_meta): ws.alter_workspace_metadata({ 'wsi': {'id': ws_id}, 'new': ws_meta }) perms = ws_data["perms"] if len(perms) > 1: admin_perm = perms['wsadmin'] ws.set_permissions({ 'id': ws_id, 'new_permission': admin_perm, 'users': ['wsadmin'] }) return info def main(): admin_token = create_user(mini_ws_admin) admin_ws = Workspace(url=mini_ws_url, token=admin_token) versions = load_narrative_type(admin_ws) versions = { 'old_ver': '1.0', 'new_ver': '2.0' } user_token = create_user(test_user) user_ws = Workspace(url=mini_ws_url, token=user_token) loaded_info = load_narrative_test_data(user_ws, versions) pprint(loaded_info) # fix_all_workspace_info(mini_ws_url, mini_auth_url, admin_token, 100) # for ws_data in loaded_info: # ws_id = ws_data['ws_id'] # ws_meta = user_ws.get_workspace_info({'id': ws_id})[8] # try: # assert(ws_meta == ws_data['correct_meta']) # except: # print("WS: {}".format(ws_id)) # pprint(ws_meta) # print("doesn't match") # pprint(ws_data['correct_meta']) if __name__ == '__main__': sys.exit(main())
germn/python-for-android
pythonforandroid/bootstraps/webview/__init__.py
Python
mit
1,982
0.001514
from pythonforandroid.toolchain import Bootstrap, current_directory, info, info_main, shprint from pythonforandroid.util import ensure_dir from os.path import join import sh class WebViewBootstrap(Bootstrap): name = 'webview' recipe_depends = list( set(Bootstrap.recipe_depends).union({'genericndkbuild'}) ) def assemble_distri
bution(self): info_m
ain('# Creating Android project from build and {} bootstrap'.format( self.name)) shprint(sh.rm, '-rf', self.dist_dir) shprint(sh.cp, '-r', self.build_dir, self.dist_dir) with current_directory(self.dist_dir): with open('local.properties', 'w') as fileh: fileh.write('sdk.dir={}'.format(self.ctx.sdk_dir)) arch = self.ctx.archs[0] if len(self.ctx.archs) > 1: raise ValueError('built for more than one arch, but bootstrap cannot handle that yet') info('Bootstrap running with arch {}'.format(arch)) with current_directory(self.dist_dir): info('Copying python distribution') self.distribute_libs(arch, [self.ctx.get_libs_dir(arch.arch)]) self.distribute_aars(arch) self.distribute_javaclasses(self.ctx.javaclass_dir, dest_dir=join("src", "main", "java")) python_bundle_dir = join('_python_bundle', '_python_bundle') ensure_dir(python_bundle_dir) site_packages_dir = self.ctx.python_recipe.create_python_bundle( join(self.dist_dir, python_bundle_dir), arch) if 'sqlite3' not in self.ctx.recipe_build_order: with open('blacklist.txt', 'a') as fileh: fileh.write('\nsqlite3/*\nlib-dynload/_sqlite3.so\n') if not self.ctx.with_debug_symbols: self.strip_libraries(arch) self.fry_eggs(site_packages_dir) super().assemble_distribution() bootstrap = WebViewBootstrap()
Anthrocon-Reg/ubersystem
uber/tests/models/test_getter.py
Python
gpl-3.0
1,761
0.004543
from uber.tests import * @pytest.fixture def attendee_id(): with Session() as session: return session.query(Attendee).filter_by(first_name='Regular', last_name='Attendee').one().id @pytest.fixture(autouse=True) def mock_apply(monkeypatch): monkeypatch.setattr(Attendee, 'apply', Mock()) return Attendee.apply def test_invalid_gets(): with Session() as session: pytest.raises(Exception, session.attendee) pytest.raises(Exception, session.attendee, '') pytest.raises(Exception, session.attendee, []) pytest.raises(Exception, session.attendee, None) pytest.raises(Exception, session.attendee, str(uuid4())) pytest.raises(Exception, session.attendee, {'id': str(uuid4())}) def test_basic_get(attendee_id, mock_apply): with Session() as session: assert session.attendee(attendee_id).first_name == 'Regular' assert not mock_apply.called assert session.attendee(id=attendee_id).first_name == 'Regular' assert not mock_apply.called
assert session.attendee({'id': attendee_id}).first_name == 'Regular' assert mock_apply.called def test_empty_get(mock_apply): with Session() as session: assert session.attendee({}).paid == NOT_PAID # basic sanity check assert mock_apply.called def test_ignore_csrf(request): with Session() as session: pytest.raises(Exception, session.attendee, {'paid': NEED_NOT_PAY}) session.attendee({'paid': NEED_NOT_PAY}, i
gnore_csrf=True) session.attendee({'paid': NEED_NOT_PAY}, allowed=['paid']) request.addfinalizer(lambda: setattr(cherrypy.request, 'method', 'GET')) cherrypy.request.method = 'POST' session.attendee({'paid': NEED_NOT_PAY})
Williams224/davinci-scripts
ksteta3pi/Consideredbkg/MC_12_11104124_MagUp.py
Python
mit
12,177
0.02825
#-- GAUDI jobOptions generated on Fri Jul 17 16:39:48 2015 #-- Contains event types : #-- 11104124 - 106 files - 1087377 events - 233.68 GBytes #-- Extra information about the data processing phases: #-- Processing Pass Step-124620 #-- StepId : 124620 #-- StepName : Digi13 with G4 dE/dx #-- ApplicationName : Boole #-- ApplicationVersion : v26r3 #-- OptionFiles : $APPCONFIGOPTS/Boole/Default.py;$APPCONFIGOPTS/Boole/DataType-2012.py;$APPCONFIGOPTS/Boole/Boole-SiG4EnergyDeposit.py;$APPCONFIGOPTS/Persistency/Compression-ZLIB-1.py #-- DDDB : fromPreviousStep #-- CONDDB : fromPreviousStep #-- ExtraPackages : AppConfig.v3r164 #-- Visible : Y #-- Processing Pass Step-124630 #-- StepId : 124630 #-- StepName : Stripping20-NoPrescalingFlagged for Sim08 #-- ApplicationName : DaVinci #-- ApplicationVersion : v32r2p1 #-- OptionFiles : $APPCONFIGOPTS/DaVinci/DV-Stripping20-Stripping-MC-NoPrescaling.py;$APPCONFIGOPTS/DaVinci/DataType-2012.py;$APPCONFIGOPTS/DaVinci/InputType-DST.py;$APPCONFIGOPTS/Persistency/Compression-ZLIB-1.py #-- DDDB : fromPreviousStep #-- CONDDB : fromPreviousStep #-- ExtraPackages : AppConfig.v3r164 #-- Visible : Y #-- Processing Pass Step-125877 #-- StepId : 125877 #-- StepName : L0 emulation - TCK 003d #-- ApplicationName : Moore #-- ApplicationVersion : v20r4 #-- OptionFiles : $APPCONFIGOPTS/L0App/L0AppSimProduction.py;$APPCONFIGOPTS/L0App/L0AppTCK-0x003d.py;$APPCONFIGOPTS/L0App/DataType-2012.py #-- DDDB : fromPreviousStep #-- CONDDB : fromPreviousStep #-- ExtraPackages : AppConfig.v3r200 #-- Visible : N #-- Processing Pass Step-127200 #-- StepId : 127200 #-- StepName : TCK-0x4097003d Flagged for Sim08 2012 #-- ApplicationName : Moore #-- ApplicationVersion : v14r2p1 #-- OptionFiles : $APPCONFIGOPTS/Moore/MooreSimProductionForSeparateL0AppStep.py;$APPCONFIGOPTS/Conditions/TCK-0x4097003d.py;$APPCONFIGOPTS/Moore/DataType-2012.py #-- DDDB : fromPreviousStep #-- CONDDB : fromPreviousStep #-- ExtraPackages : AppConf
ig.v3r206 #-- Visible : Y #-- Processin
g Pass Step-124834 #-- StepId : 124834 #-- StepName : Reco14a for MC #-- ApplicationName : Brunel #-- ApplicationVersion : v43r2p7 #-- OptionFiles : $APPCONFIGOPTS/Brunel/DataType-2012.py;$APPCONFIGOPTS/Brunel/MC-WithTruth.py;$APPCONFIGOPTS/Persistency/Compression-ZLIB-1.py #-- DDDB : fromPreviousStep #-- CONDDB : fromPreviousStep #-- ExtraPackages : AppConfig.v3r164 #-- Visible : Y #-- Processing Pass Step-127148 #-- StepId : 127148 #-- StepName : Sim08g - 2012 - MU - Pythia8 #-- ApplicationName : Gauss #-- ApplicationVersion : v45r9 #-- OptionFiles : $APPCONFIGOPTS/Gauss/Sim08-Beam4000GeV-mu100-2012-nu2.5.py;$DECFILESROOT/options/@{eventType}.py;$LBPYTHIA8ROOT/options/Pythia8.py;$APPCONFIGOPTS/Gauss/G4PL_FTFP_BERT_EmNoCuts.py;$APPCONFIGOPTS/Persistency/Compression-ZLIB-1.py #-- DDDB : dddb-20130929-1 #-- CONDDB : sim-20130522-1-vc-mu100 #-- ExtraPackages : AppConfig.v3r205;DecFiles.v27r37 #-- Visible : Y from Gaudi.Configuration import * from GaudiConf import IOHelper IOHelper('ROOT').inputFiles(['LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00043567/0000/00043567_00000001_1.allstreams.dst', 'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00043567/0000/00043567_00000002_1.allstreams.dst', 'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00043567/0000/00043567_00000003_1.allstreams.dst', 'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00043567/0000/00043567_00000004_1.allstreams.dst', 'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00043567/0000/00043567_00000005_1.allstreams.dst', 'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00043567/0000/00043567_00000006_1.allstreams.dst', 'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00043567/0000/00043567_00000007_1.allstreams.dst', 'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00043567/0000/00043567_00000008_1.allstreams.dst', 'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00043567/0000/00043567_00000009_1.allstreams.dst', 'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00043567/0000/00043567_00000010_1.allstreams.dst', 'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00043567/0000/00043567_00000011_1.allstreams.dst', 'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00043567/0000/00043567_00000012_1.allstreams.dst', 'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00043567/0000/00043567_00000013_1.allstreams.dst', 'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00043567/0000/00043567_00000014_1.allstreams.dst', 'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00043567/0000/00043567_00000015_1.allstreams.dst', 'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00043567/0000/00043567_00000016_1.allstreams.dst', 'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00043567/0000/00043567_00000017_1.allstreams.dst', 'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00043567/0000/00043567_00000018_1.allstreams.dst', 'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00043567/0000/00043567_00000019_1.allstreams.dst', 'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00043567/0000/00043567_00000020_1.allstreams.dst', 'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00043567/0000/00043567_00000021_1.allstreams.dst', 'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00043567/0000/00043567_00000022_1.allstreams.dst', 'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00043567/0000/00043567_00000023_1.allstreams.dst', 'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00043567/0000/00043567_00000024_1.allstreams.dst', 'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00043567/0000/00043567_00000025_1.allstreams.dst', 'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00043567/0000/00043567_00000026_1.allstreams.dst', 'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00043567/0000/00043567_00000027_1.allstreams.dst', 'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00043567/0000/00043567_00000032_1.allstreams.dst', 'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00043567/0000/00043567_00000033_1.allstreams.dst', 'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00043567/0000/00043567_00000034_1.allstreams.dst', 'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00043567/0000/00043567_00000045_1.allstreams.dst', 'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00043567/0000/00043567_00000057_1.allstreams.dst', 'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00043567/0000/00043567_00000058_1.allstreams.dst', 'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00043567/0000/00043567_00000062_1.allstreams.dst', 'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00043567/0000/00043567_00000073_1.allstreams.dst', 'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00043567/0000/00043567_00000074_1.allstreams.dst', 'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00043567/0000/00043567_00000075_1.allstreams.dst', 'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00043567/0000/00043567_00000076_1.allstreams.dst', 'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00043567/0000/00043567_00000077_1.allstreams.dst', 'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00043567/0000/00043567_00000078_1.allstreams.dst', 'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00043567/0000/00043567_00000079_1.allstreams.dst', 'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00043567/0000/00043567_00000080_1.allstreams.dst', 'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00043567/0000/00043567_00000081_1.allstreams.dst', 'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00043567/0000/00043567_00000082_1.allstreams.dst', 'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00043567/0000/00043567_00000083_1.allstreams.dst', 'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00043567/0000/00043567_00000084_1.allstreams.dst', 'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00043567/0000/00043567_00000085_1.allstreams.dst', 'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00043567/0000/00043567_00000086_1.allstreams.dst', 'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00043567/0000/00043567_00000087_1.allstreams.dst', 'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00043567/0000/00043567_00000088_1.allstreams.dst', 'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00043567/0000/00043567_00000089_1.allstreams.dst', 'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00043567/0000/00043567_00000090_1.allstreams.dst', 'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00043567/0000/00043567_00000091_1.allstreams.dst', 'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00043567/0000/00043567_00000092_1.allstreams.dst', 'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00043567/0000/00043567_00000093_1.allstreams.dst', 'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00043567/0000/00043567_00000094_1.allstreams.dst', 'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00043567/0000/00043567_00000095_1.allstreams.dst', 'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00043567/0000/00043567_00000096_1.allstreams.dst', 'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00043567/0000/00043567_00000097_1.allstreams.dst', 'LFN:/lhcb/M
anhstudios/swganh
data/scripts/templates/object/mission/base/shared_base_mission.py
Python
mit
435
0.048276
#### NOTICE: THIS FILE IS AUTOGENERATED #### MODIFICATIONS MAY BE LOST IF DONE IMPROPERLY
#### PLEASE SEE THE ONLINE DOCUMENTATION FOR EXAMPLES from swgpy.object import * def create(kernel): result = Missi
on() result.template = "object/mission/base/shared_base_mission.iff" result.attribute_template_id = -1 result.stfName("string_id_table","") #### BEGIN MODIFICATIONS #### #### END MODIFICATIONS #### return result
rhdedgar/openshift-tools
openshift/installer/vendored/openshift-ansible-3.4.12-1/library/modify_yaml.py
Python
apache-2.0
3,141
0.000955
#!/usr/bin/python # -*- coding: utf-8 -*- # vim: expandtab:tabstop=4:shiftwidth=4 ''' modify_yaml ansible module ''' import yaml DOCUMENTATION = ''' --- module: modify_yaml short_description: Modify yaml key value pairs author: Andrew Butcher requirements: [ ] ''' EXAMPLES = ''' - modify_yaml: dest: /etc/origin/master/master-config.yaml yaml_key: 'kubernetesMasterConfig.masterCount' yaml_value: 2 ''' # pylint: disable=missing-docstring def set_key(yaml_data, yaml_key, yaml_value): changes = [] ptr = yaml_data for key in yaml_key.split('.'): if key not in ptr and key != yaml_key.split('.')[-1]: ptr[key] = {} ptr = ptr[key] elif key == yaml_key.split('.')[-1]: if (key in ptr and module.safe_eval(ptr[key]) != yaml_value) or (key not in ptr): ptr[key] = yaml_value changes.append((yaml_key, yaml_value)) else: ptr = ptr[key] return changes def main(): ''' Modify key (supplied in jinja2 dot notation) in yaml file, setting the key to the desired value. ''' # disabling pylint errors for global-variable-undefined and invalid-name # for 'global module' usage, since it is required to use ansible_facts # pylint: disable=global-variable-undefined, invalid-name, # redefined-outer-name global module module = AnsibleModule( argument_spec=dict( dest=dict(required=True), yaml_key=dict(required=True), yaml_value=dict(required=True), backup=dict(required=False, default=True, type='bool'), ), supports_check_mode=True, ) dest = module.params['dest'] yaml_key = module.params['yaml_key'] yaml_value = module.safe_eval(module.params['yaml_value']) backup = module.params['backup'] # Represent null values as an empty string. # pylint: disable=missing-docstring, unused-argument def none_representer(dumper, data): return yaml.ScalarNode(tag=u'tag:yaml.org,2002:null', value=u'') yaml.add_representer(type(None), none_representer) try: yaml_file = open(dest)
yaml_data = yaml.safe_load(yaml_file.read()) yaml_file.close() changes = set_key(yaml_data, yaml_key, yaml_value) if len(changes) > 0: if backup: module.backup_local(dest) yaml_file = open(dest, 'w') yaml_string = yaml.dump(yaml_data, default_flow_style=False) yaml_string = yaml_string.replace('\'\'', '""') yaml_file.write(yaml_string) yaml_
file.close() return module.exit_json(changed=(len(changes) > 0), changes=changes) # ignore broad-except error to avoid stack trace to ansible user # pylint: disable=broad-except except Exception, e: return module.fail_json(msg=str(e)) # ignore pylint errors related to the module_utils import # pylint: disable=redefined-builtin, unused-wildcard-import, wildcard-import # import module snippets from ansible.module_utils.basic import * if __name__ == '__main__': main()
gdhungana/desispec
py/desispec/scripts/fiberflat.py
Python
bsd-3-clause
2,238
0.011171
""" desispec.fiberflat ================== Utility functions to compute a fiber flat correction and apply it We try to keep all the (fits) io separated. """ from __future__ import absolute_import, division import numpy as np from desispec.io import read_frame from desispec.io import write_fiberflat from desispec.fiberflat import compute_fiberflat from desispec.log import get_logger from desispec.io.qa import load_qa_frame from desispec.io import write_qa_frame from desispec.qa import qa_plots import argparse def parse(options=None): parser = argparse.ArgumentParser(description="Compute the fiber flat field correction from a DESI continuum lamp frame") parser.add_argument('--infile', type = str, default = None, required=True, help = 'path of DESI frame fits file corresponding to a continuum lamp exposure') parser.add_argument('--outfile', type = str, default = None, required=True, help = 'path of DESI fiberflat fits file') p
arser.add_argument('--qafile', type=str, default=None, required=False, help
='path of QA file') parser.add_argument('--qafig', type = str, default = None, required=False, help = 'path of QA figure file') args = None if options is None: args = parser.parse_args() else: args = parser.parse_args(options) return args def main(args) : log=get_logger() log.info("starting") # Process frame = read_frame(args.infile) fiberflat = compute_fiberflat(frame) # QA if (args.qafile is not None): log.info("performing fiberflat QA") # Load qaframe = load_qa_frame(args.qafile, frame, flavor=frame.meta['FLAVOR']) # Run qaframe.run_qa('FIBERFLAT', (frame, fiberflat)) # Write if args.qafile is not None: write_qa_frame(args.qafile, qaframe) log.info("successfully wrote {:s}".format(args.qafile)) # Figure(s) if args.qafig is not None: qa_plots.frame_fiberflat(args.qafig, qaframe, frame, fiberflat) # Write write_fiberflat(args.outfile, fiberflat, frame.meta) log.info("successfully wrote %s"%args.outfile)
wufangjie/leetcode
335. Self Crossing.py
Python
gpl-3.0
1,217
0.012325
class Solution(object): def isSelfCrossing(self, x): """ :type x: List[int] :rtype: bool """ inf = float('inf') n = len(x) if n < 3: return False ruld = [0, 0, 0, 0] # right, up, left, down next_max = inf current = [-x[1], x[0]] for i, elem in enumerate(x[2:], 2): i %= 4 if elem >= next_max: return True xy = 1 if i in {0, 2} else 0
pn =
1 if i in {0, 3} else -1 new = current[xy] + pn * elem if pn * new > pn * ruld[i - 3]: next_max = inf else: if next_max is inf and pn * new >= pn * ruld[i - 1]: ruld[i - 2] = ruld[i] next_max = abs(ruld[i - 2] - current[xy ^ 1]) ruld[i - 1], current[xy] = current[xy], new return False assert Solution().isSelfCrossing([2, 1, 1, 2]) assert not Solution().isSelfCrossing([1, 2, 3, 4]) assert Solution().isSelfCrossing([1, 1, 1, 1]) assert not Solution().isSelfCrossing([3,3,4,2,2]) assert Solution().isSelfCrossing([1,1,2,1,1]) assert not Solution().isSelfCrossing([3,3,3,2,1,1])
hangarunderground/tiempo
tiempo/__init__.py
Python
gpl-2.0
183
0
PROJECT_PATH = __path__[0] TIEMPO_REGISTRY = {} REDIS_GROUP_NAMESPACE = 'tiempogroup'
RECENT_KEY = 'tiempo:recent_tasks' RESULT_PREFIX = 'tiempo:task_result' __version__ = "1.
2.3"
Phonebooth/depot_tools
presubmit_canned_checks.py
Python
bsd-3-clause
40,994
0.011538
# Copyright (c) 2012 The Chromium Authors. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. """Generic presubmit checks that can be reused by other presubmit checks.""" import os as _os _HERE = _os.path.dirname(_os.path.abspath(__file__)) ### Description checks def CheckChangeHasTestField(input_api, output_api): """Requires that the changelist have a TEST= field.""" if input_api.change.TEST: return [] else: return [output_api.PresubmitNotifyResult( 'If this change requires manual test instructions to QA team, add ' 'TEST=[instructions].')] def CheckChangeHasBugField(input_api, output_api): """Requires that the changelist have a BUG= field.""" if input_api.change.BUG: return [] else: return [output_api.PresubmitNotifyResult( 'If this change has an associated bug, add BUG=[bug number].')] def CheckChangeHasTestedField(input_api, output_api): """Requires that the changelist have a TESTED= field.""" if input_api.change.TESTED: return [] else: return [output_api.PresubmitError('Changelist must have a TESTED= field.')] def CheckChangeHasQaField(input_api, output_api): """Requires that the changelist have a QA= field.""" if input_api.change.QA: return [] else: return [output_api.PresubmitError('Changelist must have a QA= field.')] def CheckDoNotSubmitInDescription(input_api, output_api): """Checks that the user didn't add 'DO NOT ''SUBMIT' to the CL description. """ keyword = 'DO NOT ''SUBMIT' if keyword in input_api.change.DescriptionText(): return [output_api.PresubmitError( keyword + ' is present in the changelist description.')] else: return [] def CheckChangeHasDescription(input_api, output_api): """Checks the CL description is not empty.""" text = input_api.change.DescriptionText() if text.strip() == '': if input_api.is_committing: return [output_api.PresubmitError('Add a description to the CL.')] else: return [output_api.PresubmitNotifyResult('Add a description to the CL.')] return [] def CheckChangeWasUploaded(input_api, output_api): """Checks that the issue was uploaded before committing.""" if input_api.is_committing and not input_api.change.issue: return [output_api.PresubmitError( 'Issue wasn\'t uploaded. Please upload first.')] return [] ### Content checks def CheckDoNotSubmitInFiles(input_api, output_api): """Checks that the user didn't add 'DO NOT ''SUBMIT' to any files.""" # We want to check every text file, not just source files. file_filter = lambda x : x keyword = 'DO NOT ''SUBMIT' errors = _FindNewViolationsOfRule(lambda _, line : keyword not in line, input_api, file_filter) text = '\n'.join('Found %s in %s' % (keyword, loc) for loc in errors) if text: return [output_api.PresubmitError(text)] return [] def CheckChangeLintsClean(input_api, output_api, source_file_filter=None): """Checks that all '.cc' and '.h' files pass cpplint.py.""" _RE_IS_TEST = input_api.re.compile(r'.*tests?.(cc|h)$') result = [] cpplint = input_api.cpplint # Access to a protected member _XX of a client class # pylint: disable=W0212 cpplint._cpplint_state.ResetErrorCounts() # Justifications for each filter: # # - build/include : Too many; fix in the future. # - build/include_order : Not happening; #ifdefed includes. # - build/namespace : I'm surprised by how often we violate this rule. # - readability/casting : Mistakes a whole bunch of function pointer. # - runtime/int : Can be fixed long term; volume of errors too high # - runtime/virtual : Broken now, but can be fixed in the future? # - whitespace/braces : We have a lot of explicit scoping in chrome code. cpplint._SetFilters('-build/include,-build/include_order,-build/namespace,' '-readability/casting,-runtime/int,-runtime/virtual,' '-whitespace/braces') # We currently are more strict with normal code than unit tests; 4 and 5 are # the verbosity level that would normally be passed to cpplint.py through # --verbose=#. Hopefully, in the future, we can be more verbose. files = [f.AbsoluteLocalPath() for f in input_api.AffectedSourceFiles(source_file_filter)] for file_name in files: if _RE_IS_TEST.match(file_name): level = 5 else: level = 4 cpplint.ProcessFile(file_name, level) if cpplint._cpplint_state.error_count > 0: if input_api.is_committing: res_type = output_api.PresubmitError else: res_type = output_api.PresubmitPromptWarning result = [res_type('Changelist failed cpplint.py check.')] return result def CheckChangeHasNoCR(input_api, output_api, source_file_filter=None): """Checks no '\r' (CR) character is in any source files.""" cr_files = [] for f in input_api.AffectedSourceFiles(source_file_filter): if '\r' in input_api.ReadFile(f, 'rb'): cr_files.append(f.LocalPath()) if cr_files: return [output_api.PresubmitPromptWarning( 'Found a CR character in these files:', items=cr_files)] return [] def CheckSvnModifiedDirectories(input_api, output_api, source_file_filter=None): """Checks for files in svn modified directories. They will get submitted on accident because svn commits recursively by default, and that's very dangerous. """ if input_api.change.scm != 'svn': return [] errors = [] current_cl_files = input_api.change.GetModifiedFiles() all_modified_files = input_api.change.GetAllModifiedFiles() # Filter out files in the current CL. modified_files = [f for f in all_modified_files if f not in current_cl_files] modified_abspaths = [input_api.os_path.abspath(f) for f in modified_files] for f in input_api.AffectedFiles(file_filter=source_file_filter): if f.Action() == 'M' and f.IsDirectory(): curpath = f.AbsoluteLocalPath() bad_files = [] # Check if any of the modified files in other CLs are under curpath. for i in xrange(len(modified_files)): abspath = modified_abspaths[i] if input_api.os_path.commonprefix([curpath, abspath]) == curpath: bad_files.append(modified_files[i]) if bad_files: if input_api.is_committing: error_type = output_api.PresubmitPromptWarning else: error_type = output_api.PresubmitNotifyResult errors.append(error_type( 'Potential accidental commits in changelist %s:' % f.LocalPath(), items=bad_files)) return errors def CheckChangeHasOnlyOneEol(input_api, output_api, source_file_filter=None): """Checks the files ends with one and only one \n (LF).""" eof_files = [] for f in input_api.AffectedSourceFiles(source_file_filter): contents = input_api.ReadFile(f, 'rb') # Check that the file ends in one and only one newline character. if len(contents) > 1 and (contents[-1:] != '\n' or contents[-2:-1] == '\n'): eof_files.append(f.LocalPath()) if eof_files: return [output_api.PresubmitPromptWarning( 'These files should end in one (and only one) newline character:', items=eof_files)] return [] def CheckChangeHasNoCrAndHasOnlyOneEol(input_api, output_api, source_file_filter=None): """Runs both CheckChangeHasNoCR and CheckChangeHasOnlyOneEOL in one pass. It is faster because it is reading the file only once. """ cr_files = [] eof_files = [] for f in input_api.AffectedSourceFiles(source_file_filter): contents = input_api.ReadFile(f, 'rb') if '\r' in contents: cr_files.append(f.LocalPath())
# Check that the file ends in one and only one newline character. if len(contents) > 1 and (contents[-1:] != '\n' or contents[-2:-1] == '\n'): eof_files.a
ppend(f.LocalPath()) outputs = [] if cr_files: outputs.append(output_api.PresubmitPromptWarning( 'Found a CR character in these files:', items=cr_files)) if eof_files: outputs.append(output_api.PresubmitPromptWarning( 'These fil
prescott66/Cnchi
src/encfs.py
Python
gpl-3.0
3,865
0.001035
#!/usr/bin/env python # -*- coding: utf-8 -*- # # encfs.py # # Copyright 2013 Antergos # # This program is free software; you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation; either version 2 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program; if not, write to the Free Software # Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, # MA 02110-1301, USA. """ Configures Antergos to encrypt user's home with encFS """ #import logging import os import shutil import subprocess def setup(username, dest_dir): """ Encrypt user's home folder """ # encfs pam_mount packages are needed # pam_encfs from AUR # https://wiki.debian.org/TransparentEncryptionForHomeFolder # Edit configuration files name = os.path.join(dest_dir, "etc/security/pam_encfs.conf") shutil.copy(name, name + ".cnchi") with open(name, "r") as pam_encfs: lines = pam_encfs.readlines() i = len(lines) - 1 lines[i] = "# " + lines[i] with open(name, "w") as pam_encfs: pam_encfs.write(lines) pam_encfs.write("# Added by Cnchi - Antergos Installer\n") pam_encfs.write("-\t/home/.encfs\t-\t-v\t-\n") name = os.path.join(dest_dir, "etc/security/pam_env.conf") shutil.copy(name, name + ".cnchi") with open(name, "a") as pam_env: pam_env.write("# Added by Cnchi - Antergos Installer\n") pam_env.write("# Set the ICEAUTHORITY file location to allow GNOME to start on encfs $HOME\n") pam_env.write("ICEAUTHORITY DEFAULT=/tmp/.ICEauthority_@{PAM_USER}\n") name = os.path.join(dest_dir, "etc/fuse.conf") shutil.copy(name, name + ".cnchi") with open(name, "a") as fuse_conf: fuse_conf.write("# Added by Cnchi - Antergos Installer\n") fuse_conf.write("user_allow_other\n") name
= os.path.join(dest_dir, "etc/pam.d/system-login") shutil.copy(name, name + ".cnchi") with open(name, "a") as system_login: system_login.write("# Added by Cnchi - Antergos Installer\n") system_login.write("session required\tpam_encfs.so\n") system_login.write("session optional\tpam_mount.so\n") name = os.path.join(dest_dir,
"etc/pam.d/system-auth") shutil.copy(name, name + ".cnchi") with open(name, "a") as system_auth: system_auth.write("# Added by Cnchi - Antergos Installer\n") system_auth.write("auth sufficient\tpam_encfs.so\n") system_auth.write("auth optional\tpam_mount.so\n") # Setup finished # Move user home dir out of the way mounted_dir = os.path.join(self.dest_dir, "home/", username) backup_dir = os.path.join(self.dest_dir, "var/tmp/", username) subprocess.check_call(['mv', src_dir, backup_dir]) # Create necessary dirs, encrypted and mounted(unecrypted) encrypted_dir = os.path.join(self.dest_dir, "home/.encfs/", username) subprocess.check_call(['mkdir', '-p', encrypted_dir, mounted_dir]) # Set owner subprocess.check_call(['chown', '%s:users' % username, encrypted_dir, mounted_dir]) # Create encrypted directory subprocess.check_call(['encfs', '-v', encrypted_dir, mounted_dir]) # Restore user home files src = os.path.join(backup_dir, "*") subprocess.check_call(['mv', src, mounted_dir]) src = os.path.join(backup_dir, ".[A-Za-z0-9]*") subprocess.check_call(['mv', src, mounted_dir]) # Delete home backup subprocess.check_call(['rmdir', backup_dir])
ctag/cpe453
JMRI/jython/Jynstruments/Launchers/DecoderPro.jyn/DecoderPro.py
Python
gpl-2.0
744
0.021505
import jmri.jmrit.jython.Jynstrument as Jynstrument import jmri.jmrit.catalog.NamedIcon as NamedIcon import jmri.jmrit.symbolicprog.tabbedframe.PaneOpsProgAction as PaneOpsProgAction import javax.swing.JButton as JButton class DecoderPro(Jynstrument): def getExpectedContextClassName(self): return "javax.swing.JComponent" def init(self): jbNew = JButton( PaneOpsProgAction() ) jbNew.setIcon( NamedIcon("resources/decoderpro.gif","resources/decoderpro.gif") ) jbNew.addMouseListener(self.getMouseList
eners()[0]) # In order to get the popupmenu on the button too jbNew.setToolT
ipText( jbNew.getText() ) jbNew.setText( None ) self.add(jbNew) def quit(self): pass
vivekanand1101/pontoon
pontoon/base/__init__.py
Python
bsd-3-clause
561
0
"""Application base, containing global templates.""" default_app_config = 'pontoon.base.apps.BaseConfig' MOZILLA_REPOS = ( 'ssh://hg.mozilla.org/users/m_owca.info/firefox-aur
ora/', 'ssh://hg.mozilla.org/users/m_owca.info/firefox-for-android-aurora/', 'ssh://hg
.mozilla.org/users/m_owca.info/thunderbird-aurora/', 'ssh://hg.mozilla.org/users/m_owca.info/lightning-aurora/', 'ssh://hg.mozilla.org/users/m_owca.info/seamonkey-aurora/', ) class SyncError(RuntimeError): """Error class for errors relating to the project sync process."""
danmt/NEO
Codigo_Fuente/etapa4/Instrucciones/Arbol_Sintactico_Abstracto.py
Python
gpl-3.0
428
0.044393
from Estructura import espaceado class Arbol_Sintactico_Abstracto: def __init__(self,alc
ance,hijos): self.hijos = hijos self.alcance = alcance self.cont = 1 def imprimir(self,tabulacion): if (len(self.hijos) > 1): print tabulacion + "SECUENCIA" for hijo in self.hijos: hijo.nivel = 1 hijo.imprimir(espaceado(tabulacion)) def ejecutar(self): for hijo in self.hijos
: hijo.nivel = 1 hijo.ejecutar()
IBMStreams/streamsx.health
test/test-builds.py
Python
apache-2.0
792
0.001263
import glob from subprocess import call test_failures = {} test_successes = {} files = [file for file in glob.glob('../**/build.gradle', recursive=True)] for f in files: if f.startswith('../test'): continue # clean all projects in the platform before executing build print("Cleaning all projects first...") call(['../gradlew', '-p', '../', 'clean'])
print("Executing " + f + "...") rc = call(['../gradlew', '-b', f, 'build']) if rc == 0: test_successes[f] = rc else: test_failure
s[f] = rc print("Return code: " + str(rc)) print("FAILURES:") for key in test_failures: print(key + ": " + "FAILED(rc=" + str(test_failures[key]) + ")!") print("\n\n") print("SUCCESSES:") for key in test_successes: print(key + ": PASS")
JulienMcJay/eclock
windows/Python27/Lib/site-packages/pygments/filters/__init__.py
Python
gpl-2.0
11,486
0.000871
# -*- coding: utf-8 -*- """ pygments.filters ~~~~~~~~~~~~~~~~ Module containing filter lookup functions and default filters. :copyright: Copyright 2006-2013 by the Pygments team, see AUTHORS. :license: BSD, see LICENSE for details. """ import re from pygments.token import String, Comment, Keyword, Name, Error, Whitespace, \ string_to_tokentype from pygments.filter import Filter from pygments.util import get_list_opt, get_int_opt, get_bool_opt, \ get_choice_opt, ClassNotFound, OptionError from pygments.plugin import find_plugin_filters def find_filter_class(filtername): """ Lookup a filter by name. Return None if not found. """ if filtername in FILTERS: return FILTERS[filtername] for name, cls in find_plugin_filters(): if name == filtername: return cls return None def get_filter_by_name(filtername, **options): """ Return an instantiated filter. Options are passed to the filter initializer if wanted. Raise a ClassNotFound if not found. """ cls = find_filter_class(filtername) if cls: return cls(**options) else: raise ClassNotFound('filter %r not found' % filtername) def get_all_filters(): """ Return a generator of all filter names. """ for name in FILTERS: yield name for name, _ in find_plugin_filters(): yield name def _replace_special(ttype, value, regex, specialttype, replacefunc=lambda x: x): last = 0 for match in regex.finditer(value): start, end = match.start(), match.end() if start != last: yield ttype, value[last:start] yield specialttype, replacefunc(value[start:end]) last = end if last != len(value): yield ttype, value[last:] class CodeTagFilter(Filter): """ Highlight special code tags in comments and docstrings. Options accepted: `codetags` : list of strings A list of strings that are flagged as code tags. The default is to highlight ``XXX``, ``TODO``, ``BUG`` and ``NOTE``. """ def __init__(self, **options): Filter.__init__(self, **options) tags = get_list_opt(options, 'codetags', ['XXX', 'TODO', 'BUG', 'NOTE']) self.tag_re = re.compile(r'\b(%s)\b' % '|'.join([ re.escape(tag) for tag in tags if tag ])) def filter(self, lexer, stream): regex = self.tag_re for ttype, value in stream: if ttype in String.Doc or \ ttype in Comment and \ ttype not in Comment.Preproc: for sttype, svalue in _replace_special(ttype, value, regex, Comment.Special): yield sttype, svalue else: yield ttype, value class KeywordCaseFilter(Filter): """ Convert keywords to lowercase or uppercase or capitalize them, which means first letter uppercase, rest lowercase. This can be useful e.g. if you highlight Pascal code and want to adapt the code to your styleguide. Options accepted: `case` : string The casing to convert keywords to. Must be one of ``'lower'``, ``'upper'`` or ``'capitalize'``. The default is ``'lower'``. """ def __init__(self, **options): Filter.__init__(self, **options) case = get_choice_opt(options, 'case', ['lower', 'upper', 'capitalize'], 'lower') self.convert = getattr(unicode, case) def filter(self, lexer, stream): for ttype, value in stream: if ttype in Keyword: yield ttype, self.convert(value) else: yield ttype, value class NameHighlightFilter(Filter): """ Highlight a normal Name token with a different token type. Example:: filter = NameHighlightFilter( names=['foo', 'bar', 'baz'], tokentype=Name.Function, ) This would highlight the names "foo", "bar" and "baz" as functions. `Name.Function` is the default token type. Options accepted: `names` : list of strings A list of names that should be given the different token type. There is no default. `tokentype` : TokenType or string A token type or a string containing a token type name that is used for highlighting the strings in `names`. The default is `Name.Function`. """ def __init__(self, **options): Filter.__init__(self, **options) self.names = set(get_list_opt(options, 'names', [])) tokentype = options.get('tokentype') if tokentype: self.tokentype = string_to_tokentype(tokentype) else: self.tokentype = Name.Function def filter(self, lexer, stream): for ttype, value in stream: if ttype is Name and value in self.names: yield self.tokentype, value else: yield ttype, value class ErrorToken(Exception): pass class RaiseOnErrorTokenFilter(Filter): """ Raise an exception when the lexer generates an error token. Options accepted
: `excclass` : Exception class The exception class to
raise. The default is `pygments.filters.ErrorToken`. *New in Pygments 0.8.* """ def __init__(self, **options): Filter.__init__(self, **options) self.exception = options.get('excclass', ErrorToken) try: # issubclass() will raise TypeError if first argument is not a class if not issubclass(self.exception, Exception): raise TypeError except TypeError: raise OptionError('excclass option is not an exception class') def filter(self, lexer, stream): for ttype, value in stream: if ttype is Error: raise self.exception(value) yield ttype, value class VisibleWhitespaceFilter(Filter): """ Convert tabs, newlines and/or spaces to visible characters. Options accepted: `spaces` : string or bool If this is a one-character string, spaces will be replaces by this string. If it is another true value, spaces will be replaced by ``·`` (unicode MIDDLE DOT). If it is a false value, spaces will not be replaced. The default is ``False``. `tabs` : string or bool The same as for `spaces`, but the default replacement character is ``»`` (unicode RIGHT-POINTING DOUBLE ANGLE QUOTATION MARK). The default value is ``False``. Note: this will not work if the `tabsize` option for the lexer is nonzero, as tabs will already have been expanded then. `tabsize` : int If tabs are to be replaced by this filter (see the `tabs` option), this is the total number of characters that a tab should be expanded to. The default is ``8``. `newlines` : string or bool The same as for `spaces`, but the default replacement character is ``¶`` (unicode PILCROW SIGN). The default value is ``False``. `wstokentype` : bool If true, give whitespace the special `Whitespace` token type. This allows styling the visible whitespace differently (e.g. greyed out), but it can disrupt background colors. The default is ``True``. *New in Pygments 0.8.* """ def __init__(self, **options): Filter.__init__(self, **options) for name, default in {'spaces': u'·', 'tabs': u'»', 'newlines': u'¶'}.items(): opt = options.get(name, False) if isinstance(opt, basestring) and len(opt) == 1: setattr(self, name, opt) else: setattr(self, name, (opt and default or '')) tabsize = get_int_opt(options, 'tabsize', 8) if self.tabs: self.tabs += ' '*(tabsize-1) if self.newlines: self.newlines += '\n' self.wstt = get_bool_opt(options, 'wstokentype', True) def filter(self, lexer, stream): if self.wstt: spaces = self.spaces or ' ' tabs = self.tabs or '\t'
collingreen/yaib
plugins/baseplugin.py
Python
mit
5,580
0
class BasePlugin(object): """ Extend this/copy its structure to create plugins. Your plugin class must be `Plugin` to be loaded. Can include commands (command_*), admin commands (admin_). Additionally, yaib will look functions for many of the connection events. Any commands with a docstring will be automatically added to the help command output, categorized by plugin name. Command docstrings can include {nick} and {command_prefix} which will automatically be replaced in the help text with the current values. """ name = 'BasePlugin' def __init__(self, yaib, configuration): self.yaib = yaib # save a shortcut to just this plugin's settings self.settings = self.yaib.getPluginSettings(self.name) # configure the plugin self.configure(configuration) # create any default settings self.createDefaultSettings() @property def command_prefix(self): # this is a property so it stays updated, even if the setting changes return self.yaib.command_prefix @property def nick(self): return self.yaib.nick def configure(self, configuration): """ Overwrite this to handle configuration. @param configuration: (object) the entire yaib config file. """ pass def createDefaultSettings(self): """ Called during initialization. Use self.settings.setMulti({...}, initial=True) """ pass def getDbSession(self): return self.yaib.persistence.getDbSession() def formatDoc(self, message): """Formats the given message with the {nick} and {command_prefix}.""" return self.yaib.formatDoc(message) def callLater(self, delay, func, *args, **kwargs): """ Wait for the delay (in seconds) then call the function with the given arguments.""" return self.yaib.callLater(delay, func, *args, **kwargs) def onShutdown(self): """Called when yaib is shutting down. Clean anything up and save all the settings necessary.""" pass def send(self, channel, message): """Send a message in the given channel.""" return self.yaib.sendMessage(channel, message) def reply(self, channel, nick, message): """ If the channel is the bot (ie, was a private message to the bot) sends a message back to the sender, otherwise sends to the channel. """ return self.send( channel if channel != self.nick else nick,
message ) def action(self, channel, action): """Send an action in the given channel.""" return self.yaib.action(channel, action) def onPluginsLoaded(self): """Called when ALL the plugins are loaded.""" pass def onNickChange(self, nick, old_nick): """Called when {nick}'s nick changes.""" pass def onConnected(self):
"""Called when connected to a server.""" pass def onMessageOfTheDay(self, message): """Called with the server's message of the day.""" pass def onNotification(self, user, nick, channel, message): """Called when noticed""" pass def onUserAction(self, user, nick, channel, action): """Called when a user performs an action.""" pass def onPrivateMessage(self, user, nick, message): """Called when a user sends {nick} a private message""" pass def onMessage(self, user, nick, channel, message, highlight): """Called when something is said in a channel""" pass def onSend(self, channel, message): """Called when {nick} sends a message to a channel (can be PM).""" pass def onAction(self, channel, action): """Called when {nick} does an action in a channel""" pass def onCommand(self, user, nick, channel, command, more): """Called when {nick} runs a command on behalf of a user.""" pass def onAdminCommand(self, user, nick, channel, command, more): """Called when {nick} runs an admin command on behalf of a user.""" pass def onJoined(self, channel): """Called after joining a channel.""" pass def onLeave(self, channel): """Called after leaving a channel.""" pass def onKicked(self, kicker_user, kicker, channel, message): """Called when {nick} is kicked from a channel.""" pass def onUserJoined(self, user, nick, channel): """Called when a user joins a channel.""" pass def onUserLeave(self, user, nick, channel): """Called when a user leaves a channel.""" pass def onUserQuit(self, user, nick, quitMessage): """Called when a user disconnects from the server.""" pass def onUserKicked(self, kickee, channel, kicker_user, kicker, message): """Called when a user is kicked from a channel""" pass def onUserRenamed(self, user, old_nick, new_nick): """Called when a user changes their nick""" pass def onUserList(self, channel_type, channel_name, user_list): """ Called when user_list is given for a channel (ie, upon joining the channel). NOTE: this is a list of nicks, not user strings. """ pass
DXCanas/content-curation
contentcuration/contentcuration/migrations/0039_auto_20161101_1555.py
Python
mit
1,022
0.001957
# -*- coding: utf-8 -*- # Generated by Django 1.9.7 on 2016-11-01 22:55 from __future__ import unicode_literals from django.db import migrations from django.db import models class Migration(migrations.Migration): dependencies = [ ('contentcuration', '0038_contentnode_author'), ] operations = [ migrations.AlterField( model_name='formatpreset', name='id', field=models.CharField(choices=[('high_res_video', 'High Resolution'), ('low_res_video', 'Low Resolution'), ('vector_video', 'Vectorized'), ('video_thumbnail', 'Thumbnail'), ('video_subtitle', 'Subtitle'), ('audio', 'Audio'), ('audio_thumbnail', 'Thumbnail'), ('document', 'Document'), ( 'document_thumbnail', 'Thumbnail'), ('exercise', 'Exercise'), ('exercise_thumbnail', 'Thumbnail'), ('exercise_image', 'Exercise Image'), ('ex
ercise_graphie', 'Exercise Graphie'), ('channel_thumbnail', 'Channel Thumbnail')], max_length=150, primary_key=True, serialize=False),
), ]
bdang2012/taiga-back-casting
taiga/base/api/urlpatterns.py
Python
agpl-3.0
3,316
0.000302
# Copyright (C) 2014-2015 Andrey Antukh <niwi@niwi.be> # Copyright (C) 2014-2015 Jesús Espino <jespinog@gmail.com> # Copyright (C) 2014-2015 David Barragán <bameda@dbarragan.com> # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as # published by the Free Software Foundation, either version 3 of the # License, or (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Affero General Public License for more details. # # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. # This code is partially taken from django-rest-framework: # Copyright (c) 2011-2014, Tom Christie from django.core.urlresolvers import RegexURLResolver from django.conf.urls import patterns, url, include from .settings import api_settings def apply_suffix_patterns(urlpatterns, suffix_pattern, suffix_required): ret = [] for urlpattern in urlpatterns: if isinstance(urlpattern, RegexURLResolver): # Set of included URL patterns regex = urlpattern.regex.pattern namespace = urlpattern.namespace app_name = urlpattern.app_name kwargs = urlpattern.default_kwargs # Add in the included patterns, after applying the suffixes patterns = apply_suffix_patterns(urlpattern.url_patterns, suffix_pattern, suffix_required) ret.append(url(regex, include(patterns, namespace, app_name), kwargs)) else: # Regular URL pattern regex = urlpattern.regex.pattern.rstrip("$") + suffix_pattern view = urlpattern._callback or urlpattern._callback_str kwargs = urlpattern.default_args name = urlpattern.name # Add in both the existing and the new urlpattern if not suffix_required: ret.append(urlpattern) ret.append(url(regex, view, kwargs, name))
return ret
def format_suffix_patterns(urlpatterns, suffix_required=False, allowed=None): """ Supplement existing urlpatterns with corresponding patterns that also include a ".format" suffix. Retains urlpattern ordering. urlpatterns: A list of URL patterns. suffix_required: If `True`, only suffixed URLs will be generated, and non-suffixed URLs will not be used. Defaults to `False`. allowed: An optional tuple/list of allowed suffixes. eg ["json", "api"] Defaults to `None`, which allows any suffix. """ suffix_kwarg = api_settings.FORMAT_SUFFIX_KWARG if allowed: if len(allowed) == 1: allowed_pattern = allowed[0] else: allowed_pattern = "(%s)" % "|".join(allowed) suffix_pattern = r"\.(?P<%s>%s)$" % (suffix_kwarg, allowed_pattern) else: suffix_pattern = r"\.(?P<%s>[a-z0-9]+)$" % suffix_kwarg return apply_suffix_patterns(urlpatterns, suffix_pattern, suffix_required)
hassaanm/business-articles
downloadReturns.py
Python
apache-2.0
3,618
0.004422
#!/usr/local/bin/python import sys import urllib import urllib2 import json import datetime YAHOO_URL = 'http://query.yahooapis.com/v1/public/yql?env=http%3A%2F%2Fdatatables.org%2Falltables.env&format=json&diagnostics=true&q=' def getJSON(fileName): f = open(fileName) jsonData = json.load(f) f.close() return jsonData def writeJSON(jsonData, fileName): f = open(fileName, 'w') json.dump(jsonData, f) f.close() def fixSymbol(symbol) : if len(symbol) > 1 and symbol[-2] == "/": symbol = symbol[:-2] + '-' + symbol[-1] if '/' in symbol : symbol = symbol.split('/')[0] return symbol.replace('^', '-P').rstrip() def getReturn(returns): if len(returns.keys()) == 0: return 0 firstDate = returns.keys()[0] lastDate = returns.keys()[0] for date in returns.keys(): if date < firstDate: firstDate = date if date > lastDate: lastDate = date openPrice = float(returns[firstDate][0]) closePrice = float(returns[lastDate][1]) return (closePrice - openPrice) / openPrice def getReturnForCompany(symbol, date, numOfDays): endDate = datetime.datetime.strptime(date, '%Y-%m-%d') + datetime.timedelta(days=numOfDays) sym = fixSymbol(symbol) query = 'select * from yahoo.finance.historicaldata where symbol = "'+sym+'" and startDate = "'+str(date)+'" and endDate = "'+str(endDate.date())+'"' encoded_query = urllib.quote(query) try: url = YAHOO_URL + encoded_query jsonRawData = urllib2.urlopen(url) jsonData = json.load(jsonRawData) if jsonData['query']['results'] == None: return 0.0 if type(jsonData['query']['results']['quote']) == type({}): quotes = [jsonData['query']['results']['quote']] else: quotes = jsonData['query']['results']['quote'] returns = {} for data in quotes: returns[data['Date']] = (data['Open'], data['Close']) return getReturn(returns) except: return 0.0 def returnsJSONSnippet(jsonData, days): returns = {} progress = 0 size = float(len(jsonData.keys())) for article in jsonData.keys(): date = jsonData[article]['date'] companies = jsonData[article]['company'] articleReturns = [] for company in companies: articleReturns.append(getReturnForCompany(company, date, days)) articleReturn = sum(articleReturns) / len(articleReturns) returns[article] = articleReturn if progress % 100 == 0: print progress / size, progress, 'out of', size progress += 1 return returns def returnsJSONFull(jsonData, days): returns = {} progress = 0 size = float(len(jsonData)) for article in jsonData: date = article['date'] companies = article['company'] articleReturns = [] for company in companies: articleReturns.append(getReturnForCompany(company, date, days)) articleReturn = sum(articleReturns) / len(articleReturns) key = article['title'][0] + ' ' + article['text'] returns[key] = articleReturn if progress % 100 == 0: print progress / size, progress, 'out of', size progress += 1 return returns inputFile = sys.argv[2] outputFile = sys.argv[3] days = int(sys.
argv[4]) jsonData = getJSON(inputFile) if sys.argv[1] == 'snippet': jsonToWrite = returnsJSONSnippet(jsonData, days) elif sys.argv[1] == 'full': jsonT
oWrite = returnsJSONFull(jsonData, days) writeJSON(jsonToWrite, outputFile)
tao12345666333/Talk-Is-Cheap
ansible/group.py
Python
mit
227
0
#!/us
r/bin/env python # coding=utf-8 import json import sys data = { 'g1'
: { 'hosts': [ '172.17.0.2' ] } } with open('w.log', 'w') as f: f.write(str(sys.argv)) print json.dumps(data)
gutosurrex/feed2twister
flask_app/captcha.py
Python
gpl-3.0
1,492
0.032842
import random import Image import ImageFont import ImageDraw import ImageFilter import hashlib from random_words import RandomWords def gen_captcha(text, fnt, fnt_sz, file_name, fmt='JPEG'): """Generate a captcha image""" # randomly select the foreground color fgcolor = random.randint(0,0xffff00) # make the background color the opposite of fgcolor bgcolor = fgcolor ^ 0xffffff # create a font object font = ImageFont.truetype(fnt,fnt_sz) # determine dimensions of the text dim = font.getsize(text) # create a new image slightly larger that the text im = Image.new('RGB', (dim[0]+5,dim[1]+5), bgcolor) d = ImageDraw.Draw(im) x, y = im.size r = random.randint # draw 1
00 random colored boxes on the background for num in range(100): d.rectangle((r(0,x),r(0,y),r(0,x),r(0,y)),fill=r(0,0xffffff)) # add the text to the image d.text((3,3), text, font=font, fill=fgcolor) im = im.filter(ImageFilter.EDGE_ENHANCE_MORE) # save the image to a file im.save(file_name, format=fmt) def new_wo
rd(): rw = RandomWords() word = rw.random_word() return word, hashlib.sha224(word).hexdigest() if __name__ == '__main__': """Example: This grabs a random word from the dictionary 'words' (one word per line) and generates a jpeg image named 'test.jpg' using the truetype font 'porkys.ttf' with a font size of 25. """ words = open('static/words').readlines() word = words[random.randint(1,len(words))] gen_captcha(word.strip(), 'static/porkys.ttf', 25, "captchas/test.jpg")
JTarball/docker-django-polymer
docker/app/app/backend/apps/accounts/test_views.py
Python
gpl-2.0
42,275
0.005417
""" accounts.test_views =================== Tests the REST API calls. Add more specific social registration tests """ import responses from django.core.urlresolvers import reverse from django.core import mail from django.contrib.sites.models import Site from django.contrib.auth import get_user_model from django.test.utils import override_settings from rest_framework import status from rest_framework.test import APIClient, APITestCase from allauth.account import app_settings from allauth.socialaccount.models import SocialApp from allauth.socialaccount.providers.facebook.provider import GRAPH_API_URL from .serializers import LoginSerializer class TestAccounts(APITestCase): """ Tests normal use - non social login. """ def setUp(self): self.login_url = reverse('accounts:rest_login') self.logout_url = reverse('accounts:rest_logout') self.register_url = reverse('accounts:rest_register') self.password_reset_url = reverse('accounts:rest_password_reset') self.rest_password_reset_confirm_url = reverse('accounts:rest_password_reset_confirm') self.password_change_url = reverse('accounts:rest_password_change') self.verify_url = reverse('accounts:rest_verify_email') self.user_url = reverse('accounts:rest_user_details') self.client = APIClient() self.reusable_user_data = {'username': 'admin', 'email': 'admin@email.com', 'password': 'password12'} self.reusable_user_data_change_password = {'username': 'admin', 'email': 'admin@email.com', 'password': 'password_same'} self.reusable_register_user_data = {'username': 'admin', 'email': 'admin@email.com', 'password1': 'password12', 'password2': 'password12'} self.reusable_register_user_data1 = {'username': 'admin1', 'email': 'admin1@email.com', 'password1': 'password12', 'password2': 'password12'} self.reusable_register_user_data_no_username = {'email': 'admin@email.com', 'password1': 'password12', 'password2': 'password12'} self.reusable_register_user_data_no_email = {'username': 'admin', 'password1': 'password12', 'password2': 'password12'} self.change_password_data_incorrect = {"new_password1": "password_not_same", "new_password2": "password_same"} self.change_password_data = {"new_password1": "password_same", "new_password2": "password_same"} self.change_password_data_old_password_field_enabled = {"old_password": "password12", "new_password1": "password_same", "new_password2": "password_same"} def create_user_and_login(self): """ Helper function to create a basic user, login and assign token cred
entials. """ get_user_model().objects.create_user('admin', 'admin@email.com', 'password12') response = self.client.post(self.login_url, self.reusable_user_data, format='json') self.assertEquals(response.status_code, status.HTTP_200_OK, "Snap! Basic Login has failed with a helper
function 'create_user_and_login'. Something is really wrong here.") self.client.credentials(HTTP_AUTHORIZATION='Token ' + response.data['key']) def _generate_uid_and_token(self, user): result = {} from django.utils.encoding import force_bytes from django.contrib.auth.tokens import default_token_generator from django import VERSION if VERSION[1] == 5: from django.utils.http import int_to_base36 result['uid'] = int_to_base36(user.pk) else: from django.utils.http import urlsafe_base64_encode result['uid'] = urlsafe_base64_encode(force_bytes(user.pk)) result['token'] = default_token_generator.make_token(user) return result def cleanUp(self): pass @override_settings(ACCOUNT_AUTHENTICATION_METHOD=app_settings.AuthenticationMethod.USERNAME) def test_login_basic_username_auth_method(self): """ Tests basic functionality of login with authentication method of username. """ # Assumes you provide username,password and returns a token get_user_model().objects.create_user('admin3', '', 'password12') data = {"username": 'admin3', "email": "", "password": 'password12'} response = self.client.post(self.login_url, data, format='json') self.assertEquals(response.status_code, status.HTTP_200_OK) self.assertIn('key', response.content) @override_settings(ACCOUNT_AUTHENTICATION_METHOD=app_settings.AuthenticationMethod.EMAIL, ACCOUNT_EMAIL_REQUIRED=True) def test_login_basic_email_auth_method(self): """ Tests basic functionality of login with authentication method of email. """ # Assumes you provide username,password and returns a token get_user_model().objects.create_user('admin', 'email.login@gmail.com', 'password12') data = {"username": '', "email": "email.login@gmail.com", "password": 'password12'} response = self.client.post(self.login_url, data, format='json') self.assertEquals(response.status_code, status.HTTP_200_OK) self.assertIn('key', response.content) @override_settings(ACCOUNT_AUTHENTICATION_METHOD=app_settings.AuthenticationMethod.USERNAME_EMAIL) def test_login_basic_username_email_auth_method(self): """ Tests basic functionality of login with authentication method of username or email. """ # Assumes you provide username,password and returns a token get_user_model().objects.create_user('admin', 'email.login@gmail.com', 'password12') # Check email data = {"username": '', "email": "email.login@gmail.com", "password": 'password12'} response = self.client.post(self.login_url, data, format='json') self.assertEquals(response.status_code, status.HTTP_200_OK) # Check username data = {"username": 'admin', "email": '', "password": 'password12'} response = self.client.post(self.login_url, data, format='json') self.assertEquals(response.status_code, status.HTTP_200_OK) self.assertIn('key', response.content) @override_settings(ACCOUNT_AUTHENTICATION_METHOD=app_settings.AuthenticationMethod.USERNAME) def test_login_auth_method_username_fail_no_users_in_db(self): """ Tests login fails with a 400 when no users in db for login auth method of 'username'. """ serializer = LoginSerializer({'username': 'admin', 'password': 'password12'}) response = self.client.post(self.login_url, serializer.data, format='json') self.assertEquals(response.status_code, status.HTTP_400_BAD_REQUEST) @override_settings(ACCOUNT_AUTHENTICATION_METHOD=app_settings.AuthenticationMethod.EMAIL) def test_login_email_auth_method_fail_no_users_in_db(self): """ Tests login fails with a 400 when no users in db for login auth method of 'email'. """ serializer = LoginSerializer({'username': 'admin', 'password': 'password12'}) response = self.client.post(self.login_url, serializer.data, format='json') self.assertEquals(response.status_code, status.HTTP_400_BAD_REQUEST) @override_settings(ACCOUNT_AUTHENTICATION_METHOD=app_settings.AuthenticationMethod.USERNAME_EMAIL) def test_login_username_email_auth_method_fail_no_users_in_db(self): """ Tests login fails with a 400 when no users in db for login auth method of 'username_email'. """ serializer = LoginSerializer({'username': 'admin', 'password': 'password12'}) response = self.client.post(self.login_url, serializer.data, format='json') self.assertEquals(response.status_code, status.HTTP_400_BAD_REQUEST) def common_test_login_fail_incorrect_change(self): # Create user, login and try and change password INCORRECTLY self.create_user_and_login() self.client.post(self.password_change_url, data=self.change_password_data_incorrect, format='json') # Remove credentials self.client.credentials() response = self.client.post(self.login_url, self.reusable_user_data, format='json') self.assertEquals(response.status_code, status.HTTP_200_OK) self.assertIn('key', response.con
mateoqac/unqTip
language/vgbs/web/forms.py
Python
gpl-3.0
6,865
0.004661
import web import base import local def orNone(a, b): if a is None: return b return a class Field(object): def __init__(self, name, description, primary=False, validator=None): self.name = name self.primary = primary self.description = description if validator == None: validator = lambda *args: (True, '') self.validator = validator self.record = None # yet def full_name(self): return self.record.record_name + '_' + self.name def validate(self, value): return self.validator(value) def html_input(self, default_values): return "<input type='text' id='%s' name='%s' value='%s'>" % ( self.full_name(), self.full_name(), default_values.get(self.full_name(), '') ) class PasswordField(Field): def __init__(self, *args, **kwargs): Field.__init__(self, *args, **kwargs) def validate(self, value): #if len(value) < 8: # return False, 'pass_too_short' return True, '' def html_input(self, default_values): return "<input type='password' id='%s' name='%s'>" % ( self.full_name(), self.full_name(), ) class Record(object): def __init__(self, record_name, table=None, fields=[], role=None, add_title=None, admin_title=None, add_action=None, remove_action=None): self.record_name = record_name self.table = table self.fields = fields self.add_action = add_action self.remove_action = remove_action self.primary_key = None self._check_primary_key() for field in self.fields: field.record = self self.role = role self.add_title = orNone(add_title, 'Dar de alta %s' % (self.record_name,)) self.admin_title = orNone(admin_title, 'Administrar %s' % (self.record_name,)) def all_elements(self): if self.table is None: return [] return local.db.select(self.table) def _check_primary_key(self): nprimary = 0 for field in self.fields: if field.primary: nprimary += 1 self.primary_key = field if nprimary != 1: raise Exception('Warning: %s should have exactly one primary key' % ( self.record_name) ) def class_admin(self, parent_class=base.Content): metaself = self class C(parent_class): role_required = self.role def request(self): "Main administration page." return local.render.admin_list( record=metaself, ) return C def class_add_service(self, parent_class=base.Content): metaself = self class C(parent_class): role_required = self.role def request(self): "Render the form for creating instances of this record." cookies = web.cookies() input = web.input() # get the default value for each field # from the cookies default_values = {} for field in metaself.fields: default = cookies.get('last_' + field.full_name(), None) if default is None: default = '' default_values[field.full_name()] = default if input.get('errfield', False): focus_on = input.errfield else: focus_on = metaself.fields[0].full_name() return local.render.add_form( input=web.input(), action='/%s/add' % (metaself.record_name,), description=metaself.add_title, fields=metaself.fields, default_values=default_values, focus=focus_on ) return C def class_add(self, parent_class=base.Action): metaself = self class C(parent_class): role_required = self.role def request(self): "Add an instance of this record." data = web.input() # Check that the values for each field are valid bad_fields = [] errmsg = False any_error = False dictionary = {} for field in metaself.fields: value = data.get(field.full_name()) dictionary[field.name] = value web.setcookie('last_' + field.full_name(), value) ok, message = field.validate(value) if not ok: any_error = True bad_fields.append('error_' + field.full_name()) if not errmsg: errmsg = message if any_error: raise web.seeother('/%s/add_service?errmsg=%s%s' % ( metaself.record_name, errmsg, ''.join(['&%s=1' % (f,) for f in bad_fields]) )) if metaself.table is not None: # Check that there are no repeated keys primary_value = dictionary[metaself.primary_key.name] it = local.db.query('select count(*) as total from ' + metaself.table + \ ' where ' + metaself.primary_key.name + '=$primary_value', vars=locals()) if it[0].total > 0: raise web.seeother('/%s/add_service?errmsg=already_exists&error_%s=1' % ( metaself.record_name, metaself.primary_key.full_name()
)) if metaself.table is not None and metaself.add_action is None: local.db.insert(metaself.table, **dictionary) if metaself.add_action is not None: metaself.add_action(dictionary) else: raise web.seeother('/%s/admin' % (metaself.record_name,)) return C def class_remove(self, parent_class=base.Action):
metaself = self class C(parent_class): role_required = self.role def request(self): dictionary = {} if metaself.remove_action is not None: metaself.remove_action(dictionary) return C
unintended/Cohen
tests/backends/test_ampache_storage.py
Python
mit
4,553
0.001098
# -*- coding: utf-8 -*- # Licensed under the MIT license # http://opensource.org/licenses/mit-license.php # Copyright 2014, Hartmut Goebel <h.goebel@goebel-consult.de> """ Test cases for L{backends.ampache_storage} """ from lxml import etree from twisted.trial import unittest from coherence.backends import ampache_storage SONG = ''' <!-- taken from https://github.com/ampache/ampache/wiki/XML-API but the original was not valid XML, so we can not trust it --> <root> <song id="3180"> <title>Hells Bells</title> <artist id="129348">AC/DC</artist> <album id="2910">Back in Black</album> <tag id="2481" count="3">Rock &amp; Roll</tag> <tag id="2482" count="1">Rock</tag> <tag id="2483" count="1">Roll</tag> <track>4</track> <time>234</time> <url>http://localhost/play/index.php?oid=123908...</url> <size>654321</size> <art>http://localhost/image.php?id=129348</art> <preciserating>3</preciserating> <rating>2.9</rating> </song> </root> ''' SONG_370 = ''' <!-- real-world example from Ampache 3.7.0 --> <root> <song id="3440"> <title><![CDATA[Achilles Last Stand]]></title> <artist id="141"><![CDATA[Led Zeppelin]]></artist> <album id="359"><![CDATA[Presence]]></album> <tag id="" count="0"><![CDATA[]]></tag> <filename><![CDATA[/mnt/Musique/Led Zeppelin/Presence/01 - Achilles Last Stand.mp3]]></filename> <track>1</track> <time>625</time> <year>1976</year> <bi
trate>248916</bitrate> <mode>vbr</mode> <mime>audio/mpeg</mime> <url><![CDATA[http://songserver/ampache/play/index.php?ssid=1e11a4&type=song&oid=3440&uid=4&name=Led%20Zeppelin%20-%20Achilles%20Last%20Stand.mp3]]></url> <size>19485595</size> <mbid></mbid> <album_mbid></album_mbid> <ar
tist_mbid></artist_mbid> <art><![CDATA[http://songserver/ampache/image.php?id=359&object_type=album&auth=1e11a40&name=art.]]></art> <preciserating>0</preciserating> <rating>0</rating> <averagerating></averagerating> </song> </root> ''' class DummyStore: def __init__(self): pass proxy = False class TestAmpache(unittest.TestCase): def setUp(self): pass def test_song(self): """Test songs with XML from Ampache 3.7.0""" doc = etree.fromstring(SONG) song = doc.find('song') store = DummyStore() track = ampache_storage.Track(store, song) self.assertEqual(track.get_id(), 'song.3180') self.assertEqual(track.parent_id, 'album.2910') self.assertEqual(track.duration, '0:03:54') self.assertEqual(track.get_url(), 'http://localhost/play/index.php?oid=123908...') self.assertEqual(track.get_name(), 'Hells Bells') self.assertEqual(track.title, 'Hells Bells') self.assertEqual(track.artist, 'AC/DC') self.assertEqual(track.album, 'Back in Black') self.assertEqual(track.genre, None) self.assertEqual(track.track_nr, '4') self.assertEqual(track.cover, 'http://localhost/image.php?id=129348') self.assertEqual(track.mimetype, 'audio/mpeg') # guessed self.assertEqual(track.size, 654321) self.assertIs(track.get_path(), None) self.assertEqual(track.get_children(), []) self.assertEqual(track.get_child_count(), 0) def test_song_370(self): """Test songs with XML from Ampache 3.7.0""" doc = etree.fromstring(SONG_370) song = doc.find('song') store = DummyStore() track = ampache_storage.Track(store, song) self.assertEqual(track.get_id(), 'song.3440') self.assertEqual(track.parent_id, 'album.359') self.assertEqual(track.duration, '0:10:25') self.assertEqual(track.get_url(), 'http://songserver/ampache/play/index.php?ssid=1e11a4&type=song&oid=3440&uid=4&name=Led%20Zeppelin%20-%20Achilles%20Last%20Stand.mp3') self.assertEqual(track.get_name(), 'Achilles Last Stand') self.assertEqual(track.title, 'Achilles Last Stand') self.assertEqual(track.artist, 'Led Zeppelin') self.assertEqual(track.album, 'Presence') self.assertEqual(track.genre, None) self.assertEqual(track.track_nr, '1') self.assertEqual(track.cover, 'http://songserver/ampache/image.php?id=359&object_type=album&auth=1e11a40&name=art.') self.assertEqual(track.mimetype, 'audio/mpeg') self.assertEqual(track.size, 19485595) self.assertIs(track.get_path(), None) self.assertEqual(track.get_children(), []) self.assertEqual(track.get_child_count(), 0)
r0qs/chubby
Fonte/Command.py
Python
gpl-3.0
1,775
0.023662
from pygame import K_UP, K_DOWN, K_LEFT, K_RIGHT from Caracter import Caracter class CommandHandler(object): #0 1 2 3 4 5 6 7 8 9 10 11 12 13 _automata_transitions= [[11,11,0, 4, 0, 0, 11,11,0, 11,0, 11,13,0],#up [9, 2, 0, 0, 0, 0, 9, 9, 0, 0, 0, 12,0, 0],#down
[0, 6, 3, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],#left [1, 0, 0, 0, 5, 0, 7, 0, 0, 0, 0, 1, 0, 0]]#right # The final states final_list = [3,7,9,11,13] final_state = 0 def __init__(self, caracter):
self.caracter = caracter self.actual_state = 0 def refresh_state(self, in_key): self.final_state = 0 input_code = -1 if in_key == K_UP: input_code = 0 elif in_key == K_DOWN: input_code = 1 elif in_key == K_LEFT: input_code = 2 elif in_key == K_RIGHT: input_code = 3 self.actual_state = self._automata_transitions[input_code][self.actual_state] if self.actual_state == 3: if self.caracter.onGround == False: self.caracter.pendingRoll = True elif self.actual_state == 7: self.caracter.doSprint() elif self.actual_state == 9: if self.caracter.onGround == False: self.caracter.pendingGetDown = True else: self.caracter.doGetDown() elif self.actual_state == 11: self.caracter.doJump() elif self.actual_state == 13: self.caracter.doClimb() #print "estado atual:" + str(self.actual_state) if self.final_state in self.final_list : self.actual_state = 0 return self.final_state return self.actual_state
goyalsid/phageParser
parserscripts/crispr_db_parser.py
Python
mit
3,791
0.000528
""" CRISPR_db_parser Madeleine Bonsma March 7, 2015 Updated May 3, 2016 This script takes a list of spacers downloaded from the CRISPRdb website and splits them into individual files, one file per organism. Result files are saved in "data/spacers". """ import linecache import os # CRISPR db parser # MB Mar 07 2015 filename = "data/spacerdatabase.txt" # File from CRISPRdb to sort spacer_db = open(filename, "r") # check if directory for saving exists directory = "data/spacers" if not os.path.exists(directory): os.makedirs(directory) # places to dump accession numbers during execution refseq_list = [] refseq_dict = {} for num, line in enumerate(spacer_db, 1): check = True # awkward while loop if line[0] == ">": # use the headers, indicated by >, to sort # delete 1st character to make loop same each time around line = line[1:] counter = 0 while check: c
ounter += 1 # this part of the header is the NCBI accession refseq = line[0:9] if refseq not in refseq_list: # open new file if it's a new bacteria refseq_dict[refseq] = open( "data/spacers/%s.fasta" % refseq, "w" ) if "|" in line: # if more than one bacteria contain spacer i = line.index("|")
# include in header the locus identifier and spacer # position identifier writeline = line[10:i] writeline2 = writeline.replace('_', '.') else: # if it's only one bacteria writeline = line[10:] writeline2 = writeline.replace('_', '.') # write header and spacer to file refseq_dict[refseq].write(">" + writeline2 + "\n") refseq_dict[refseq].write( linecache.getline("%s" % filename, num + 1) ) # since the file is organized alphabetically by the # first bacteria in the header, if we see a different # first bacteria we can close the previous file to free # up space. This might be buggy. if counter == 1: try: refseq_prev = linecache.getline( "%s" % filename, num - 2 )[1:10] refseq_dict[refseq_prev].close() except: # throws exception on the first time through, # otherwise wouldn't pass refseq_list.append(refseq) if refseq in refseq_list: if "|" in line: i = line.index("|") # include in header the locus identifier and spacer # position identifier writeline = line[10:i] writeline2 = writeline.replace('_', '.') else: writeline = line[10:] writeline2 = writeline.replace('_', '.') refseq_dict[refseq].write(">" + writeline2 + "\n") refseq_dict[refseq].write( linecache.getline("%s" % filename, num + 1) ) try: i = line.index("|") # change the header so that the next bacteria is up for # the loop line = line[i + 1:] except: check = False for key in refseq_dict: if not refseq_dict[key].closed: refseq_dict[key].close() spacer_db.close()
rahulunair/nova
nova/db/sqlalchemy/api_migrations/migrate_repo/versions/059_add_consumer_generation.py
Python
apache-2.0
1,178
0.000849
# Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, eith
er express or implied. See the # License for the specific language governing permissions and limitations # under the License. from sqlalchemy im
port Column from sqlalchemy import Integer from sqlalchemy import MetaData from sqlalchemy import Table from sqlalchemy import text def upgrade(migrate_engine): meta = MetaData() meta.bind = migrate_engine consumers = Table("consumers", meta, autoload=True) if not hasattr(consumers.c, "generation"): # This is adding a column to an existing table, so the server_default # bit will make existing rows 0 for that column. consumers.create_column(Column("generation", Integer, default=0, server_default=text("0"), nullable=False))
libo/Enigma2
lib/python/Screens/ScanSetup.py
Python
gpl-2.0
45,681
0.032924
from Screen import Screen from Screens.DefaultWizard import DefaultWizard from ServiceScan import ServiceScan from Components.config import config, ConfigSubsection, ConfigSelection, \ ConfigYesNo, ConfigInteger, getConfigListEntry, ConfigSlider, ConfigEnableDisable from Components.ActionMap import NumberActionMap, ActionMap from Components.ConfigList import ConfigListScreen from Components.NimManager import nimmanager, getConfigSatlist from Components.Label import Label from Tools.Directories import resolveFilename, SCOPE_DEFAULTPARTITIONMOUNTDIR, SCOPE_DEFAULTDIR, SCOPE_DEFAULTPARTITION from Tools.HardwareInfo import HardwareInfo from Screens.MessageBox import MessageBox from enigma import eTimer, eDVBFrontendParametersSatellite, eComponentScan, \ eDVBSatelliteEquipmentControl, eDVBFrontendParametersTerrestrial, \ eDVBFrontendParametersCable, eConsoleAppContainer, eDVBResourceManager def buildTerTransponder(frequency, inversion=2, bandwidth = 3, fechigh = 6, feclow = 6, modulation = 2, transmission = 2, guard = 4, hierarchy = 4): # print "freq", frequency, "inv", inversion, "bw", bandwidth, "fech", fechigh, "fecl", feclow, "mod", modulation, "tm", transmission, "guard", guard, "hierarchy", hierarchy parm = eDVBFrontendParametersTerrestrial() parm.frequency = frequency parm.inversion = inversion parm.bandwidth = bandwidth parm.code_rate_HP = fechigh parm.code_rate_LP = feclow parm.modulation = modulation parm.transmission_mode = transmission parm.guard_interval = guard parm.hierarchy = hierarchy return parm def getInitialTransponderList(tlist, pos): list = nimmanager.getTransponders(pos) for x in list: if x[0] == 0: #SAT parm = eDVBFrontendParametersSatellite() parm.frequency = x[1] parm.symbol_rate = x[2] parm.polarisation = x[3] parm.fec = x[4] parm.inversion = x[7] parm.orbital_position = pos parm.system = x[5] parm.modulation = x[6] parm.rolloff = x[8] parm.pilot = x[9] tlist.append(parm) def getInitialCableTransponderList(tlist, nim): list = nimmanager.getTranspondersCable(nim) for x in list: if x[0] == 1: #CABLE parm = eDVBFrontendParametersCable() parm.frequency = x[1] parm.symbol_rate = x[2] parm.modulation = x[3] parm.fec_inner = x[4] parm.inversion = parm.Inversion_Unknown #print "frequency:", x[1] #print "symbol_rate:", x[2] #print "modulation:", x[3] #print "fec_inner:", x[4] #print "inversion:", 2 tlist.append(parm) def getInitialTerrestrialTransponderList(tlist, region): list = nimmanager.getTranspondersTerrestrial(region) #self.transponders[self.parsedTer].append((2,freq,bw,const,crh,crl,guard,transm,hierarchy,inv)) #def buildTerTransponder(frequency, inversion = 2, bandwidth = 3, fechigh = 6, feclow = 6, #modulation = 2, transmission = 2, guard = 4, hierarchy = 4): for x in list: if x[0] == 2: #TERRESTRIAL parm = buildTerTransponder(x[1], x[9], x[2], x[4], x[5], x[3], x[7], x[6], x[8]) tlist.append(parm) cable_bands = { "DVBC_BAND_EU_VHF_I" : 1 << 0, "DVBC_BAND_EU_MID" : 1 << 1, "DVBC_BAND_EU_VHF_III" : 1 << 2, "DVBC_BAND_EU_SUPER" : 1 << 3, "DVBC_BAND_EU_HYPER" : 1 << 4, "DVBC_BAND_EU_UHF_IV" : 1 << 5, "DVBC_BAND_EU_UHF_V" : 1 << 6, "DVBC_BAND_US_LO" : 1 << 7, "DVBC_BAND_US_MID" : 1 << 8, "DVBC_BAND_US_HI" : 1 << 9, "DVBC_BAND_US_SUPER" : 1 << 10, "DVBC_BAND_US_HYPER" : 1 << 11, } class CableTransponderSearchSupport: # def setCableTransponderSearchResult(self, tlist): # pass # def cableTransponderSearchFinished(self): # pass def tryGetRawFrontend(self, feid): res_mgr = eDVBResourceManager.getInstance() if res_mgr: raw_channel = res_mgr.allocateRawChannel(self.feid) if raw_channel: frontend = raw_channel.getFrontend() if frontend: frontend.closeFrontend() # immediate close... del frontend del raw_channel return True return False def cableTransponderSearchSessionClosed(self, *val): print "cableTransponderSearchSessionClosed, val", val self.cable_search_container.appClosed.remove(self.cableTransponderSearchClosed) self.cable_search_container.dataAvail.remove(self.getCableTransponderData) if val and len(val): if val[0]: self.setCableTransponderSearchResult(self.__tlist) else: self.cable_search_container.sendCtrlC() self.setCableTransponderSearchResult(None) self.cable_search_container = None self.cable_search_session = None self.__tlist = None self.cableTransponderSearchFinished() def cableTransponderSearchClosed(self, retval): print "cableTransponderSearch finished", retval self.cable_search_session.close(True) def getCableTransponderData(self, str): #prepend any remaining data from the previous call str = self.remainingdata + str #split in lines lines = str.split('\n') #'str' should end with '\n', so when splitting, the last line should be empty. If this is not the case, we received an incomplete line if len(lines[-1]): #remember this data for next time self.remainingdata = lines[-1] lines = lines[0:-1] else: self.remainingdata = "" for line in lines: data = line.split() if len(data): if data[0] == 'OK': print str parm = eDVBFrontendParametersCable() qam = { "QAM16" : parm.Modulation_QAM16, "QAM32" : parm.Modulation_QAM32, "QAM64" : parm.Modulation_QAM64, "QAM128" : parm.Modulation_QAM128, "QAM256" : parm.Modulation_QAM256 } inv = { "INVERSION_OFF" : parm.Inversion_Off, "INVERSION_ON" : parm.Inversion_On, "INVERSION_AUTO" : parm.Inversion_Unknown } fec = { "FEC_AUTO" : parm.FEC_Auto, "FEC_1_2" : parm.FEC_1_2, "FEC_2_3" : parm.FEC_2_3, "FEC_3_4" : parm.FEC_3_4, "FEC_5_6": parm.FEC_5_6, "FEC_7_8" : parm.FEC_7_8, "FEC_8_9" : parm.FEC_8_9, "FEC_NONE" : parm.FEC_None } parm.frequency = int(data[1]) parm.symbol_rate = int(data[2]) parm.fec_inner = fec[data[3]] parm.modulation = qam[data[4]] parm.inversion = inv[data[5]] self.__tlist.append(parm) tmpstr = _("Try to find used Transponders in cable network.. please wait...") tmpstr += "\n\n" tmpstr += data[1] tmpstr += " kHz " tmpstr += data[0] self.cable_search_session["text"].setText(tmpstr) def startCableTransponderSearch(self, nim_idx): if not self.tryGetRawFrontend(nim_idx): self.session.nav.stopService() if not self.tryGetRawFrontend(nim_idx): if self.session.pipshown: # try to disable pip self.session.pipshown = False del self.session.pip if not self.tryGetRawFrontend(nim_idx): self.cableTransponderSearchFinished() return self.__tlist = [ ] self.remainingdata = "" self.
cable_search_container = eConsoleAppContainer() self.cable_search_container.appClosed.append(self.cableTransponderSearchClosed) self.cable_search_container.dataAvail.append(self.getCableTransponderData) cableConfig = config.Nims[nim_idx].cable tunername = nimmanager.getNimName(nim_idx) try: bus = nimmanager.
getI2CDevice(nim_idx) if bus is None: print "ERROR: could not get I2C device for nim", nim_idx, "for cable transponder search" bus = 2 except: # older API if nim_idx < 2: if HardwareInfo().get_device_name() == "dm500hd": bus = 2 else: bus = nim_idx else: if nim_idx == 2: bus = 2 # DM8000 first nim is /dev/i2c/2 else: bus = 4 # DM8000 second num is /dev/i2c/4 if tunername == "CXD1981": cmd = "cxd1978 --init --scan --verbose --wakeup --inv 2 --bus %d" % bus else: cmd = "tda1002x --init --scan --verbose --wakeup --inv 2 --bus %d" % bus if cableConfig.scan_type.value == "bands": cmd += " --scan-bands " bands = 0 if cableConfig.scan_band_EU_VHF_I.value: bands |= cable_bands["DVBC_BAND_EU_VHF_I"] if cableConfig.scan_band_EU_MID.value: bands |= cable_bands["DVBC_BAND_EU_MID"] if cableConfig.scan_band_EU_VHF_III.value: bands |= cable_bands["DVBC_BAND_EU_VHF_III"] if cableConfig.scan_band_EU_UHF_IV.value: bands |= cable_bands["DVBC_BAND_EU_UHF_IV"] if cableConfig.scan_band_EU_UHF_V.value: bands |= cable_bands["DVBC
nikesh-mahalka/nova
nova/tests/unit/virt/libvirt/fake_os_brick_connector.py
Python
apache-2.0
1,443
0
# Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. def get_connector_properties(root_helper, my_ip, multipath, enforce_multipath, host=None): """Fake os-brick.""" props = {} props['ip'] = my_ip props['host'] = host iscsi = ISCSIConnector('') props['initiator'] = iscsi.get_initiator() props['wwpns'] = ['100010
604b019419'] props['wwnns'] = ['200010604b019419'] props['multipath'] = multipath props['platform'] = 'x86_64' props['os_type'] = 'linux2' return props class ISCSIConnector(object): """Mimick the iSCSI connector.""" def __init__(self, root_helper, driver=None, execute=None, use_multipath=False, device_scan_attempts=3, *args, **kwargs): self.root_herlp = root_helper, self.execute = execute def get_initiat
or(self): return "fake_iscsi.iqn"
tseaver/google-cloud-python
asset/google/cloud/asset_v1p2beta1/gapic/transports/asset_service_grpc_transport.py
Python
apache-2.0
7,923
0.001262
# -*- coding: utf-8 -*- # # Copyright 2019 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obt
ain a copy of the License at # # https://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CON
DITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import google.api_core.grpc_helpers import google.api_core.operations_v1 from google.cloud.asset_v1p2beta1.proto import asset_service_pb2_grpc class AssetServiceGrpcTransport(object): """gRPC transport class providing stubs for google.cloud.asset.v1p2beta1 AssetService API. The transport provides access to the raw gRPC stubs, which can be used to take advantage of advanced features of gRPC. """ # The scopes needed to make gRPC calls to all of the methods defined # in this service. _OAUTH_SCOPES = ("https://www.googleapis.com/auth/cloud-platform",) def __init__( self, channel=None, credentials=None, address="cloudasset.googleapis.com:443" ): """Instantiate the transport class. Args: channel (grpc.Channel): A ``Channel`` instance through which to make calls. This argument is mutually exclusive with ``credentials``; providing both will raise an exception. credentials (google.auth.credentials.Credentials): The authorization credentials to attach to requests. These credentials identify this application to the service. If none are specified, the client will attempt to ascertain the credentials from the environment. address (str): The address where the service is hosted. """ # If both `channel` and `credentials` are specified, raise an # exception (channels come with credentials baked in already). if channel is not None and credentials is not None: raise ValueError( "The `channel` and `credentials` arguments are mutually " "exclusive." ) # Create the channel. if channel is None: channel = self.create_channel( address=address, credentials=credentials, options={ "grpc.max_send_message_length": -1, "grpc.max_receive_message_length": -1, }.items(), ) self._channel = channel # gRPC uses objects called "stubs" that are bound to the # channel and provide a basic method for each RPC. self._stubs = { "asset_service_stub": asset_service_pb2_grpc.AssetServiceStub(channel) } # Because this API includes a method that returns a # long-running operation (proto: google.longrunning.Operation), # instantiate an LRO client. self._operations_client = google.api_core.operations_v1.OperationsClient( channel ) @classmethod def create_channel( cls, address="cloudasset.googleapis.com:443", credentials=None, **kwargs ): """Create and return a gRPC channel object. Args: address (str): The host for the channel to use. credentials (~.Credentials): The authorization credentials to attach to requests. These credentials identify this application to the service. If none are specified, the client will attempt to ascertain the credentials from the environment. kwargs (dict): Keyword arguments, which are passed to the channel creation. Returns: grpc.Channel: A gRPC channel object. """ return google.api_core.grpc_helpers.create_channel( address, credentials=credentials, scopes=cls._OAUTH_SCOPES, **kwargs ) @property def channel(self): """The gRPC channel used by the transport. Returns: grpc.Channel: A gRPC channel object. """ return self._channel @property def export_assets(self): """Return the gRPC stub for :meth:`AssetServiceClient.export_assets`. Exports assets with time and resource types to a given Cloud Storage location. The output format is newline-delimited JSON. This API implements the ``google.longrunning.Operation`` API allowing you to keep track of the export. Returns: Callable: A callable which accepts the appropriate deserialized request object and returns a deserialized response object. """ return self._stubs["asset_service_stub"].ExportAssets @property def batch_get_assets_history(self): """Return the gRPC stub for :meth:`AssetServiceClient.batch_get_assets_history`. Batch gets the update history of assets that overlap a time window. For RESOURCE content, this API outputs history with asset in both non-delete or deleted status. For IAM\_POLICY content, this API outputs history when the asset and its attached IAM POLICY both exist. This can create gaps in the output history. Returns: Callable: A callable which accepts the appropriate deserialized request object and returns a deserialized response object. """ return self._stubs["asset_service_stub"].BatchGetAssetsHistory @property def create_feed(self): """Return the gRPC stub for :meth:`AssetServiceClient.create_feed`. Creates a feed in a parent project/folder/organization to listen to its asset updates. Returns: Callable: A callable which accepts the appropriate deserialized request object and returns a deserialized response object. """ return self._stubs["asset_service_stub"].CreateFeed @property def get_feed(self): """Return the gRPC stub for :meth:`AssetServiceClient.get_feed`. Gets details about an asset feed. Returns: Callable: A callable which accepts the appropriate deserialized request object and returns a deserialized response object. """ return self._stubs["asset_service_stub"].GetFeed @property def list_feeds(self): """Return the gRPC stub for :meth:`AssetServiceClient.list_feeds`. Lists all asset feeds in a parent project/folder/organization. Returns: Callable: A callable which accepts the appropriate deserialized request object and returns a deserialized response object. """ return self._stubs["asset_service_stub"].ListFeeds @property def update_feed(self): """Return the gRPC stub for :meth:`AssetServiceClient.update_feed`. Updates an asset feed configuration. Returns: Callable: A callable which accepts the appropriate deserialized request object and returns a deserialized response object. """ return self._stubs["asset_service_stub"].UpdateFeed @property def delete_feed(self): """Return the gRPC stub for :meth:`AssetServiceClient.delete_feed`. Deletes an asset feed. Returns: Callable: A callable which accepts the appropriate deserialized request object and returns a deserialized response object. """ return self._stubs["asset_service_stub"].DeleteFeed
letsencrypt/letsencrypt
certbot-nginx/tests/http_01_test.py
Python
apache-2.0
10,269
0.003019
"""Tests for certbot_nginx._internal.http_01""" import unittest import josepy as jose try: import mock except ImportError: # pragma: no cover from unittest import mock # type: ignore from acme import challenges from certbot import achallenges from certbot.tests import acme_util from certbot.tests import util as test_util from certbot_nginx._internal.obj import Addr import test_util as util AUTH_KEY = jose.JWKRSA.load(test_util.load_vector("rsa512_key.pem")) class HttpPerformTest(util.NginxTest): """Test the NginxHttp01 challenge.""" account_key = AUTH_KEY achalls = [ achallenges.KeyAuthorizationAnnotatedChallenge( challb=acme_util.chall_to_challb( challenges.HTTP01(token=b"kNdwjwOeX0I_A8DXt9Msmg"), "pending"), domain="www.example.com", account_key=account_key), achallenges.KeyAuthorizationAnnotatedChallenge( challb=acme_util.chall_to_challb( challenges.HTTP01( token=b"\xba\xa9\xda?<m\xaewmx\xea\xad\xadv\xf4\x02\xc9y" b"\x80\xe2_X\t\xe7\xc7\xa4\t\xca\xf7&\x945" ), "pending"), domain="ipv6.com", account_key=account_key), achallenges.KeyAuthorizationAnnotatedChallenge( challb=acme_util.chall_to_challb( challenges.HTTP01( token=b"\x8c\x8a\xbf_-f\\cw\xee\xd6\xf8/\xa5\xe3\xfd" b"\xeb9\xf1\xf5\xb9\xefVM\xc9w\xa4u\x9c\xe1\x87\xb4" ), "pending"), domain="www.example.org", account_key=account_key), achallenges.KeyAuthorizationAnnotatedChallenge( challb=acme_util.chall_to_challb( challenges.HTTP01(token=b"kNdwjxOeX0I_A8DXt9Msmg"), "pending"), domain="migration.com", account_key=account_key), achallenges.KeyAuthorizationAnnotatedChallenge( challb=acme_util.chall_to_challb( challenges.HTTP01(token=b"kNdwjxOeX0I_A8DXt9Msmg"), "pending"), domain="ipv6ssl.com", account_key=account_key), ] def setUp(self): super().setUp() config = self.get_nginx_configurator( self.config_path, self.config_dir, self.work_dir, self.logs_dir) from certbot_nginx._internal import http_01 self.http01 = http_01.NginxHttp01(config) def test_perform0(self): responses = self.http01.perform() self.assertEqual([], responses) @mock.patch("certbot_nginx._internal.configurator.NginxConfigurator.save") def test_perform1(self, mock_save): self.http01.add_chall(self.achalls[0]) response = self.achalls[0].response(self.account_key) responses = self.http01.perform() self.assertEqual([response], responses) self.assertEqual(mock_save.call_count, 1) def test_perform2(self): acme_responses = [] for achall in self.achalls: self.http01.add_chall(achall) acme_responses.append(achall.response(self.account_key)) http_responses = self.http01.perform() self.assertEqual(len(http_responses), 5) for i in range(5): self.assertEqual(http_responses[i], acme_responses[i]) def test_mod_config(self): self.http01.add_chall(self.achalls[0]) self.http01.add_chall(self.achalls[2]) self.http01._mod_config() # pylint: disable=protected-access self.http01.configurator.save() self.http01.configurator.parser.load() # vhosts = self.http01.configurator.parser.get_vhosts() # for vhost in vhosts: # pass # if the name matches # check that the location block is in there and is correct # if vhost.addrs == set(v_addr1): # response = self.achalls[0].response(self.account_key) # else: # response = self.achalls[2].response(self.account_key) # self.assertEqual(vhost.addrs, set(v_addr2_print)) # self.assertEqual(vhost.names, set([response.z_domain.decode('ascii')])) @mock.patch('certbot_nginx._internal.parser.NginxParser.add_server_directives') def test_mod_config_http_and_https(self, mock_add_server_directives): """A server_name with both HTTP and HTTPS vhosts should get modded in both vhosts""" self.configuration.https_port = 443 self.http01.add_chall(self.achalls[3]) # migration.com self.http01._mod_config() # pylint: disable=protected-access # Domain has an HTTP and HTTPS vhost # 2 * 'rewrite' + 2 * 'return 200 keyauthz' = 4 self.assertEqual(mock_add_server_directives.call_count, 4) @mock.patch('certbot_nginx._internal.parser.nginxparser.dump') @mock.patch('certbot_nginx._internal.parser.NginxParser.add_server_directives') def test_mod_config_only_https(self, mock_add_server_directives, mock_dump): """A server_name with only an HTTPS vhost should get modded""" self.http01.add_chall(self.achalls[4]) # ipv6ssl.com self.http01._mod_config() # pylint: disable=protected-access # It should modify the existing HTTPS vhost self.assertEqual(mock_add_server_directives.call_count, 2) # since there was no suitable HTTP vhost or default HTTP vhost, a non-empty one # should have been created and written to the challenge conf file self.assertNotEqual(mock_dump.call_args[0][0], []) @mock.patch('certbot_nginx._internal.parser.NginxParser.add_server_directives') def test_mod_config_deduplicate(self, mock_add_server_directives): """A vhost that appears in both HTTP and HTTPS vhosts only gets modded once""" achall = achallenges.KeyAuthorizationAnnotatedChallenge( challb=acme_util.chall_to_challb( challenges.HTTP01(token=b"kNdwjxOeX0I_A8DXt9Msmg"), "pending"), domain="ssl.both.com", account_key=AUTH_KEY) self.http01.add_chall(achall) self.http01._mod_config() # pylint: disable=protected-access # Should only get called 5 times, rather than 6, because two vhosts are the same self.assertEqual(mock_add_server_directi
ves.call_count, 5*2) def test_mod_config_insert_bucket_directive(self): nginx_conf = self.http01.configurator.parser.abs_path('nginx.conf')
expected = ['server_names_hash_bucket_size', '128'] original_conf = self.http01.configurator.parser.parsed[nginx_conf] self.assertFalse(util.contains_at_depth(original_conf, expected, 2)) self.http01.add_chall(self.achalls[0]) self.http01._mod_config() # pylint: disable=protected-access self.http01.configurator.save() self.http01.configurator.parser.load() generated_conf = self.http01.configurator.parser.parsed[nginx_conf] self.assertTrue(util.contains_at_depth(generated_conf, expected, 2)) def test_mod_config_update_bucket_directive_in_included_file(self): # save old example.com config example_com_loc = self.http01.configurator.parser.abs_path('sites-enabled/example.com') with open(example_com_loc) as f: original_example_com = f.read() # modify example.com config modified_example_com = 'server_names_hash_bucket_size 64;\n' + original_example_com with open(example_com_loc, 'w') as f: f.write(modified_example_com) self.http01.configurator.parser.load() # run change self.http01.add_chall(self.achalls[0]) self.http01._mod_config() # pylint: disable=protected-access self.http01.configurator.save() self.http01.configurator.parser.load() # not in nginx.conf expected = ['server_names_hash_bucket_size', '128'] nginx_conf_loc = self.http01.configurator.parser.abs_path('nginx.conf') nginx_conf = self.http01.configurator.parser.parsed[nginx_conf_loc] self.assertFalse(util.contains_at_depth(nginx_conf, expected, 2)) # is updated in example.com conf generated_conf = self.http01.configurator.parser.parsed[example_com_loc]
EasyCTF/easyctf-2015
api/problems/programming/addition/addition_grader.py
Python
mit
225
0.04
def grade(tid, answer): if answer.find("'twas_sum_EZ_programming,_am_I_rite?") != -1: return { "correct": True, "message": "Nice job!"
} return { "
correct": False, "message": "If you're confused, read some tutorials :)" }
BrutusTT/pyJD
pyJD/EZModule.py
Python
agpl-3.0
8,093
0.011121
#################################################################################################### # Copyright (C) 2016 by Ingo Keller, Katrin Lohan # # <brutusthetschiepel@gmail.com> # # # # This file is part of pyJD (Python/Yarp Tools for the JD robot). # # # # pyJD is free software: you can redistribute it and/or modify it under the terms of the # # GNU Affero General Public License as published by the Free Software Foundation, either # # version 3 of the License, or (at your option) any later version. # # # # pyJD is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; # # without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. # # See the GNU Affero General Public License for more details. # # # # You should have received a copy of the GNU Affero General Public License # # along with pyJD. If not, see <http://www.gnu.org/licenses/>. # #################################################################################################### import argparse import socket import time import yarp EMSG_YARP_NOT_FOUND = "Could not connect to the yarp server. Try running 'yarp detect'." EMSG_ROBOT_NOT_FOUND = 'Could not connect to the robot at %s:%s' class EZModule(yarp.RFModule): """ The EZBModule class provides a base class for developing modules for the JD robot. """ # Default IP Address and Port for the JD Humanoid Robot. TCP_IP = '192.168.1.1' TCP_PORT = 23 # Existing motor ID's are D0-D9, D12-D14 and D16-D18 there are more limits LIMITS = [ (30, 180), (70, 170), (0, 170), (0, 170), (0, 60), (0, 180), (0, 90), (0, 60), (0, 180), (0, 180), (0, 180), (0, 160), (0, 180), (0, 130), (0, 180), (0, 160), (0, 180), (50, 130), (0, 180), (0, 180), (0, 180) ] def __init__(self, ip, port, prefix): yarp.RFModule.__init__(self) self.ip = ip self.port = int(port) self.prefix = prefix # self.last_pos = [-1] * len(EZModule.LIMITS) def configure(self, rf): name = self.__class__.__name__ if self.prefix: name = self.prefix + '/' + name self.setName(name) # RPC Port self.rpc_port = yarp.RpcServer() # name settings port_name = '/%s/%s' % (name, 'rpc') if not self.rpc_port.open(port_name): raise RuntimeError, EMSG_YARP_NOT_FOUND self.attach_rpc_server(self.rpc_port) return True def interruptModule(self): self.rpc_port.interrupt() for x in dir(self): if x.endswith('Port') and 'interrupt' in dir(getattr(self, x)): getattr(self, x).interrupt() return True def close(self): self.rpc_port.close() for x in dir(self): if x.endswith('Port') and 'close' in dir(getattr(self, x)): getattr(self, x).close() return True def getPeriod(self): return 0.1 def updateModule(self): # XXX: I do not know why we need that, but if method is empty the module gets stuck time.sleep(0.000001) return True def createInputPort(self, name, mode = 'unbuffered'): """ This method returns an input port. @param obj - the object that the port is created for @param name - if a name is provided it gets appended to the modules name @param buffered - if buffered is True a buffered port will be used otherwise not; default is True. @result port """ return self.__createPort(name + ':i', None, mode) def __createPort(self, name, target = None, mode = 'unbuffered'): """ This method returns a port object. @param name - yarp name for the port @param obj - object for which the port is created @param buffered - if buffered is True a buffered port will be used otherwise not; default is True. @result port """ # create port if mode == 'buffered': port = yarp.BufferedPortBottle() elif mode == 'rpcclient': port = yarp.RpcClient() elif mode == 'rpcserver': port = yarp.RpcServer() else: port = yarp.Port() # build port name port_name = [''] # prefix handling if hasattr(self, 'prefix') and self.prefix: port_name.append(self.prefix) port_name.append(self.__class__.__name__) port_name.append(name) # open port if not port.open('/'.join(port_name)): raise RuntimeError, EMSG_YARP_NOT_FOUND # add output if given if target: port.addOutput(target) if hasattr(self, '_ports'): self._ports.append(port) return port def createOutputPort(self, name, target = None, mode = 'unbuffered'): """ This method returns an output port. @param obj - the object that the port is created for @param name - if a name is provided it gets appended to the modules name @param buffered - if buffered is True a buffered port will be used otherwise not; default is True. @result port """ return self.__createPort(name + ':o', target, mode) #################################################################################################### # # Default methods for running the modules standalone # #################################################################################################### def createArgParser(): """ This method creates a base argument parser. @return Argument Parser object """ parser = argparse.ArgumentParser(description='Create a JDModule to control the JD robot.') parser.add_argument( '-i', '--ip', dest = 'ip', default = str(EZModule.TCP_IP), help = 'IP address for the JD robot.') parser.add_argument( '-p', '--port', dest = 'port', default = str(EZModule.TCP_PORT), help = 'Port for the JD robot') parser.add_argument( '-n', '--name', dest = 'name', default = '', help = 'Name prefix for Yarp port names') return parser.parse_args() def main(module_cls): """ This is a main method to run a module from command line. @param module_cls - an EZModule based class that can be started as a standalone module. """ args = createArgParser() yarp.Network.init()
resource_finder = yarp.ResourceFinder() resource_finder.setVerbose(True) # resource_finder.configure(argc,argv); module = module_cls(args.ip, args.port, args.name) module.runModule(res
ource_finder) yarp.Network.fini()
udoyen/pythonlearning
1-35/ex14.py
Python
mit
808
0
from sys import argv script, user_name = argv # Decalare text or prompt to be seen by the user # for all request for inout prompt = '> ' print "Hi %s, I'm the %s script." % (user_name, script) print "I'd like to ask you a few questions." print "Do you like me %s?" % user_name # The 'prompt = >' is seen by user as they are asked for some input likes = raw_input(prompt) print "Where do you live %s?" % user_name # The 'prompt = >' is seen by user as they are asked for some input lives = raw_input(prompt) print "What kind of computer do you have?" # The 'prompt = >' is seen by user as they are
asked for some input computer = raw_input(prompt) print ""
" Alright, so you said %r about liking me. You live in %r. Not sure where that is. And you have a %r computer. Nice. """ % (likes, lives, computer)
NervanaSystems/neon
neon/data/text.py
Python
apache-2.0
22,858
0.001181
# ****************************************************************************** # Copyright 2017-2018 Intel Corporation # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # ****************************************************************************** """ Defines text datatset handling. """ import logging import numpy as np import os import h5py from neon.data.dataiterator import NervanaDataIterator, ArrayIterator from neon.data.datasets import Dataset from neon.data.text_preprocessing import pad_sentences, pad_data logger = logging.getLogger(__name__) class Text(NervanaDataIterator): """ This class defines methods for loading and iterating over text datasets. """ def __init__(self, time_steps, path, vocab=None, tokenizer=None, onehot_input=True, reverse_target=False, get_prev_target=False): """ Construct a text dataset object. Arguments: time_steps (int) : Length of a sequence. path (str) : Path to text file. vocab (python.set) : A set of unique tokens. tokenizer (function) : Tokenizer function. onehot_input (boolean): One-hot representation of input reverse_target (boolean): for sequence to sequence models, set to True to reverse target sequence. Also disables shifting target by one. get_prev_target (boolean): for sequence to sequence models, set to True for training data to provide correct target from previous time step as decoder input. If condition, shape will be a tuple of shapes, corresponding to encoder and decoder inputs. """ super(Text, self).__init__(name=None) self.seq_length = time_steps self.onehot_input = onehot_input self.batch_index = 0 self.reverse_target = reverse_target self.get_prev_target = get_prev_target X, y = self._get_data(path, tokenizer, vocab) # reshape to preserve sentence continuity across batches self.X = X.reshape(self.be.bsz, self.nbatches, time_steps) self.y = y.reshape(self.be.bsz, self.nbatches, time_steps) # stuff below this comment needs to be cleaned up and commented self.nout = self.nclass if self.onehot_input: self.shape = (self.nout, time_steps) self.dev_X = self.be.iobuf((self.nout, time_steps)) if self.get_prev_target: self.dev_Z = self.be.iobuf((self.nout, time_steps)) else: self.shape = (time_steps, 1) self.dev_X = self.be.iobuf(time_steps, dtype=np.int32) if self.get_prev_target: self.dev_Z = self.be.iobuf(time_steps, dtype=np.int32) self.decoder_shape = self.shape self.dev_y = self.be.iobuf((self.nout, time_steps)) self.dev_lbl = self.be.iobuf(time_steps, dtype=np.int32) self.dev_lblflat = self.dev_lbl.reshape((1, -1)) def _get_data(self, path, tokenizer, vocab): text = open(path).read() tokens = self.get_tokens(text, tokenizer) # make this a static method extra_tokens = len(tokens) % (self.be.bsz * self.seq_length) if extra_tokens: tokens = tokens[:-extra_tokens] self.nbatches = len(tokens) // (self.be.bsz * self.seq_length) self.ndata = self.nbatches * self.be.bsz # no leftovers self.vocab = sorted(self.get_vocab(tokens, vocab)) self.nclass = len(self.vocab) # vocab dicts self.token_to_index = dict((t, i) for i, t in enumerate(self.vocab)) self.index_to_token = dict((i, t) for i, t in enumerate(self.vocab)) # map tokens to indices X = np.asarray([self.token_to_index[t] for t in tokens], dtype=np.uint32) if self.reverse_target: y = X.copy() else: y = np.concatenate((X[1:], X[:1])) return X, y @staticmethod def create_valid_file(path, valid_split=0.1): """ Create separate files for training and validation. Arguments: path(str): Path to data file. valid_split(float, optional): Fraction of data to set aside for validation.
Returns: str, str : Paths to train file and validation file """ text = open(path).read() # create train and valid paths filename, ext = os.path.splitext(path) train_path = filename + '_train' + ext
valid_path = filename + '_valid' + ext # split data train_split = int(len(text) * (1 - valid_split)) train_text = text[:train_split] valid_text = text[train_split:] # write train file with open(train_path, 'w') as train_file: train_file.write(train_text) # write valid file with open(valid_path, 'w') as valid_file: valid_file.write(valid_text) return train_path, valid_path @staticmethod def get_tokens(string, tokenizer=None): """ Map string to a list of tokens. Arguments: string(str): String to be tokenized. token(object): Tokenizer object. tokenizer (function) : Tokenizer function. Returns: list : A list of tokens """ # (if tokenizer is None, we have a list of characters) if tokenizer is None: return string else: return tokenizer(string) @staticmethod def get_vocab(tokens, vocab=None): """ Construct vocabulary from the given tokens. Arguments: tokens(list): List of tokens. vocab: (Default value = None) Returns: python.set : A set of unique tokens """ # (if vocab is not None, we check that it contains all tokens) if vocab is None: return set(tokens) else: vocab = set(vocab) assert vocab >= set(tokens), "the predefined vocab must contain all the tokens" return vocab @staticmethod def pad_sentences(sentences, sentence_length=None, dtype=np.int32, pad_val=0.): """ Deprecated, use neon.data.text_preprocessing.pad_sentences. """ logger.error('pad_sentences in the Text class is deprecated. This function ' 'is now in neon.data.text_preprocessing.') return pad_sentences(sentences, sentence_length=sentence_length, dtype=dtype, pad_val=pad_val) @staticmethod def pad_data(path, vocab_size=20000, sentence_length=100, oov=2, start=1, index_from=3, seed=113, test_split=0.2): """ Deprecated, use neon.data.text_preprocessing.pad_data. """ logger.error('pad_data in the Text class is deprecated. This function' 'is now in neon.data.text_preprocessing') return pad_data(path, vocab_size=vocab_size, sentence_length=sentence_length, oov=oov, start=start, index_from=index_from, seed=seed, test_split=test_split) def reset(self): """ Reset the starting index of this dataset back to zero. Relevant fo
RalpH-himself/MusicLibrarySyncForMSC
mlsPlaylist.py
Python
gpl-3.0
3,927
0.003056
# Playlist.py # # reads all available playlists, adjusts song paths, removes not copied songs, # writes resulting playlist to destination import mlsSong as sng import config import glob import os import sys import codecs def Playlist(): # get a list of all playlists playlists = glob.glob(config.SOURCE_PLAYLISTFOLDER + "\\*.m3u*") # keep only the file name for (i, playlist) in enumerate(playlists): (filepath, filename) = os.path.split(playlist) playlists[i] = filename # Winamp fail: playlists are saved with pretty random-looking names. # Look up the new names in a look-up file. Playlists that are not found # won't be copied. for oldPlaylist in playlists: newPlaylist = "" for lutPlaylist in config.PLAYLIST_LUT: print oldPlaylist print lutPlaylist[0] if lutPlaylist
[0] == oldPlaylist: newPlaylist = lutPlaylist[1] print "Playlist name conversion: from", oldPlaylist, "to", newPlaylist break if newPlaylist == "": print "No playlist name conversion found for", oldPlaylist break # "s" as in Source_playlist # ------------------------- # open source playlist
try: s = codecs.open(config.SOURCE_PLAYLISTFOLDER + "\\" + oldPlaylist, 'r', encoding='UTF-8') ## s = open(config.SOURCE_PLAYLISTFOLDER + "\\" + oldPlaylist, 'r') except: print "Playlist", oldPlaylist, "could not be read!" continue # "d" as in Destination_playlist # ------------------------------ # check if destination playlist file already exists try: d = open(config.DEST_PLAYLISTFOLDER + "\\" + newPlaylist, 'r') except: # file does not exist, create it d = open(config.DEST_PLAYLISTFOLDER + "\\" + newPlaylist, 'w') else: # file already exists, delete it and create a new one d.close() os.remove(config.DEST_PLAYLISTFOLDER + "\\" + newPlaylist) d = open(config.DEST_PLAYLISTFOLDER + "\\" + newPlaylist, 'w') # write header line d.write("#EXTM3U\n") # read first line, it should be '#EXTM3U' b = s.readline() print b if b == '#EXTM3U\r\n': print "EXTM3U playlist." extm3u = True else: extm3u = False # I'm pretty sure b is already the first song, so don't read another # line before properly processing it skipFirst = True for lines in s: if extm3u: a = s.readline() # 'EXTINF:' song.trackLength,Artist - Title # This line can be left unchanged. if not skipFirst: b = s.readline() # file path: strip SOURCE_MUSICFOLDER, replace it with DEST_MUSICFOLDER print b b = b.replace(config.SOURCE_MUSICFOLDER, config.DEST_MUSICFOLDER) print b else: skipFirst = False # process b: # - if b is a relative path, convert it to absolute # ... TO DO # - find song, where config.songList[x].fileNameOld = b # ... TO DO # - if config.songList[x].added == 0: continue (song was not copied; don't add it to playlist) # ... TO DO # write new path to b b = config.songList[x].fileNameNew + "\n" if not extm3u: # create line a a = "EXTINF:" + config.songList[x].trackLength + "," a = a + config.songList[x].trackArtist + " - " a = a + config.songList[x].trackTitle + "\n" d.write(a) d.write(b) s.close() d.close()
mozilla/firefox-flicks
vendor-local/lib/python/celery/utils/mail.py
Python
bsd-3-clause
5,402
0
# -*- coding: utf-8 -*- """ celery.utils.mail ~~~~~~~~~~~~~~~~~ How task error emails are formatted and sent. """ from __future__ import absolute_import import sys import smtplib import socket import traceback import warnings from email.mime.text import MIMEText from .functional import maybe_list from .imports import symbol_by_name supports_timeout = sys.version_info >= (2, 6) _local_hostname = None def get_local_hostname(): global _local_hostname if _local_hostname is None: _local_hostname = socket.getfqdn() return _local_hostname class SendmailWarning(UserWarning): """Problem happened while sending the email message.""" class Message(object): def __init__(self, to=None, sender=None, subject=None, body=None, charset='us-ascii'): self.to = maybe_list(to) self.sender = sender self.subject = subject self.body = body self.charset = charset def __repr__(self): return '<Email: To:%r Subject:%r>' % (self.to, self.subject) def __str__(self): msg = MIMEText(self.body, 'plain', self.charset) msg['Subject'] = self.subject msg['From'] = self.sender msg['To'] = ', '.join(self.to) return msg.as_string() class Mailer(object): supports_timeout = supports_timeout def __init__(self, host='localhost', port=0, user=None, password=None, timeout=2, use_ssl=False, use_tls=False): self.host = host self.port = port self.user = user self.password = password self.timeout = timeout self.use_ssl = use_ssl self.use_tls = use_tls def send(self, message, fail_silently=False): try: if self.supports_timeout: self._send(message, timeout=self.timeout) else: import socket old_timeout = socket.getdefaulttimeout() socket.setdefaulttimeout(self.timeout) try: self._send(message) finally: socket.setdefaulttimeout(old_timeout) except Exception, exc: if not fail_silently: raise warnings.warn(SendmailWarning( 'Mail could not be sent: %r %r\n%r' % ( exc, {'To': ', '.join(message.to), 'Subject': message.subject}, traceback.format_stack()))) def _send(self, message, **kwargs): Client = smtplib.SMTP_SSL if self.use_ssl else smtplib.SMTP client = Client(self.host, self.port, local_hostname=get_local_hostname(), **kwargs) if self.use_tls: client.ehlo() client.starttls() client.ehlo() if self.user and self.password: client.login(self.user, self.password) client.sendmail(message.sender, message.to, str(message)) try: client.quit() except socket.sslerror: client.close() class ErrorMail(object): """Defines how and when task error e-mails should be sent. :param task: The task instance that raised the error. :attr:`subject` and :attr:`body` are format strings which are passed a context containing the following keys: * name Name of the task. * id UUID of the task. * exc String representation of the exception. * args Positional arguments. * kwargs Keyword arguments. * traceback String representation of the traceback. * hostname Worker hostname. """ # pep8.py borks on a inline signature separator and # says "trailing whitespace" ;) EMAIL_SIGNATURE_SEP = '-- ' #: Format string used to generate error email subjects. subject = """\ [celery@%(hostname)s] Error: Task %(name)s (%(id)s): %(exc)s """ #: Format string used to generate error email content. body = """ Task %%(name)s with id %%(id)s raised exception:\n%%(exc)r Task was called with args: %%(args)s kwargs: %%(kwargs)s. The contents of the full traceback was: %%(traceback)s %(EMAIL_SIGNATURE_SEP)s Just to let you know, py-celery at %%(hostname)s. """ % {'EMAIL_SIGNATURE_SEP': EMAIL_SIGNATURE_SEP} error_whitelist = None def __init__(self, task, **kwargs): self.task = task self.email_subject = kwargs.get('subject', self.subject) self.email_body = kwargs.get('body', self.body) self.error_whitelist = getattr(task, 'error
_whitelist', None) or () def should_send(self, context, exc): """Returns true or false depending on if a task error mail should be sent for this type of error.""" allow_classes = tuple(map(symbol_by_name, self.error_whitelist))
return not self.error_whitelist or isinstance(exc, allow_classes) def format_subject(self, context): return self.subject.strip() % context def format_body(self, context): return self.body.strip() % context def send(self, context, exc, fail_silently=True): if self.should_send(context, exc): self.task.app.mail_admins(self.format_subject(context), self.format_body(context), fail_silently=fail_silently)
IceflowRE/MR-eBook-Downloader
tests/tools_test.py
Python
gpl-3.0
762
0
from unidown.tools import unlink_dir_rec class TestDeleteDirRec: def test_non_existence(self, tmp_path): no_folder = tmp_path.joinp
ath("./donotexist/") assert not no_folder.exists() unlink_dir_rec(no_folder) assert not no_folder.exists() def test_recursive(self, tmp_path): for number in range(1, 4): with tmp_path.joinpath(str(number)).open('w'): pass sub_folder = tmp_path.joinpath("sub") sub_folder.mkdir(parents=True, exist_ok=True) for number in range(1, 4):
with sub_folder.joinpath(str(number)).open('w'): pass tmp_path.joinpath("sub2").mkdir() unlink_dir_rec(tmp_path) assert not tmp_path.exists()
janol77/flask-app
app/libs/validators.py
Python
gpl-3.0
775
0
"""Validators class.""" # -*- coding: utf-8 -*- from wtforms import ValidationError class UniqueValidator(object): """Validador para chequear variables unicas.""" def __init__(self, model, field, message=None): self.model = model self.fie
ld = field if not message: message = u'Existe otro Elemento con el mismo valor.' self.message = message def
__call__(self, form, field): _id = None params = {self.field: field.data, 'deleted': False} existing = self.model.objects.filter(**params).first() if 'id' in form.data: _id = str(form.id.data) if existing and (_id is None or _id != str(existing.id)): raise ValidationError(self.message)
nathanielvarona/airflow
tests/providers/amazon/aws/hooks/test_s3.py
Python
apache-2.0
25,423
0.002085
# # Licensed to the Apache Software Foundation (ASF) under one # or more contributor license agreements. See the NOTICE file # distributed with this work for additional information # regarding copyright ownership. The ASF licenses this file # to you under the Apache License, Version 2.0 (the # "License"); you may not use this file except in compliance # with the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. # import gzip as gz import os import tempfile from unittest import mock from unittest.mock import Mock import boto3 import pytest from botocore.exceptions import ClientError, NoCredentialsError from airflow.exceptions import AirflowException from airflow.models import Connection from airflow.providers.amazon.aws.hooks.s3 import S3Hook, provide_bucket_name, unify_bucket_name_and_key try: from moto import mock_s3 except ImportError: mock_s3 = None # This class needs to be separated out because if there are earlier mocks in the same class # the tests will fail on teardown. class TestAwsS3HookNoMock: def test_check_for_bucket_raises_error_with_invalid_conn_id(self, monkeypatch): monkeypatch.delenv('AWS_PROFILE', raising=False) monkeypatch.delenv('AWS_ACCESS_KEY_ID', raising=False) monkeypatch.delenv('AWS_SECRET_ACCESS_KEY', raising=False) hook = S3Hook(aws_conn_id="does_not_exist") with pytest.raises(NoCredentialsError): hook.check_for_bucket("test-non-existing-bucket") @pytest.mark.skipif(mock_s3 is None, reason='moto package not present') class TestAwsS3Hook: @mock_s3 def test_get_conn(self): hook = S3Hook() assert hook.get_conn() is not None @mock_s3 def test_use_threads_default_value(self): hook = S3Hook() assert hook.transfer_config.use_threads is True @mock_s3 def test_use_threads_set_value(self): hook = S3Hook(transfer_config_args={"use_threads": False}) assert hook.transfer_config.use_threads is False def test_parse_s3_url(self): parsed = S3Hook.parse_s3_url("s3://test/this/is/not/a-real-key.txt") assert parsed == ("test", "this/is/not/a-real-key.txt"), "Incorrect parsing of the s3 url" def test_parse_s3_object_directory(self): parsed = S3Hook.parse_s3_url("s3://test/this/is/not/a-real-s3-directory/") assert parsed == ("test", "this/is/not/a-real-s3-directory/"), "Incorrect parsing of the s3 url" def test_check_for_bucket(self, s3_bucket): hook = S3Hook() assert hook.check_for_bucket(s3_bucket) is True assert hook.check_for_bucket('not-a-bucket') is False @mock_s3 def test_get_bucket(self): hook = S3Hook() assert hook.get_bucket('bucket') is not None @mock_s3 def test_create_bucket_default_region(self): hook = S3Hook() hook.create_bucket(bucket_name='new_bucket') assert hook.get_bucket('new_bucket') is not None @mock_s3 def test_create_bucket_us_standard_region(self, monkeypatch): monkeypatch.delenv('AWS_DEFAULT_REGION', raising=False) hook = S3Hook() hook.create_bucket(bucket_name='new_bucket', region_name='us-east-1') bucket = hook.get_bucket('new_bucket') assert bucket is not None region = bucket.meta.client.get_bucket_location(Bucket=bucket.name).get('LocationConstraint') # https://github.com/spulec/moto/pull/1961 # If location is "us-east-1", LocationConstraint should be None assert region is None @mock_s3 def test_create_bucket_other_region(self): hook = S3Hook() hook.create_bucket(bucket_name='new_bucket', region_name='us-east-2') bucket = hook.get_bucket('new_bucket') assert bucket is not None region = bucket.meta.client.get_bucket_location(Bucket=bucket.name).get('LocationConstraint') assert region == 'us-east-2' def test_check_for_prefix(self, s3_bucket): hook = S3Hook() bucket = hook.get_bucket(s3_bucket) bucket.put_object(Key='a', Body=b'a') bucket.put_object(Key='dir/b', Body=b'b') assert hook.check_for_prefix(bucket_name=s3_bucket, prefix='dir/', delimiter='/') is True assert hook.check_for_prefix(bucket_name=s3_bucket, prefix='a', delimiter='/') is False def test_list_prefixes(self, s3_bucket): hook = S3Hook() bucket = hook.get_bucket(s3_bucket) bucket.put_object(Key='a', Body=b'a') bucket.put_object(Key='dir/b', Body=b'b') assert [] == hook.list_prefixes(s3_bucket, prefix='non-existent/') assert ['dir/'] == hook.list_prefixes(s3_bucket, delimiter='/') assert ['a'] == hook.list_keys(s3_bucket, delimiter='/') assert ['dir/b'] == hook.list_keys(s3_bucket, prefix='dir/') def test_list_prefixes_paged(self, s3_bucket): hook = S3Hook() bucket = hook.get_bucket(s3_bucket) # we don't need to test the paginator that's covered by boto tests keys = [f"{i}/b" for i in range(2)] dirs = [f"{i}/" for i in range(2)] for key in keys: bucket.put_object(Key=key, Body=b'a') assert sorted(dirs) == sorted(hook.list_prefixes(s3_bucket, delimiter='/', page_size=1)) def test_list_keys(self, s3_bucket): hook = S3Hook() bucket = hook.get_bucket(s3_bucket) bucket.put_object(Key='a', Body=b'a') bucket.put_object(Key='dir/b', Body=b'b') assert [] == hook.list_keys(s3_bucket, prefix='non-existent/') assert ['a', 'dir/b'] == hook.list_keys(s3_bucket) assert ['a'] == hook.list_keys(s3_bucket, delimiter='/'
) assert ['dir/b'] == hook.list_keys(s3_bucket, prefix='dir/') def test_list_keys_paged(self, s3_bucket): hook = S3Hook() bucket = hook.get_bucket(s3_bucket) keys = [str(i) for i in range(2)]
for key in keys: bucket.put_object(Key=key, Body=b'a') assert sorted(keys) == sorted(hook.list_keys(s3_bucket, delimiter='/', page_size=1)) def test_check_for_key(self, s3_bucket): hook = S3Hook() bucket = hook.get_bucket(s3_bucket) bucket.put_object(Key='a', Body=b'a') assert hook.check_for_key('a', s3_bucket) is True assert hook.check_for_key(f's3://{s3_bucket}//a') is True assert hook.check_for_key('b', s3_bucket) is False assert hook.check_for_key(f's3://{s3_bucket}//b') is False def test_get_key(self, s3_bucket): hook = S3Hook() bucket = hook.get_bucket(s3_bucket) bucket.put_object(Key='a', Body=b'a') assert hook.get_key('a', s3_bucket).key == 'a' assert hook.get_key(f's3://{s3_bucket}/a').key == 'a' def test_read_key(self, s3_bucket): hook = S3Hook() bucket = hook.get_bucket(s3_bucket) bucket.put_object(Key='my_key', Body=b'Cont\xC3\xA9nt') assert hook.read_key('my_key', s3_bucket) == 'Contént' # As of 1.3.2, Moto doesn't support select_object_content yet. @mock.patch('airflow.providers.amazon.aws.hooks.base_aws.AwsBaseHook.get_client_type') def test_select_key(self, mock_get_client_type, s3_bucket): mock_get_client_type.return_value.select_object_content.return_value = { 'Payload': [{'Records': {'Payload': b'Cont\xC3\xA9nt'}}] } hook = S3Hook() assert hook.select_key('my_key', s3_bucket) == 'Contént' def test_check_for_wildcard_key(self, s3_bucket): hook = S3Hook() bucket = hook.get_bucket(s3_bucket) bucket.put_object(Key='abc', Body=b'a') bucket.put_object(Key='a/b', Body=b'a') assert hook.check_for_wildcard_key('a*', s3_bucket) is True assert hook.check_for_wildcard_key('abc', s3_bucket) is
thelabnyc/django-oscar-wfrs
src/wellsfargo/migrations/0008_fraudscreenresult.py
Python
isc
1,998
0.001502
# -*- coding: utf-8 -*- # Generated by Django 1.10.6 on 2017-10-02 21:26 from __future__ import unicode_literals from django.db import migrations, models import django.db.models.deletion class Migration(migrations.Migration): dependencies = [ ("order", "0004_auto_20160111_1108"), ("wellsfargo", "0007_financingplan_advertising_enabled"), ] operations = [ migrations.CreateModel( name="FraudScreenResult", fields=[ ( "id", models.AutoField( auto_created=True, primary_key=True, serialize=False, verbose_name="ID", ), ), ( "screen_type", models.CharField(max_length=25, verbose_name="Fraud Screen Type"), ), ( "decision", models.CharField( choices=[ ("REJECT", "Transaction was rejected"), ("ACCEPT", "Transaction was accepted"), ("ERROR", "Error occurred while running fraud screen"), ], max_length=25, verbose_name="Decision", ), ), ("message", models.TextField(verbose_name="Message")), ("created_datetime", models.DateTimeField(auto_now_add=True)), ("modified_datetime"
, models.DateTimeField(auto_now=True)), ( "order", models.ForeignKey( on_delete=django.db.models.dele
tion.CASCADE, to="order.Order" ), ), ], options={ "ordering": ("-created_datetime", "-id"), }, ), ]
qinggeng/tools
MatrixSkatch/shell.py
Python
unlicense
2,810
0.041637
#-*- coding: utf-8 -*- import os, re from traceback import format_exc as fme from exprParser import Parser, ParserContext class Shell: echo = False echoStrs = { 'on': True, 'off': False, 'true': True, 'false': False, } commands = {} history = [] values = [] ops = [] def processEchoCommand(self, args): try: if len(args) == 0: pass else: echoStateStr = args[0].lower() self.echo = self.echoStrs[echoStateStr] print 'echo = %r' % (self.echo, ) except Exception, e: self.error('invalid echo setting value %s' % (echoStateStr, )) def processExitCommand(self, args): self.msg('bye!') exit() def
makeHistoryCommandArgs(self, args): h = self.history if len(args) > 0: arg = args[0] if arg.isdigit(): return int(arg) return len(h) def processHistoryCommand(self, args): h = self.history historyLen = self.makeHistoryCommandArgs(args) for item, i in zip(h, reversed(range(historyLen))): self.msg('%d. %s' % (i + 1, item)) def processOps(self, args): ops = self.ops for op in self.ops: self.msg(op)
def msg(self, txt): print txt def error(self, msg): print msg def installCommands(self): c = self.commands c[':echo'] = self.processEchoCommand c[':exit'] = self.processExitCommand c[':history'] = self.processHistoryCommand c[':ops'] = self.processOps def inputOperation(self, userInput): parser = Parser() context = ParserContext() context.unnamedVariables = self.values parser.context = context parser.parse(userInput) d = parser.ret self.values.append(d) self.msg('$%d=' % (len(self.values), )) self.msg(str(d)) #self.printDeterminant(self.values[-1]) return True def isValidDeterminant(self, d): rl = -1 for r in d: if rl == -1: rl = len(r) elif len(r) != rl: self.msg('invalid determinant') return False return True def printDeterminant(self, d): msg = '' for r in d: msg += '|' for e in r: msg +=str(e) + '\t' msg += '|\n' self.msg(msg) def processOperationInput(self, userInput): userInput = userInput.strip() return self.inputOperation(userInput) return False def runShell(self): self.installCommands() while 1: userInput = raw_input('>>') if len(userInput.strip()) == 0: continue if True == self.echo: self.msg(userInput) inputs = userInput.split(' ') if len(inputs) > 0: cmdName = inputs[0] if cmdName in self.commands: try: self.history.append(userInput) self.commands[cmdName](inputs[1:]) except Exception, e: print e print fme() elif self.processOperationInput(userInput): self.ops.append(userInput) pass else: self.error('unknow command/operation "%s"' % (userInput)) if __name__ == '__main__': s = Shell() s.runShell()
bparzella/secsgem
secsgem/secs/variables/u1.py
Python
lgpl-2.1
1,183
0
##################################################################### # u1.py # # (c) Copyright 2021, Benjamin Parzella. All rights reserved. # # This library is free software; you can redistribute it and/or # modify it under the terms of the GNU Lesser General Public # License as published by the Free Software Foundation; either # version 2.1 of the License, or (at your option) any later version. # # T
his software is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Lesser General Public License for more details. ##################################################################### """SECS 1 byte unsigned integer variable type.""" from .base_number import BaseNumber class U1(BaseNumber): """
Secs type for 1 byte unsigned data. :param value: initial value :type value: list/integer :param count: number of items this value :type count: integer """ format_code = 0o51 text_code = "U1" _base_type = int _min = 0 _max = 255 _bytes = 1 _struct_code = "B" preferred_types = [int]
ansp-2015/arquea
menuextension/models.py
Python
mpl-2.0
652
0.001534
# -*
- coding: utf-8 -*- # This Source Code Form is subject to the terms of the Mozilla Public # License, v. 2.0. If a copy of the MPL was not distributed with this # file, You can obtain one at http://mozilla.org/MPL/2.0/. from django.db import models from django.utils.transl
ation import ugettext_lazy as _ from django.template import Library from treemenus.models import MenuItem register = Library() class MenuItemExtension(models.Model): menu_item = models.OneToOneField(MenuItem, related_name="extension") visivel = models.BooleanField(default=False) css = models.CharField(_(u'CSS style'), null=True, blank=True, max_length=300)
Banjong1990/honey
dpkt/dpkt/icmp.py
Python
gpl-2.0
4,112
0.034776
# $Id: icmp.py,v 1.1.1.1 2005/10/29 18:20:48 provos Exp $ from dpkt import Packet, in_cksum as _icmp_cksum import ip # Types (icmp_type) and codes (icmp_code) - # http://www.iana.org/assignments/icmp-parameters ICMP_CODE_NONE = 0 # for types without codes ICMP_ECHOREPLY = 0 # echo reply ICMP_UNREACH = 3 # dest unreachable, codes: ICMP_UNREACH_NET = 0 # bad net ICMP_UNREACH_HOST = 1 # bad host ICMP_UNREACH_PROTO = 2 # bad protocol ICMP_UNREACH_PORT = 3 # bad port ICMP_UNREACH_NEEDFRAG = 4 # IP_DF caused drop ICMP_UNREACH_SRCFAIL = 5 # src route failed ICMP_UNREACH_NET_UNKNOWN = 6 # unknown net ICMP_UNREACH_HOST_UNKNOWN = 7 # unknown host ICMP_UNREACH_ISOLATED = 8 # src host isolated ICMP_UNREACH_NET_PROHIB = 9 # for crypto devs ICMP_UNREACH_HOST_PROHIB = 10 # ditto ICMP_UNREACH_TOSNET = 11 # bad tos for net ICMP_UNREACH_TOSHOST = 12 # bad tos for host ICMP_UNREACH_FILTER_PROHIB = 13 # prohibited access ICMP_UNREACH_HOST_PRECEDENCE = 14 # precedence error ICMP_UNREACH_PRECEDENCE_CUTOFF = 15 # precedence cutoff ICMP_SRCQUENCH = 4 # packet lost, slow down ICMP_REDIRECT = 5 # shorter route, codes: ICMP_REDIRECT_NET = 0 # for network ICMP_REDIRECT_HOST = 1 # for host ICMP_REDIRECT_TOSNET = 2 # for tos and net ICMP_REDIRECT_TOSHOST = 3 # for tos and host ICMP_ALTHOSTADDR = 6 # alternate host address ICMP_ECHO = 8 # echo
service ICMP_RTRADVERT = 9 # router advertise, codes: ICMP_RTRADVERT_NORMAL = 0 # normal ICMP_RTRADVERT_NOROUTE_COMMON = 16 # selective routing ICMP_RTRSOLICIT = 10 # router solicitation ICMP_TIMEXCEED = 11 # time exceeded, code: ICMP_TIMEXCEED_INTRANS = 0 # ttl==0 in transit ICMP_TIMEXCEED_REASS = 1 # ttl==0 in reass ICMP_PARAMPROB = 12 # ip header bad ICMP_PARAMPROB_ERRATPTR = 0 # req. opt. absent ICMP_PARAMPROB_OPTABSENT = 1 # req. opt. absent ICMP_PARA
MPROB_LENGTH = 2 # bad length ICMP_TSTAMP = 13 # timestamp request ICMP_TSTAMPREPLY = 14 # timestamp reply ICMP_INFO = 15 # information request ICMP_INFOREPLY = 16 # information reply ICMP_MASK = 17 # address mask request ICMP_MASKREPLY = 18 # address mask reply ICMP_TRACEROUTE = 30 # traceroute ICMP_DATACONVERR = 31 # data conversion error ICMP_MOBILE_REDIRECT = 32 # mobile host redirect ICMP_IP6_WHEREAREYOU = 33 # IPv6 where-are-you ICMP_IP6_IAMHERE = 34 # IPv6 i-am-here ICMP_MOBILE_REG = 35 # mobile registration req ICMP_MOBILE_REGREPLY = 36 # mobile registration reply ICMP_DNS = 37 # domain name request ICMP_DNSREPLY = 38 # domain name reply ICMP_SKIP = 39 # SKIP ICMP_PHOTURIS = 40 # Photuris ICMP_PHOTURIS_UNKNOWN_INDEX = 0 # unknown sec index ICMP_PHOTURIS_AUTH_FAILED = 1 # auth failed ICMP_PHOTURIS_DECOMPRESS_FAILED = 2 # decompress failed ICMP_PHOTURIS_DECRYPT_FAILED = 3 # decrypt failed ICMP_PHOTURIS_NEED_AUTHN = 4 # no authentication ICMP_PHOTURIS_NEED_AUTHZ = 5 # no authorization ICMP_TYPE_MAX = 40 class ICMP(Packet): """Internet Control Message Protocol.""" __hdr__ = ( ('type', 'B', 8), ('code', 'B', 0), ('sum', 'H', 0) ) class Echo(Packet): __hdr__ = (('id', 'H', 0), ('seq', 'H', 0)) class Quote(Packet): __hdr__ = (('pad', 'I', 0),) def unpack(self, buf): Packet.unpack(self, buf) self.data = self.ip = ip.IP(self.data) class Unreach(Quote): __hdr__ = (('pad', 'H', 0), ('mtu', 'H', 0)) class Quench(Quote): pass class Redirect(Quote): __hdr__ = (('gw', 'I', 0),) class ParamProbe(Quote): __hdr__ = (('ptr', 'B', 0), ('pad1', 'B', 0), ('pad2', 'H', 0)) class TimeExceed(Quote): pass _typesw = { 0:Echo, 3:Unreach, 4:Quench, 5:Redirect, 8:Echo, 11:TimeExceed } def unpack(self, buf): Packet.unpack(self, buf) try: self.data = self._typesw[self.type](self.data) setattr(self, self.data.__class__.__name__.lower(), self.data) except: self.data = buf def __str__(self): if not self.sum: self.sum = _icmp_cksum(Packet.__str__(self)) return Packet.__str__(self)
dkentw/robotframework
atest/robot/standard_libraries/builtin/listener_printing_start_end_kw.py
Python
apache-2.0
306
0
import sys ROBOT_LISTENER_API_VERSION = 2 def start_keyword(name, attrs): sys.stdout.write('start keyword %s\n' % name) sys.stderr.write(
'start keyword %s\n' % name) def end_keyword(name, attrs): sys.stdout.write('end keyword %s\n' % name) sys.stderr.write('end key
word %s\n' % name)
lavish205/olympia
src/olympia/search/middleware.py
Python
bsd-3-clause
434
0
from elasticsearch import TransportError import olympia.core.logger from olympia.amo.utils import render log = olympia.core.logger.getLogger('z.es') class ElasticsearchExceptionMiddleware(object): def proces
s_exception(self, request, exception): if issubclass(exception.__class__, Transpor
tError): log.exception(u'Elasticsearch error') return render(request, 'search/down.html', status=503)
kantel/processingpy
sketches/modes/PythonMode/examples/Basics/Objects/CompositeObjects/egg.py
Python
mit
858
0.001166
class Egg(object): def __init__(self, xpos, ypos, t, s): self.x = xpos # x-coordinate self.y = ypos # y-coordinate self.tilt = t # Left and right angle offset self.angle = 0 # Used to define the tilt self.scalar = s / 100.0 # Height of the egg def wobble(self): self.tilt = cos(self.angle) / 8 self.angle += 0.1 def display(self): noStroke() fill(255) with pushMatrix(): translate(self.x, self.y) rotate(self.tilt) scale(self.scalar) with beginShape(): vertex(0, -100) bezierVertex(25, -100, 40, -65, 40, -40)
bezierVertex(40, -15
, 25, 0, 0, 0) bezierVertex(-25, 0, -40, -15, -40, -40) bezierVertex(-40, -65, -25, -100, 0, -100)
qsnake/gpaw
gpaw/utilities/cg.py
Python
gpl-3.0
1,346
0.003715
import numpy as np def CG(A, X, B, maxiter=20, tolerance=1.0e-10, verbose=False): """Solve X*A=B using conjugate gradient method. ``X`` and ``B`` are ``ndarrays```of shape ``(m, nx, ny, nz)`` coresponding to matrices of size ``m*n`` (``n=nx*ny*nz``) and ``A`` is a callable representing an ``n*n`` matrix:: A(X, Y) will store ``X*A`` in the ou
tput array ``Y``. On return ``X`` will be the solution to ``X*A=B`` within ``tolerance``.""" m = len(X) shape
= (m, 1, 1, 1) R = np.empty(X.shape, X.dtype.char) Q = np.empty(X.shape, X.dtype.char) A(X, R) R -= B P = R.copy() c1 = A.sum(np.reshape([abs(np.vdot(r, r)) for r in R], shape)) for i in range(maxiter): error = sum(c1.ravel()) if verbose: print 'CG-%d: %e' % (i, error) if error < tolerance: return i, error A(P, Q) #alpha = c1 / reshape([vdot(p, q) for p, q in zip(P, Q)], shape) alpha = c1 / A.sum(np.reshape([np.vdot(q,p) for p, q in zip(P, Q)], shape)) X -= alpha * P R -= alpha * Q c0 = c1 c1 = A.sum(np.reshape([abs(np.vdot(r, r)) for r in R], shape)) beta = c1 / c0 P *= beta P += R raise ArithmeticError('Did not converge!')
jaygoswami2303/course_dashboard_api
v2/GradeAPI/urls.py
Python
mit
492
0.006098
""" Grade API v1 URL specification """ from django.conf.urls import url, patterns import views urlpatterns = patterns( '', url(r'^grades/courses
/$', views.CourseGradeList.as_view()), url(r'^grades/courses/(?P<org>[A-Za-z0-9_.-]+)[+](?P<name>
[A-Za-z0-9_.-]+)[+](?P<run>[A-Za-z0-9_.-]+)/$', views.CourseGradeDetail.as_view()), url(r'^grades/students/$', views.StudentList.as_view()), url(r'^grades/students/(?P<student_id>[0-9]+)/$', views.StudentGradeDetail.as_view()), )
iilab/ltfhc-next
system/python-requests/test_requests.py
Python
apache-2.0
40,903
0.001076
#!/usr/bin/env python # -*- coding: utf-8 -*- """Tests for Requests.""" from __future__ import division import json import os import pickle import unittest import requests import pytest from requests.adapters import HTTPAdapter from requests.auth import HTTPDigestAuth from requests.compat import ( Morsel, cookielib, getproxies, str, urljoin, urlparse) from requests.cookies import cookiejar_from_dict, morsel_to_cookie from requests.exceptions import InvalidURL, MissingSchema from requests.structures import CaseInsensitiveDict try: import StringIO except ImportError: import io as StringIO HTTPBIN = os.environ.get('HTTPBIN_URL', 'http://httpbin.org/') # Issue #1483: Make sure the URL always has a trailing slash HTTPBIN = HTTPBIN.rstrip('/') + '/' def httpbin(*suffix): """Returns url for HTTPBIN resource.""" return urljoin(HTTPBIN, '/'.join(suffix)) class RequestsTestCase(unittest.TestCase): _multiprocess_can_split_ = True def setUp(self): """Create simple data set with headers.""" pass def tearDown(self): """Teardown.""" pass def test_entry_points(self): requests.session requests.session().get requests.session().head requests.get requests.head requests.put requests.patch requests.post def test_invalid_url(self): with pytest.raises(MissingSchema): requests.get('hiwpefhipowhefopw') with pytest.raises(InvalidURL): requests.get('http://') def test_basic_building(self): req = requests.Request() req.url = 'http://kennethreitz.org/' req.data = {'life': '42'} pr = req.prepare() assert pr.url == req.url assert pr.body == 'life=42' def test_no_content_length(self): get_req = requests.Request('GET', httpbin('get')).prepare() assert 'Content-Length' not in get_req.headers head_req = requests.Request('HEAD', httpbin('head')).prepare() assert 'Content-Length' not in head_req.headers def test_path_is_not_double_encoded(self): request = requests.Request('GET', "http://0.0.0.0/get/test case").prepare() assert request.path_url == '/
get/test%20case' def test_params_are_added_before_fragment(self): request = requests.Request('GET', "http://example.com/path#fragment", params={"a": "b"}).prepare() assert request.url == "htt
p://example.com/path?a=b#fragment" request = requests.Request('GET', "http://example.com/path?key=value#fragment", params={"a": "b"}).prepare() assert request.url == "http://example.com/path?key=value&a=b#fragment" def test_mixed_case_scheme_acceptable(self): s = requests.Session() s.proxies = getproxies() parts = urlparse(httpbin('get')) schemes = ['http://', 'HTTP://', 'hTTp://', 'HttP://', 'https://', 'HTTPS://', 'hTTps://', 'HttPs://'] for scheme in schemes: url = scheme + parts.netloc + parts.path r = requests.Request('GET', url) r = s.send(r.prepare()) assert r.status_code == 200, 'failed for scheme {0}'.format(scheme) def test_HTTP_200_OK_GET_ALTERNATIVE(self): r = requests.Request('GET', httpbin('get')) s = requests.Session() s.proxies = getproxies() r = s.send(r.prepare()) assert r.status_code == 200 def test_HTTP_302_ALLOW_REDIRECT_GET(self): r = requests.get(httpbin('redirect', '1')) assert r.status_code == 200 # def test_HTTP_302_ALLOW_REDIRECT_POST(self): # r = requests.post(httpbin('status', '302'), data={'some': 'data'}) # self.assertEqual(r.status_code, 200) def test_HTTP_200_OK_GET_WITH_PARAMS(self): heads = {'User-agent': 'Mozilla/5.0'} r = requests.get(httpbin('user-agent'), headers=heads) assert heads['User-agent'] in r.text assert r.status_code == 200 def test_HTTP_200_OK_GET_WITH_MIXED_PARAMS(self): heads = {'User-agent': 'Mozilla/5.0'} r = requests.get(httpbin('get') + '?test=true', params={'q': 'test'}, headers=heads) assert r.status_code == 200 def test_set_cookie_on_301(self): s = requests.session() url = httpbin('cookies/set?foo=bar') r = s.get(url) assert s.cookies['foo'] == 'bar' def test_cookie_sent_on_redirect(self): s = requests.session() s.get(httpbin('cookies/set?foo=bar')) r = s.get(httpbin('redirect/1')) # redirects to httpbin('get') assert 'Cookie' in r.json()['headers'] def test_cookie_removed_on_expire(self): s = requests.session() s.get(httpbin('cookies/set?foo=bar')) assert s.cookies['foo'] == 'bar' s.get( httpbin('response-headers'), params={ 'Set-Cookie': 'foo=deleted; expires=Thu, 01-Jan-1970 00:00:01 GMT' } ) assert 'foo' not in s.cookies def test_cookie_quote_wrapped(self): s = requests.session() s.get(httpbin('cookies/set?foo="bar:baz"')) assert s.cookies['foo'] == '"bar:baz"' def test_cookie_persists_via_api(self): s = requests.session() r = s.get(httpbin('redirect/1'), cookies={'foo': 'bar'}) assert 'foo' in r.request.headers['Cookie'] assert 'foo' in r.history[0].request.headers['Cookie'] def test_request_cookie_overrides_session_cookie(self): s = requests.session() s.cookies['foo'] = 'bar' r = s.get(httpbin('cookies'), cookies={'foo': 'baz'}) assert r.json()['cookies']['foo'] == 'baz' # Session cookie should not be modified assert s.cookies['foo'] == 'bar' def test_request_cookies_not_persisted(self): s = requests.session() s.get(httpbin('cookies'), cookies={'foo': 'baz'}) # Sending a request with cookies should not add cookies to the session assert not s.cookies def test_generic_cookiejar_works(self): cj = cookielib.CookieJar() cookiejar_from_dict({'foo': 'bar'}, cj) s = requests.session() s.cookies = cj r = s.get(httpbin('cookies')) # Make sure the cookie was sent assert r.json()['cookies']['foo'] == 'bar' # Make sure the session cj is still the custom one assert s.cookies is cj def test_param_cookiejar_works(self): cj = cookielib.CookieJar() cookiejar_from_dict({'foo' : 'bar'}, cj) s = requests.session() r = s.get(httpbin('cookies'), cookies=cj) # Make sure the cookie was sent assert r.json()['cookies']['foo'] == 'bar' def test_requests_in_history_are_not_overridden(self): resp = requests.get(httpbin('redirect/3')) urls = [r.url for r in resp.history] req_urls = [r.request.url for r in resp.history] assert urls == req_urls def test_user_agent_transfers(self): heads = { 'User-agent': 'Mozilla/5.0 (github.com/kennethreitz/requests)' } r = requests.get(httpbin('user-agent'), headers=heads) assert heads['User-agent'] in r.text heads = { 'user-agent': 'Mozilla/5.0 (github.com/kennethreitz/requests)' } r = requests.get(httpbin('user-agent'), headers=heads) assert heads['user-agent'] in r.text def test_HTTP_200_OK_HEAD(self): r = requests.head(httpbin('get')) assert r.status_code == 200 def test_HTTP_200_OK_PUT(self): r = requests.put(httpbin('put')) assert r.status_code == 200 def test_BASICAUTH_TUPLE_HTTP_200_OK_GET(self): auth = ('user', 'pass') url = httpbin('basic-auth', 'user', 'pass') r = requests.get(url, auth=auth) assert r.status_code == 200 r = requests.get(url) assert r.status_code == 401 s = requests.session() s.auth = auth r = s.get(url) assert r.status_code == 200 def test_basicauth_with_netrc(self):
chrisspen/homebot
src/ros/src/ros_homebot_python/src/ros_homebot_python/constants.py
Python
mit
14,480
0.003384
from __future__ import print_function import os import threading import re import pint ureg = pint.UnitRegistry() MM = ureg.millimeter METER = ureg.meter SEC = ureg.second DEG = ureg.degree RAD = ureg.radian DRAW_LOCK = threading.RLock() DEBOUNCE = 1 HEAD = NAME_HEAD = 'HEAD' TORSO = NAME_TORSO = 'TORSO' NAME_PAN = 'PAN' NAME_TILT = 'TILT' INDEX_HEAD = 1 INDEX_TORSO = 2 SYSTEM_STARTUP_SPEECH = 'Hello.' SYSTEM_SHUTDOWN_SPEECH = 'Goodbye.' NAME_TO_INDEX = { NAME_HEAD: INDEX_HEAD, NAME_TORSO: INDEX_TORSO, } INDEX_TO_NAME = { INDEX_HEAD: NAME_HEAD, INDEX_TORSO: NAME_TORSO, } # Things to expect from running `udevadm info --query=all --name=/dev/ttyACM*` DEVICE_SIGNATURES = { NAME_TORSO: [ #'ID_MODEL_FROM_DATABASE=Uno R3 (CDC ACM)', 'arduino__www.arduino.cc__0043_854363236313514132d0', ], NAME_HEAD: [ #'ID_MODEL=Arduino_Leonardo', #'leonardo', 'arduino__www.arduino.cc__0043_5533330393435171b041', ], } PALETTE = [ ('banner', 'black', 'light gray'), ('streak', 'black', 'dark red'), ('bg', 'black', 'dark blue'), ] HIGHLIGHT_COLOR = 'banner' LINE_LASER_PIN = 20 ID_NULL = '' ID_PAN_ANGLE = 'a' ID_BUMPER = 'b' ID_PAN_SPEED = 'c' ID_TILT_ANGLE = 'd' ID_EDGE = 'e' ID_STATUS_BUTTON = 'f' ID_BATTERY_VOLTAGE = 'g' ID_BATTERY_TEMP = 'h' ID_IDENTIFY = 'i' ID_EXTERNAL_POWER = 'j' ID_POWER_BUTTON = 'k' ID_LED = 'l' ID_LED_AUTO = 'm' ID_MOTOR_SPEED = 'n' ID_GO_TO_CENTER = 'o' ID_PING = 'p' ID_GET_VALUE = 'q' ID_PAN_FULL_REV_COUNT = 'r' ID_ALL_STOP = 's' ID_CALIBRATE = 't' ID_ULTRASONIC = 'u' ID_PONG = 'v' ID_FORCE_SENSORS = 'w' ID_TWIST = 'x' ID_PAN_CENTERMARK = 'y' ID_SET_VALUE = 'z' ID_PAN_POWER = 'A' ID_TILT_POWER = 'B' ID_SONAR_POWER = 'C' ID_ARDUINO_TEMP = 'D' ID_IMU_EULER = 'E' ID_RECHARGE_POWERDOWN = 'F' ID_BATTERY_CHARGE_RATIO = 'G' ID_IMU_ACCELEROMETER = 'H' #ID_MICROPHONE_ENABLE = 'I' ID_IMU_GYROSCOPE = 'J' ID_IMU_MAGNETOMETER = 'K' ID_LOG = 'L' ID_MOTOR_ACCEL = 'M' ID_IMU_CALIBRATION = 'N' ID_MOTOR_CALIBRATION = 'O' ID_MOTOR_ENCODER = 'P' ID_TWIST_DONE = 'Q' ID_MOTOR_ERROR = 'R' ID_GO_TO_SLEEP = 'S' ID_SHUTDOWN = 'T' # 'U' # 'V' # 'W' ID_CRASH = 'X' # 'Y' ID_HASH = 'Z' # These are used to lookup callbacks. Do Not Change. ALL_IDS = { ID_PAN_ANGLE: 'pan angle', ID_BUMPER: 'bumper', ID_PAN_SPEED: 'pan speed', ID_TILT_ANGLE: 'tilt angle', ID_EDGE: 'edge', ID_STATUS_BUTTON: 'status button', ID_BATTERY_VOLTAGE: 'battery voltage', ID_BATTERY_TEMP: 'battery temperature', ID_IDENTIFY: 'identify', ID_EXTERNAL_POWER: 'external power', ID_POWER_BUTTON: 'power button', ID_LED: 'led', ID_LED_AUTO: 'led auto', ID_MOTOR_SPEED: 'motor speed', ID_MOTOR_ACCEL: 'motor acceleration', ID_PING: 'ping', ID_FORCE_SENSORS: 'force sensors', ID_GO_TO_CENTER: 'go to center', ID_GET_VALUE: 'get value', ID_PAN_FULL_REV_COUNT: 'pan full rev count', ID_CALIBRATE: 'calibrate', ID_ALL_STOP: 'all stop', ID_ULTRASONIC: 'ultrasonic', ID_PONG: 'pong', ID_PAN_CENTERMARK: 'pan centermark', ID_SET_VALUE: 'set value', ID_PAN_POWER: 'pan power', ID_TILT_POWER: 'tilt power', ID_SONAR_POWER: 'sonar power', ID_ARDUINO_TEMP: 'arduino temperature', ID_RECHARGE_POWERDOWN: 'recharge powerdown', ID_BATTERY_CHARGE_RATIO: 'battery charge ratio', ID_LOG: 'log', ID_GO_TO_SLEEP: 'sleep', ID_SHUTDOWN: 'shutdown', ID_CRASH: 'crash', ID_HASH: 'hash', ID_IMU_EULER: 'imu euler', ID_IMU_ACCELEROMETER: 'imu accelerometer', ID_IMU_GYROSCOPE: 'imu gyroscope', ID_IMU_MAGNETOMETER: 'imu magnetometer', ID_IMU_CALIBRATION: 'imu calibration', ID_MOTOR_CALIBRATION: 'motor calibration', ID_MOTOR_ENCODER: 'motor encoder', ID_MOTOR_ERROR: 'motor error', ID_TWIST: 'twist', ID_TWIST_DONE: 'twist done', } NAME_TO_IDS = dict((re.sub(r'[^a-z]+', '_', v.lower()), k) for k, v in ALL_IDS.iteritems()) MOVEMENT_ERROR_NONE = 0 MOVEMENT_ERROR_EDGE = 1 MOVEMENT_ERROR_ULTRASONIC = 2 MOVEMENT_ERROR_TILT = 3 MOVEMENT_ERROR_ACCEL = 4 MOVEMENT_ERROR_ENCODER = 5 MOVEMENT_ERROR_BUMPER = 6 # Movement will be halted if ultrasonics detect we're this distance from an obstacle. MOVEMENT_ULTRASONIC_THRESHOLD_CM = 5 # This amount of tilt will be allowed on the logic x or y axis before movement is cancelled. MOVEMENT_MAX_TILT = 10 # These map to ROS messages. BOTH_FORMATS_OUT = { # ID_ALL_STOP: [], # ID_IDENTIFY: [], # ID_LED: [('state', bool)], # ID_LED_AUTO: [('state', bool)], # ID_PING: [], # ID_GET_VALUE: [('id', int)], ID_PONG: [('total', int)], ID_ARDUINO_TEMP: [('temperature', float)], ID_MOTOR_CALIBRATION: [('name', str), ('state', int)], } HEAD_FORMATS_OUT = { ID_PAN_ANGLE: [('angle', int)], ID_PAN_FULL_REV_COUNT: [('count', int)], ID_PAN_CENTERMARK: [('state', int)], ID_TILT_ANGLE: [('angle', int)], } TORSO_FORMATS_OUT = { ID_BUMPER: [('index', 'uint8'), ('state', int)], ID_EDGE: [('index', 'uint8'), ('state', int)], ID_BATTERY_VOLTAGE: [('voltage', float)], ID_BATTERY_TEMP: [('temperature', float)], ID_EXTERNAL_POWER: [('state1', int), ('state2', int)], # ID_LED: [('state', bool)], # ID_LED_AUTO: [('state', bool)], # ID_MOTOR_SPEED: [('left',
int), ('right', int)], ID_ULTRASONIC: [('index', 'uint8'), ('distance', int)], # ID_SONAR_POWER: [('state', bool)], ID_IMU_EULER: [('
x', float), ('y', float), ('z', float)], ID_IMU_ACCELEROMETER: [('x', float), ('y', float), ('z', float)], ID_IMU_GYROSCOPE: [('x', float), ('y', float), ('z', float)], ID_IMU_MAGNETOMETER: [('x', float), ('y', float), ('z', float)], ID_IMU_CALIBRATION: [ ('system', int), ('gyroscope', int), ('accelerometer', int), ('magnetometer', int), ], # ID_RECHARGE_POWERDOWN: [], ID_BATTERY_CHARGE_RATIO: [('charge', int)], # ID_GO_TO_SLEEP: [('duration', int)], # ID_SHUTDOWN: [], # ID_MOTOR_ACCEL: [('acceleration', float)], ID_STATUS_BUTTON: [('state', int)], ID_MOTOR_ENCODER: [('channel', int), ('count', int)], ID_MOTOR_ERROR: [('error', int)],# single byte ID_TWIST_DONE: [('error', int)], # 0=no error, 1=edge, 2=ultrasonic, 3=tilt, 4=accel, 5=encoder } BOTH_FORMATS_IN = { ID_ALL_STOP: [], ID_LED: [('index', int), ('state', int)], ID_LED_AUTO: [('state', int)], ID_GET_VALUE: [('id', str)], ID_FORCE_SENSORS: [('state', int)], } HEAD_FORMATS_IN = { ID_PAN_SPEED: [('speed', 'int32')], ID_GO_TO_CENTER: [('type', str)], ID_CALIBRATE: [('type', str)], ID_PAN_ANGLE: [('angle', int)], ID_TILT_ANGLE: [('angle', int)], ID_TILT_POWER: [('enabled', int)], ID_PAN_POWER: [('enabled', int)], # ID_MICROPHONE_ENABLE: [('state', int)], } TORSO_FORMATS_IN = { ID_SONAR_POWER: [('enabled', int)], ID_MOTOR_SPEED: [('left', int), ('right', int)], ID_MOTOR_ACCEL: [('acceleration', int)], ID_RECHARGE_POWERDOWN: [], ID_GO_TO_SLEEP: [('duration', int)], ID_SHUTDOWN: [], # Mimics Twist format. http://docs.ros.org/api/geometry_msgs/html/msg/Twist.html # Linear is linear.x, Angular is angular.z. ID_TWIST: [('linear', float), ('angular', float), ('seconds', float), ('force', int)], } # Packets using these IDs will require acknowledgement. ACK_IDS = set([ ID_LED, ID_LED_AUTO, ID_SONAR_POWER, ID_MOTOR_SPEED, ID_MOTOR_ACCEL, ID_GO_TO_CENTER, ID_TILT_ANGLE, ID_PAN_ANGLE, ]) MOTOR_FORWARD = 'forward' MOTOR_REVERSE = 'reverse' MOTOR_TURN_CW = 'turn_cw' MOTOR_TURN_CCW = 'turn_ccw' MOTOR_BREAK = 'break' MOTOR_PIVOT_LEFT_CW = 'pivot_left_cw' MOTOR_PIVOT_LEFT_CCW = 'pivot_left_ccw' MOTOR_PIVOT_RIGHT_CW = 'pivot_right_cw' MOTOR_PIVOT_RIGHT_CCW = 'pivot_right_ccw' # ComMotion Manual, Page 4 # The desired speed from -255 to +255. Positive values are forward, negative values are reverse. MOTOR_MAX_SPEED = 255 MOTOR_EIGTH_SPEED = int(round(MOTOR_MAX_SPEED * 0.125)) MOTOR_QUARTER_SPEED = int(round(MOTOR_MAX_SPEED * 0.25)) MOTOR_HALF_SPEED = int(round(MOTOR_MAX_SPEED * 0.5)) MOTOR_THREE_QUARTER_SPEED = int(round(MOTOR_MAX
nficano/python-lambda
aws_lambda/aws_lambda.py
Python
isc
26,779
0
import hashlib import json import logging import os import subprocess import sys import time from collections import defaultdict from shutil import copy from shutil import copyfile from shutil import copystat from shutil import copytree from tempfile import mkdtemp import boto3 import botocore import yaml import sys from .helpers import archive from .helpers import get_environment_variable_value from .helpers import LambdaContext from .helpers import mkdir from .helpers import read from .helpers import timestamp ARN_PREFIXES = { "cn-north-1": "aws-cn", "cn-northwest-1": "aws-cn", "us-gov-west-1": "aws-us-gov", } log = logging.getLogger(__name__) def load_source(module_name, module_path): """Loads a python module from the path of the corresponding file.""" if sys.version_info[0] == 3 and sys.version_info[1] >= 5: import importlib.util spec = importlib.util.spec_from_file_location(module_name, module_path) module = importlib.util.module_from_spec(spec) spec.loader.exec_module(module) elif sys.version_info[0] == 3 and sys.version_info[1] < 5: import importlib.machinery loader = importlib.machinery.SourceFileLoader(module_name, module_path) module = loader.load_module() return module def cleanup_old_versions( src, keep_last_versions, config_file="config.yaml", profile_name=None, ): """Deletes old deployed versions of the function in AWS Lambda. Won't delete $Latest and any aliased version :param str src: The path to your Lambda ready project (folder must contain a valid config.yaml and handler module (e.g.: service.py). :param int keep_last_versions: The number of recent versions to keep and not delete """ if keep_last_versions <= 0: print("Won't delete all versions. Please do this manually") else: path_to_config_file = os.path.join(src, config_file) cfg = read_cfg(path_to_config_file, profile_name) profile_name = cfg.get("profile") aws_access_key_id = cfg.get("aws_access_key_id") aws_secret_access_key = cfg.get("aws_secret_access_key") client = get_client( "lambda", profile_name, aws_access_key_id, aws_secret_access_key, cfg.get("region"), ) response = client.list_versions_by_function( FunctionName=cfg.get("function_name"), ) versions = response.get("Versions") if len(response.get("Versions")) < keep_last_versions: print("Nothing to delete. (Too few versions published)") else: version_numbers = [ elem.get("Version") for elem in versions[1:-keep_last_versions] ] for version_number in version_numbers: try: client.delete_function( FunctionName=cfg.get("function_name"), Qualifier=version_number, ) except botocore.exceptions.ClientError as e: print(f"Skipping Version {version_number}: {e}") def deploy( src, requirements=None, local_package=None, config_file="config.yaml", profile_name=None, preserve_vpc=False, ): """Deploys a new function to AWS Lambda. :param str src: The path to your Lambda ready project (folder must contain a valid config.yaml and handler module (e.g.: service.py). :param str local_package: The path to a local package with should be included in the deploy as well (and/or is not available on PyPi) """ # Load and parse the config file. path_to_config_file = os.path.join(src, config_file) cfg = read_cfg(path_to_config_file, profile_name) # Copy all the pip dependencies required to run your code into a temporary # folder then add the handler file in the root of this directory. # Zip the contents of this folder into a single file and output to the dist # directory. path_to_zip_file = build( src, config_file=config_file, requirements=requirements, local_package=local_package, ) existing_config = get_function_config(cfg) if existing_config: update_function( cfg, path_to_zip_file, existing_config, preserve_vpc=preserve_vpc ) else: create_function(cfg, path_to_zip_file) def deploy_s3( src, requirements=None, local_package=None, config_file="config.yaml", profile_name=None, preserve_vpc=False, ): """Deploys a new function via AWS S3. :param str src: The path to your Lambda ready project (folder must contain a valid config.yaml and handler module (e.g.: service.py). :param str local_package: The path to a local package with should be included in the deploy as well (and/or is not available on PyPi) """ # Load and parse the config file. path_to_config_file = os.path.join(src, config_file) cfg = read_cfg(path_to_config_file, profile_name) # Copy all the pip dependencies required to run your code into a temporary # folder then add the handler file in the root of this directory. # Zip the contents of this folder into a single file and output to the dist # directory. path_to_zip_file = build( src, config_file=config_file, requirements=requirements, local_package=local_package, ) use_s3 = True s3_file = upload_s3(cfg, path_to_zip_file, use_s3) existing_config = get_function_config(cfg) if existing_config: update_function( cfg, path_to_zip_file, existing_config, use_s3=use_s3, s3_file=s3_file, preserve_vpc=preserve_vpc, ) else: create_function(cfg, path_to_zip_file, use_s3=use_s3, s3_file=s3_file) def upload( src, requirements=None, local_package=None, config_file="config.yaml", profile_name=None, ): """Uploads a new function to AWS S3. :param str src: The path to your Lambda ready project (folder must contain a valid config.yaml and handler module (e.g.: service.py). :param str local_package: The path to a local package with should be included in the deploy as well (and/or is not available on PyPi) """ # Load and parse the config file. path_to_config_file = os.path.join(src, config_file) cfg = read_cfg(path_to_config_file, profile_name) # Copy all the pip dependencies required to run your code into a temporary # folder then add the handler file in the root of this directory. # Zip the contents of this folder into a single file and output to the dist # directory. path_to_zip_file = build( src, config_file=config_file, requirements=requirements, local_package=local_package, ) upload_s3(cfg, path_to_zip_file) def invoke( src, event_file="event.json", config_file="config.yaml", profile_name=None, verbose=False, ): """Simulates a call to your function. :param str src: The path to your Lambda ready project (folder must contain a valid config.yaml and handler module (e.g.: service.py). :param str alt_event: An optional argument to override which event file to use. :param bool verbose: Whether to print out verbose details. """ # Load and parse the config file. path_to_config_file = os.path.join(src, config_file) cfg = read_cfg(path_to_config_file, profile_name) # Set AWS_PROFIL
E en
vironment variable based on `--profile` option. if profile_name: os.environ["AWS_PROFILE"] = profile_name # Load environment variables from the config file into the actual # environment. env_vars = cfg.get("environment_variables") if env_vars: for key, value in env_vars.items(): os.environ[key] = get_environment_variable_value(value) # Load and parse event file. path_to_event_file = os.path.join(src, e
bluemini/kuma
vendor/packages/translate/storage/xml_name.py
Python
mpl-2.0
2,748
0.000364
#!/usr/bin/env python # -*- coding: utf-8 -*- # # Copyright 2008 Zuza Software Foundation # # This file is part of translate. # # translate is free software; you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation; either version 2 of the License, or # (at your option) any later version. # # translate is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program; if not, see <http://www.gnu.org/licenses/>. # class XmlNamespace(object): def __init__(self, namespace): self._namespace = namespace def name(self, tag): return "{%s}%s" % (self._namespace, tag) class XmlNamer(object): """Initialize me with a DOM node or a DOM document node (the toplevel node you get when parsing an XML file). Then use me to generate fully qualified XML names. >>> xml = '<office:document-styles xmlns:office="urn:oasis:names:tc:opendocument:xmlns:office:1.0"></office>' >>> from lxml import etree >>> namer = XmlNamer(etree.fromstring(xml)) >>> namer.name('office', 'blah') {urn:oasis:names:tc:opendocument:xmlns:office:1.0}blah >>> namer.name('office:blah') {urn:oasis:names:tc:opendocument:xmlns:office:1.0}blah I can also give you XmlNamespace objects if you give me the abbreviated namespace name. These are useful if you need to reference a namespace continuously. >>> office_ns = name.namespace('office') >>> office_ns.name('foo') {urn:oasis:names:tc:opendocument:xmlns:office:1.0}foo """ def __init__(self, dom_node): # Allow the user to pass a dom node of the # XML document nodle if hasattr(dom_node, 'nsmap'): self.nsmap = dom_node.nsmap else: self.nsmap = dom_node.getroot().nsmap def name(self, namespace_shortcut, tag=None): # If the user doesn't pass an argument into 'tag' # then namespace_shortcut contain
s a tag of the form # 'short-namespace:tag' if tag is None: try: namespace_shortcut, tag = namespace_shortcut.split(':') except ValueError: # If there is no namespace in namespace_shortcut. tag = namespace_shortcut.lstrip("{}") return tag return "{%s}%s" % (sel
f.nsmap[namespace_shortcut], tag) def namespace(self, namespace_shortcut): return XmlNamespace(self.nsmap[namespace_shortcut])
imposeren/django-messages-extends
test_settings.py
Python
mit
4,967
0.000604
# Django settings for temp project. DEBUG = True TEMPLATE_DEBUG = DEBUG ADMINS = ( # ('Your Name', 'your_email@example.com'), ) MANAGERS = ADMINS DATABASES = { 'default': {'ENGINE': 'django.db.backends.sqlite3'} } # Hosts/domain names that are valid for this site; required if DEBUG is False # See https://docs.djangoproject.com/en/1.5/ref/settings/#allowed-hosts ALLOWED_HOSTS = [] # Local time zone for this installation. Choices can be found here: # http://en.wikipedia.org/wiki/List_of_tz_zones_by_name # although not all choices may be available on all operating systems. # In a Windows environment this must be set to your system time zone. TIME_ZONE = 'America/Chicago' # Language code for this installation. All choices can be found here: # http://www.i18nguy.com/unicode/language-identifiers.html LANGUAGE_CODE = 'en-us' SITE_ID = 1 # If you set this to False, Django will make some optimizations so as not # to load the internationalization machinery. USE_I18N = True # If you set this to False, Django will not format dates, numbers and # calendars according to the current locale. USE_L10N = True # If you set this to False, Django will not use timezone-aware datetimes. USE_TZ = True # Absolute filesystem path to the directory that will hold user-uploaded files. # Example: "/var/www/example.com/media/" MEDIA_ROOT = '' # URL that handles the media served from MEDIA_ROOT. Make sure to use a # trailing slash. # Examples: "http://example.com/media/", "http://media.example.com/" MEDIA_URL = '' # Absolute path to the directory static files should be collected to. # Don't put anything in this directory yourself; s
tore your static files # in apps' "static/" subdirectories and in STATICFILES_DIRS. # Example: "/var/www/example.com/static/" STATIC_ROOT = '' # URL prefix for static files. # Example: "http://example.com/static/", "http://static.example.com/" STATIC_URL = '/static/' # Additional locations of static files STATICFILES_DIRS = ( # Put strings here, like "/home/html/static" or "C:/www/django/static". # Always use forward slashes, even on Windows. # Don't forget to use absolute paths, not rel
ative paths. ) # List of finder classes that know how to find static files in # various locations. STATICFILES_FINDERS = ( 'django.contrib.staticfiles.finders.FileSystemFinder', 'django.contrib.staticfiles.finders.AppDirectoriesFinder', # 'django.contrib.staticfiles.finders.DefaultStorageFinder', ) # Make this unique, and don't share it with anybody. SECRET_KEY = '1s^z*4c6clc@+)c8dstu#eh4bi5907+&h_$2_&=y!3=a_!))u6' # List of callables that know how to import templates from various sources. TEMPLATE_LOADERS = ( 'django.template.loaders.filesystem.Loader', 'django.template.loaders.app_directories.Loader', # 'django.template.loaders.eggs.Loader', ) MIDDLEWARE_CLASSES = ( 'django.middleware.common.CommonMiddleware', 'django.contrib.sessions.middleware.SessionMiddleware', 'django.middleware.csrf.CsrfViewMiddleware', 'django.contrib.auth.middleware.AuthenticationMiddleware', 'django.contrib.messages.middleware.MessageMiddleware', # Uncomment the next line for simple clickjacking protection: # 'django.middleware.clickjacking.XFrameOptionsMiddleware', ) ROOT_URLCONF = 'test_urls' # Python dotted path to the WSGI application used by Django's runserver. WSGI_APPLICATION = 'temp.wsgi.application' TEMPLATE_DIRS = ( # Put strings here, like "/home/html/django_templates" or "C:/www/django/templates". # Always use forward slashes, even on Windows. # Don't forget to use absolute paths, not relative paths. ) INSTALLED_APPS = ( 'django.contrib.auth', 'django.contrib.contenttypes', 'django.contrib.sessions', 'django.contrib.sites', 'django.contrib.messages', 'django.contrib.staticfiles', # Uncomment the next line to enable the admin: # 'django.contrib.admin', # Uncomment the next line to enable admin documentation: # 'django.contrib.admindocs', 'messages_extends', ) MESSAGE_STORAGE = 'messages_extends.storages.FallbackStorage' # A sample logging configuration. The only tangible logging # performed by this configuration is to send an email to # the site admins on every HTTP 500 error when DEBUG=False. # See http://docs.djangoproject.com/en/dev/topics/logging for # more details on how to customize your logging configuration. LOGGING = { 'version': 1, 'disable_existing_loggers': False, 'filters': { 'require_debug_false': { '()': 'django.utils.log.RequireDebugFalse' } }, 'handlers': { 'mail_admins': { 'level': 'ERROR', 'filters': ['require_debug_false'], 'class': 'django.utils.log.AdminEmailHandler' } }, 'loggers': { 'django.request': { 'handlers': ['mail_admins'], 'level': 'ERROR', 'propagate': True, }, } }
stiell/hyde
hyde/ext/plugins/folders.py
Python
mit
1,263
0.002375
# -*- coding: utf-8 -*- """ Plugins related to folders and paths """ from hyde.plugin import Plugin from hyde.fs import Folder class FlattenerPlugin(Plugin): """ The plugin class for flattening nested folders. """ def __init__(self, site): super(FlattenerPlugin, self).__init__(site)
def begin_site(self): """ Finds all the folders that need flattening and changes the relative deploy path of all resources in those folders. """ item
s = [] try: items = self.site.config.flattener.items except AttributeError: pass for item in items: node = None target = '' try: node = self.site.content.node_from_relative_path(item.source) target = Folder(item.target) except AttributeError: continue if node: for resource in node.walk_resources(): target_path = target.child(resource.name) self.logger.debug( 'Flattening resource path [%s] to [%s]' % (resource, target_path)) resource.relative_deploy_path = target_path
jonathanmeier5/teamstore
saleor/teamstore/migrations/0004_teamstore_shipping_method.py
Python
bsd-3-clause
672
0.001488
# -*- codin
g: utf-8 -*- # Generated by Django 1.10.7 on 2017-06-25 15:19 from __future__ import unicode_literals from django.db import migrations, models import django.db.models.deletion class Mig
ration(migrations.Migration): dependencies = [ ('shipping', '0005_auto_20170616_1351'), ('teamstore', '0003_auto_20170624_1533'), ] operations = [ migrations.AddField( model_name='teamstore', name='shipping_method', field=models.ForeignKey(default=2, on_delete=django.db.models.deletion.CASCADE, related_name='+', to='shipping.ShippingMethod', verbose_name='team shipping method'), ), ]
teoliphant/numpy-refactor
numpy/polynomial/__init__.py
Python
bsd-3-clause
951
0
""" A sub-package for efficiently dealing with polynomials. Within the documentation for this sub-package, a "finite power series," i.e., a polynomial (also referred to simply as a "series") is represented by a 1-D numpy array of the polynomial's coefficients, ordered from lowest order ter
m to highest. For example, array([1,2,3]) represents ``P_0 + 2*P_1 + 3*P_2``, where P_n is the n-th order basis polynomial applicable to the specific module in question, e.g., `polynomial` (which "wraps" the "standard" basis) or `chebyshev`. For optimal performance, all operations on polynomials, including evaluation at an argument, are implemented as operations on the coefficients. Additional (module-speci
fic) information can be found in the docstring for the module of interest. """ from polynomial import * from chebyshev import * from polyutils import * from numpy.testing import Tester test = Tester(__file__).test bench = Tester(__file__).bench
gasabr/AtoD
atod/__init__.py
Python
mit
806
0.001241
''' This is the library for getting DotA2 insight information. Problem: there was no way to get
dota internal data about heroes, their abilities... in suitable for further work form. Solution: this library allows you to get access to all
the data in the in-game files. But information about single hero does not have much use, so there is a way to get stats of selected heroes or get information about certain match. ''' from atod.meta import meta_info from atod.models.interfaces import Member, Group from atod.models.ability import Ability from atod.models.abilities import Abilities from atod.models.hero import Hero from atod.models.heroes import Heroes from atod.models.match import Match from atod.utils.pick import get_recommendations # from atod.utils import dota_api
cypreess/django-plans
plans/urls.py
Python
mit
2,070
0.006763
from django.conf import settings from django.conf.urls import url from plans.views import CreateOrderView, OrderListView, InvoiceDetailView, AccountActivationView, \ OrderPaymentReturnView, CurrentPlanView, UpgradePlanView, OrderView, BillingInfoRedirectView, \ BillingInfoCreateView, BillingInfoUpdateView, BillingInfoDeleteView, CreateOrderPlanChangeView, ChangePlanView, \ PricingView, FakePaymentsView urlpatterns = [ url(r'^pricing/$', PricingView.as_view(), name='pricing'), url(r'^account/$', CurrentPlanVi
ew.as_view(), name='current_plan'), url(r'^account/activation/$', AccountActivationView.as_view(), name='account_activation'), url(r'^upgrade/$', UpgradePlanView.as_view(), name='upgrade_plan'), url(r'^order/extend/new/(?P<pk>\d+)/$', CreateOrderView.as_view(), name='create_order_plan'), url(r'^order/upgrade/new/(?P<pk>\d+)/$', CreateOrderPlanChangeView.as_view(), name='create_order_plan_change'), url(r'^change/(?P<pk>\d+)/$', ChangePlan
View.as_view(), name='change_plan'), url(r'^order/$', OrderListView.as_view(), name='order_list'), url(r'^order/(?P<pk>\d+)/$', OrderView.as_view(), name='order'), url(r'^order/(?P<pk>\d+)/payment/success/$', OrderPaymentReturnView.as_view(status='success'), name='order_payment_success'), url(r'^order/(?P<pk>\d+)/payment/failure/$', OrderPaymentReturnView.as_view(status='failure'), name='order_payment_failure'), url(r'^billing/$', BillingInfoRedirectView.as_view(), name='billing_info'), url(r'^billing/create/$', BillingInfoCreateView.as_view(), name='billing_info_create'), url(r'^billing/update/$', BillingInfoUpdateView.as_view(), name='billing_info_update'), url(r'^billing/delete/$', BillingInfoDeleteView.as_view(), name='billing_info_delete'), url(r'^invoice/(?P<pk>\d+)/preview/html/$', InvoiceDetailView.as_view(), name='invoice_preview_html'), ] if getattr(settings, 'DEBUG', False): urlpatterns += [ url(r'^fakepayments/(?P<pk>\d+)/$', FakePaymentsView.as_view(), name='fake_payments'), ]
robobrobro/foe
test/command/interpret/test_get_input.py
Python
mit
153
0.006536
""" Tests for foe.command.interpre
t._setup_readline """ from foe.command.interpret import _get_input def test_no
_config(): assert not _get_input()
JshWright/home-assistant
homeassistant/components/tts/__init__.py
Python
apache-2.0
16,893
0
""" Provide functionality to TTS. For more details about this component, please refer to the documentation at https://home-assistant.io/components/tts/ """ import asyncio import ctypes import functools as ft import hashlib import logging import mimetypes import os import re import io from aiohttp import web import voluptuous as vol from homeassistant.const import ATTR_ENTITY_ID from homeassistant.setup import async_prepare_setup_platform from homeassistant.core import callback from homeassistant.config import load_yaml_config_file from homeassistant.components.http import HomeAssistantView from homeassistant.components.media_player import ( SERVICE_PLAY_MEDIA, MEDIA_TYPE_MUSIC, ATTR_MEDIA_CONTENT_ID, ATTR_MEDIA_CONTENT_TYPE, DOMAIN as DOMAIN_MP) from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers import config_per_platform import homeassistant.helpers.config_validation as cv REQUIREMENTS = ["mutagen==1.37.0"] DOMAIN = 'tts' DEPENDENCIES = ['http'] _LOGGER = logging.getLogger(__name__) MEM_CACHE_FILENAME = 'filename' MEM_CACHE_VOICE = 'voice' CONF_LANG = 'language' CONF_CACHE = 'cache' CONF_CACHE_DIR = 'cache_dir' CONF_TIME_MEMORY = 'time_memory' DEFAULT_CACHE = True DEFAULT_CACHE_DIR = "tts" DEFAULT_TIME_MEMORY = 300 SERVICE_SAY = 'say' SERVICE_CLEAR_CACHE = 'clear_cache' ATTR_MESSAGE = 'message' ATTR_CACHE = 'cache' ATTR_LANGUAGE = 'language' ATTR_OPTIONS = 'options' _RE_VOICE_FILE = re.compile( r"([a-f0-9]{40})_([^_]+)_([^_]+)_([a-z_]+)\.[a-z0-9]{3,4}") KEY_PATTERN = '{0}_{1}_{2}_{3}' PLATFORM_SCHEMA = cv.PLATFORM_SCHEMA.extend({ vol.Optional(CONF_CACHE, default=DEFAULT_CACHE): cv.boolean, vol.Optional(CONF_CACHE_DIR, default=DEFAULT_CACHE_DIR): cv.string, vol.Optional(CONF_TIME_MEMORY, default=DEFAULT_TIME_MEMORY): vol.All(vol.Coerce(int), vol.Range(min=60, max=57600)), }) SCHEMA_SERVICE_SAY = vol.Schema({ vol.Required(ATTR_MESSAGE): cv.string, vol.Optional(ATTR_ENTITY_ID): cv.entity_ids, vol.Optional(ATTR_CACHE): cv.boolean, vol.Optional(ATTR_LANGUAGE): cv.string, vol.Optional(ATTR_OPTIONS): dict, }) SCHEMA_SERVICE_CLEAR_CACHE = vol.Schema({}) @asyncio.coroutine def async_setup(hass, config): """Set up TTS.""" tts = SpeechManager(hass) try: conf = config[DOMAIN][0] if config.get(DOMAIN, []) else {} use_cache = conf.get(CONF_CACHE, DEFAULT_CACHE) cache_dir = conf.get(CONF_CACHE_DIR, DEFAULT_CACHE_DIR) time_memory = conf.get(CONF_TIME_MEMORY, DEFAULT_TIME_MEMORY) yield from tts.async_init_cache(use_cache, cache_dir, time_memory) except (HomeAssistantError, KeyError) as err: _LOGGER.error("Error on cache init %s", err) return False hass.http.register_view(TextToSpeechView(tts)) descriptions = yield from hass.loop.run_in_executor( None, load_yaml_config_file, os.path.join(os.path.dirname(__file__), 'services.yaml')) @asyncio.coroutine def async_setup_platform(p_type, p_config, disc_info=None): """Set up a tts platform.""" platform = yield from async_prepare_setup_platform( hass, config, DOMAIN, p_type) if platform is None: return try: if hasattr(platform, 'async_get_engine'): provider = yi
eld from platform.async_get_engine( hass, p_config) else: provider = yield from hass.loop.run_in_executor( None, platform.get_engine, hass, p_config) if provider is None:
_LOGGER.error("Error setting up platform %s", p_type) return tts.async_register_engine(p_type, provider, p_config) except Exception: # pylint: disable=broad-except _LOGGER.exception("Error setting up platform %s", p_type) return @asyncio.coroutine def async_say_handle(service): """Service handle for say.""" entity_ids = service.data.get(ATTR_ENTITY_ID) message = service.data.get(ATTR_MESSAGE) cache = service.data.get(ATTR_CACHE) language = service.data.get(ATTR_LANGUAGE) options = service.data.get(ATTR_OPTIONS) try: url = yield from tts.async_get_url( p_type, message, cache=cache, language=language, options=options ) except HomeAssistantError as err: _LOGGER.error("Error on init tts: %s", err) return data = { ATTR_MEDIA_CONTENT_ID: url, ATTR_MEDIA_CONTENT_TYPE: MEDIA_TYPE_MUSIC, } if entity_ids: data[ATTR_ENTITY_ID] = entity_ids yield from hass.services.async_call( DOMAIN_MP, SERVICE_PLAY_MEDIA, data, blocking=True) hass.services.async_register( DOMAIN, "{}_{}".format(p_type, SERVICE_SAY), async_say_handle, descriptions.get(SERVICE_SAY), schema=SCHEMA_SERVICE_SAY) setup_tasks = [async_setup_platform(p_type, p_config) for p_type, p_config in config_per_platform(config, DOMAIN)] if setup_tasks: yield from asyncio.wait(setup_tasks, loop=hass.loop) @asyncio.coroutine def async_clear_cache_handle(service): """Handle clear cache service call.""" yield from tts.async_clear_cache() hass.services.async_register( DOMAIN, SERVICE_CLEAR_CACHE, async_clear_cache_handle, descriptions.get(SERVICE_CLEAR_CACHE), schema=SCHEMA_SERVICE_CLEAR_CACHE) return True class SpeechManager(object): """Representation of a speech store.""" def __init__(self, hass): """Initialize a speech store.""" self.hass = hass self.providers = {} self.use_cache = DEFAULT_CACHE self.cache_dir = DEFAULT_CACHE_DIR self.time_memory = DEFAULT_TIME_MEMORY self.file_cache = {} self.mem_cache = {} @asyncio.coroutine def async_init_cache(self, use_cache, cache_dir, time_memory): """Init config folder and load file cache.""" self.use_cache = use_cache self.time_memory = time_memory def init_tts_cache_dir(cache_dir): """Init cache folder.""" if not os.path.isabs(cache_dir): cache_dir = self.hass.config.path(cache_dir) if not os.path.isdir(cache_dir): _LOGGER.info("Create cache dir %s.", cache_dir) os.mkdir(cache_dir) return cache_dir try: self.cache_dir = yield from self.hass.loop.run_in_executor( None, init_tts_cache_dir, cache_dir) except OSError as err: raise HomeAssistantError("Can't init cache dir {}".format(err)) def get_cache_files(): """Return a dict of given engine files.""" cache = {} folder_data = os.listdir(self.cache_dir) for file_data in folder_data: record = _RE_VOICE_FILE.match(file_data) if record: key = KEY_PATTERN.format( record.group(1), record.group(2), record.group(3), record.group(4) ) cache[key.lower()] = file_data.lower() return cache try: cache_files = yield from self.hass.loop.run_in_executor( None, get_cache_files) except OSError as err: raise HomeAssistantError("Can't read cache dir {}".format(err)) if cache_files: self.file_cache.update(cache_files) @asyncio.coroutine def async_clear_cache(self): """Read file cache and delete files.""" self.mem_cache = {} def remove_files(): """Remove files from filesystem.""" for _, filename in self.file_cache.items(): try: os.remove(os.path.join(self.cache_dir, filename)) except OSError as err:
hep-mirrors/herwig
Models/Feynrules/python/ufo2peg/check_lorentz.py
Python
gpl-3.0
38,432
0.032863
from __future__ import print_function import itertools,cmath,re from .helpers import SkipThisVertex,extractAntiSymmetricIndices from .converter import py2cpp from .lorentzparser import parse_lorentz import string,re def compare(a,b) : num=abs(a-b) den=abs(a+b) if(den == 0. and 1e-10) : return True return num/den<1e-10 def evaluate(x,model,parmsubs): import cmath return eval(x, {'cmath':cmath, 'complexconjugate':model.function_library.complexconjugate}, parmsubs) # ordering for EW VVV vertices (ordering not an issue as all same spin) def VVVordering(vertex) : pattern = "if((p1->id()==%s&&p2->id()==%s&&p3->id()==%s)"+\ "||(p1->id()==%s&&p2->id()==%s&&p3->id()==%s)||"+\ "(p1->id()==%s&&p2->id()==%s&&p3->id()==%s)) {norm(-norm());}" ordering = pattern%(vertex.particles[1].pdg_code, vertex.particles[0].pdg_code, vertex.particles[2].pdg_code, vertex.particles[0].pdg_code, vertex.particles[2].pdg_code, vertex.particles[1].pdg_code, vertex.particles[2].pdg_code, vertex.particles[1].pdg_code, vertex.particles[0].pdg_code) return ordering def tensorCouplings(vertex,value,prefactors,L,lorentztag,pos,all_couplings,order) : # split the structure into its different terms for analysis ordering="" structures = extractStructures(L) if(lorentztag == 'SST') : terms=[['P(1003,2)','P(2003,1)'], ['P(1003,1)','P(2003,2)'], ['P(-1,1)','P(-1,2)','Metric(1003,2003)'], ['Metric(1003,2003)']] signs=[1.,1.,-1.,-1.] new_couplings=[False]*len(terms) elif(lorentztag == 'FFT' ) : terms=[['P(2003,1)','Gamma(1003,2,1)'], ['P(2003,2)','Gamma(1003,2,1)'], ['P(1003,1)','Gamma(2003,2,1)'], ['P(1003,2)','Gamma(2003,2,1)'], ['P(-1,1)','Gamma(-1,2,1)','Metric(1003,2003)'], ['P(-1,2)','Gamma(-1,2,1)','Metric(1003,2003)'], ['Metric(1003,2003)']] signs=[1.,-1.,1.,-1.,-0.5,0.5,1.] new_couplings=[False]*3*len(terms) elif(lorentztag == 'VVT' ) : terms=[['P(-1,1)','P(-1,2)','Metric(1,2003)','Metric(2,1003)'], # from C term ['P(-1,1)','P(-1,2)','Metric(1,1003)','Metric(2,2003)'], # from C term ['P(-1,1)','P(-1,2)','Metric(1,2)','Metric(1003,2003)'], # from C term ['P(1,2)','P(2,1)','Metric(1003,2003)'], # from D term (sym) ['P(1,2)','P(2003,1)','Metric(2,1003)'], # 1st term ['P(1,2)','P(1003,1)','Metric(2,2003)'], # 1st swap ['P(2,1)','P(2003,2)','Metric(1,1003)'], # 2nd term ['P(2,1)','P(1003,2)','Metric(1,2003)'], # 2nd swap ['P(1003,2)','P(2003,1)','Metric(1,2)'], # 3rd term ['P(1003,1)','P(2003,2)','Metric(1
,2)'], # 3rd swap ['Metric(1,2003)','Metric(2,1003)'], # from mass term ['Metric(1,1003)','Metric(2,2003)'], # from mass term ['Metric(1,2)','Metric(1003,2003)'], # from mass term ['P(1,1)','P(2,1)','Metric(1003,2003)'], # gauge terms ['P(1,2)','P(2,2)','Metric(1003,2003)'], # gauge terms ['P(1,1)','P(2,2)','Metric(1003,2003)'], # gauge terms
['P(1003,1)','P(1,1)','Metric(2,2003)'], # gauge terms ['P(1003,2)','P(2,2)','Metric(1,2003)'], # gauge terms ['P(2003,1)','P(1,1)','Metric(2,1003)'], # gauge terms ['P(2003,2)','P(2,2)','Metric(1,1003)'], # gauge terms ] signs=[1.,1.,-1.,1.,-1.,-1.,-1.,-1.,1.,1.,1.,1.,-1.,1.,1.,0.25,-1.,-1.,-1.,-1.] new_couplings=[False]*len(terms) elif(lorentztag == 'FFVT' ) : terms = [['Gamma(2004,2,1)','Metric(3,1004)'], ['Gamma(1004,2,1)','Metric(3,2004)'], ['Gamma(3,2,1)','Metric(1004,2004)'], ['Gamma(2004,2,-1)','Metric(3,1004)'], ['Gamma(1004,2,-1)','Metric(3,2004)'], ['Gamma(3,2,-1)','Metric(1004,2004)']] signs=[1.,1.,-0.5,1.,1.,-0.5] new_couplings=[False]*3*len(terms) elif(lorentztag == 'VVVT' ) : # the F(mu nu,rho sigma lambda) terms first terms = [['P(2004,2)','Metric(1,1004)','Metric(2,3)'],['P(2004,3)','Metric(1,1004)','Metric(2,3)'], ['P(1004,2)','Metric(1,2004)','Metric(2,3)'],['P(1004,3)','Metric(1,2004)','Metric(2,3)'], ['P(2004,3)','Metric(1,3)','Metric(2,1004)'],['P(2004,1)','Metric(1,3)','Metric(2,1004)'], ['P(1004,3)','Metric(1,3)','Metric(2,2004)'],['P(1004,1)','Metric(1,3)','Metric(2,2004)'], ['P(2004,1)','Metric(1,2)','Metric(3,1004)'],['P(2004,2)','Metric(1,2)','Metric(3,1004)'], ['P(1004,1)','Metric(1,2)','Metric(3,2004)'],['P(1004,2)','Metric(1,2)','Metric(3,2004)'], ['P(3,1)','Metric(1,2004)','Metric(2,1004)'],['P(3,2)','Metric(1,2004)','Metric(2,1004)'], ['P(3,1)','Metric(1,1004)','Metric(2,2004)'],['P(3,2)','Metric(1,1004)','Metric(2,2004)'], ['P(3,1)','Metric(1,2)','Metric(1004,2004)'],['P(3,2)','Metric(1,2)','Metric(1004,2004)'], ['P(2,3)','Metric(1,2004)','Metric(3,1004)'],['P(2,1)','Metric(1,2004)','Metric(3,1004)'], ['P(2,3)','Metric(1,1004)','Metric(3,2004)'],['P(2,1)','Metric(1,1004)','Metric(3,2004)'], ['P(2,3)','Metric(1,3)','Metric(1004,2004)'],['P(2,1)','Metric(1,3)','Metric(1004,2004)'], ['P(1,2)','Metric(2,2004)','Metric(3,1004)'],['P(1,3)','Metric(2,2004)','Metric(3,1004)'], ['P(1,2)','Metric(2,1004)','Metric(3,2004)'],['P(1,3)','Metric(2,1004)','Metric(3,2004)'], ['P(1,2)','Metric(2,3)','Metric(1004,2004)'],['P(1,3)','Metric(2,3)','Metric(1004,2004)']] signs = [1.,-1.,1.,-1.,1.,-1.,1.,-1.,1.,-1.,1.,-1., 1.,-1.,1.,-1.,-1.,1.,1.,-1.,1.,-1.,-1.,1.,1.,-1.,1.,-1.,-1.,1.] new_couplings=[False]*len(terms) l = lambda c: len(pos[c]) if l(8)!=3 : ordering = VVVordering(vertex) # unknown else : raise Exception('Unknown data type "%s".' % lorentztag) iterm=0 try : for term in terms: for perm in itertools.permutations(term): label = '*'.join(perm) for istruct in range(0,len(structures)) : if label in structures[istruct] : reminder = structures[istruct].replace(label,'1.',1) loc=iterm if(reminder.find("ProjM")>=0) : reminder=re.sub("\*ProjM\(.*,.\)","",reminder) loc+=len(terms) elif(reminder.find("ProjP")>=0) : reminder=re.sub("\*ProjP\(.*,.\)","",reminder) loc+=2*len(terms) structures[istruct] = "Done" val = eval(reminder, {'cmath':cmath} )*signs[iterm] if(new_couplings[loc]) : new_couplings[loc] += val else : new_couplings[loc] = val iterm+=1 except : SkipThisVertex() # check we've handled all the terms for val in structures: if(val!="Done") : raise SkipThisVertex() # special for FFVT if(lorentztag=="FFVT") : t_couplings=new_couplings new_couplings=[False]*9 for i in range(0,9) : j = i+3*(i//3) k = i+3+3*(i//3) if( not t_couplings[j]) : new_couplings[i] = t_couplings[k] else : new_couplings[i] = t_couplings[j] # set the couplings for icoup in range(0,len(new_couplings)) : if(new_couplings[icoup]) : new_couplings[icoup] = '(%s) * (%s) *(
mentalsmash/geek-map
mentalsmash/__init__.py
Python
gpl-2.0
775
0
''' Copyright (C) 2012 mentalsmash.org <contact@mentalsmash.org> This library is free software; you can redistribute it and/or modify it under the terms of the GNU Library General Public License as published by the Free Software Foundation; either version 2 of the License, or (at your option) any later version. This library is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Library General Public License for more details. You should have received a copy of the GNU Library General Public License along wi
th this library; if not, write to the Free Software Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110
-1301, USA. '''
california-civic-data-coalition/django-calaccess-processed-data
calaccess_processed_elections/managers/opencivicdata/core/divisions.py
Python
mit
1,449
0
#!/usr/bin/env python # -*- coding: utf-8 -*- """ Custom managers for the Division model. """ from __future__ import unicode_literals from calaccess_processed.managers import BulkLoadSQLManager class OCDAssemblyDivisionManager(BulkLoadSQLManager): """ Custom manager for state assembly OCD Divisions.
""" def get_queryset(self): """ Filters down to state assembly divisions. """
qs = super(OCDAssemblyDivisionManager, self).get_queryset() return qs.filter(subid1='ca', subtype2='sldl') class OCDSenateDivisionManager(BulkLoadSQLManager): """ Custom manager for state senate OCD Divisions. """ def get_queryset(self): """ Filters down to state senate divisions. """ qs = super(OCDSenateDivisionManager, self).get_queryset() return qs.filter(subid1='ca', subtype2='sldu') class OCDCaliforniaDivisionManager(BulkLoadSQLManager): """ Custom manager for OCD Divisions in California. """ def get_queryset(self): """ Filters down to divisions in California. """ qs = super(OCDCaliforniaDivisionManager, self).get_queryset() return qs.filter(subid1='ca') def california(self): """ Returns state of California division. """ qs = super(OCDCaliforniaDivisionManager, self).get_queryset() return qs.get(id='ocd-division/country:us/state:ca')
JoaquimPatriarca/senpy-for-gis
gasp/osm2lulc/grs.py
Python
gpl-3.0
17,992
0.008393
""" OpenStreetMap to Land Use/Land Cover Maps """ def osm_to_sqdb(osmXml, osmSQLITE): """ Convert OSM file to SQLITE DB """ from gasp.toshp.gdal import ogr_btw_driver return ogr_btw_driver(osmXml, osmSQLITE, supportForSpatialLite=True) def osm_project(osmSQLITE, srs_epsg): """ Reproject OSMDATA to a specific Spatial Reference System """ from gasp.gdal.proj import ogr2ogr_transform_inside_sqlite from .var import osmTableData osmtables = {} for table in osmTableData: ogr2ogr_transform_inside_sqlite( osmSQLITE, table, 4326, srs_epsg, '{}_{}'.format(table, str(srs_epsg)), sql="SELECT * FROM {}".format(osmTableData[table]) ) osmtables[table] = '{}_{}'.format(table, str(srs_epsg)) return osmtables def raster_based(osmdata, nomenclature, refRaster, lulcRst, epsg=3857, overwrite=None): """ Convert OSM Data into Land Use/Land Cover Information An raster based approach. TODO: Add detailed description """ # ************************************************************************ # # Python Modules from Reference Packages # # ************************************************************************ # import os import pandas # ************************************************************************ # # Senpy dependencies # # ************************************************************************ # from gasp.oss.ops import create_folder from gasp.grs import run_grass # Rules to be used from gasp.osm2lulc.rules.rule1 import raster_selection as selection from gasp.osm2lulc.rules.rule2 import raster_get_roads as get_roads from gasp.osm2lulc.rules.rule3_4 import raster_area as area from gasp.osm2lulc.rules.rule5 import basic_buffer from gasp.osm2lulc.rules.rule7 import assign_points_tag_to_buildings # ************************************************************************ # # Global Settings # # ************************************************************************ # from .var import PRIORITIES workspace = os.path.join(os.path.dirname(lulcRst), 'osmtolulc') # Check if workspace exists if os.path.exists(workspace): if overwrite: create_folder(workspace) else: raise ValueError('Path {} already exists'.format(workspace)) else: create_folder(workspace) # ************************************************************************ # # Convert OSM file to SQLITE DB # # ************************************************************************ # osm_db = osm_to_sqdb(osmdata, os.path.join(workspace, 'osm.sqlite')) # ************************************************************************ # # Transform SRS of OSM Data # # ************************************************************************ # osmTableData = osm_project(osm_db, epsg) # ************************************************************************ # # Start a GRASS GIS Session # # ************************************************************************ # grass_base = run_grass( workspace, grassBIN='grass74', location='grloc', srs=epsg) import grass.script as grass import grass.script.setup as gsetup gsetup.init(grass_base, workspace, 'grloc', 'PERMANENT') # ************************************************************************ # # IMPORT SOME GASP MODULES FOR GRASS GIS # # ************************************************************************ # from gasp.grsconv import grass_converter from gasp.grs.g import raster_to_region from gasp.grs.r import mosaic_raster # ************************************************************************ # # SET GRASS GIS LOCATION EXTENT # # ************************************************************************ # extRst = grass_converter(refRaster, 'extent_raster') raster_to_region(extRst) # ************************************************************************ # # MapResults # mergeOut = {} # ************************************************************************ # # ************************************************************************ # # 1 - Selection Rule # # ************************************************************************ # """ selOut = { cls_code : rst_name, ... } """ selOut = selection( osm_db, nomenclature, osmTableData['polygons'], workspace ) for cls in selOut: mergeOut[cls] = [selOut[cls]] # ************************************************************************ # # 2 - Get Information About Roads Location # # ************************************************************************ # """ roads = { cls_code : rst_name, ... } """ roads = get_roads( osm_db, nomenclature, osmTableData['lines'], osmTableData['polygons'], workspace ) for c
ls in roads: if cls not in mergeOut: mergeOut[cls] = [roads[cls]] else: mergeOut[cls].append(roads[cls]) # ******************
****************************************************** # # 3 - Area Upper than # # ************************************************************************ # """ auOut = { cls_code : rst_name, ... } """ auOut = area( osm_db, nomenclature, osmTableData['polygons'], workspace, UPPER=True ) for cls in auOut: if cls not in mergeOut: mergeOut[cls] = [auOut[cls]] else: mergeOut[cls].append(auOut[cls]) # ************************************************************************ # # 4 - Area Lower than # # ************************************************************************ # """ alOut = { cls_code : rst_name, ... } """ alOut = area( osm_db, nomenclature, osmTableData['polygons'], workspace, UPPER=None ) for cls in alOut: if cls not in mergeOut: mergeOut[cls] = [alOut[cls]] else: mergeOut[cls].append(alOut[cls]) # ************************************************************************ # # 5 - Get data from lines table (railway | waterway) # # ************************************************************************ # """ bfOut = { cls_code : [rst_name, ...], ... } """ bfOut = basic_buffer( osm_db, nomenclature, osmTableData['lines'], workspace ) for cls in bfOut: if cls not in mergeOut: mergeOut[cls] = bfOut[cls] else: mergeOut[cls] += bfOut[cls] # ************************************************************************ # # 7 - Assign untagged Buildings to tags # # ************************************************************************ # if nomenclature != "GLOBE_LAND_30": buildsOut = assign_points_tag_to_buildings( osm_db, nomenclature, osmTableData['points'], osmTableData['polygons'], workspace ) for cls in buildsOut: if cls not in mergeOut: mergeOut[cls] = [buildsOut[cls]] else: mergeOut[cls].append(buildsOut[cls]) # ************************************************************************ # # Produce LULC Map # # ************************************************************************ # """ Merge all results for one cls into one raster mergeOut = { cls_code : [rst_name, rst_name, ...], ... } into mergeOut = { cls_code : patched_raster, ... } """ for cls in mergeOut: if len(mergeOut[cls]) == 1: mergeOut[cls] = mergeOut[cls][0] else: mosaic_raster(mergeOut[cls], 'mosaic_{}'.format(str(cls))) mergeOut[cls] = 'mosaic_{}
mjtamlyn/archery-scoring
entries/migrations/0006_auto_20150612_2307.py
Python
bsd-3-clause
372
0
# -*- coding: utf-8 -*- from django.db import models, migrations class Migration(migrations.Migration): dependencies = [ ('entries', '0005_
resultsmode_json'), ] o
perations = [ migrations.AlterField( model_name='resultsmode', name='json', field=models.TextField(default='', blank=True), ), ]
mixturemodel-flow/tensorflow
tensorflow/contrib/boosted_trees/python/utils/losses_test.py
Python
apache-2.0
3,957
0.006823
# Copyright 2017 The TensorFlow Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an
"AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # ============================================================================== """Tests for trainer hooks.""" from __future__ import absolute_import from __future__ import
division from __future__ import print_function import math import numpy as np from tensorflow.contrib.boosted_trees.python.utils import losses from tensorflow.python.framework import constant_op from tensorflow.python.framework import dtypes from tensorflow.python.framework import test_util from tensorflow.python.ops import array_ops from tensorflow.python.platform import googletest class LossesTest(test_util.TensorFlowTestCase): def test_per_example_exp_loss(self): def _logit(p): return np.log(p) - np.log(1 - p) labels_positive = array_ops.ones([10, 1], dtypes.float32) weights = array_ops.ones([10, 1], dtypes.float32) labels_negative = array_ops.zeros([10, 1], dtypes.float32) predictions_probs = np.array( [[0.1], [0.2], [0.3], [0.4], [0.5], [0.6], [0.7], [0.8], [0.9], [0.99]], dtype=np.float32) prediction_logits = _logit(predictions_probs) eps = 0.2 with self.test_session(): predictions_tensor = constant_op.constant( prediction_logits, dtype=dtypes.float32) loss_for_positives, _ = losses.per_example_exp_loss( labels_positive, weights, predictions_tensor, eps=eps) loss_for_negatives, _ = losses.per_example_exp_loss( labels_negative, weights, predictions_tensor, eps=eps) pos_loss = loss_for_positives.eval() neg_loss = loss_for_negatives.eval() # For positive labels, points <= 0.3 get max loss of e. # For negative labels, these points have minimum loss of 1/e. for i in range(2): self.assertEqual(math.exp(1), pos_loss[i]) self.assertEqual(math.exp(-1), neg_loss[i]) # For positive lables, p oints with predictions 0.7 and larger get minimum # loss value of 1/e. For negative labels, these points are wrongly # classified and get loss e. for i in range(6, 10): self.assertEqual(math.exp(-1), pos_loss[i]) self.assertEqual(math.exp(1), neg_loss[i]) # Points in between 0.5-eps, 0..5+eps get loss exp(-label_m*y), where # y = 1/eps *x -1/(2eps), where x is the probability and label_m is either # 1 or -1 (for label of 0). for i in range(2, 6): self.assertAlmostEqual( math.exp(-1.0 * (predictions_probs[i] * 1.0 / eps - 0.5 / eps)), pos_loss[i]) self.assertAlmostEqual( math.exp(1.0 * (predictions_probs[i] * 1.0 / eps - 0.5 / eps)), neg_loss[i]) def test_per_example_squared_loss(self): def _squared_loss(p, y): return np.mean(1.0 * (p - y) * (p - y)) labels = np.array([[0.123], [224.2], [-3], [2], [.3]], dtype=np.float32) weights = array_ops.ones([5, 1], dtypes.float32) predictions = np.array( [[0.123], [23.2], [233], [52], [3]], dtype=np.float32) with self.test_session(): loss_tensor, _ = losses.per_example_squared_loss(labels, weights, predictions) loss = loss_tensor.eval() for i in range(5): self.assertAlmostEqual( _squared_loss(labels[i], predictions[i]), loss[i]) if __name__ == "__main__": googletest.main()
pgroudas/pants
tests/python/pants_test/tasks/test_list_goals.py
Python
apache-2.0
4,088
0.009295
# coding=utf-8 # Copyright 2014 Pants project contributors (see CONTRIBUTORS.md). # Licensed under the Apache License, Version 2.0 (see LICENSE). from __future__ import (absolute_import, division, generators, nested_scopes, print_function, unicode_literals, with_statement) import pytest from pants.backend.core.tasks.list_goals import ListGoals from pants.backend.core.tasks.task import Task from pants.goal.goal import Goal from pants.goal.task_registrar import TaskRegistrar from pants_test.tasks.task_test_base import ConsoleTaskTestBase class ListGoalsTest(ConsoleTaskTestBase): _INSTALLED_HEADER = 'Installed goals:' _UNDOCUMENTED_HEADER = 'Undocumented goals:' _LIST_GOALS_NAME = 'goals' _LIST_GOALS_DESC = 'List all documented goals.' _LLAMA_NAME = 'llama' _LLAMA_DESC = 'With such handsome fiber, no wonder everyone loves Llamas.' _ALPACA_NAME = 'alpaca' @classmethod def task_type(cls): return ListGoals class LlamaTask(Task): pass class AlpacaTask(Task): pass def test_list_goals(self): Goal.clear() self.assert_console_output(self._INSTALLED_HEADER) TaskRegistrar(name=self._LIST_GOALS_NAME, action=ListGoals)\ .install().with_description(self._LIST_GOALS_DESC) self.assert_console_output( self._INSTALLED_HEADER, ' {0}: {1}'.format(self._LIST_GOALS_NAME, self._LIST_GOALS_DESC), ) TaskRegistrar(name=self._LLAMA_NAME, action=ListGoalsTest.LlamaTask)\ .install().with_description(self._LLAMA_DESC) self.assert_console_output( self._INSTALLED_HEADER, ' {0}: {1}'.format(self._LIST_GOALS_NAME, self._LIST_GOALS_DESC), ' {0}: {1}'.format(self._LLAMA_NAME, self._LLAMA_DESC), ) TaskRegistrar(name=self._ALPACA_NAME, action=ListGoalsTest.AlpacaTask, dependencies=[self._LLAMA_NAME])\ .install() self.assert_console_output( self._INSTALLED_HEADER, ' {0}: {1}'.format(self._LIST_GOALS_NAME, self._LIST_GOALS_DESC), ' {0}: {1}'.format(self._LLAMA_NAME, self._LLAMA_DESC), ) def test_list_goals_all(self): Goal.clear() TaskRegistrar(name=self._LIST_GOALS_NAME, action=ListGoals)\ .install().with_description(self._LIST_GOALS_DESC) TaskRegistrar(name=self._LLAMA_NAME, action=ListGoalsTest.LlamaTask)\ .install().with_description(self._LLAMA_DESC) TaskRegistrar(name=self._ALPACA_NAME, action=ListGoalsTest.AlpacaTask, dependencies=[self._LLAMA_NAME])\ .install() self.assert_console_output( self._INSTALLED_HEADER, ' {0}: {1}'.format(self._LIST_GOALS_NAME, self._LIST_GOALS_DESC), ' {0}: {1}'.format(self._LLAMA_NAME, self._LLAMA_DESC), '', self._UNDOCUMENTED_HEADER, ' {0}'.format(self._ALPACA_NAME), options={ 'all': True } ) # TODO(John Sirois): Re-enable when fixing up ListGoals `--graph` in # https://github.com/pantsbuild/pants/issues/918 @pytest.mark.xfail def test_list_goals_graph(self): Goal.clear() TaskRegistrar(name=self._LIST_GOALS_NAME, action=ListGoals)\ .install().with_description(self._LIST_GOALS_DESC) TaskRegistrar(name=self._LLAMA_NAME, action=ListGoalsTest.LlamaTask)\ .install().with_description(self._LLAMA_DESC) TaskRegistrar(name=self._ALPACA_NAME, action=ListGoalsTest.AlpacaTask, dependencies=[self._LLAMA_NAME])\ .install() self.assert_console_output( 'digraph G {\n rankdir=LR;\n graph [compound=true];', ' subgraph cluster_goals {\n node [style=filled];\n color = blue;\n label = "goals";', ' goals_goals [label="goals"];', ' }', ' subgraph cluster_llama {\n node [style=filled];\n color = blue;\n label = "llama";', ' llama_llama [label="llama"];', ' }', ' subgraph cluster_alpaca {\n node [style=fil
led];\n color = blue;\n label = "alpaca";', ' alpaca_alpaca [label="alpaca"];', ' }',
' alpaca_alpaca -> llama_llama [ltail=cluster_alpaca lhead=cluster_llama];', '}', options={ 'graph': True } )
dkkline/CanSat14-15
unused_code/generate_static_json_data.py
Python
mit
1,797
0
import json import argparse import os from listener.config import MIN_TIME, MAX_TIME, PRESSURE_AVERAGE_AMOUNT from listener.replayer import Replayer from listener.utilities import convert_time, average from listener.calculate import (calculate_temp_NTC, calculate_press, calculate_height, calculate_gyr) from collections import deque from io import StringIO parser = argparse.ArgumentParser(prog="Replayer", description="Replay a CanSat log file for " "listener.") parser.add_argument("input_file") args = parser.parse_args() input_file = os.path.abspath(args.input_file) input_handle = open(input_file, "r") out_file = "static.json" out_handle = open(out_file, "w") replayer = Replayer(MIN_TIME, MAX_TIME, input_handle, StringIO(), False, True) full_data = replayer.start() last_pressure_values = deque(maxlen=PRESSURE_AVERAGE_AMOUNT) data_temperature = [] data_pressure = [] data_height = [] data_gyro = [] for datapoint in full_data: if not MIN_TIME <= datapoint["Time"] <= MAX_TIME: continue # Skip pressure = calculate_press(datapoint["Press"]) last_pressure_values.append(pressure) time = convert_time(datapoint["Time"] - MIN_TIME) data_temperature.append([time, calculate_temp_NTC(datapoint["NTC"])]) pressure = average(last_
pressure_values) data_pressure.append([time, pressure]) data_height.append([time, calculate_height(pressure)]) data_gyro.append([time, calculate_gyr(datapoint["GyrZ"])]) all_data = { "Temp": data_temperature, "Press": data_pressure, "Height": data_height, "Gyro": data_gyro } out_handle.write(json.dumps(all_data)) input_handle.close() out_handle.close()
erdc/proteus
scripts/ppmatlab.py
Python
mit
3,523
0.023843
## Automatically adapted for numpy.oldnumeric Apr 14, 2008 by -c from builtins import range def writeMeshMatlabFormat(mesh,meshFileBase): """ build array data structures for matlab finite element mesh representation and write to a file to view and play with in matlatb in matlab can then print mesh with pdemesh(p,e,t) where p is the vertex or point matrix e is the edge matrix, and t is the element matrix points matrix is [2 x num vertices] format : row 1 = x coord, row 2 = y coord for nodes in mesh edge matrix is [7 x num edges] format: row 1 = start vertex number row 2 = end vertex number row 3 = start value in edge parameterization, should be 0 row 4 = end value in edge parameterization, should be 1 row 5 = global edge id, base 1 row 6 = subdomain on left? always 1 for now row 7 = subdomain on right? always 0 for now element matrix is [4 x num elements] row 1 = vertex 1 global number row 2 = vertex 2 global number row 3 = vertex 3 global number row 4 = triangle subdomain number where 1,2,3 is a local counter clockwise numbering of vertices in triangle """ import numpy as numpy matlabBase = 1 p = numpy.zeros((2,mesh['nNodes_global']),numpy.float_) e = numpy.zeros((7,mesh['nElementBoundaries_global']),numpy.float_) t = numpy.zeros((4,mesh['nElements_global']),numpy.float_) #load p,e,t and write file mfile = open(meshFileBase+'.m','w') mfile.write('p = [ ... \n') for nN in range(mesh['nNodes_glob
al']): p[0,nN]=mesh['nodeArray'][nN,0] p[1,nN]=mesh['nodeArray'][nN,1] mfile.write('%g %g \n' % tuple(p[:,nN])) mfile.write(']; \n') mfile.write("p = p\';\n") #need transpose for
matlab mfile.write('e = [ ... \n') for ebN in range(mesh['nElementBoundaries_global']): e[0,ebN]=mesh['elementBoundaryNodesArray'][ebN,0] + matlabBase #global node number of start node base 1 e[1,ebN]=mesh['elementBoundaryNodesArray'][ebN,1] + matlabBase #global node number of end node base 1 e[2,ebN]=0.0 #edge param. is 0 to 1 e[3,ebN]=1.0 e[4,ebN]=ebN + matlabBase #global edge number base 1 e[5,ebN]=0 #subdomain to left e[6,ebN]=1 #subdomain to right mfile.write('%g %g %g %g %g %g %g \n' % tuple(e[:,ebN])) mfile.write(']; \n') mfile.write("e = e\';\n") #need transpose for matlab #write triangles last mfile.write('t = [ ... \n') for eN in range(mesh['nElements_global']): t[0,eN]=mesh['elementNodesArray'][eN,0]+matlabBase #global node number for vertex 0 t[1,eN]=mesh['elementNodesArray'][eN,1]+matlabBase #global node number for vertex 0 t[2,eN]=mesh['elementNodesArray'][eN,2]+matlabBase #global node number for vertex 0 t[3,eN]=1 #subdomain id mfile.write('%g %g %g %g \n' % tuple(t[:,eN])) mfile.write(']; \n'); mfile.write("t = t\';\n") #need transpose for matlab mfile.close() return p,e,t ######################################################################## if __name__ == '__main__': import os,shelve import ppmatlab,numpy.oldnumeric as numpy os.listdir('./results') filename = './results/re_forsyth2_ss_2d_pre_forsyth2_ss_2d_c0p1_n_mesh_results.dat' res = shelve.open(filename) mesh = res['mesh'] mmfile = 'forsyth2MeshMatlab' p,e,t = ppmatlab.writeMeshMatlabFormat(mesh,mmfile)
hlzz/dotfiles
graphics/VTK-7.0.0/Filters/Geometry/Testing/Python/TestGhostPoints.py
Python
bsd-3-clause
2,835
0.000705
import sys import vtk from vtk.test import Testing class TestGhostPoints(Testing.vtkTest): def testLinear(self): pts = vtk.vtkPoints() pts.SetNumberOfPoints(4) pts.InsertPoint(0, (0, 0, 0)) pts.InsertPoint(1, (1, 0, 0)) pts.InsertPoint(2, (0.5, 1, 0)) pts.InsertPoint(3, (0.5, 0.5, 1)) te = vtk.vtkTetra() ptIds = te.GetPointIds() for i in range(4): ptIds.SetId(i, i) ghosts = vtk.vtkUnsignedCharArray() ghosts.SetName(vtk.vtkDataSetAttributes.GhostArrayName()) ghosts.SetNumberOfTuples(4) ghosts.SetValue(0, 1) ghosts.SetValue(1, 1) ghosts.SetValue(2, 1) ghosts.SetValue(3, 0) grid = vtk.vtkUnstructuredGrid() grid.Allocate(1, 1) grid.InsertNextCell(te.GetCellType(), te.GetPointIds()) grid.SetPoints(pts) grid.GetPointData().AddArray(ghosts) dss = vtk.vtkDataSetSurfaceFilter() dss.SetInputData(grid) dss.Update() self.assertEqual(dss.GetOutput().GetNumberOfCells(), 3) def testNonLinear(self): pts = vtk.vtkPoints() pts.SetNumberOfPoints(10) pts.InsertPoint(0, (0, 0, 0)) pts.InsertPoint(1, (1, 0, 0)) pts.InsertPoint(2, (0.5, 1, 0)) pts.InsertPoint(3, (0.5, 0.5, 1)) pts.InsertPoint(4, (0.5, 0, 0)) pts.InsertPoint(5, (1.25, 0.5, 0)) pts.InsertPoint(6, (0.25, 0.5, 0)) pts.InsertPoint(7, (0.25, 0.25, 0.5)) pts.InsertPoi
nt(8, (0
.75, 0.25, 0.5)) pts.InsertPoint(9, (0.5, 0.75, 0.5)) te = vtk.vtkQuadraticTetra() ptIds = te.GetPointIds() for i in range(10): ptIds.SetId(i, i) ghosts = vtk.vtkUnsignedCharArray() ghosts.SetName(vtk.vtkDataSetAttributes.GhostArrayName()) ghosts.SetNumberOfTuples(10) ghosts.SetValue(0, 1) ghosts.SetValue(1, 1) ghosts.SetValue(2, 1) ghosts.SetValue(3, 0) ghosts.SetValue(4, 1) ghosts.SetValue(5, 1) ghosts.SetValue(6, 1) ghosts.SetValue(7, 0) ghosts.SetValue(8, 0) grid = vtk.vtkUnstructuredGrid() grid.Allocate(1, 1) grid.InsertNextCell(te.GetCellType(), te.GetPointIds()) grid.SetPoints(pts) grid.GetPointData().AddArray(ghosts) ugg = vtk.vtkUnstructuredGridGeometryFilter() ugg.SetInputData(grid) dss = vtk.vtkDataSetSurfaceFilter() dss.SetNonlinearSubdivisionLevel(2) dss.SetInputConnection(ugg.GetOutputPort()) dss.Update() self.assertEqual(dss.GetOutput().GetNumberOfCells(), 48) if __name__ == "__main__": Testing.main([(TestGhostPoints, 'test')])
akiokio/centralfitestoque
src/.pycharm_helpers/python_stubs/-1807332816/gi/_gobject/_gobject.py
Python
bsd-2-clause
21,406
0.010511
# encoding: utf-8 # module gi._gobject._gobject # from /usr/lib/python2.7/dist-packages/gi/_gobject/_gobject.so # by generator 1.130 # no doc # no imports # Variables with simple values G_MAXDOUBLE = 1.7976931348623157e+308 G_MAXFLOAT = 3.4028234663852886e+38 G_MAXINT = 2147483647 G_MAXINT16 = 32767 G_MAXINT32 = 2147483647 G_MAXINT64 = 9223372036854775807L G_MAXINT8 = 127 G_MAXLONG = 2147483647L G_MAXOFFSET = 9223372036854775807L G_MAXSHORT = 32767 G_MAXSIZE = 4294967295L G_MAXSSIZE = 2147483647L G_MAXUINT = 4294967295L G_MAXUINT16 = 65535 G_MAXUINT32 = 4294967295L G_MAXUINT64 = 18446744073709551615L G_MAXUINT8 = 255 G_MAXULONG = 4294967295L G_MAXUSHORT = 65535 G_MINDOUBLE = 2.2250738585072014e-308 G_MINFLOAT = 1.1754943508222875e-38 G_MININT = -2147483648 G_MININT16 = -32768 G_MININT32 = -2147483648 G_MININT64 = -9223372036854775808L G_MININT8 = -128 G_MINLONG = -2147483648L G_MINOFFSET = -9223372036854775808L G_MINSHORT = -32768 PARAM_CONSTRUCT = 4 PARAM_CONSTRUCT_ONLY = 8 PARAM_LAX_VALIDATION = 16 PARAM_READABLE = 1 PARAM_READWRITE = 3 PARAM_WRITABLE = 2 SIGNAL_ACTION = 32 SIGNAL_DETAILED = 16 SIGNAL_NO_HOOKS = 64 SIGNAL_NO_RECURSE = 8 SIGNAL_RUN_CLEANUP = 4 SIGNAL_RUN_FIRST = 1 SIGNAL_RUN_LAST = 2 # functions def add_emission_hook(*args, **kwargs): # real signature unknown pass def list_properties(*args, **kwargs): # real signature unknown pass def new(*args, **kwargs): # real signature unknown pass def remove_emission_hook(*args, **kwargs): # real signature unknown pass def signal_accumulator_true_handled(*args, **kwargs): # real signature unknown pass def signal_list_ids(*args, **kwargs): # real signature unknown pass def signal_list_names(*args, **kwargs): # real signature unknown pass def signal_lookup(*args, **kwargs): # real signature unknown pass def signal_name(*args, **kwargs): # real signature unknown pass def signal_new(*args, **kwargs): # real signature unknown pass def signal_query(*args, **kwargs): # real signature unknown pass def threads_init(*args, **kwargs): # real signature unknown pass def type_children(*args, **kwargs): # real signature unknown pass def type_from_name(*args, **kwargs): # real signature unknown pass def type_interfaces(*args, **kwargs): # real signature unknown pass def type_is_a(*args, **kwargs): # real signature unknown pass def type_name(*args, **kwargs): # real signature unknown pass def type_parent(*args, **kwargs): # real signature unknown pass def type_register(*args, **kwargs): # real signature unknown pass def _install_metaclass(*args, **kwargs): # real signature unknown pass # classes class GBoxed(object): # no doc def copy(self, *args, **kwargs): # real signature unknown pass def __eq__(self, y): # real signature unknown; restored from __doc__ """ x.__eq__(y) <==> x==y """
pass def __ge__(self, y): # real signature unknown; restored from __doc__ """ x.__ge__(y) <==> x>=y """ pass def __gt__(self, y): # real signature unknown; restored from _
_doc__ """ x.__gt__(y) <==> x>y """ pass def __hash__(self): # real signature unknown; restored from __doc__ """ x.__hash__() <==> hash(x) """ pass def __init__(self, *args, **kwargs): # real signature unknown pass def __le__(self, y): # real signature unknown; restored from __doc__ """ x.__le__(y) <==> x<=y """ pass def __lt__(self, y): # real signature unknown; restored from __doc__ """ x.__lt__(y) <==> x<y """ pass @staticmethod # known case of __new__ def __new__(S, *more): # real signature unknown; restored from __doc__ """ T.__new__(S, ...) -> a new object with type S, a subtype of T """ pass def __ne__(self, y): # real signature unknown; restored from __doc__ """ x.__ne__(y) <==> x!=y """ pass def __repr__(self): # real signature unknown; restored from __doc__ """ x.__repr__() <==> repr(x) """ pass __gtype__ = None # (!) real value is '' class GEnum(int): # no doc def __eq__(self, y): # real signature unknown; restored from __doc__ """ x.__eq__(y) <==> x==y """ pass def __ge__(self, y): # real signature unknown; restored from __doc__ """ x.__ge__(y) <==> x>=y """ pass def __gt__(self, y): # real signature unknown; restored from __doc__ """ x.__gt__(y) <==> x>y """ pass def __init__(self, *args, **kwargs): # real signature unknown pass def __le__(self, y): # real signature unknown; restored from __doc__ """ x.__le__(y) <==> x<=y """ pass def __lt__(self, y): # real signature unknown; restored from __doc__ """ x.__lt__(y) <==> x<y """ pass @staticmethod # known case of __new__ def __new__(S, *more): # real signature unknown; restored from __doc__ """ T.__new__(S, ...) -> a new object with type S, a subtype of T """ pass def __ne__(self, y): # real signature unknown; restored from __doc__ """ x.__ne__(y) <==> x!=y """ pass def __reduce__(self, *args, **kwargs): # real signature unknown pass def __repr__(self): # real signature unknown; restored from __doc__ """ x.__repr__() <==> repr(x) """ pass def __str__(self): # real signature unknown; restored from __doc__ """ x.__str__() <==> str(x) """ pass value_name = property(lambda self: object(), lambda self, v: None, lambda self: None) # default value_nick = property(lambda self: object(), lambda self, v: None, lambda self: None) # default __gtype__ = None # (!) real value is '' class GFlags(int): # no doc def __add__(self, y): # real signature unknown; restored from __doc__ """ x.__add__(y) <==> x+y """ pass def __and__(self, y): # real signature unknown; restored from __doc__ """ x.__and__(y) <==> x&y """ pass def __divmod__(self, y): # real signature unknown; restored from __doc__ """ x.__divmod__(y) <==> divmod(x, y) """ pass def __div__(self, y): # real signature unknown; restored from __doc__ """ x.__div__(y) <==> x/y """ pass def __eq__(self, y): # real signature unknown; restored from __doc__ """ x.__eq__(y) <==> x==y """ pass def __ge__(self, y): # real signature unknown; restored from __doc__ """ x.__ge__(y) <==> x>=y """ pass def __gt__(self, y): # real signature unknown; restored from __doc__ """ x.__gt__(y) <==> x>y """ pass def __init__(self, *args, **kwargs): # real signature unknown pass def __le__(self, y): # real signature unknown; restored from __doc__ """ x.__le__(y) <==> x<=y """ pass def __lt__(self, y): # real signature unknown; restored from __doc__ """ x.__lt__(y) <==> x<y """ pass def __mod__(self, y): # real signature unknown; restored from __doc__ """ x.__mod__(y) <==> x%y """ pass def __mul__(self, y): # real signature unknown; restored from __doc__ """ x.__mul__(y) <==> x*y """ pass @staticmethod # known case of __new__ def __new__(S, *more): # real signature unknown; restored from __doc__ """ T.__new__(S, ...) -> a new object with type S, a subtype of T """ pass def __ne__(self, y): # real signature unknown; restored from __doc__ """ x.__ne__(y) <==> x!=y """ pass def __or__(self, y): # real signature unknown; restored from __doc__ """ x.__or__(y) <==> x|y """ pass def __pow__(self, y, z=None): # real signature unknown; restored from __doc__ """ x.__pow__(y[, z]) <==> pow(x, y[, z]) """ pass def __radd__(self, y): # real signature unknown; restored from __doc__ """ x.__radd__(y) <==> y+x """ pass def __rand__(self, y): # real signature unknown; restored from __doc__ """ x.__rand__(y) <==> y&x """ pass def __rdi
kevin-intel/scikit-learn
sklearn/gaussian_process/_gpr.py
Python
bsd-3-clause
22,428
0
"""Gaussian processes regression.""" # Authors: Jan Hendrik Metzen <jhm@informatik.uni-bremen.de> # Modified by: Pete Green <p.l.green@liverpool.ac.uk> # License: BSD 3 clause import warnings from operator import itemgetter import numpy as np from scipy.linalg import cholesky, cho_solve import scipy.optimize from ..base import BaseEstimator, RegressorMixin, clone from ..base import MultiOutputMixin from .kernels import RBF, ConstantKernel as C from ..preprocessing._data import _handle_zeros_in_scale from ..utils import check_random_state from ..utils.optimize import _check_optimize_result class GaussianProcessRegressor(MultiOutputMixin, RegressorMixin, BaseEstimator): """Gaussian process regression (GPR). The implementation is based on Algorithm 2.1 of Gaussian Processes for Machine Learning (GPML) by Rasmussen and Williams. In addition to standard scikit-learn estimator API, GaussianProcessRegressor: * allows prediction without prior fitting (based on the GP prior) * provides an additional method `sample_y(X)`, which evaluates samples drawn from the GPR (prior or posterior) at given inputs * exposes a method `log_marginal_likelihood(theta)`, which can be used externally for other ways of selecting hyperparameters, e.g., via Markov chain Monte Carlo. Read more in the :ref:`User Guide <gaussian_process>`. .. versionadded:: 0.18 Parameters ---------- kernel : kernel instance, default=None The kernel specifying the covariance function of the GP. If None is passed, the kernel ``ConstantKernel(1.0, constant_value_bounds="fixed" * RBF(1.0, length_scale_bounds="fixed")`` is used as default. Note that the kernel hyperparameters are optimized during fitting unless the bounds are marked as "fixed". alpha : float or ndarray of shape (n_samples,), default=1e-10 Value added to the diagonal of the kernel matrix during fitting. This can prevent a potential numerical issue during fitting, by ensuring that the calculated values form a positive definite matrix. It can also be interpreted as the variance of additional Gaussian measurement noise on the training observations. Note that this is different from using a `WhiteKernel`. If an array is passed, it must have the same number of entries as the data used for fitting and is used as datapoint-dependent noise level. Allowing to specify the noise level directly as a parameter is mainly for convenience and for consistency with Ridge. optimizer : "fmin_l_bfgs_b" or callable, default="fmin_l_bfgs_b" Can either be one of the internally supported optimizers for optimizing the kernel's parameters, specified by a string, or an externally defined optimizer passed as a callable. If a callable is passed, it must have the signature:: def optimizer(obj_func, initial_theta, bounds): # * 'obj_func': the objective function to be minimized, which # takes the hyperparameters theta as a parameter and an # optional flag eval_gradient, which determines if the # gradient is returned additionally to the function value # * 'initial_theta': the initial value for theta, which can be # used by local optimizers # * 'bounds': the bounds on the values of theta .... # Returned are the best found hyperparameters theta and # the corresponding value of the target function. return theta_opt, func_min Per default, the 'L-BFGS-B' algorithm from scipy.optimize.minimize is used. If None is passed, the kernel's parameters are kept fixed. Available internal optimizers are:: 'fmin_l_bfgs_b' n_restarts_optimizer : int, default=0 The number of restarts of the optimizer for findin
g the kernel's parameters which maximize the log-marginal likelihood. The first run of the optimizer is performed from the kernel's initial parameters, the remaining ones (if any) from thetas sampled log-uniform randomly from the space of allowed theta-values. If greater than 0, all bounds must be fini
te. Note that n_restarts_optimizer == 0 implies that one run is performed. normalize_y : bool, default=False Whether the target values y are normalized, the mean and variance of the target values are set equal to 0 and 1 respectively. This is recommended for cases where zero-mean, unit-variance priors are used. Note that, in this implementation, the normalisation is reversed before the GP predictions are reported. .. versionchanged:: 0.23 copy_X_train : bool, default=True If True, a persistent copy of the training data is stored in the object. Otherwise, just a reference to the training data is stored, which might cause predictions to change if the data is modified externally. random_state : int, RandomState instance or None, default=None Determines random number generation used to initialize the centers. Pass an int for reproducible results across multiple function calls. See :term:`Glossary <random_state>`. Attributes ---------- X_train_ : array-like of shape (n_samples, n_features) or list of object Feature vectors or other representations of training data (also required for prediction). y_train_ : array-like of shape (n_samples,) or (n_samples, n_targets) Target values in training data (also required for prediction) kernel_ : kernel instance The kernel used for prediction. The structure of the kernel is the same as the one passed as parameter but with optimized hyperparameters L_ : array-like of shape (n_samples, n_samples) Lower-triangular Cholesky decomposition of the kernel in ``X_train_`` alpha_ : array-like of shape (n_samples,) Dual coefficients of training data points in kernel space log_marginal_likelihood_value_ : float The log-marginal-likelihood of ``self.kernel_.theta`` n_features_in_ : int Number of features seen during :term:`fit`. .. versionadded:: 0.24 Examples -------- >>> from sklearn.datasets import make_friedman2 >>> from sklearn.gaussian_process import GaussianProcessRegressor >>> from sklearn.gaussian_process.kernels import DotProduct, WhiteKernel >>> X, y = make_friedman2(n_samples=500, noise=0, random_state=0) >>> kernel = DotProduct() + WhiteKernel() >>> gpr = GaussianProcessRegressor(kernel=kernel, ... random_state=0).fit(X, y) >>> gpr.score(X, y) 0.3680... >>> gpr.predict(X[:2,:], return_std=True) (array([653.0..., 592.1...]), array([316.6..., 316.6...])) """ def __init__(self, kernel=None, *, alpha=1e-10, optimizer="fmin_l_bfgs_b", n_restarts_optimizer=0, normalize_y=False, copy_X_train=True, random_state=None): self.kernel = kernel self.alpha = alpha self.optimizer = optimizer self.n_restarts_optimizer = n_restarts_optimizer self.normalize_y = normalize_y self.copy_X_train = copy_X_train self.random_state = random_state def fit(self, X, y): """Fit Gaussian process regression model. Parameters ---------- X : array-like of shape (n_samples, n_features) or list of object Feature vectors or other representations of training data. y : array-like of shape (n_samples,) or (n_samples, n_targets) Target values Returns ------- self : returns an instance of self. """ if self.kernel is None: # Use an RBF kernel as default self.kernel_ = C(1.0, constant_value_bounds="fixed") \ * RBF(1.0, length_scale_bounds="fixed") else:
azaghal/ansible
lib/ansible/modules/debconf.py
Python
gpl-3.0
5,533
0.002169
#!/usr/bin/python # -*- coding: utf-8 -*- # Copyright: (c) 2014, Brian Coca <briancoca+ansible@gmail.com> # GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt) from __future__ import absolute_import, division, print_function __metaclass__ = type DOCUMENTATION = r''' --- module: debconf short_description: Configure a .deb package description: - Configure a .deb package using debconf-set-selections. - Or just query existing selections. version_added: "1.6" notes: - This module requires the command line debconf tools. - A number of questions have to be answered (depending on the package). Use 'debconf-show <package>' on any Debian or derivative with the package installed to see questions/settings available. - Some distros will always record tasks involving the setting of passwords as changed. This is due to debconf-get-selections masking passwords. requirements: - debconf - debconf-utils options: name: description: - Name of package to configure. type: str required: true aliases: [ pkg ] question: description: - A debconf configuration setting. type: str aliases: [ selection, setting ] vtype: description: - The type of the value supplied. - C(seen) was added in Ansible 2.2. type: str choices: [ boolean, error, multiselect, note, password, seen, select, string, text, title ] value: description: - Value to set the configuration to. type: str aliases: [ answer ] unseen: description: - Do not set 'seen' flag when pre-seeding. type: bool default: no author: - Brian Coca (@bcoca) ''' EXAMPLES = r''' - name: Set default locale to fr_FR.UTF-8 debconf: name: locales question: locales/default_environment_locale value: fr_FR.UTF-8 vtype: select - name: Set to generate locales debconf: name: locales question: locales/locales_to_be_generated
value: en_US.UTF-8 UTF-8, fr_FR.UTF-8 UTF-8 vtype: multiselect - name: Accept oracle license debconf: name: oracle-java7-install
er question: shared/accepted-oracle-license-v1-1 value: 'true' vtype: select - name: Specifying package you can register/return the list of questions and current values debconf: name: tzdata ''' from ansible.module_utils._text import to_text from ansible.module_utils.basic import AnsibleModule def get_selections(module, pkg): cmd = [module.get_bin_path('debconf-show', True), pkg] rc, out, err = module.run_command(' '.join(cmd)) if rc != 0: module.fail_json(msg=err) selections = {} for line in out.splitlines(): (key, value) = line.split(':', 1) selections[key.strip('*').strip()] = value.strip() return selections def set_selection(module, pkg, question, vtype, value, unseen): setsel = module.get_bin_path('debconf-set-selections', True) cmd = [setsel] if unseen: cmd.append('-u') if vtype == 'boolean': if value == 'True': value = 'true' elif value == 'False': value = 'false' data = ' '.join([pkg, question, vtype, value]) return module.run_command(cmd, data=data) def main(): module = AnsibleModule( argument_spec=dict( name=dict(type='str', required=True, aliases=['pkg']), question=dict(type='str', aliases=['selection', 'setting']), vtype=dict(type='str', choices=['boolean', 'error', 'multiselect', 'note', 'password', 'seen', 'select', 'string', 'text', 'title']), value=dict(type='str', aliases=['answer']), unseen=dict(type='bool'), ), required_together=(['question', 'vtype', 'value'],), supports_check_mode=True, ) # TODO: enable passing array of options and/or debconf file from get-selections dump pkg = module.params["name"] question = module.params["question"] vtype = module.params["vtype"] value = module.params["value"] unseen = module.params["unseen"] prev = get_selections(module, pkg) changed = False msg = "" if question is not None: if vtype is None or value is None: module.fail_json(msg="when supplying a question you must supply a valid vtype and value") # if question doesn't exist, value cannot match if question not in prev: changed = True else: existing = prev[question] # ensure we compare booleans supplied to the way debconf sees them (true/false strings) if vtype == 'boolean': value = to_text(value).lower() existing = to_text(prev[question]).lower() if value != existing: changed = True if changed: if not module.check_mode: rc, msg, e = set_selection(module, pkg, question, vtype, value, unseen) if rc: module.fail_json(msg=e) curr = {question: value} if question in prev: prev = {question: prev[question]} else: prev[question] = '' if module._diff: after = prev.copy() after.update(curr) diff_dict = {'before': prev, 'after': after} else: diff_dict = {} module.exit_json(changed=changed, msg=msg, current=curr, previous=prev, diff=diff_dict) module.exit_json(changed=changed, msg=msg, current=prev) if __name__ == '__main__': main()
adoosii/edx-platform
lms/djangoapps/discussion_api/tests/test_views.py
Python
agpl-3.0
38,334
0.001513
""" Tests for Discussion API views """ from datetime import datetime import json from urlparse import urlparse import ddt import httpretty import mock from pytz import UTC from django.core.urlresolvers import reverse from rest_framework.test import APIClient from common.test.utils import disable_signal from discussion_api import api from discussion_api.tests.utils import ( CommentsServiceMockMixin, make_minimal_cs_comment, make_minimal_cs_thread, ) from student.tests.factories import CourseEnrollmentFactory, UserFactory from util.testing import UrlResetMixin from xmodule.modulestore.tests.django_utils import ModuleStoreTestCase from xmodule.modulestore.tests.factories import CourseFactory class DiscussionAPIViewTestMixin(CommentsServiceMockMixin, UrlResetMixin): """ Mixin for common code in tests of Discussion API views. This includes creation of common structures (e.g. a course, user, and enrollment), logging in the test client, utility functions, and a test case for unauthenticated requests. Subclasses must set self.url in their setUp methods. """ client_class = APIClient @mock.patch.dict("django.conf.settings.FEATURES", {"ENABLE_DISCUSSION_SERVICE": True}) def setUp(self): super(DiscussionAPIViewTestMixin, self).setUp() self.maxDiff = None # pylint: disable=invalid-name self.course = CourseFactory.create( org="x", course="y", run="z", start=datetime.now(UTC), discussion_topics={"Test Topic": {"id": "test_topic"}} ) self.password = "password" self.user = UserFactory.create(password=self.password) CourseEnrollmentFactory.create(user=self.user, course_id=self.course.id) self.client.login(username=self.user.username, password=self.password) def assert_response_correct(self, response, expected_status, expected_content): """ Assert that the response has the given status code and parsed content """ self.assertEqual(response.status_code, expected_status) parsed_content = json.loads(response.content) self.assertEqual(parsed_content, expected_content) def test_not_authenticated(self): self.client.logout() response = self.client.get(self.url) self.assert_response_correct( response, 401, {"developer_message": "Authentication credentials were not provided."} ) @mock.patch.dict("django.conf.settings.FEATURES", {"ENABLE_DISCUSSION_SERVICE": True}) class CourseViewTest(DiscussionAPIViewTestMixin, ModuleStoreTestCase): """Tests for CourseView""" def setUp(self): super(CourseViewTest, self).setUp() self.url = reverse("discussion_course", kwargs={"course_id": unicode(self.course.id)}) def test_404(self): response = self.client.get( reverse("course_topics", kwargs={"cou
rse_id": "non/existent/course"}) ) self.assert_response_correct( response, 404, {"developer_message": "Not found."} ) def test_get_success
(self): response = self.client.get(self.url) self.assert_response_correct( response, 200, { "id": unicode(self.course.id), "blackouts": [], "thread_list_url": "http://testserver/api/discussion/v1/threads/?course_id=x%2Fy%2Fz", "following_thread_list_url": ( "http://testserver/api/discussion/v1/threads/?course_id=x%2Fy%2Fz&following=True" ), "topics_url": "http://testserver/api/discussion/v1/course_topics/x/y/z", } ) @mock.patch.dict("django.conf.settings.FEATURES", {"ENABLE_DISCUSSION_SERVICE": True}) class CourseTopicsViewTest(DiscussionAPIViewTestMixin, ModuleStoreTestCase): """Tests for CourseTopicsView""" def setUp(self): super(CourseTopicsViewTest, self).setUp() self.url = reverse("course_topics", kwargs={"course_id": unicode(self.course.id)}) def test_404(self): response = self.client.get( reverse("course_topics", kwargs={"course_id": "non/existent/course"}) ) self.assert_response_correct( response, 404, {"developer_message": "Not found."} ) def test_get_success(self): response = self.client.get(self.url) self.assert_response_correct( response, 200, { "courseware_topics": [], "non_courseware_topics": [{ "id": "test_topic", "name": "Test Topic", "children": [], "thread_list_url": "http://testserver/api/discussion/v1/threads/?course_id=x%2Fy%2Fz&topic_id=test_topic", }], } ) @ddt.ddt @httpretty.activate @mock.patch.dict("django.conf.settings.FEATURES", {"ENABLE_DISCUSSION_SERVICE": True}) class ThreadViewSetListTest(DiscussionAPIViewTestMixin, ModuleStoreTestCase): """Tests for ThreadViewSet list""" def setUp(self): super(ThreadViewSetListTest, self).setUp() self.author = UserFactory.create() self.url = reverse("thread-list") def test_course_id_missing(self): response = self.client.get(self.url) self.assert_response_correct( response, 400, {"field_errors": {"course_id": {"developer_message": "This field is required."}}} ) def test_404(self): response = self.client.get(self.url, {"course_id": unicode("non/existent/course")}) self.assert_response_correct( response, 404, {"developer_message": "Not found."} ) def test_basic(self): self.register_get_user_response(self.user, upvoted_ids=["test_thread"]) source_threads = [{ "type": "thread", "id": "test_thread", "course_id": unicode(self.course.id), "commentable_id": "test_topic", "group_id": None, "user_id": str(self.author.id), "username": self.author.username, "anonymous": False, "anonymous_to_peers": False, "created_at": "2015-04-28T00:00:00Z", "updated_at": "2015-04-28T11:11:11Z", "thread_type": "discussion", "title": "Test Title", "body": "Test body", "pinned": False, "closed": False, "abuse_flaggers": [], "votes": {"up_count": 4}, "comments_count": 5, "unread_comments_count": 3, "read": False, "endorsed": False }] expected_threads = [{ "id": "test_thread", "course_id": unicode(self.course.id), "topic_id": "test_topic", "group_id": None, "group_name": None, "author": self.author.username, "author_label": None, "created_at": "2015-04-28T00:00:00Z", "updated_at": "2015-04-28T11:11:11Z", "type": "discussion", "title": "Test Title", "raw_body": "Test body", "rendered_body": "<p>Test body</p>", "pinned": False, "closed": False, "following": False, "abuse_flagged": False, "voted": True, "vote_count": 4, "comment_count": 5, "unread_comment_count": 3, "comment_list_url": "http://testserver/api/discussion/v1/comments/?thread_id=test_thread", "endorsed_comment_list_url": None, "non_endorsed_comment_list_url": None, "editable_fields": ["abuse_flagged", "following", "voted"], "read": False, "has_endorsed": False, }] self.register_get_threads_response(source_threads, page=1, num_pages=2) response = self.client.get(self.url, {"course_id": unicode(self.course.id)}) s