repo_name
stringlengths
5
100
path
stringlengths
4
231
language
stringclasses
1 value
license
stringclasses
15 values
size
int64
6
947k
score
float64
0
0.34
prefix
stringlengths
0
8.16k
middle
stringlengths
3
512
suffix
stringlengths
0
8.17k
lmazuel/azure-sdk-for-python
azure-mgmt-datalake-analytics/azure/mgmt/datalake/analytics/job/models/job_information.py
Python
mit
6,107
0.000655
# coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for # license information. # # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is # regenerated. # -------------------------------------------------------------------------- from .job_information_basic import JobInformationBasic class JobInformation(JobInformationBasic): """The extended Data Lake Analytics job information properties returned when retrieving a specific job. Variables are only populated by the server, and will be ignored when sending a request. :ivar job_id: the job's unique identifier (a GUID). :vartype job_id: str :param name: the friendly name of the job. :type name: str :param type: the job type of the current job (Hive, USql, or Scope (for internal use only)). Possible values include: 'USql', 'Hive', 'Scope' :type type: str or ~azure.mgmt.datalake.analytics.job.models.JobType :ivar submitter: the user or account that submitted the job. :vartype submitter: str :param degree_of_parallelism: the degree of parallelism used for this job. This must be greater than 0, if set to less than 0 it will default to 1. Default value: 1 . :type degree_of_parallelism: int :param priority: the priority value for the current job. Lower numbers have a higher priority. By default, a job has a priority of 1000. This must be greater than 0. :type priority: int :ivar submit_time: the time the job was submitted to the service. :vartype submit_time: datetime :ivar start_time: the start time of the job. :vartype start_time: datetime :ivar end_time: the completion time of the job. :vartype end_time: datetime :ivar state: the job state. When the job is in the Ended state, refer to Result and ErrorMessage for details. Possible values include: 'Accepted', 'Compiling', 'Ended', 'New', 'Queued', 'Running', 'Scheduling', 'Starting', 'Paused', 'WaitingForCapacity' :vartype state: str or ~azure.mgmt.datalake.analytics.job.models.JobState :ivar result: the result of job execution or the current result of the running job. Possible values include: 'None', 'Succeeded', 'Cancelled', 'Failed' :vartype result: str or ~azure.mgmt.datalake.analytics.job.models.JobResult :ivar log_folder: the log folder path to use in the following format: adl://<accountName>.azuredatalakestore.net/system/jobservice/jobs/Usql/2016/03/13/17/18/5fe51957-93bc-4de0-8ddc-c5a4753b068b/logs/. :vartype log_folder: str :param log_file_patterns: the list of log file name patterns to find in the logFolder. '*' is the only matching character allowed. Example format: jobExecution*.log or *mylog*.txt :type log_file_patterns: list[str] :param related: the recurring job relationship information properties. :type related: ~azure.mgmt.datalake.analytics.job.models.JobRelationshipProperties :param tags: the key-value pairs used to add additional metadata to the job information. (Only for use internally with Scope job type.) :type tags: dict[str, str] :ivar error_message: the error message details for the job, if the job failed. :vartype error_message: list[~azure.mgmt.datalake.analytics.job.models.JobErrorDetails] :ivar state_audit_records: the job state audit records, indicating when various operations have been performed on this job. :vartype state_audit_records: list[~azure.mgmt.datalake.analytics.job.models.JobStateAuditRecord] :param properties: the job specific properties. :type properties: ~azure.mgmt.datalake.analytics.job.models.JobProperties """ _validation = { 'job_id': {'readonly': True}, 'name': {'required': True}, 'type': {'required': True}, 'submitter': {'readonly': True}, 'submit_time': {'readonly': True}, 'start_time': {'readonly': True}, 'end_time': {'readonly': True}, 'state': {'readonly': True}, 'result': {'readonly': True}, 'log_folder': {'readonly': True}, 'error_message': {'readonly': True}, 'state_audit_records': {'readonly': True}, 'properties': {'required': True}, } _attribute_map = { 'job_id': {'key': 'jobId', 'type': 'str'}, 'name': {'key': 'name', 'type': 'str'}, 'type': {'key': 'type', 'type': 'JobType'}, 'submitter': {'key': 'submitter', 'type': 'str'}, 'degree_of_parallelism': {'key': 'degreeOfParallelism', 'type': 'int'}, 'priority': {'k
ey': 'priority', 'type': 'int'}, 'submit_time': {'key': 'submitTime', 'type': 'iso-8601'}, 'start_time': {'key': 'startTime', 'type': 'iso-8601'}, 'end_time': {'key': 'endTime', 'type': 'iso-8601'}, 'state': {'key': 'state', 'type': 'JobState'}, 'result': {'key': 'result', 'type': 'JobResult'}, 'log_folder': {'key': 'logFolder', 'type': 'str'},
'log_file_patterns': {'key': 'logFilePatterns', 'type': '[str]'}, 'related': {'key': 'related', 'type': 'JobRelationshipProperties'}, 'tags': {'key': 'tags', 'type': '{str}'}, 'error_message': {'key': 'errorMessage', 'type': '[JobErrorDetails]'}, 'state_audit_records': {'key': 'stateAuditRecords', 'type': '[JobStateAuditRecord]'}, 'properties': {'key': 'properties', 'type': 'JobProperties'}, } def __init__(self, name, type, properties, degree_of_parallelism=1, priority=None, log_file_patterns=None, related=None, tags=None): super(JobInformation, self).__init__(name=name, type=type, degree_of_parallelism=degree_of_parallelism, priority=priority, log_file_patterns=log_file_patterns, related=related, tags=tags) self.error_message = None self.state_audit_records = None self.properties = properties
NiclasEriksen/importANT
pypf.py
Python
mit
3,410
0
def neighbors(node, all_nodes): dirs = [[0, 1], [1, 0], [-1, 0], [0, -1]] ddirs = [[1, 1], [1, -1], [-1, 1], [-1, -1]] result = set() # cdef bool x for dir in dirs: nx, ny = node[0] + dir[0], node[1] + dir[1] try: all_nodes[nx][ny] except IndexError: pass else: result.add((nx, ny)) for dir in ddirs: nx, ny = node[0] + dir[0], node[1] + dir[1] try: all_nodes[nx][ny] except IndexError: pass else: x, y = False, False for r in result: if nx - 1 == r[0] and ny == r[1]: x = True elif nx + 1 == r[0] and ny == r[1]: x = True if ny - 1 == r[1] and nx == r[0]: y = True elif ny +
1 == r[1] and nx == r[0]: y = True if y and x: result.add((nx, ny)) return result def get_score(c, node, goal, heightmap): score = c.score if c.node[0] != node[
0] and c.node[1] != node[1]: score += 14 else: score += 10 gx = abs(goal[0] - c.node[0]) gy = abs(goal[1] - c.node[1]) score += (gx + gy) * 5 penalty = heightmap[c.node[0]][c.node[1]] * 1 # print(score, "penalty:", penalty) score -= penalty return score class Candidate: def __init__(self, node, lastnode=None): self.node = node self.score = 0 self.visited = False self.lastnode = lastnode def get_path(all_nodes, node, goal, heightmap): open_list = [] closed_list = [] path_list = [] final_list = [] start = Candidate(node, None) current = Candidate(node, start) count, current.count = 0, 0 while current.node != goal: candidates = [] for n in neighbors(current.node, all_nodes): c = Candidate(n, current) candidates.append(c) for c in candidates: closed = False for cc in closed_list: if c.node == cc.node: closed = True for co in open_list: if co.node == c.node: closed = True if not closed: c.count = count count += 1 c.score = get_score(c, current.node, goal, heightmap) open_list.append(c) open_list = sorted( open_list, key=lambda x: x.count, reverse=False ) if len(open_list) > 0: # count += 1 next_c = open_list[0] closed_list.append(next_c) current = next_c open_list.remove(next_c) else: print("Goal not found. Node {0} broke it.".format(node)) break nextnode = current # goal path_list = [nextnode.node] while nextnode.node != start.node: nextnode = nextnode.lastnode path_list.append(nextnode.node) for c in reversed(path_list): final_list.append(c) if len(final_list) > 0: print("Pathfinding successful!") print("Steps: {0}".format(len(final_list))) return final_list, True else: print("ERROR: Pathfinding went wrong, returning to start.") final_list = [start] return final_list, False
demisto/content
Packs/BitSight/Integrations/BitSightForSecurityPerformanceManagement/BitSightForSecurityPerformanceManagement_test.py
Python
mit
2,406
0.003325
import demistomock as demisto from CommonServerPython import BaseClient import BitSightForSecurityPerformanceManagement as bitsight from datetime import datetime def test_get_companies_guid_command(mocker): # Positive Scenario client = bitsight.Client(base_url='https://test.com') res = {"my_company": {"guid": "123"}, "companies": [{"name": "abc", "shortname": "abc", "guid": "123"}]} mocker.patch.object(BaseClient, '_http_request', return_value=res) _, outputs, _ = bitsight.get_companies_guid_command(client) assert outputs[0].get('guid') == '123'
def test_get_company_details_command(mocker): inp_args = {'guid': '123'} client = bitsight.Client(base_url='https://test.com') res = {"name": "abc"} mocker.patch.object(BaseClient, '_http_request', return_value=res) _, outputs, _ = bitsight.get_company_details_command(client, inp_args) assert outputs.get('name') == 'abc' def test_get_company_findings_command(mocker): inp_arg
s = {'guid': '123', 'first_seen': '2021-01-01', 'last_seen': '2021-01-02'} client = bitsight.Client(base_url='https://test.com') res = {"results": [{"severity": "severe"}]} mocker.patch.object(BaseClient, '_http_request', return_value=res) _, outputs, _ = bitsight.get_company_findings_command(client, inp_args) assert outputs[0].get('severity') == 'severe' def test_fetch_incidents(mocker): inp_args = {'guid': '123', 'findings_min_severity': 'severe', 'findings_grade': 'WARN', 'findings_asset_category': 'high', 'risk_vector': 'breaches,dkim'} client = bitsight.Client(base_url='https://test.com') mocker.patch.object(demisto, 'params', return_value=inp_args) res = {"results": [{"severity": "severe", "first_seen": "2021-02-01", "temporary_id": "temp1"}]} mocker.patch.object(BaseClient, '_http_request', return_value=res) last_run, events = bitsight.fetch_incidents(client=client, last_run={'time': '2020-12-01T01:01:01Z'}, params=inp_args) curr_date = datetime.now().strftime('%Y-%m-%d') assert curr_date in last_run['time'] assert events == [{'name': 'BitSight Finding - temp1', 'occurred': '2021-02-01T00:00:00Z', 'rawJSON': '{"severity": "severe", "first_seen": "2021-02-01", "temporary_id": "temp1"}'}]
cloudtools/awacs
awacs/mechanicalturk.py
Python
bsd-2-clause
4,190
0.000716
# Copyright (c) 2012-2021, Mark Peek <mark@peek.org> # All rights reserved. # # See LICENSE file for full license. from .aws import Action as BaseAction from .aws import BaseARN service_name = "Amazon Mechanical Turk" prefix = "mechanicalturk" class Action(BaseAction): def __init__(self, action: str = None) -> None: super().__init__(prefix, action) class ARN(BaseARN): def __init__(self, resource: str = "", region: str = "", account: str = "") -> None: super().__init__( service=prefix, resource=resource, region=region, account=account ) AcceptQualificationRequest = Action("AcceptQualificationRequest") ApproveAssignment = Action("ApproveAssignment") ApproveRejectedAssignment = Action("ApproveRejectedAssignment") AssignQualification = Action("AssignQualification") AssociateQualificationWithWorker = Action("AssociateQualificationWithWorker") BlockWorker = Action("BlockWorker") ChangeHITTypeOfHIT = Action("ChangeHITTypeOfHIT") CreateAdditionalAssignmentsForHIT = Action("CreateAdditionalAssignmentsForHIT") CreateHIT = Action("CreateHIT") CreateHITType = Action("CreateHITType") CreateHITWithHITType = Action("CreateHITWithHITType") CreateQualificationType = Action("CreateQualificationType") CreateWorkerBlock = Action("CreateWorkerBlock") DeleteHIT = Action("DeleteHIT") DeleteQualificationType = Action("DeleteQualificationType") DeleteWorkerBlock = Action("DeleteWorkerBlock") DisableHIT = Action("DisableHIT") DisassociateQualificationFromWorker = Action("DisassociateQualificationFromWorker") DisposeHIT = Action("DisposeHIT") DisposeQualificationType = Action("DisposeQualificationType") ExtendHIT = Action("ExtendHIT") ForceExpireHIT = Action("ForceExpireHIT") GetAccountBalance = Action("GetAccountBalance") GetAssignment = Action("GetAssignment") GetAssignmentsForHIT = Action("GetAssignmentsForHIT") GetBlockedWorkers = Action("GetBlockedWorkers") GetBonusPayments = Action("GetBonusPayments") GetFileUploadURL = Action("GetFileUploadURL") GetHIT = Action("GetHIT") GetHITsForQualificationType = Action("GetHITsForQualificationType") GetQualificationRequests = Action("GetQualificationRequests") GetQualificationScore = Action("GetQualificationScore") GetQualificationType = Action("GetQualificationType") GetQualificationsForQualificationType = Action("GetQualificationsForQualificationType") GetRequesterStatistic = Action("GetRequesterStatistic") GetRequesterWorkerStatistic = Action("GetRequesterWorkerStatistic") GetReviewResultsForHIT = Action("GetReviewResultsForHIT") GetReviewableHITs = Action("G
etReviewableHITs") GrantBonus = A
ction("GrantBonus") GrantQualification = Action("GrantQualification") ListAssignmentsForHIT = Action("ListAssignmentsForHIT") ListBonusPayments = Action("ListBonusPayments") ListHITs = Action("ListHITs") ListHITsForQualificationType = Action("ListHITsForQualificationType") ListQualificationRequests = Action("ListQualificationRequests") ListQualificationTypes = Action("ListQualificationTypes") ListReviewPolicyResultsForHIT = Action("ListReviewPolicyResultsForHIT") ListReviewableHITs = Action("ListReviewableHITs") ListWorkerBlocks = Action("ListWorkerBlocks") ListWorkersWithQualificationType = Action("ListWorkersWithQualificationType") NotifyWorkers = Action("NotifyWorkers") RegisterHITType = Action("RegisterHITType") RejectAssignment = Action("RejectAssignment") RejectQualificationRequest = Action("RejectQualificationRequest") RevokeQualification = Action("RevokeQualification") SearchHITs = Action("SearchHITs") SearchQualificationTypes = Action("SearchQualificationTypes") SendBonus = Action("SendBonus") SendTestEventNotification = Action("SendTestEventNotification") SetHITAsReviewing = Action("SetHITAsReviewing") SetHITTypeNotification = Action("SetHITTypeNotification") UnblockWorker = Action("UnblockWorker") UpdateExpirationForHIT = Action("UpdateExpirationForHIT") UpdateHITReviewStatus = Action("UpdateHITReviewStatus") UpdateHITTypeOfHIT = Action("UpdateHITTypeOfHIT") UpdateNotificationSettings = Action("UpdateNotificationSettings") UpdateQualificationScore = Action("UpdateQualificationScore") UpdateQualificationType = Action("UpdateQualificationType")
Gitweijie/first_project
networking_cisco/db/migration/models/head.py
Python
apache-2.0
1,228
0
# Copyright 2015 Cisco Systems, Inc # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations #
under the License. from networking_cisco import backwards_compatibility as bc from networking_cisco.plugins.cisco.db.device_manager import ( # noqa hd_models) from networking_cisco.plugins.cisco.db.l3 import ( # noqa ha_db) from networking_cisco.plugins.cisco.db.l3 imp
ort ( # noqa l3_models) from networking_cisco.plugins.ml2.drivers.cisco.n1kv import ( # noqa n1kv_models) from networking_cisco.plugins.ml2.drivers.cisco.nexus import ( # noqa nexus_models_v2) from networking_cisco.plugins.ml2.drivers.cisco.ucsm import ( # noqa ucsm_model) def get_metadata(): return bc.model_base.BASEV2.metadata
AvadootNachankar/gstudio
gnowsys-ndf/gnowsys_ndf/ndf/management/commands/data_entry.py
Python
agpl-3.0
45,442
0.004577
''' -- imports from python libraries -- ''' import os import csv import json import ast import time import datetime ''' imports from installed packages ''' from django.core.management.base import BaseCommand, CommandError from mongokit import IS try: from bson import ObjectId except ImportError: # old pymongo from pymongo.objectid import ObjectId ''' imports from application folders/files ''' from gnowsys_ndf.ndf.models import DATA_TYPE_CHOICES from gnowsys_ndf.ndf.models import node_collection, triple_collection from gnowsys_ndf.ndf.models import Node from gnowsys_ndf.ndf.models import GSystemType, AttributeType, RelationType from gnowsys_ndf.ndf.models import GSystem, GAttribute, GRelation from gnowsys_ndf.ndf.views.methods import create_gattribute, create_grelation, create_college_group_and_setup_data from gnowsys_ndf.ndf.views.methods import get_student_enrollment_code #################################################################################################################### # TODO: # 1) Name of attributes/relation in property_order field needs to be replaced with their respective ObjectIds # 2) regex query needs to be modified because in current situation it's not considering names with space # - searching for terms till it finds first space SCHEMA_ROOT = os.path.join( os.path.dirname(__file__), "schema_files" ) log_list = [] # To hold intermediate errors log_list.append("\n######### Script run on : " + time.strftime("%c") + " #########\n############################################################\n") is_json_file_exists = False gsystem_type_node = None gsystem_type_id = None gsystem_type_name = "" home_grp = node_collection.one({'_type': "Group", 'name': "home"}) group_id = home_grp._id user_id = 1 mis_group = node_collection.one({ '_type': "Group", '$or': [{ 'name': {'$regex': u"MIS_admin", '$options': 'i'} }, { 'altnames': {'$regex': u"MIS_admin", '$options': 'i'} }], 'group_type': "PRIVATE" }, { 'created_by': 1 }) if mis_group is not None: group_id = mis_group._id user_id = mis_group.created_by # User who created the above private group college_gst = node_collection.one({ "_type": "GSystemType", "name": "College" }) college_dict = {} college_name_dict = {} attr_type_dict = {} rel_type_dict = {} create_student_enrollment_code = False create_private_college_group = False node_repeated = False class Command(BaseCommand): help = "Based on " def handle(self, *args, **options): try: for file_name in args: file_path = os.path.join(SCHEMA_ROOT, file_name) global gsystem_type_node global gsystem_type_id global gsystem_type_name gsystem_type_node = None gsystem_type_id = None gsystem_type_name = "" if os.path.exists(file_path): gsystem_type_name = os.path.basename(file_path) gsystem_type_name = os.path.splitext(gsystem_type_name)[0] gsystem_type_name = gsystem_type_name.replace("_", " ") if gsystem_type_name == u"Student": global create_student_enrollment_code create_student_enrollment_code = True elif gsystem_type_name == u"College": global create_private_college_group create_private_college_group = True gsystem_type_node = node_collection.one({ "_type": "GSystemType", "$or": [{ "name": {"$regex": "^"+gsystem_type_name+"$", '$options': 'i'} }, { "altnames": {"$regex": "^"+gsystem_type_name+"$", '$options': 'i'} }] }) if gsystem_type_node: gsystem_type_id = gsystem_type_node._id else: error_message = "\n GSystemTypeError: This GSystemType ("+gsystem_type_name+") doesn't exists for creating it's own GSystem !!!" log_list.append(error_message) raise Exception(error_message) file_extension = os.path.splitext(file_name)[1] if "csv" in file_extension: # Process csv file and convert it to json format at first total_rows = 0 info_message = "\n CSVType: Following file (" + file_path + ") found!!!" log_list.append(info_message) try: csv_file_path = file_path json_file_name = file_name.rstrip("csv") + "json" json_file_path = os.path.join(SCHEMA_ROOT, json_file_name) json_file_content = "" with open(csv_file_path, 'rb') as csv_file: csv_file_content = csv.DictReader(csv_file, delimiter=",") json_file_content = [] for row in csv_file_content: total_rows += 1
json_file_content.append(row) info_message = "\n- File '" + file_name + "' contains : " + str(total_rows) + " entries/rows (excluding top-header/column-names)." print info_message log_list.append(str(info_message)) with open(json_file_path, 'w') as json_file:
json.dump(json_file_content, json_file, indent=4, sort_keys=False) if os.path.exists(json_file_path): file_path = json_file_path is_json_file_exists = True info_message = "\n JSONType: Following file (" + json_file_path + ") created successfully.\n" log_list.append(info_message) except Exception as e: error_message = "\n CSV-JSONError: " + str(e) log_list.append(error_message) # End of csv-json coversion elif "json" in file_extension: is_json_file_exists = True else: error_message = "\n FileTypeError: Please choose either 'csv' or 'json' format supported files!!!\n" log_list.append(error_message) raise Exception(error_mesage) if is_json_file_exists: # Process json file and create required GSystems, GRelations, and GAttributes info_message = "\n Task initiated: Processing json-file...\n" log_list.append(info_message) t0 = time.time() parse_data_create_gsystem(file_path, file_name) t1 = time.time() time_diff = t1 - t0 # print time_diff total_time_minute = round( (time_diff/60), 2) if time_diff else 0 total_time_hour = round( (time_diff/(60*60)), 2) if time_diff else 0 # End of processing json file info_message = "\n------- Task finised: Successfully processed json-file -------\n" info_message += "- Total time taken for the processing: \n\n\t" + str(total_time_minute) + " MINUTES\n\t=== OR ===\n\t" + str(total_time_hour) + " HOURS\n" print info_message log_list.append(str(info_message)) # End of processing json file else:
CoderDojoSG/todo
todo1/application.py
Python
apache-2.0
1,472
0.007473
from flask import Flask from flask impo
rt make_response from flask import request from flask import
render_template from flask import redirect from flask import url_for import logging from logging.handlers import RotatingFileHandler app = Flask(__name__) @app.route('/') def index(): app.logger.info('index') username = request.cookies.get('username') if (username == None): return redirect(url_for('login')) else: return render_template('index.html', username=username) @app.route('/login', methods=['GET','POST']) def login(): app.logger.info('login') if request.method == 'POST': if validate_credentials(request.form['username'], request.form['password']): resp = make_response(redirect(url_for('index'))) resp.set_cookie('username', request.form['username']) return resp else: return render_template('login.html', error='Invalid username or password') else: return render_template('login.html') @app.route('/logout') def logout(): app.logger.info('logout') resp = make_response(redirect(url_for('index'))) resp.set_cookie('username', '', expires=0) return resp def validate_credentials(username, password): return username == password if __name__ == '__main__': handler = RotatingFileHandler('todo.log', maxBytes=10000, backupCount=1) handler.setLevel(logging.INFO) app.logger.addHandler(handler) app.run()
jonathan-s/happy
happy/error.py
Python
apache-2.0
2,606
0.005372
class EmptyResult(object): ''' Null Object pattern to prevent Null reference errors when there is no result ''' def __init__(self): self.status = 0 self.body = '' self.msg = '' self.reason = '' def __nonzero__(self): return False class HapiError(ValueError): """Any problems get thrown as HapiError exceptions with the relevant info inside""" as_str_template = u''' ---- request ---- {method} {host}{url}, [timeout={timeout}] ---- body ---- {body} ---- headers ---- {headers} ---- result ---- {result_status} ---- body ----- {result_body} ---- headers ----- {result_headers} ---- reason ---- {result_reason} ---- trigger error ---- {error} ''' def __init__(self, result, request, err=None): super(HapiError,self).__init__(result and result.reason or "Unknown Reason") if result == None: self.result = EmptyResult() else: self.result = result if request == None: request = {} self.request = request self.err = err def __str__(self): return self.__unicode__().encode('ascii', 'replace') def __unicode__(self): params = {} request_keys = ('method', 'host', 'url', 'data', 'headers', 'timeout', 'body') result_attrs = ('status', 'reason', 'msg', 'body', 'headers') params['error'] = self.err for key in request_keys: params[key] = self.request.get(key) for attr in result_attrs: params['result_%s' % attr] = getattr(self.result, attr, '') params = self._dict_vals_to_unicode(params) return self.as_str_template.format(**params) def _dict_vals_to_unicode(self, data): unicode_data = {}
for key, val in data.items(): if not isinstance(val, basestring): unicode_data[key] = unicod
e(val) elif not isinstance(val, unicode): unicode_data[key] = unicode(val, 'utf8', 'ignore') else: unicode_data[key] = val return unicode_data # Create more specific error cases, to make filtering errors easier class HapiBadRequest(HapiError): '''Error wrapper for most 40X results and 501 results''' class HapiNotFound(HapiError): '''Error wrapper for 404 and 410 results''' class HapiTimeout(HapiError): '''Wrapper for socket timeouts, sslerror, and 504''' class HapiUnauthorized(HapiError): '''Wrapper for 401 Unauthorized errors''' class HapiServerError(HapiError): '''Wrapper for most 500 errors'''
DarkSand/Sasila
sasila/system_normal/processor/first_processor.py
Python
apache-2.0
1,026
0.000975
#!/usr/bin/env python # -*- coding: utf-8 -*- import sys from bs4 import BeautifulSoup as bs from sasila.system_normal.spider.spider_core import SpiderCore from sasila.system_normal.pipeline.console_pipeline import ConsolePipeline from sasila.system_normal.processor.base_processor import BaseProcessor from sasila.system_normal.downloader.http.spider_request import Request if sys.version_info < (3, 0): reload(sys) sys.setdefaultencoding('utf-8') class FirstProcessor(BaseProcessor): spider_id = 'test' spider_name = 'test' allowed_domains = ['mzitu.com'] start_requests = [Request(url="http://www.mzitu.com/")] def process(self, response): soup = bs(response.m_response.content, 'lxml') a_list = soup.select("a") for a in a_list: if "href" in a.attrs: url = re
sponse.nice_join(a["href"]) yield {'url': url} # if __name__
== '__main__': # spider = SpiderCore(FirstProcessor()).set_pipeline(ConsolePipeline()).start()
wrightjb/bolt-planar
transform.py
Python
bsd-3-clause
12,182
0.002545
############################################################################# # Copyright (c) 2010 by Casey Duncan # All rights reserved. # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are met: # # * Redistributions of source code must retain the above copyright notice, # this list of conditions and the following disclaimer. # * Redistributions in binary form must reproduce the above copyright notice, # this list of conditions and the following disclaimer in the documentation # and/or other materials provided with the distribution. # * Neither the name(s) of the copyright holders nor the names of its # contributors may be used to endorse or promote products derived from this # software without specific prior written permission. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AS IS AND ANY EXPRESS OR # IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF # MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO # EVENT SHALL THE COPYRIGHT HOLDERS BE LIABLE FOR ANY DIRECT, INDIRECT, # INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT # LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, # OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF # LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING # NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, # EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. ############################################################################# from __future__ import division import math import planar from planar.util import cached_property, assert_unorderable, cos_sin_deg class Affine(tuple): """Two dimensional affine transform for linear mapping from 2D coordinates to other 2D coordinates. Parallel lines are preserved by these transforms. Affine transforms can perform any combination of translations, scales/flips, shears, and rotations. Class methods are provided to conveniently compose transforms from these operations. Internally the transform is stored as a 3x3 transformation matrix. The transform may be constructed directly by specifying the first two rows of matrix values as 6 floats. Since the matrix is an affine transform, the last row is always ``(0, 0, 1)``. :param members: 6 floats for the first two matrix rows. :type members: float """ def __new__(self, *members): if len(members) == 6: mat3x3 = [x * 1.0 for x in members] + [0.0, 0.0, 1.0] return tuple.__new__(Affine, mat3x3) else: raise TypeError( "Expected 6 number args, got %s" % len(members)) @classmethod def identity(cls): """Return the identity transform. :rtype: Affine """ return identity @classmethod def translation(cls, offset): """Create a translation transform from an offset vector. :param offset: Translation offset. :type offset: :class:`~planar.Vec2` :rtype: Affine """ ox, oy = offset return tuple.__new__(cls, (1.0, 0.0, ox, 0.0, 1.0, oy, 0.0, 0.0, 1.0)) @classmethod def scale(cls, scaling): """Create a scaling transform from a scalar or vector. :param scaling: The scaling factor. A scalar value will scale in both dimensions equally. A vec
tor scaling value scales the dimensions independently. :type scaling: float or :class:`~planar.Vec2` :rtype: Affine """ try: sx = sy = float(scaling) except TypeError: sx, sy = scaling return tuple.__new__(cls, (sx, 0.0, 0.0, 0.0, sy, 0.0, 0.0, 0.0, 1.0)) @cl
assmethod def shear(cls, x_angle=0, y_angle=0): """Create a shear transform along one or both axes. :param x_angle: Angle in degrees to shear along the x-axis. :type x_angle: float :param y_angle: Angle in degrees to shear along the y-axis. :type y_angle: float :rtype: Affine """ sx = math.tan(math.radians(x_angle)) sy = math.tan(math.radians(y_angle)) return tuple.__new__(cls, (1.0, sy, 0.0, sx, 1.0, 0.0, 0.0, 0.0, 1.0)) @classmethod def rotation(cls, angle, pivot=None): """Create a rotation transform at the specified angle, optionally about the specified pivot point. :param angle: Rotation angle in degrees :type angle: float :param pivot: Point to rotate about, if omitted the rotation is about the origin. :type pivot: :class:`~planar.Vec2` :rtype: Affine """ ca, sa = cos_sin_deg(angle) if pivot is None: return tuple.__new__(cls, (ca, sa, 0.0, -sa, ca, 0.0, 0.0, 0.0, 1.0)) else: px, py = pivot return tuple.__new__(cls, (ca, sa, px - px*ca + py*sa, -sa, ca, py - px*sa - py*ca, 0.0, 0.0, 1.0)) def __str__(self): """Concise string representation.""" return ("|% .2f,% .2f,% .2f|\n" "|% .2f,% .2f,% .2f|\n" "|% .2f,% .2f,% .2f|") % self def __repr__(self): """Precise string representation.""" return ("Affine(%r, %r, %r,\n" " %r, %r, %r)") % self[:6] @cached_property def determinant(self): """The determinant of the transform matrix. This value is equal to the area scaling factor when the transform is applied to a shape. """ a, b, c, d, e, f, g, h, i = self return a*e - b*d @cached_property def is_identity(self): """True if this transform equals the identity matrix, within rounding limits. """ return self is identity or self.almost_equals(identity) @cached_property def is_rectilinear(self): """True if the transform is rectilinear, i.e., whether a shape would remain axis-aligned, within rounding limits, after applying the transform. """ a, b, c, d, e, f, g, h, i = self return ((abs(a) < planar.EPSILON and abs(e) < planar.EPSILON) or (abs(d) < planar.EPSILON and abs(b) < planar.EPSILON)) @cached_property def is_conformal(self): """True if the transform is conformal, i.e., if angles between points are preserved after applying the transform, within rounding limits. This implies that the transform has no effective shear. """ a, b, c, d, e, f, g, h, i = self return abs(a*b + d*e) < planar.EPSILON @cached_property def is_orthonormal(self): """True if the transform is orthonormal, which means that the transform represents a rigid motion, which has no effective scaling or shear. Mathematically, this means that the axis vectors of the transform matrix are perpendicular and unit-length. Applying an orthonormal transform to a shape always results in a congruent shape. """ a, b, c, d, e, f, g, h, i = self return (self.is_conformal and abs(1.0 - (a*a + d*d)) < planar.EPSILON and abs(1.0 - (b*b + e*e)) < planar.EPSILON) @cached_property def is_degenerate(self): """True if this transform is degenerate, which means that it will collapse a shape to an effective area of zero. Degenerate transforms cannot be inverted. """ return abs(self.determinant) < planar.EPSILON @property def column_vectors(self): """The values of the transform as three 2D column vectors""" a, b, c, d, e, f, _, _, _ = self return planar.Vec2(a, d), planar.Vec2(b, e), planar.Vec2(c, f) def almost_equals(self, other):
ARDivekar/SearchDistribute
other/Legacy/sqliteDefaults.py
Python
mit
9,396
0.038314
# Author: Abhishek Divekar, Jan 2016. Licence: Creative Commons. import os import sqlite3 import datetime def get_conn(db_file_name): #makes a new file if it does not exist BASE_DIR = os.path.dirname(os.path.abspath(__file__)) #gets direcotry path in which file is stored. db_path = os.path.join(BASE_DIR, db_file_name) with sqlite3.connect(db_path, detect_types=sqlite3.PARSE_DECLTYPES) as conn: #souce for "detect_types=sqlite3.PARSE_DECLTYPES" is: #http://stackoverflow.com/questions/4272908/sqlite-date-storage-and-conversion print "\t\tOpened connection successfully" return conn return None class Table: def __init__ (self, input_attributes, input_table): self.table=input_table self.attributes=input_attributes def __len__(self): return len(self.table) def __getitem__(self,i): ''' works for 2D or 3D or any-D yay! works because if a[i][j][k], a[i] returns a tuple, for the ith row. Let, row=a[i]. Then, a[i][j][k] becomes row[j][k]. We start call the function again, to get the column entry. ''' # print type(self) if type(i)==int: return self.table[i] elif type(i)==str: #assume that they are searching by column, i.e. #table['col_name'] #this allows access by column and then row ind=self.attributes.index(i) col=[] for row_no in range(0, len(self.table)-1): col.append(self.table[row_no][ind]) return tuple(col) def build_where_clause(where_params_list, where_values_list): if where_params_list!=None and where_values_list!=None: where_clause=" WHERE " where_clause+=" %s='%s' "%(str(where_params_list[0]), str(where_values_list[0])) for i in range(1,len(where_values_list)): where_clause+=" AND %s='%s' "%(str(where_params_list[i]), str(where_values_list[i])) else : where_clause="" return where_clause def build_select_query(tablename, select_params_list, where_params_list=None, where_values_list=None): select_query="SELECT " select_query+=" %s"%select_params_list[0] for i in range(1,len(select_params_list)): select_query+=", %s"%select_params_list[i] select_query+=" FROM %s "%tablename select_query+=build_where_clause(where_params_list=where_params_list, where_values_list=where_values_list) select_query+=";" return select_query def build_update_query(tablename, update_params_list, update_values_list, where_params_list=None, where_values_list=None): update_query="UPDATE "+tablename+" SET " update_query+=" %s='%s' "%(str(update_params_list[0]), str(update_values_list[0])) for i in range(1,len(update_values_list)): update_query+=", %s='%s' "%(str(update_params_list[i]), str(update_values_list[i])) update_query+=build_where_clause(where_params_list=where_params_list, where_values_list=where_values_list) update_query+=";" return update_query def build_insert_query(tablename, insert_params_list, tuple_values_list): insert_query="INSERT INTO %s(" %tablename+"%s"%insert_params_list[0] # print insert_query for param in insert_params_list: if insert_params_list[0]!= param: insert_query+=", %s"%param insert_query+=") VALUES " #print insert_query insert_query+="\n('%s'"%tuple_values_list[0][0] for j in range(1,len(tuple_values_list[0])): insert_query+=" ,'%s'"%tuple_values_list[0][j] insert_query+=")" for i in range(1,len(tuple_values_list)): insert_query+=",\n('%s'"%tuple_values_list[i][0] for j in range(1,len(tuple_values_list[i])): insert_query+=" ,'%s'"%tuple_values_list[i][j] insert_query+=";" # print insert_query return insert_query def build_date(d, m, y): return datetime.date(y,m,d) def build_date2(day, month, year): return datetime.date(year,month,day) """ <---------------THE CORRECT WAY TO HANDLE DATES IN SQLITE3 with sqliteDefaults------------------> #Create a random table conn.execute('''Create table if not exists person( ID INTEGER PRIMARY KEY, Name TEXT, DOB DATE ); ''') conn.commit() #Insert values into the table in one of the accepted formats sqliteDefaults.insert_table_sqlite(conn, 'person', ('ID', 'Name', 'DOB'), [ (1, 'Bob', sqliteDefaults.build_date(07,10,1999) ), (2, 'John', sqliteDefaults.build_date(y=2005,m=8,d=21) ), (3, 'Stacy', sqliteDefaults.build_date2(month=6,day=25,year=2003)), (4, 'Emma', datetime.date(2001, 10, 27) ) ] ) #Source: http://stackoverflow.com/questions/4272908/sqlite-date-storage-and-conversion table=sqliteDefaults.verified_select_sqlite(conn,"select * from person order by DOB desc;") for row in table: print row #OUTPUT: #(2, u'John', datetime.date(2005, 8, 21)) #(3, u'Stacy', datetime.date(2003, 6, 25)) #(4, u'Emma', datetime.date(2001, 10, 27)) #(1, u'Bob', datetime.date(1999, 10, 7)) print table[2][2].day #OUTPUT: # 27 #We can now compare the values as we do normal datetime objects: with > and <, etc i=1; j=2; if table[i][2]<table[j][2]:
print "%s is older than %s"%(table[i][1], table[j][1]) elif table[j][2]<table[i][2]: print "%s is older than %s"%(table[j][1], table[i][1]) #OUTPUT: # Emma is older than Stacy """ def insert_table_sqlite(conn, tablename, insert_params_list, tuple_values_list, commit=True): insert_query= build_insert_query(tablename=tablename, inse
rt_params_list=insert_params_list, tuple_values_list=tuple_values_list) # print insert_query cursor=conn.cursor() cursor.execute(insert_query) if commit: conn.commit() # database_in_use(conn) def insert_table_sqlite2(conn, tablename, parameters_tuple=(), tuple_values_list=[], commit=True, print_query=False): if tuple_values_list==[]: print("\n\tSQLiteDefaults: insert_table_sqlite() ERROR: tuple_value_list cannot be empty") return query="" if parameters_tuple==(): query="INSERT INTO %s VALUES " %(tablename); else: query="INSERT INTO %s %s VALUES" %(tablename, parameters_tuple); #else: #print("\n\tSQLiteDefaults: insert_table_sqlite() ERROR: parameters_tuple must be a tuple") query=query+"(?" + (",?"*(len(parameters_tuple)-1)) + ")" #source: https://docs.python.org/2/library/sqlite3.html if print_query: print query conn.executemany(query, tuple_values_list) if commit: conn.commit() def verified_select_sqlite(conn, select_query, fetch="all", printing=True): '''This function verifies that the entered query is a valid select query (to prevent SQL injection). If it is, it executes it and gets the table object. It returns None if the table is Empty, and prints an ERROR. If the table is non-empty, it returns the table object.''' if 'select' in select_query.lower(): temp = select_query.strip() if not ';' in temp: temp+=';' # print temp if temp.index(';') == (len(temp)-1): cursor=conn.cursor() cursor.execute(temp) attributes=[] for i in cursor.description: attributes.append(i[0]) result_table=() if fetch.lower()=="all": result_table=cursor.fetchall() elif fetch.lower()=="one": result_table=cursor.fetchone() else: if printing: print "verified_select() ERROR: Improper value '%s' passed to argument 'fetch'"%fetch return None if result_table is (): if printing: print 'verified_select() ERROR: Empty table' return None return Table(input_table=result_table, input_attributes=attributes) else: if printing: print 'verified_select() ERROR: Only one query can be fired at a time' else: if printing: print 'verified_select() ERROR: Only select queries can be executed' def print_table(conn, select_query): table = verified_select_sqlite(conn, select_query, printing=False) if table is not None: print '\n\n----------------------------------------------------------------' for row in table: print '\n' for i in range(0,len(row)): print row[i],"\t\t", print '\n\n----------------------------------------------------------------\n' def list_all_tables(db_file_name): conn=get_conn(db_file_name)
lmazuel/azure-sdk-for-python
azure-mgmt-logic/azure/mgmt/logic/models/business_identity.py
Python
mit
1,162
0
# coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License
.txt in the project root for # license information. # # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is # regenerated. # -------------------------------------------------------------------------- from msrest.serialization import Model class BusinessIdentity(Model): """The integration account partner's business identity. :param qualifier: The business identity qualifier e.g. as2identity, ZZ,
ZZZ, 31, 32 :type qualifier: str :param value: The user defined business identity value. :type value: str """ _validation = { 'qualifier': {'required': True}, 'value': {'required': True}, } _attribute_map = { 'qualifier': {'key': 'qualifier', 'type': 'str'}, 'value': {'key': 'value', 'type': 'str'}, } def __init__(self, qualifier, value): self.qualifier = qualifier self.value = value
woutdenolf/spectrocrunch
scraps/cod.py
Python
mit
6,727
0.000743
# -*- coding: utf-8 -*- from peewee import * import urllib import tempfile import os from contextlib import contextmanager from sshtunnel import SSHTunnelForwarder import traceback db = MySQLDatabase( "cod", host="127.0.0.1", user="cod_reader", port=3308, connect_timeout=10000 ) # Get # ssh wout@axil1.ua.ac.be -L 3307:www.crystallography.net:3306 -N & # python -m pwiz cod -e mysql -u cod_reader -H 127.0.0.1 -p 3307 # # mysql -ucod_reader -h 127.0.0.1 -P 3307 # SELECT DATABASE(); # USE cod; # SHOW TABLES; # DESCRIBE data; class BaseModel(Model): class Meta: database = db class Data(BaseModel): rfsqd = FloatField(db_column="RFsqd", null=True) ri = FloatField(db_column="RI", null=True) rall = FloatField(db_column="Rall", null=True) robs = FloatField(db_column="Robs", null=True) rref = FloatField(db_column="Rref", null=True) z = IntegerField(db_column="Z", index=True, null=True) zprime = FloatField(db_column="Zprime", index=True, null=True) a = FloatField(index=True, null=True) acce_code = CharField(index=True, null=True) alpha = FloatField(index=True, null=True) authors = TextField(null=True) b = FloatField(index=True, null=True) beta = FloatField(index=True, null=True) c = FloatField(index=True, null=True) calcformula = CharField(index=True, null=True) cellformula = CharField(null=True) cellpressure = FloatField(null=True) celltemp = FloatField(null=True) chemname = CharField(index=True, null=True) commonname = CharField(index=True, null=True) compoundsource = CharField(null=True) date = DateField(index=True, null=True) diffrpressure = FloatField(null=True) diffrtemp = FloatField(null=True) doi = CharField(index=True, null=True) duplicateof = IntegerField(null=True) file = PrimaryKeyField() firstpage = CharField(null=True) flags = CharField(null=True) formula = CharField(index=True, null=True) gamma = FloatField(index=True, null=True) gofall = FloatField(null=True) gofgt = FloatField(null=True) gofobs = FloatField(null=True) issue = CharField(null=True) journal = CharField(index=True, null=True) lastpage = CharField(null=True) method = CharField(index=True, null=True) mineral = CharField(index=True, null=True) nel = CharField(index=True, null=True) onhold = DateField(null=True) optimal = IntegerField(null=True) pressurehist = CharField(null=True) radsymbol = CharField(db_column="radSymbol", null=True) radtype = CharField(db_column="radType", null=True) radiation = CharField(null=True) sg = CharField(index=True, null=True) sghall = CharField(db_column="sgHall", index=True, null=True) siga = FloatField(null=True) sigalpha = FloatField(null=True) sigb = FloatField(null=True) sigbeta = FloatField(null=True) sigc = FloatField(null=True) sigcellpressure = FloatField(null=True) sigcelltemp = FloatField(null=True) sigdiffrpressure = FloatField(null=True) sigdiffrtemp = FloatField(null=True) siggamma = FloatField(null=True) sigvol = FloatField(null=True) status = CharField(null=True) svnrevision = IntegerField(index=True, null=True) text = TextField(index=True) thermalhist = CharField(null=True) time = TimeField(index=True, null=True) title = TextField(null=True) vol = FloatField(index=True, null=True) volume = IntegerField(null=True) wrall = FloatField(db_column="wRall", null=True) wrobs = FloatField(db_column="wRobs", null=True) wrref = FloatField(db_column="wRref", null=True) wavelength = FloatField(null=True) year = IntegerField(null=True) class Meta: db_table = "data" indexes = ((("mineral", "chemname", "commonname"), False),) def __str__(self): ret = "{} ({})\n".format(self.mineral, self.commonname) ret += "{} ({})\n".format(self.formula, self.chemname) ret += "{} ({} {} {} {} {} {})\n".format( self.sg, self.a, self.b, self.c, self.alpha, self.beta, self.gamma ) ret += "P = {} kPa, T = {} K\n".format(self.diffrpressure, self.diffrtemp) ret += "P = {} kPa, T = {} K\n".format(self.cellpressure, self.celltemp) ret += "{} ({})\n".format(self.authors, self.year) ret += "https://doi.org/{}\n".format(self.doi) return ret @staticmethod def sap(p): if p is None: return True a = 0.9 # atm b = 1.1 a *= 101.325 # kPa b *= 101.325 return p >= a and p <= b @staticmethod def sat(t): if t is None: return True a = 15 # celcius b = 30 a += 273.15 # kelvin b += 273.15 return t >= a and t <= b def satp(self): return ( self.sap(self.diffrpressure) and self.sap(self.cellpressure) and self.sat(self.diffrtemp)
and self.sat(self.celltemp) ) @property def filename(self): return os.path.join("{}.cif".format(self.file)) @property def path(self): return os.path.join(tempfile.gettempdir(), "spec
trocrunch", "cif") @property def resourcename(self): return os.path.join(self.path, self.filename) @property def url(self): return "http://www.crystallography.net/cod/{}.cif".format(self.file) def download(self): filename = self.resourcename if not os.path.isfile(filename): path = self.path if not os.path.exists(path): os.makedirs(path) ciffile = urllib.URLopener() ciffile.retrieve(self.url, filename) @classmethod def namequery(cls, name): return ( cls.select() .where( cls.mineral == name or cls.commonname == name or cls.chemname == name ) .order_by(cls.year.desc()) ) @contextmanager def codtunnel(): server = SSHTunnelForwarder( ssh_address_or_host=("axil1.ua.ac.be", 22), ssh_username="wout", ssh_pkey="/users/denolf/.ssh/id_rsa", remote_bind_address=("www.crystallography.net", 3306), local_bind_address=("127.0.0.1", 3308), ) try: server.start() yield except: print traceback.format_exc() server.stop() if __name__ == "__main__": with codtunnel(): query = Data.namequery("copper acetate") # for entry in query: # print entry for entry in query: if entry.satp(): print entry entry.download() break
TheLampshady/pascompiler
tests/test_variables.py
Python
apache-2.0
537
0.001862
from tests.base import TestBase from pascal.program import Program class TestVariables(TestBase): def test_pass_valid_var(self): file_name = "tests/mock_pas/all_var.pas" pascal_
program = Program(file_name) pascal_program.run() self.assertEqual(len(pascal_program.symbol_table), 7) self.assertEqual(pascal_program.symbol_addr
ess, 23) def test_pass_assign(self): file_name = "tests/mock_pas/variables.pas" pascal_program = Program(file_name) pascal_program.run()
plotly/plotly.py
packages/python/plotly/plotly/validators/cone/_showlegend.py
Python
mit
404
0
import _plotly_utils.basevalidators class ShowlegendValidator(_plotly_utils.basevalidators.BooleanValidato
r): def __init__(self, plotly_name="showlegend", parent_name="cone", **kwargs): super(ShowlegendValidator, self).__init__( plotly_name=plotly_name, parent_name=parent_name, edit_type=
kwargs.pop("edit_type", "style"), **kwargs )
Lucterios2/contacts
lucterios/mailing/docs/fr/conf.py
Python
gpl-3.0
9,330
0
#!/usr/bin/env python3 # -*- coding: utf-8 -*- # # Lucterios mailing documentation build configuration file, created by # sphinx-quickstart on Tue Dec 22 17:11:37 2015. # # This file is execfile()d with the current directory set to its # containing dir. # # Note that not all possible configuration values are present in this # autogenerated file. # # All configuration values have a default; values that are commented out # serve to show the default. import sys import os import shlex # If extensions (or modules to document with autodoc) are in another directory, # add these directo
ries to sys.path here. If the directory is relative to the # documentation root, use os.path.abspath to make it absolute, like shown here. # sys.path.insert(0, os.path.abspath('.')) # -- General configuration ------------------------------------------------ # If your documentation needs a minimal Sphinx version, state it here. # needs_sphinx = '1.0' # Add any Sphinx extension module names here, as strings. They can be # extensions coming with Sphinx (named 'sphinx.ext.*') or your custom # ones. ex
tensions = [] # Add any paths that contain templates here, relative to this directory. templates_path = ['_templates'] # The suffix(es) of source filenames. # You can specify multiple suffix as a list of string: # source_suffix = ['.rst', '.md'] source_suffix = '.rst' # The encoding of source files. # source_encoding = 'utf-8-sig' # The master toctree document. master_doc = 'index' # General information about the project. project = 'Lucterios courier' copyright = '2016, sd-libre' author = 'sd-libre' # The version info for the project you're documenting, acts as replacement for # |version| and |release|, also used in various other places throughout the # built documents. # # The short X.Y version. version = '2.0' # The full version, including alpha/beta/rc tags. release = '2.0.2.15122316' # The language for content autogenerated by Sphinx. Refer to documentation # for a list of supported languages. # # This is also used if you do content translation via gettext catalogs. # Usually you set "language" from the command line for these cases. language = 'fr' # There are two options for replacing |today|: either, you set today to some # non-false value, then it is used: # today = '' # Else, today_fmt is used as the format for a strftime call. # today_fmt = '%B %d, %Y' # List of patterns, relative to source directory, that match files and # directories to ignore when looking for source files. exclude_patterns = [] # The reST default role (used for this markup: `text`) to use for all # documents. # default_role = None # If true, '()' will be appended to :func: etc. cross-reference text. # add_function_parentheses = True # If true, the current module name will be prepended to all description # unit titles (such as .. function::). # add_module_names = True # If true, sectionauthor and moduleauthor directives will be shown in the # output. They are ignored by default. # show_authors = False # The name of the Pygments (syntax highlighting) style to use. pygments_style = 'sphinx' # A list of ignored prefixes for module index sorting. # modindex_common_prefix = [] # If true, keep warnings as "system message" paragraphs in the built documents. # keep_warnings = False # If true, `todo` and `todoList` produce output, else they produce nothing. todo_include_todos = False # -- Options for HTML output ---------------------------------------------- # The theme to use for HTML and HTML Help pages. See the documentation for # a list of builtin themes. html_theme = 'haiku' # Theme options are theme-specific and customize the look and feel of a theme # further. For a list of options available for each theme, see the # documentation. # html_theme_options = {} # Add any paths that contain custom themes here, relative to this directory. # html_theme_path = [] # The name for this set of Sphinx documents. If None, it defaults to # "<project> v<release> documentation". # html_title = None # A shorter title for the navigation bar. Default is the same as html_title. # html_short_title = None # The name of an image file (relative to this directory) to place at the top # of the sidebar. # html_logo = None # The name of an image file (within the static path) to use as favicon of the # docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 # pixels large. # html_favicon = None # Add any paths that contain custom static files (such as style sheets) here, # relative to this directory. They are copied after the builtin static files, # so a file named "default.css" will overwrite the builtin "default.css". html_static_path = ['_static'] # Add any extra paths that contain custom files (such as robots.txt or # .htaccess) here, relative to this directory. These files are copied # directly to the root of the documentation. # html_extra_path = [] # If not '', a 'Last updated on:' timestamp is inserted at every page bottom, # using the given strftime format. # html_last_updated_fmt = '%b %d, %Y' # If true, SmartyPants will be used to convert quotes and dashes to # typographically correct entities. # html_use_smartypants = True # Custom sidebar templates, maps document names to template names. # html_sidebars = {} # Additional templates that should be rendered to pages, maps page names to # template names. html_additional_pages = {} # If false, no module index is generated. html_domain_indices = True # If false, no index is generated. html_use_index = True # If true, the index is split into individual pages for each letter. html_split_index = False # If true, links to the reST sources are added to the pages. html_show_sourcelink = False # If true, "Created using Sphinx" is shown in the HTML footer. Default is True. html_show_sphinx = False # If true, "(C) Copyright ..." is shown in the HTML footer. Default is True. html_show_copyright = True # If true, an OpenSearch description file will be output, and all pages will # contain a <link> tag referring to it. The value of this option must be the # base URL from which the finished HTML is served. html_use_opensearch = '' # This is the file name suffix for HTML files (e.g. ".xhtml"). # html_file_suffix = None # Language to be used for generating the HTML full-text search index. # Sphinx supports the following languages: # 'da', 'de', 'en', 'es', 'fi', 'fr', 'h', 'it', 'ja' # 'nl', 'no', 'pt', 'ro', 'r', 'sv', 'tr' # html_search_language = 'fr' # A dictionary with options for the search language support, empty by default. # Now only 'ja' uses this config value # html_search_options = {'type': 'default'} # The name of a javascript file (relative to the configuration directory) that # implements a search results scorer. If empty, the default will be used. # html_search_scorer = 'scorer.js' # Output file base name for HTML help builder. htmlhelp_basename = 'Lucteriosmailingdoc' # -- Options for LaTeX output --------------------------------------------- latex_elements = { # The paper size ('letterpaper' or 'a4paper'). # 'papersize': 'letterpaper', # The font size ('10pt', '11pt' or '12pt'). # 'pointsize': '10pt', # Additional stuff for the LaTeX preamble. # 'preamble': '', # Latex figure (float) alignment # 'figure_align': 'htbp', } # Grouping the document tree into LaTeX files. List of tuples # (source start file, target name, title, # author, documentclass [howto, manual, or own class]). latex_documents = [ (master_doc, 'Lucteriosmailing.tex', 'Lucterios mailing Documentation', 'sd-libre', 'manual'), ] # The name of an image file (relative to this directory) to place at the top of # the title page. # latex_logo = None # For "manual" documents, if this is true, then toplevel headings are parts, # not chapters. # latex_use_parts = False # If true, show page references after internal links. # latex_show_pagerefs = False # If true, show URL addresses after external links. # latex_show_urls = False # Documents to append as an appendix to all manuals. # latex_appendices = [] # If false, no module index is generated. # latex_domain_indices =
blockbomb/plover
plover/test_orthography.py
Python
gpl-2.0
4,487
0.002452
# Copyright (c) 2013 Hesky Fisher # See LICENSE.txt for details. from orthography import add_suffix import unittest class OrthographyTestCase(unittest.TestCase): def test_add_suffix(self): cases = ( ('artistic', 'ly', 'artistically'), ('cosmetic', 'ly', 'cosmetically'), ('establish', 's', 'establishes'), ('speech', 's', 'speeches'), ('approach', 's', 'approaches'), ('beach', 's', 'beaches'), ('arch', 's', 'arches'), ('larch', 's', 'larches'), ('march', 's', 'marches'), ('search', 's', 'searches'), ('starch', 's', 'starches'), ('stomach', 's', 'stomachs'), ('monarch', 's', 'monarchs'), ('patriarch', 's', 'patriarchs'), ('oligarch', 's', 'oligarchs'), ('cherry', 's', 'cherries'), ('day', 's', 'days'), ('penny', 's', 'p
ennies'), ('pharmacy', 'ist', 'pharmacist'), ('melody', 'ist', 'melodist'), ('pacify', 'ist', 'pacifist'), ('geology', 'ist', 'geolo
gist'), ('metallurgy', 'ist', 'metallurgist'), ('anarchy', 'ist', 'anarchist'), ('monopoly', 'ist', 'monopolist'), ('alchemy', 'ist', 'alchemist'), ('botany', 'ist', 'botanist'), ('therapy', 'ist', 'therapist'), ('theory', 'ist', 'theorist'), ('psychiatry', 'ist', 'psychiatrist'), ('lobby', 'ist', 'lobbyist'), ('hobby', 'ist', 'hobbyist'), ('copy', 'ist', 'copyist'), ('beauty', 'ful', 'beautiful'), ('weary', 'ness', 'weariness'), ('weary', 'some', 'wearisome'), ('lonely', 'ness', 'loneliness'), ('narrate', 'ing', 'narrating'), ('narrate', 'or', 'narrator'), ('generalize', 'ability', 'generalizability'), ('reproduce', 'able', 'reproducible'), ('grade', 'ations', 'gradations'), ('urine', 'ary', 'urinary'), ('achieve', 'able', 'achievable'), ('polarize', 'ation', 'polarization'), ('done', 'or', 'donor'), ('analyze', 'ed', 'analyzed'), ('narrate', 'ing', 'narrating'), ('believe', 'able', 'believable'), ('animate', 'ors', 'animators'), ('discontinue', 'ation', 'discontinuation'), ('innovate', 'ive', 'innovative'), ('future', 'ists', 'futurists'), ('illustrate', 'or', 'illustrator'), ('emerge', 'ent', 'emergent'), ('equip', 'ed', 'equipped'), ('defer', 'ed', 'deferred'), ('defer', 'er', 'deferrer'), ('defer', 'ing', 'deferring'), ('pigment', 'ed', 'pigmented'), ('refer', 'ed', 'referred'), ('fix', 'ed', 'fixed'), ('alter', 'ed', 'altered'), ('interpret', 'ing', 'interpreting'), ('wonder', 'ing', 'wondering'), ('target', 'ing', 'targeting'), ('limit', 'er', 'limiter'), ('maneuver', 'ing', 'maneuvering'), ('monitor', 'ing', 'monitoring'), ('color', 'ing', 'coloring'), ('inhibit', 'ing', 'inhibiting'), ('master', 'ed', 'mastered'), ('target', 'ing', 'targeting'), ('fix', 'ed', 'fixed'), ('scrap', 'y', 'scrappy'), ('trip', 's', 'trips'), ('equip', 's', 'equips'), ('bat', 'en', 'batten'), ('smite', 'en', 'smitten'), ('got', 'en', 'gotten'), ('bite', 'en', 'bitten'), ('write', 'en', 'written'), ('flax', 'en', 'flaxen'), ('wax', 'en', 'waxen'), ('fast', 'est', 'fastest'), ('white', 'er', 'whiter'), ('crap', 'y', 'crappy'), ('lad', 'er', 'ladder'), ) failed = [] for word, suffix, expected in cases: if add_suffix(word, suffix) != expected: failed.append((word, suffix, expected)) for word, suffix, expected in failed: print 'add_suffix(%s, %s) is %s not %s' % (word, suffix, add_suffix(word, suffix),expected) self.assertEqual(len(failed), 0) if __name__ == '__main__': unittest.main()
weigq/pytorch-pose
example/main.py
Python
gpl-3.0
12,764
0.014964
''' chg1: first change to multi-person pose estimation ''' from __future__ import print_function, absolute_import import argparse import time import matplotlib.pyplot as plt import os import torch import torch.nn.parallel import torch.backends.cudnn as cudnn import torch.optim import torchvision.datasets as datasets import torchvision.transforms as transforms from pose import Bar from pose.utils.logger import Logger from pose.utils.evaluation import ac
curacy, AverageMeter, final_preds from pose.utils.misc import save_checkpoint, save_pred, LRDecay from pose.utils.osutils import mkdir_p, isfile, isdir, join fro
m pose.utils.imutils import batch_with_heatmap from pose.utils.transforms import fliplr, flip_back import pose.models as models import pose.datasets as datasets model_names = sorted(name for name in models.__dict__ if name.islower() and not name.startswith("__") and callable(models.__dict__[name])) # to calaulate acc idx = [1,2,3,4,5,6,11,12,15,16] best_acc = 0 def main(args): global best_acc # create checkpoint dir if not isdir(args.checkpoint): mkdir_p(args.checkpoint) # create model print("==> creating model '{}'".format(args.arch)) model = models.__dict__[args.arch](num_classes=16) # multi-GPU model = torch.nn.DataParallel(model).cuda() # the total number of parameters print(' Total params size: %.2fM' % (sum(para.numel() for para in model.parameters())/1000000.0)) # define criterion and optimizer criterion = torch.nn.MSELoss(size_average=True).cuda() optimizer = torch.optim.RMSprop(model.parameters(), lr = args.lr, momentum = args.momentum, weight_decay = args.weight_decay) # optionally resume from a checkpoint # -------- title = 'mpii-' + args.arch if args.resume: if isfile(args.resume): print("=> loading checkpoint '{}'".format(args.resume)) checkpoint = torch.load(args.resume) args.start_epoch = checkpoint['epoch'] best_acc = checkpoint['best_acc'] model.load_state_dict(checkpoint['state_dict']) optimizer.load_state_dict(checkpoint['optimizer']) print("=> loaded checkpoint '{}' (epoch {})" .format(args.resume, checkpoint['epoch'])) logger = Logger(join(args.checkpoint, 'log.txt'), title=title, resume=True) else: print("=> no checkpoint found at '{}'".format(args.resume)) # -------- else: # open the log file logger = Logger(os.path.join(args.checkpoint, 'log.txt'), title=title) # set names of log file logger.set_names(['train-loss', 'val-loss', 'val-acc']) # using the fastest algorithm cudnn.benchmark = True # Data loading code train_loader = torch.utils.data.DataLoader( dataset = datasets.Mpii('data/mpii/mpii_annotations.json', args.dataPath), batch_size = args.train_batch, shuffle = True, num_workers = args.workers, pin_memory=True) val_loader = torch.utils.data.DataLoader( dataset = datasets.Mpii('data/mpii/mpii_annotations.json', args.dataPath, train=False), batch_size = args.test_batch, shuffle = False, num_workers = args.workers, pin_memory=True) if args.evaluate: print('\nEvaluation only') loss, acc, predictions = validate(val_loader, model, criterion, args.debug, args.flip) save_pred(predictions, checkpoint=args.checkpoint) return for epoch in range(args.start_epoch, args.Epochs): # lr decay lr = LRDecay(optimizer, epoch, args.lr) print('\nEpoch: %d | lr: %.8f' % (epoch, lr)) # train for one epoch train_loss = train(train_loader, model, criterion, optimizer, epoch - 1, args.debug) # evaluate on validation set valid_loss, valid_acc, predictions = validate(val_loader, model, criterion, args.debug, args.flip) # append logger file logger.append([train_loss, valid_loss, valid_acc]) # remember best acc and save checkpoint is_best = valid_acc > best_acc best_acc = max(valid_acc, best_acc) save_checkpoint({ 'epoch': epoch, 'arch': args.arch, 'state_dict': model.state_dict(), 'best_acc': best_acc, 'optimizer' : optimizer.state_dict(), }, predictions, is_best, checkpoint = args.checkpoint) logger.close() logger.plot() plt.savefig(os.path.join(args.checkpoint, 'log.eps')) def train(train_loader, model, criterion, optimizer, epoch, debug=False): batch_time = AverageMeter() data_time = AverageMeter() losses = AverageMeter() # switch to train mode model.train() end = time.time() gt_win, pred_win = None, None bar = Bar('Processing', max=len(train_loader)) print("the length of train_loader: {}".format(len(train_loader))) for i, (inputs, target) in enumerate(train_loader): # measure data loading time data_time.update(time.time() - end) inputs = inputs.cuda() target = target.cuda(async=True) input_var = torch.autograd.Variable(inputs) target_var = torch.autograd.Variable(target) # compute output output = model(input_var) # Calculate intermediate loss loss = criterion(output[0], target_var) for j in range(1, len(output)): loss += criterion(output[j], target_var) if debug: # visualize groundtruth and predictions gt_batch_img = batch_with_heatmap(inputs, target) pred_batch_img = batch_with_heatmap(inputs, output[-1].data) if not gt_win or not pred_win: ax1 = plt.subplot(121) ax1.title.set_text('Groundtruth') gt_win = plt.imshow(gt_batch_img) ax2 = plt.subplot(122) ax2.title.set_text('Prediction') pred_win = plt.imshow(pred_batch_img) else: gt_win.set_data(gt_batch_img) pred_win.set_data(pred_batch_img) plt.pause(.05) plt.draw() # measure accuracy and record loss losses.update(loss.data[0], inputs.size(0)) # compute gradient and do SGD step optimizer.zero_grad() loss.backward() optimizer.step() # measure elapsed time batch_time.update(time.time() - end) end = time.time() # plot progress bar.suffix = '({batch}/{size}) Data: {data:.6f}s | Batch: {bt:.3f}s | Total: {total:} | ETA: {eta:} | Loss: {loss:.8f}'.format( batch=i + 1, size=len(train_loader), data=data_time.val, bt=batch_time.val, total=bar.elapsed_td, eta=bar.eta_td, loss=losses.avg, ) bar.next() bar.finish() return losses.avg def validate(val_loader, model, criterion, debug=False, flip=True): batch_time = AverageMeter() losses = AverageMeter() acces = AverageMeter() # predictions predictions = torch.Tensor(val_loader.dataset.__len__(), 16, 2) # switch to evaluate mode model.eval() gt_win, pred_win = None, None end = time.time() bar = Bar('Processing', max=len(val_loader)) print("length of output:{}".format(len(val_loader))) for i, (inputs, target, meta) in enumerate(val_loader): target = target.cuda(async=True) input_var = torch.autograd.Variable(inputs.cuda(), volatile=True) target_var = torch.autograd.Variable(target, volatile=True) # compute output output = model(input_var) # score_map: 16*64*64 score_map = output[-1].data.cpu() if flip: flip_input_var = torch.autograd.Variable( torch.from_numpy(fliplr(inputs.clone().numpy())).float().cuda(), volatile=True
emilybache/texttest-runner
src/main/python/storytext/bin/migrate_uimap.py
Python
mit
1,119
0.004468
#!/usr/bin/env python from ConfigParser import ConfigParser from ordereddict import O
rderedDict import sys def make_parser(): parser = ConfigParser(dict_type=OrderedDict) parser.optionxform = str return parser def transform(sectionName): sectionName = sectionName.replace(",Dialog=", ", Dialog=") if sectionName.startswith("View="): if
sectionName.endswith("Viewer"): return "Type=Viewer, " + sectionName.split(", ")[0] else: parts = sectionName.split(",") parts.reverse() if len(parts) == 1: parts.insert(0, "Type=View") return ", ".join(parts) else: return sectionName if __name__ == "__main__": fileName = sys.argv[1] parser = make_parser() parser.read([ fileName ]) newParser = make_parser() for section in parser.sections(): newSection = transform(section) newParser.add_section(newSection) for option, value in parser.items(section): newParser.set(newSection, option, value) newParser.write(open(fileName + ".tmp", "w"))
bjodah/aqchem
chempy/_solution.py
Python
bsd-2-clause
5,994
0.001835
# -*- coding: utf-8 -*- """ "Sandbox" module for exploring API useful for digital labbooks. Examples -------- >>> from chempy.units import to_unitless, default_units as u >>> s1 = Solution(0.1*u.dm3, {'CH3OH': 0.1 * u.molar}) >>> s2 = Solution(0.3*u.dm3, {'CH3OH': 0.4 * u.molar, 'Na+': 2e-3*u.molar, 'Cl-': 2e-3*u.molar}) >>> s3 = s1 + s2 >>> abs(to_unitless(s3.volume - 4e-4 * u.m**3, u.dm3)) < 1e-15 True >>> s3.concentrations.isclose({'CH3OH': 0.325*u.molar, 'Na+': 1.5e-3*u.molar, 'Cl-': 1.5e-3*u.molar}) True >>> s4 = s3.dissolve({'CH3OH': 1*u.gram}) >>> abs(s4.concentrations['CH3OH'] - (0.325 + 1/(12.011 + 4*1.008 + 15.999)/.4)*u.molar) < 1e-4 True """ import copy from .chemistry import Substance from .units import ( get_derived_unit, html_of_unit, is_unitless, SI_base_registry, to_unitless, rescale, default_units as u, ) from .util.arithmeticdict import ArithmeticDict, _imul, _itruediv from .printing import as_per_substance_html_table class QuantityDict(ArithmeticDict): def __init__(self, units, *args, **kwargs): self.units = units super(QuantityDict, self).__init__(lambda: 0 * self.units, *args, **kwargs) self._check() @classmethod def of_quantity(cls, quantity_name, *args, **kwargs): instance = cls( get_derived_unit(SI_base_registry, quantity_name), *args, **kwargs ) instance.quantity_name = quantity_name return instance def rescale(self, new_units): return self.__class__( new_units, {k: rescale(v, new_units) for k, v in self.items()} ) def _repr_html_(self): if hasattr(self, "quantity_name"): header = self.quantity_name.capitalize() + " / " else: header = "" header += html_of_unit(self.units) tab = as_per_substance_html_table(to_unitless(self, self.units), header=header) return tab._repr_html_() def _check(self): for k, v in self.items(): if not is_unitless(v / self.units): raise ValueError( "entry for %s (%s) is not compatible with %s" % (k, v, self.units) ) def __setitem__(self, key, value): if not is_unitless(value / self.units): raise ValueError( "entry for %s (%s) is not compatible with %s" % (key, value, self.units) ) super(QuantityDict, self).__setitem__(key, value) def copy(self): return self.__class__(self.units, copy.deepcopy(list(self.items()))) def __repr__(self): return "{}({}, {})".format( self.__class__.__name__, repr(self.units), dict(self) ) def __mul__(self, other): d = dict(copy.deepcopy(list(self.items()))) _imul(d, other) return self.__class__(self.units * getattr(other, "units", 1), d) def __truediv__(self, other): d = dict(copy.deepcopy(list(self.items()))) _itruediv(d, other) return self.__class__(self.units / getattr(other, "units", 1), d) def __floordiv__(self, other): a = self.copy() if getattr(other, "units", 1) != 1: raise ValueError("Floor division with quantities not defined") a //= other return a def __rtruediv__(self, other): """ other / self """ return self.__class__( getattr(other, "units", 1) / self.units, {k: other / v for k, v in self.items()}, ) def __rfloordiv__(self, other): """ other // self """ return self.__class__( getattr(other, "units", 1) / self.units, {k: other // v for k, v in self.items()}, ) class AutoRegisteringSubstanceDict(object): def __init__(self, factory=Substance.from_formula): self.factory = factory self._store = {} def __getitem__(self, key): if key not in self._store: self._store[key] = self.factory(key) return self._store[key] class Solution(object): def __init__(self, volume, concentrations, substances=None, solvent=None): if not is_unitless(volume / u.dm3): raise ValueError("volume need to have a unit (e.g. dm3)") self.volume = volume self.concentrations = QuantityDict(u.molar, concentrations) if substances is None: substances = AutoRegisteringSubstanceDict() self.substances = substances self.solvent = solvent def __eq__(self, other): if not isinstance(other, Solution)
: return NotImplemented return all( [ getattr(self, k) == getattr(other, k) for k in "volume concentrations substances solvent".split() ] ) def __add__(self, other): if self.solvent != other.solvent: raise NotImplementedError( "Mixed solvent should be represented as concentrations" ) tot_amount = ( self.concentrations * self.volume + other.c
oncentrations * other.volume ) tot_vol = self.volume + other.volume return Solution(tot_vol, tot_amount / tot_vol, self.substances, self.solvent) def dissolve(self, masses): contrib = QuantityDict( u.molar, { k: v / self.substances[k].molar_mass() / self.volume for k, v in masses.items() }, ) return Solution( self.volume, self.concentrations + contrib, self.substances, self.solvent ) def withdraw(self, volume): if volume > self.volume: raise ValueError( "Cannot withdraw a volume greater than the solution volume" ) if volume < volume * 0: raise ValueError("Cannot withdraw a negative volume") self.volume -= volume return Solution(volume, self.concentrations, self.substances, self.solvent)
kburts/django-playlist
django_playlist/auth/forms.py
Python
mit
954
0.008386
from django import forms from django.contrib.auth.models import User from django.forms.models import ModelForm from crispy_forms.helper import FormHelper from crispy_forms.layout import Submit from .models import UserProfile class UserForm(ModelForm): password = forms.CharField(widget=forms.PasswordInput()) #email = forms.EmailField(max_length=100, required=False) class Meta: mod
el = User #fields = ('username', 'email', 'password') ## I really don't nee
d your email and you're safer not sharing it with me fields = ('username', 'password') helper = FormHelper() helper.form_method = 'POST' helper.add_input(Submit('post', 'post', css_class='btn-primary')) class LoginForm(forms.ModelForm): class Meta: model = User fields = ('username', 'password') class UserProfileForm(ModelForm): class Meta: model = UserProfile fields = ('website', 'picture')
sre/rubber
src/latex_modules/ltxtable.py
Python
gpl-2.0
484
0.022727
# This file is part of Rubber and thus covered by the GPL # (c) Sebastian Reichel, 2012 """ Dependency analysis for package 'ltxtable' in
Rubber. """ def setup (doc
ument, context): global doc doc = document doc.hook_macro('LTXtable', 'aa', hook_ltxtable) def hook_ltxtable (loc, width, file): # If the file name looks like it contains a control sequence or a macro # argument, forget about this \LTXtable. if file.find('\\') < 0 and file.find('#') < 0: doc.add_source(file)
GbalsaC/bitnamiP
venv/lib/python2.7/site-packages/boto/ec2/cloudwatch/dimension.py
Python
agpl-3.0
1,532
0
# Copyright (c) 2006-2012 Mitch Garnaat http://garnaat.org/ # # Permission is hereby granted, free of charge, to any person obtaining a # copy of this software and associated documentation files (the # "Software"), to deal in the Software without restriction, including # without limitation the rights to use, copy, modify, merge, publish, dis- # tribute, sublicense, and/or sell copies of the Software, and to permit # persons to whom the Software is furnished to do so, subject to the fol- # lowing conditions: # # The above copyright notice and this permission notice shall be included # in all copies or substantial portions of the Software. # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS # OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARR
ANTIES OF MERCHANTABIL-
# ITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT # SHALL THE AUTHOR BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, # WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS # IN THE SOFTWARE. # class Dimension(dict): def startElement(self, name, attrs, connection): pass def endElement(self, name, value, connection): if name == 'Name': self._name = value elif name == 'Value': if self._name in self: self[self._name].append(value) else: self[self._name] = [value] else: setattr(self, name, value)
HewlettPackard/oneview-ansible
library/oneview_sas_logical_jbod_attachment_facts.py
Python
apache-2.0
3,330
0.002402
#!/usr/bin/python # -*- coding: utf-8 -*- ### # Copyright (2016-2017) Hewlett Packard Enterprise Development LP # # Licensed under the Apache License, Version 2.0 (the "License"); # You may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. ### ANSIBLE_METADATA = {'metadata_version': '1.1', 'status': ['stableinterface'], 'supported_by': 'community'} DOCUMENTATION = ''' --- module: oneview_sas_logical_jbod_attachment_facts short_description: Retrieve facts about one or more of the OneView SAS Logical JBOD Attachments. version_added: "2.3" description: - Retrieve facts about one or more of the SAS Logical JBOD Attachments from OneView. requirements: - "python >= 2.7.9" - "hpeOneView >= 3.0" author: "Abilio Parada (@abiliogp)" options: name: description: - Name of SAS Logical JBOD Attachment. required: false notes: - This resource is only available on HPE Synergy extends_documentation_fragment: - oneview - oneview.factsparams ''' EXAMPLES = ''' - name: Gather facts about all SAS Logical JBOD Attachment oneview_sas_logical_jbod_attachment_facts: config: "{{ config_path }}" - debug: var=sas_logical_jbod_attachments - name: Gather paginated, filtered and sorted facts about SAS Logical JBOD Attachment oneview_sas_logical_jbod_attachment_facts: config: "{{ config }}" params: start: 0 count: 2 sort: 'name:descending' filter: "state=Deployed" - debug: var=sas_logical_jbod_attachments - name: Gather facts about a SAS Logical JBOD Attachment by name oneview_sas_logical_jbod_attachment_facts: config: "{{ config_path }}" name: "logical-enclosure-SAS-Logical-Interconnect-Group-BDD-1-SLJA-1" - debug: var=sas_logical_jbod_attachments ''' RETURN = ''' sas_logical_jbod_attachments: description: Has all the OneView facts about the SAS Logical JBOD Attachment. returned: Always, but can be null. type: dict ''' from ansible.module_utils
.oneview import OneViewModuleBase class SasLogicalJbodAttachmentFactsModule(OneViewModuleBase): def __init__(self): argument_spec = dict( name=dict(required=False, type='str'), params=dict(required=False, type='dict'), )
super(SasLogicalJbodAttachmentFactsModule, self).__init__(additional_arg_spec=argument_spec) def execute_module(self): if self.module.params['name']: name = self.module.params['name'] resources = self.oneview_client.sas_logical_jbod_attachments.get_by('name', name) else: resources = self.oneview_client.sas_logical_jbod_attachments.get_all(**self.facts_params) return dict(changed=False, ansible_facts=dict(sas_logical_jbod_attachments=resources)) def main(): SasLogicalJbodAttachmentFactsModule().run() if __name__ == '__main__': main()
ettrig/NIPAP
pynipap/pynipap.py
Python
mit
37,369
0.003131
""" pynipap - a Python NIPAP client library ======================================= pynipap is a Python client library for the NIPAP IP address planning system. It is structured as a simple ORM. There are three ORM-classes: * :class:`VRF` * :class:`Pool` * :class:`Prefix` Each of these maps to the NIPAP objects with the same name. See the main NIPAP API documentation for an overview of the different object types and what they are used for. There are also a few supporting classes: * :class:`AuthOptions` - Authentication options. And a bunch of exceptions: * :class:`NipapError` * :class:`NipapNonExistentError` * :class:`NipapInputError` * :class:`NipapMissingInputError` * :class:`NipapExtraneousInputError` * :class:`NipapNoSuchOperatorError` * :class:`NipapValueError` * :class:`NipapDuplicateError` * :class:`NipapAuthError` * :class:`NipapAuthenticationError` * :class:`NipapAuthorizationError` General usage ------------- pynipap has been designed to be simple to use. Preparations ^^^^^^^^^^^^ Make sure that pynipap is accessible in your `sys.path`, you can test it by starting a python shell and running:: import pynipap If that works, you are good to go! To simplify your code slightly, you can import the individual classes into your main namespace:: import pynipap from pynipap import VRF, Pool, Prefix Before you can access NIPAP you need to specify the URL to the NIPAP XML-RPC service and the authentication options to use for your connection. NIPAP has a authentication system which is somewhat involved, see the main NIPAP documentation. The URL, including the user credentials, is set in the pynipap module variable `xmlrpc_uri` as so:: pynipap.xmlrpc_uri = "http://user:pass@127.0.0.1:9002" The minimum authentication options which we need to set is the `authoritative_source` option, which specifies what system is accessing NIPAP. This is logged for each query which alters the NIPAP database and attached to each prefix which is created or edited. Well-behaved clients are required to honor this and verify that the user really want to alter the prefix, when trying to edit a prefix which last was edited by another system. The :class:`AuthOptions` class is a class with a shared state, similar to a singleton class; that is, when a first instance is created each consecutive instances will be copies of the first one. In this way the authentication options can be accessed from all of the pynipap classes. :: a = AuthOptions({ 'authoritative_source': 'my_fancy_nipap_client' }) After this, we are good to go! Accessing data ^^^^^^^^^^^^^^ To fetch data from NIPAP, a set of static methods (@classmethod) has been defined in each of the ORM classes. They are: * :func:`get` - Get a single object from its ID. * :func:`list` - List objects matching a simple criteria. * :func:`search` - Perform a full-blown search. * :func:`smart_search` - Perform a magic search from a string. Each of these functions return either an instance of the requested class (:py:class:`VRF`, :class:`Pool`, :class:`Prefix`) or a list of instances. The :func:`search` and :func:`smart_search` functions also embeds the lists in dicts which contain search meta data. The easiest way to get data out of NIPAP is to use the :func:`get`-method, given that you know the ID of the object you want to fetch:: # Fetch VRF with ID 1 and print its name vrf = VRF.get(1) print(vrf.name) To list all objects each object has a :func:`list`-function. :: # list all pools pools = Pool.list() # print the name of the pools for p in pools: print(p.name) Each of the list functions can also take a `spec`-dict as a second argument. With the spec you can perform a simple search operation by specifying object attribute values. :: # List pools with a default type of 'assignment' pools = Pool.list({ 'default_type': 'assignment' }) Performing searches ^^^^^^^^^^^^^^^^^^^ Commin' up, commin' up. Saving changes ^^^^^^^^^^^^^^ Changes made to objects are not automatically saved. To save the changes, simply run the object's :func:`save`-method:: vrf.name = "Spam spam spam" vrf.save() Error handling -------------- As is customary in Python applications, an error results in an exception being thrown. All pynipap exceptions extend the main exception :class:`NipapError`. A goal with the pynipap library has been to make the XML-RPC-channel to the backend as transparent as possible, so the XML-RPC Faults which the NIPAP server returns in case of errors are converted and re-thrown as new exceptions which also they extend :class:`NipapError`, for example the NipapDuplicateError which is thrown when a duplicate key error occurs in NIPAP. Classes ------- """ import sys import logging if sys.version_info[0] < 3: import xmlrpclib int = long else: import xmlrpc.client as xmlrpclib __version__ = "0.28.4" __author__ = "Kristian Larsson, Lukas Garberg" __author_email__= "kll@tele2.net, lukas@spritelink.net" __copyright__ = "Copyright 2011, Kristian Larsson, Lukas Garberg" __license__ = "MIT" __status__ = "Development" __url__ = "http://SpriteLink.github.com/NIPAP" # This variable holds the URI to the nipap XML-RPC service which will be used. # It must be set before the Pynipap can be used! xmlrpc_uri = None # Caching of objects is enabled per default but can be disabled for certain # scenarios. Since we don't have any cache expiration time it can be useful to # disable for long running applications. CACHE = True class AuthOptions: """ A global-ish authentication option container. Note that this essentially is a global variable. If you handle multiple queries from different users, you need to make sure that the AuthOptions-instances are set to the current user's. """ __shared_state = {} options = None def __init__(self, options = None): """ Create a shared option container. The argument 'options' must be a dict containing authentication options. """ self.__dict__ = self.__shared_state if len(self.__shared_state) == 0 and options is None: raise NipapMissingInputError("authentication options not set") if options is not None: self.options = options class XMLRPCConnection: """ Handles a shared XML-RPC connection. """ __shared_state = {} connection = None _logger = None def __init__(self): """ Create XML-RPC connection. The connection will be created to the URL set in the module variable `xmlrpc_uri`. The instanciation will fail unless this variable is set. """ if xmlrpc_uri is None: raise NipapError('XM
L-RPC URI not specified')
# creating new instance self.connection = xmlrpclib.ServerProxy(xmlrpc_uri, allow_none=True, use_datetime=True) self._logger = logging.getLogger(self.__class__.__name__) class Pynipap: """ A base class for the pynipap model classes. All Pynipap classes which maps to data in NIPAP (:py:class:VRF, :py:class:Pool, :py:class:Prefix) extends this class. """ _logger = None """ Logging instance for this object. """ id = None """ Internal database ID of object. """ def __eq__(self, other): """ Perform test for equality. """ # Only possible if we have ID numbers set if self.id is None or other.id is None: return False return self.id == other.id def __init__(self, id=None): """ Creates logger and XML-RPC-connection. """ self
estaban/pyload
module/plugins/hoster/FilepostCom.py
Python
gpl-3.0
6,081
0.00296
# -*- coding: utf-8 -*- """ This program is free software; you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation; either version 3 of the License, or (at your option) any later version. This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MER
CHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. You should have received a copy of the GNU General Public License along with this progra
m; if not, see <http://www.gnu.org/licenses/>. changelog: 0.27 - 2012-08-12 - hgg fix "global name 'js_answer' is not defined" bug fix captcha bug #1 (failed on non-english "captcha wrong" errors) """ import re from time import time from module.plugins.internal.SimpleHoster import SimpleHoster, create_getInfo from module.plugins.internal.CaptchaService import ReCaptcha from module.common.json_layer import json_loads class FilepostCom(SimpleHoster): __name__ = "FilepostCom" __type__ = "hoster" __pattern__ = r'https?://(?:www\.)?(?:filepost\.com/files|fp.io)/([^/]+).*' __version__ = "0.28" __description__ = """Filepost.com hoster plugin""" __author_name__ = "zoidberg" __author_mail__ = "zoidberg@mujmail.cz" FILE_INFO_PATTERN = r'<input type="text" id="url" value=\'<a href[^>]*>(?P<N>[^>]+?) - (?P<S>[0-9\.]+ [kKMG]i?B)</a>\' class="inp_text"/>' OFFLINE_PATTERN = r'class="error_msg_title"> Invalid or Deleted File. </div>|<div class="file_info file_info_deleted">' PREMIUM_ONLY_PATTERN = r'members only. Please upgrade to premium|a premium membership is required to download this file' RECAPTCHA_KEY_PATTERN = r"Captcha.init\({\s*key:\s*'([^']+)'" FLP_TOKEN_PATTERN = r"set_store_options\({token: '([^']+)'" def handleFree(self): # Find token and captcha key file_id = re.match(self.__pattern__, self.pyfile.url).group(1) m = re.search(self.FLP_TOKEN_PATTERN, self.html) if m is None: self.parseError("Token") flp_token = m.group(1) m = re.search(self.RECAPTCHA_KEY_PATTERN, self.html) if m is None: self.parseError("Captcha key") captcha_key = m.group(1) # Get wait time get_dict = {'SID': self.req.cj.getCookie('SID'), 'JsHttpRequest': str(int(time() * 10000)) + '-xml'} post_dict = {'action': 'set_download', 'token': flp_token, 'code': file_id} wait_time = int(self.getJsonResponse(get_dict, post_dict, 'wait_time')) if wait_time > 0: self.wait(wait_time) post_dict = {"token": flp_token, "code": file_id, "file_pass": ''} if 'var is_pass_exists = true;' in self.html: # Solve password for file_pass in self.getPassword().splitlines(): get_dict['JsHttpRequest'] = str(int(time() * 10000)) + '-xml' post_dict['file_pass'] = file_pass self.logInfo("Password protected link, trying " + file_pass) download_url = self.getJsonResponse(get_dict, post_dict, 'link') if download_url: break else: self.fail("No or incorrect password") else: # Solve recaptcha recaptcha = ReCaptcha(self) for i in xrange(5): get_dict['JsHttpRequest'] = str(int(time() * 10000)) + '-xml' if i: post_dict['recaptcha_challenge_field'], post_dict['recaptcha_response_field'] = recaptcha.challenge( captcha_key) self.logDebug(u"RECAPTCHA: %s : %s : %s" % ( captcha_key, post_dict['recaptcha_challenge_field'], post_dict['recaptcha_response_field'])) download_url = self.getJsonResponse(get_dict, post_dict, 'link') if download_url: if i: self.correctCaptcha() break elif i: self.invalidCaptcha() else: self.fail("Invalid captcha") # Download self.download(download_url) def getJsonResponse(self, get_dict, post_dict, field): json_response = json_loads(self.load('https://filepost.com/files/get/', get=get_dict, post=post_dict)) self.logDebug(json_response) if not 'js' in json_response: self.parseError('JSON %s 1' % field) # i changed js_answer to json_response['js'] since js_answer is nowhere set. # i don't know the JSON-HTTP specs in detail, but the previous author # accessed json_response['js']['error'] as well as js_answer['error']. # see the two lines commented out with "# ~?". if 'error' in json_response['js']: if json_response['js']['error'] == 'download_delay': self.retry(wait_time=json_response['js']['params']['next_download']) # ~? self.retry(wait_time=js_answer['params']['next_download']) elif 'Wrong file password' in json_response['js']['error']: return None elif 'You entered a wrong CAPTCHA code' in json_response['js']['error']: return None elif 'CAPTCHA Code nicht korrekt' in json_response['js']['error']: return None elif 'CAPTCHA' in json_response['js']['error']: self.logDebug('error response is unknown, but mentions CAPTCHA -> return None') return None else: self.fail(json_response['js']['error']) # ~? self.fail(js_answer['error']) if not 'answer' in json_response['js'] or not field in json_response['js']['answer']: self.parseError('JSON %s 2' % field) return json_response['js']['answer'][field] getInfo = create_getInfo(FilepostCom)
PALab/PLACE
place/plugins/quanta_ray/qray_driver.py
Python
lgpl-3.0
21,278
0.001222
"""Driver module for Newport's Spectra-Physics Quanta-Ray INDI, PRO, and LAB Series Nd:YAG lasers. NOTE: the watchdog parameter is important! The laser will turn off if it does not receive a command within the watchdog time period. Therefore, it is advised to use a command like QRstatus().get_status() at regular intervals to query the status of the laser during operation. @author: Jami L Johnson September 5, 2014 """ import serial class QuantaRay: """QuantaRay class""" def __init__(self, portINDI='/dev/ttyUSB0', baudINDI=9600): """Define serial port for INDI""" self.indi = serial.Serial( port=portINDI, baudrate=baudINDI, parity=serial.PARITY_NONE, stopbits=serial.STOPBITS_TWO, bytesize=serial.EIGHTBITS ) def open_connection(self): """ Open serial connection to INDI""" self.indi.close() self.indi.open() indi_open = self.indi.isOpen() if indi_open is False: raise RuntimeError('unable to connect to INDI') def close_connection(self): """Close connection to INDI""" self.indi.close() def get_id(self): """Get ID""" self.indi.write('*IDN?\r'.encode()) return self.indi.readline().decode() def help(self): """Prints serial command options (operational commands)""" self.indi.write('HELP\r'.encode()) for _ in range(1, 6): print(self.indi.readline().decode()) def turn_on(self): """Turns Quanta-Ray INDI on""" self.indi.write('ON\r'.encode()) def turn_off(self): """Turns Quanta-Ray INDI off""" self.indi.write('OFF\r'.encode()) def set_lamp(self, lamp_set='FIX', lamp_pulse=''): """Select lamp trigger source lamp_set: FIX = set lamp trigger to Fixed EXT = set lamp trigger to External Source VAR = set lamp trigger to Variable INH = inhibit lamp trigger lamp_pulse = set rate of lamp (pulses/second) """ if lamp_pulse != '': self.indi.write(('LAMP '+ str(lamp_set) + ' ' + str(lamp_pulse) + '\r').encode()) else: self.indi.write(('LAMP '+ str(lamp_set) + '\r').encode()) def get_lamp(self): """ Returns the lamp Variable Rate trigger setting """ self.indi.write('LAMP VAR?\r'.encode()) return self.indi.readline().decode() def set(self, cmd='NORM'): """Set mode, type, or timing of Q-switch cmd: LONG = long pulse mode EXT = external mode NORM = normal mode SING = single shot FIR = fire Q-switch once REP = repetitive shots """ self.indi.write(('QSW ' + str(cmd) + '\r').encode()) def single_shot(self): """Set single shot""" self.set('SING') def normal_mode(self): """Set normal mode""" self.set('NORM') def repeat_mode(self, watchdog_timeout): """Set repetitive shots and ensures watchdog is turned on (not disabled) :param watchdog_timeout: seconds before laser safety shutoff :type watchdog_timeout: int #:raises ValueError: if watchdog is requested to be 0 (disabled) """ if watchdog_timeout == 0: dummy = input('QuantaRay INDI Laser watchdog is 0 s. This will ' + 'disable watchdog and the laser will continue to run ' + 'after the experiment has finished. Continue? [ y / n ]:') if dummy == 'n': raise ValueError('Disabling watchdog when using repeat mode is not advised') self.set_watchdog(watchdog_timeout) self.set('REP') def get(self): """Queries and returns the Q-switch settings.""" self.indi.write('QSW?\r'.encode()) return self.indi.readline().decode() def set_adv(self, delay): """Set advanced sync delay""" self.indi.write(('ADV ' + str(delay) + '\r').encode()) def get_adv(self): """Queries and returns the Q-switch Advanced Sync settings""" self.indi.write('QSW ADV? \r'.encode()) return self.indi.readline().decode() def set_delay(self, delay): """Sets delay for Q-switch delay""" self.indi.write(('QSW DEL ' + str(delay) + '\r').encode()) def get_delay(self): """Queries and returns the Q-switch delay setting""" self.indi.write('QSW DEL? \r'.encode()) return self.indi.readline().decode() def set_echo(self, mode=0): """Set echo mode of INDI. mode: 0 = show prompts 1 = laser echoes characters as received 2 = shows error messages 3 = output line feed for every command (even those that don't normally generate a response) 4 = terminate responses with <cr><lf>, rather than just <lf> 5 = use XON/XOFF handshaking for data sent to laser (not for data sent from the laser) """ self.indi.write(('ECH ' + str(mode) + '\r').enc
ode()) def set_watchdog(self, time=10): """Set range of watchdog. If the laser does not receive comm
unication from the control computer within the specifiedc time, it turns off. If disabled, the default time is zero. Time must be between 0 and 110 seconds. """ if time < 0 or time > 110: raise ValueError('Invalid watchdog time. Choose value between 0 and 110 seconds.') self.indi.write(('WATC ' + str(time) + '\r').encode()) def set_baud(self, baud_indi=9600): """Sets baudrate of laser. At power-up, baudrate is always 9600.""" self.indi.write(('BAUD ' + str(baud_indi) + '\r').encode()) def get_amp_setting(self): """Queries amplifier PFN command setting in percent""" self.indi.write('READ:APFN?\r'.encode()) return self.indi.readline().decode() def get_amp_power(self): """Queries amplifier PFN monitor in percent (what PFN power supply is actually doing)""" self.indi.write('READ:AMON?\r'.encode()) return self.indi.readline().decode() def get_osc_setting(self): """Queries oscillator PFN command setting in percent""" self.indi.write('READ:OPFN?\r'.encode()) return self.indi.readline().decode() def get_osc_power(self): """Queries oscillator PFN monitor in percent (what PFN power supply is actually doing)""" self.indi.write('READ:OMON?\r'.encode()) return self.indi.readline().decode() def get_qsw_adv(self): """Queries and returns the current Q-Switch Advanced Sync setting""" self.indi.write('READ:QSWADV?\r'.encode()) return self.indi.readline().decode() def get_shots(self): """Queries and returns the number of shots""" self.indi.write('SHOT?\r'.encode()) return self.indi.readline().decode() def get_trig_rate(self): """Queries and returns the lamp trigger rate (unless lamp trigger source is external""" self.indi.write('READ:VAR?\r'.encode()) return self.indi.readline().decode() def set_osc_power(self, percent=0): """set the Oscillator PFN voltage as a percentage of factory full scale""" self.indi.write(('OPFN ' + str(percent) + '\r').encode()) def set_amp_power(self, percent=0): """set the PFN Amplifier voltage as a percentage of factory full scale""" self.indi.write(('APFN ' + str(percent) + '\r').encode()) def get_status(self): """Returns the laser status. Result is a list with entries of the form: [bit, error], where "bit" is the bit of the status byte, and "error" is a text description of the error. """ self.indi.write('*STB?\r'.encode()) stb_value = bin(int(self.indi.readline().decode())) stb_value = stb_value[2:] # remove 0b at beginning #print 'stb_value: ', stb_value # prints binary status byte value error_list = list() if stb_value[len(stb_value)-1] == '1': bit = '0' error = 'Laser em
Yukarumya/Yukarum-Redfoxes
testing/marionette/harness/marionette_harness/tests/unit/test_position.py
Python
mpl-2.0
664
0
# This Source Code Form is subject to the terms of the Mozilla Public # License, v. 2.0. If a copy of the MPL was not distributed with this # file, You can obtain one at http://mozilla.org/MPL/2.0/. from marionette_driver.by import By from marionette_harness import MarionetteTestCase class TestPosition(MarionetteTestCase): def test_should_get_element_position_back(self): test_url = self.marionette.absolute_url('rectangles.html') self.marionette.navigate(test_url)
r2 = self.marionette.find_element(By.ID, "r2") location = r2.re
ct self.assertEqual(11, location['x']) self.assertEqual(10, location['y'])
JohnVinyard/zounds
zounds/learn/test_meanstd.py
Python
mit
1,301
0
import unittest2 from zounds.util import simple_in_memory_settings from .preprocess import MeanStdNormalization, PreprocessingPipeline import featureflow as ff import numpy as np class MeanStdTests(unittest2.TestCase): def _forward_backward(self, sha
pe): @simple_in_memory_settings class Model(ff.BaseModel): meanstd = ff.PickleFeature( MeanStdNormalization, store=False) pipeline = ff.PickleFeature(
PreprocessingPipeline, needs=(meanstd,), store=True) training = np.random.random_sample((100,) + shape) _id = Model.process(meanstd=training) model = Model(_id) data_shape = (10,) + shape data = np.random.random_sample(data_shape) result = model.pipeline.transform(data) self.assertEqual(data_shape, result.data.shape) inverted = result.inverse_transform() self.assertEqual(inverted.shape, data.shape) np.testing.assert_allclose(inverted, data) def test_can_process_1d(self): self._forward_backward((9,)) def test_can_process_2d(self): self._forward_backward((3, 4)) def test_can_process_3d(self): self._forward_backward((5, 4, 7))
smrmkt/online_learning_algorithms
exec_cw.py
Python
bsd-3-clause
1,188
0.000842
#!/usr/bin/env python #-*-coding:utf-8-*- from evaluator import Evaluator from loader import Loader import matplotlib.pyplot as plt from confidence_weighted import ConfidenceWeighted def graph_plot(plt_obj, show=False): plt_obj.ylim(0, 1) plt_obj.xlabel("Number of trials") plt_obj.ylabel("Accuracy") plt_obj.legend(["CW", "CW1", "CW2"], loc="lower right") if show is True: plt_obj.show() else: plt_obj.figure() if __name__ == '__main__': # construct passi
ve-aggressive model cw = list() cw.append(ConfidenceWeighted(123)) cw.append(ConfidenceWeighted(123, 0.30)) cw.append(ConfidenceWeighted(123, 0.50)) # training phase loader = Loader('a1a', 123, 30956, 1605) y_vec, feats_vec = loader.load_train()
for i in range(len(cw)): evaluator = Evaluator(cw[i], y_vec, feats_vec) evaluator.update() plt.plot(evaluator.accuracy) graph_plot(plt) # test phase y_vec, feats_vec = loader.load_test() for i in range(len(cw)): evaluator = Evaluator(cw[i], y_vec, feats_vec) evaluator.predict() plt.plot(evaluator.accuracy) graph_plot(plt, show=True)
rika/dynamic-provisioning
statistics.py
Python
gpl-3.0
3,837
0.011207
#!/usr/bin/env python # coding: utf-8 import os import csv from schedule_entry import EntryStatus from machine import MachineStatus def dump_stat(path, data, headers): with open(path, 'w') as out: csv_out = csv.writer(out) csv_out.writerow(headers) for row in data: csv_out.writerow(row) class Statistics(): def __init__(self): self.numbers = [] self.scheds = [] self.entries = [] self.durations = [] def snapshot(self, timestamp, entries, machines): # Number of jobs in scheduled/execution njs = len([e for e in entries if e.status == EntryStatus.scheduled]) nje = len([e for e in entries if e.status == EntryStatus.executing]) # Number of machines allocating/running nma = len([m for m in machines if m.status == MachineStatus.allocating]) nmr = len([m for m in machines if m.status == MachineStatus.running]) self.numbers.append((timestamp, njs, nje, nma, nmr)) def schedshot(self, provisioner): self.scheds.append((provisioner.timestamp, provisioner.budget, provisioner.cost_pred, provisioner.wf_end)) def jobs(self, entries): d = {} for e in entries: if e.host != None: host_id = e.host.id condor_slot = e.host.condor_slot else: host_id = condor_slot = None if e.job != None: wf_id = e.job.wf_id dag_job_id = e.job.dag_job_id else: wf_id = dag_job_id = None
for event in e.log.keys(): if e.log[event]: self.entries.append((host_id, condor_slot, wf_id, dag_job_id, e.condor_id, event, e.log[event])) if dag_job_id and 'EXECUTE' in e.log.keys() and 'JOB_TERMINATED' in e.log.keys() and 'SUBMIT' in e.log.keys(): parts = dag_job_id.split('_') if len(parts) == 2: jt = parts[0] else: jt = '_'.join(parts[:2])
d[jt] = [ (d[jt][0] if jt in d.keys() else 0) +1, (d[jt][1] if jt in d.keys() else 0) +(e.log['JOB_TERMINATED'] - e.log['EXECUTE']).total_seconds(), (d[jt][2] if jt in d.keys() else 0) +(e.log['EXECUTE'] - e.log['SUBMIT']).total_seconds(), (d[jt][3] if jt in d.keys() else 0) +(e.log['JOB_TERMINATED'] - e.log['SUBMIT']).total_seconds(), ] for jt in d.keys(): self.durations.append((jt, d[jt][1]*1.0 / d[jt][0], d[jt][2]*1.0 / d[jt][0], d[jt][3]*1.0 / d[jt][0], d[jt][0])) def dump(self): home = os.path.expanduser('~') directory = os.path.join(home, '.dynamic_provisioning') if not os.path.exists(directory): os.makedirs(directory) print 'Writing statistics in ' + str(directory) path = os.path.join(directory, 'numbers.csv') headers = ['timestamp','n_jobs_s','n_jobs_e','n_machines_a','n_machines_r'] dump_stat(path, self.numbers, headers) path = os.path.join(directory, 'budget.csv') headers = ['timestamp', 'budget', 'cost_prediction', 'wf_end'] dump_stat(path, self.scheds, headers) path = os.path.join(directory, 'jobs.csv') headers = ['host', 'slot', 'workflow', 'dag_job_id','condor_id', 'event', 'timestamp'] dump_stat(path, self.entries, headers) path = os.path.join(directory, 'durations.csv') headers = ['job', 'execute_time', 'queue_time', 'total_time', 'n'] dump_stat(path, self.durations, headers)
IBMStreams/streamsx.topology
test/python/topology/py36_types.py
Python
apache-2.0
6,604
0.011508
# coding=utf-8 # Licensed Materials - Property of IBM # Copyright IBM Corp. 2019 # Separated test code with Python 3.6 syntax. import typing import decimal from streamsx.spl.types import int64 class NTS(typing.NamedTuple): x: int msg: str class NamedTupleBytesSchema(typing.NamedTuple): idx: str msg: bytes flag: bool oidx: typing.Optional[str] = None omsg: typing.Optional[bytes] = None oflag: typing.Optional[bool] = None class NamedTupleNumbersSchema2(typing.NamedTuple): i64: int f64: float d128: decimal.Decimal c64: complex si64: typing.Set[int] oi64: typing.Optional[int] = None of64: typing.Optional[float] = None od128: typing.Optional[decimal.Decimal] = None oc64: typing.Optional[complex] = None omi64li64: typing.Optional[typing.Mapping[int,typing.List[int]]] = None class NamedTupleNumbersSchema(typing.NamedTuple): i64: int f64: float d128: decimal.Decimal c64: complex oi64: typing.Optional[int] = None of64: typing.Optional[float] = None od128: typing.Optional[decimal.Decimal] = None oc64: typing.Optional[complex] = None omi64li64: typing.Optional[typing.Mapping[int,typing.List[int]]] = None #tuple<float64 start_time, float64 end_time, float64 confidence> class SpottedSchema(typing.NamedTuple): start_time: float end_time: float confidence: float class NamedTupleSetOfListofTupleSchema(typing.NamedTuple): slt: typing.Set[typing.List[SpottedSchema]] #tuple<map<rstring, tuple<float64 start_time, float64 end_time, float64 confidence>> keywords_spotted> class NamedTupleMapWithTupleSchema(typing.NamedTuple): keywords_spotted: typing.Mapping[str,SpottedSchema] class NamedTupleMapWithListTupleSchema(typing.NamedTuple): keywords_spotted: typing.Mapping[str,typing.List[SpottedSchema]] class NamedTupleListOfTupleSchema(typing.NamedTuple): spotted: typing.List[SpottedSchema] #tuple<rstring str, tuple<float64 start_time, float64 end_time, float64 confidence> spotted> class NamedTupleNestedTupleSchema(typing.NamedTuple): key: str spotted: SpottedSchema #tuple<int64 i64, list<tuple<rstring str, tuple<float64 start_time, float64 end_time, float64 confidence> spotted>> spottedList> class NamedTupleListOfNestedTupleSchema(typing.NamedTuple): i64: int spottedList: typing.List[NamedTupleNestedTupleSchema] #tuple<rstring s1, tuple<int64 i64, list<tuple<rstring key, tuple<float64 start_time, float64 end_time, float64 confidence> spotted>> spottedList> tupleWList> class NamedTupleNestedList2Schema(typing.NamedTuple): s1: str tupleWList: NamedTupleListOfNestedTupleSchema #tuple<rstring s2, tuple<rstring s1, tuple<int64 i64, list<tuple<rstring key, tuple<float64 start_time, float64 end_time, float64 confidence> spotted>> spottedList> tupleWList> tupleWList2> class NamedTupleNestedList3Schema(typing.NamedTuple): s2: str tupleWList2: NamedTupleNestedList2Schema #tuple<int64 i64, map<rstring, tuple<rstring str, tuple<float64 start_time, float64 end_time, float64 confidence> spotted>> spotted> class NamedTupleMapOfNestedTupleSchema(typing.NamedTuple): i64: int spottedMap: typing.Mapping[str,NamedTupleNestedTupleSchema] #tuple<rstring s1, tuple<int64 i64, map<rstring, tuple<rstring key, tuple<float64 start_time, float64 end_time, float64 confidence> spotted>> spottedMap> tupleWMap> class NamedTupleNestedMap2Schema(typing.NamedTuple): s1: str tupleWMap: NamedTupleMapOfNestedTupleSchema #tuple<rstring s2, tuple<rstring s1, tuple<int64 i64, map<rstring, tuple<rstring key, tuple<float64 start_time, float64 end_time, float64 confidence> spotted>> spottedMap> tupleWMap> t
upleWMap2> class NamedTupleNestedMap3Schema(typing.NamedTuple): s2: str tupleWMap2: NamedTupleNestedMap2Schema class TestSchema(typing.NamedTuple): flag: bool i64: int class ContactsSchema(typing.NamedTuple): mail: str phone: str nested_tuple: TestSchema class AddressSchema(typing.NamedTuple): street: str city: str contacts: ContactsSchema class PersonSchema(typing.NamedTuple):
name: str age: int address: AddressSchema #tuple<int64 x_coord, int64 y_coord> class Point2DSchema(typing.NamedTuple): x_coord: int y_coord: int #tuple<int64 x_coord, int64 y_coord, int64 z_coord> class Point3DSchema(typing.NamedTuple): x_coord: int y_coord: int z_coord: int #tuple<tuple<int64 x_coord, int64 y_coord> center, int64 radius> class CircleSchema(typing.NamedTuple): center: Point2DSchema radius: float #tuple<float64 radius, boolean has_rings> class CircleRadiusSchema(typing.NamedTuple): radius: float has_rings: bool #tuple<tuple<int64 x_coord, int64 y_coord, int64 z_coord> center, int64 radius , int64 radius2> class DonutSchema(typing.NamedTuple): center: Point3DSchema radius: int radius2: int rings: typing.List[CircleRadiusSchema] #tuple<tuple<tuple<int64 x_coord, int64 y_coord> center, radius int64> circle, # tuple<tuple<int64 x_coord, int64 y_coord, int64 z_coord> center, int64 radius , int64 radius2> torus> class TripleNestedTupleAmbiguousAttrName(typing.NamedTuple): circle: CircleSchema # contains 'center' as tuple attribute torus: DonutSchema # contains also 'center' as a different tuple type attribute, contains 'rings' attribute rings: typing.List[CircleSchema] # rings with nested (anonymous C++ type) #tuple<int64 int1, map<string, tuple<int64 x_coord, int64 y_coord>> map1> class TupleWithMapToTupleAttr1(typing.NamedTuple): int1: int map1: typing.Mapping[str,Point2DSchema] #tuple<int64 int2, map<string, tuple<int64 int1, map<rstring, tuple<int64 x_coord, int64 y_coord>> map1>> map2> # This schema contains map attributes at different nesting levels with different attribute names and different Value types class TupleWithMapToTupleWithMap(typing.NamedTuple): int2: int map2: typing.Mapping[str,TupleWithMapToTupleAttr1] #tuple<int64 int1, map<string, tuple<int64 int1, map<rstring, tuple<int64 x_coord, int64 y_coord>> map1>> map1> # This schema contains map attributes at different nesting levels with equal map attribute name (map1), but different Value types class TupleWithMapToTupleWithMapAmbigousMapNames(typing.NamedTuple): int1: int map1: typing.Mapping[str,TupleWithMapToTupleAttr1] #tuple<int64 int1, map<string, tuple<int64 x_coord, int64 y_coord, int64 z_coord>> map1> #class TupleWithMapToTupleAttr2(typing.NamedTuple): # int1: int # map1: typing.Mapping[str,Point3DSchema]
306777HC/libforensics
code/lf/win/shell/link/__init__.py
Python
lgpl-3.0
1,769
0.000565
# Copyright 2010 Michael Murr # # This file is part of LibForensics. # # LibForensics is free software: you can redistribute it and/or modify # it under the terms of the GNU Lesser General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # LibForensics is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Lesser General Public License for more details. # # You should have received a copy of the GNU Lesser General Public License # along with LibForensics. If not, see <http://www.gnu.org/licenses/>. """Windows shell link files""" from lf.win.shell.link.objects import ( ShellLink, FileAttributes, LinkFlags, ShellLinkHeader, StringData, LinkInfo, VolumeID, CNRL, ExtraDataBlock, ConsoleProps, ConsoleFEProps, DarwinProps, ExpandableStringsDataBlock, EnvironmentProps, IconEnvironmentProps, KnownFolderProps, PropertyStoreProps, ShimProps, SpecialFolderProps, DomainRelativeObjId, TrackerProps, VistaAndAboveIDListProps, TerminalBlock, ExtraDataBlockFactory, StringDataSet ) __docformat__ = "restructuredtext en" __all__ = [ "ShellLink", "FileAttributes", "LinkFlags", "ShellLinkHeader", "StringData", "LinkInfo", "VolumeID", "CNRL", "ExtraDataBlock", "ConsoleProps", "ConsoleFEProps", "DarwinProps", "ExpandableStringsData
Block", "EnvironmentProps", "IconEnvironmentProps", "KnownFolderProps", "PropertyStoreProps", "ShimProps", "SpecialF
olderProps", "DomainRelativeObjId", "TrackerProps", "VistaAndAboveIDListProps", "TerminalBlock", "ExtraDataBlockFactory", "StringDataSet" ]
Trust-Code/trust-addons
trust_second_unit_of_measure/models/mrp_bom.py
Python
agpl-3.0
3,943
0
# -*- encoding: utf-8 -*- ############################################################################### # # # Copyright (C) 2015 Trustcode - www.trustcode.com.br # # # # This program is free software: you can redistribute it and/or modify # # it under the terms of the GNU Affero General Public License as published by # # the Free Software Foundation, either version 3 of the License, or # # (at your option) any later version. # # # # This program is distributed in the hope that it will be useful, # # but WITHOUT ANY WARRANTY; without even the implied warranty of # # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # # GNU General Public License for more details. # # # # You should have received a copy of the GNU General Public License # # along with this program. If not, see <http://www.gnu.org/licenses/>. # ############################################################################### from openerp import models, api, fields class mrp_bom(models.Model): _inherit = 'mrp.bom' def _bom_explode(self, cr, uid, bom, product, factor, properties=None, level=0, routing_id=False, previous_products=None, master_bom=None, context=None): res = super(mrp_bom, self)._bom_explode( cr, uid, bom, product, factor, properties=properties, level=level, routing_i
d=routing_id, previous_products=previous_products, master_bom=master_bom, context=context
) results = res[0] # product_lines results2 = res[1] # workcenter_lines indice = 0 for bom_line_id in bom.bom_line_ids: line = results[indice] line['largura'] = bom_line_id.largura line['comprimento'] = bom_line_id.comprimento line['unidades'] = bom_line_id.unidades indice += 1 return results, results2 class mrp_bom_line(models.Model): _inherit = 'mrp.bom.line' largura = fields.Float(string="Largura", digits=(16, 6)) comprimento = fields.Float(string="Comprimento", digits=(16, 6)) unidades = fields.Float(string="Unidades", digits=(16, 6)) @api.onchange('largura', 'comprimento', 'unidades') def compute_quantity(self): self.product_qty = (self.largura or 1) * \ (self.comprimento or 1) * (self.unidades or 1) class mrp_production_product_line(models.Model): _inherit = 'mrp.production.product.line' largura = fields.Float(string="Largura", digits=(16, 6)) comprimento = fields.Float(string="Comprimento", digits=(16, 6)) unidades = fields.Float(string="Unidades", digits=(16, 6)) class stock_move(models.Model): _inherit = 'stock.move' largura = fields.Float(string="Largura", digits=(16, 6)) comprimento = fields.Float(string="Comprimento", digits=(16, 6)) unidades = fields.Float(string="Unidades", digits=(16, 6)) class mrp_production(models.Model): _inherit = 'mrp.production' def _make_production_consume_line(self, cr, uid, line, context=None): move_id = super(mrp_production, self)\ ._make_production_consume_line( cr, uid, line, context=context) self.pool['stock.move'].write(cr, uid, move_id, {'unidades': line.unidades, 'comprimento': line.comprimento, 'largura': line.largura}) return move_id
kaeawc/django-auth-example
test/account.py
Python
mit
1,507
0.000664
# -*- coding: utf-8 -* import uuid import random import string from test import DjangoTestCase class Account(object): def __init__(self, email=None, password=None): self.email = email self.password = password @staticmethod def create_email(): return u"some.one+%s@example.com" % uuid.uuid4().hex.__str__() @staticmethod def create_password(length=20): return u"".join([random.choice(string.digits) for _ in range(length)]) class AccountTestCase(DjangoTestCase): def signup(self, email=None, password=None, password_confirmation=None): data = {} if email is not None: data[u"email"] = email if password is not None:
data[u"password"] = password if password_confirmation is not None: data[u"password_confirmation"] = password_confirmation response = self.http_post(u"/signup", data) return Account(email=email, password=password), response def login(self, email=None, password=None): data = {} if email is not None: data[u"email"] = email if password is not None: data[u"password"] = password
return self.http_post(u"/login", data) def logout(self, email=None, password=None): data = {} if email is not None: data[u"email"] = email if password is not None: data[u"password"] = password return self.http_post(u"/logout", data)
GbalsaC/bitnamiP
venv/lib/python2.7/site-packages/testtools/tests/test_content_type.py
Python
agpl-3.0
2,543
0.001573
# Copyright (c) 2008, 2012 testtools developers. See LICENSE for details. from testtools import TestCase from testtools.matchers import Equals, MatchesException, Raises from testtools.content_type import ( ContentTy
pe, JSON, UTF8_TEXT, ) class TestContentType(TestCase): def test___init___None_errors(self): raises_value_error = Raises(MatchesException(ValueError)) self.assertThat(lambda:ContentType(None, None), raises_value_error) self.assertThat(lambda:ContentType(None, "traceback"), raises_value_error) self.assertThat(lambda:ContentType("text", None), raises_value_error) def test___init___sets_ivars(self): con
tent_type = ContentType("foo", "bar") self.assertEqual("foo", content_type.type) self.assertEqual("bar", content_type.subtype) self.assertEqual({}, content_type.parameters) def test___init___with_parameters(self): content_type = ContentType("foo", "bar", {"quux": "thing"}) self.assertEqual({"quux": "thing"}, content_type.parameters) def test___eq__(self): content_type1 = ContentType("foo", "bar", {"quux": "thing"}) content_type2 = ContentType("foo", "bar", {"quux": "thing"}) content_type3 = ContentType("foo", "bar", {"quux": "thing2"}) self.assertTrue(content_type1.__eq__(content_type2)) self.assertFalse(content_type1.__eq__(content_type3)) def test_basic_repr(self): content_type = ContentType('text', 'plain') self.assertThat(repr(content_type), Equals('text/plain')) def test_extended_repr(self): content_type = ContentType( 'text', 'plain', {'foo': 'bar', 'baz': 'qux'}) self.assertThat( repr(content_type), Equals('text/plain; baz="qux"; foo="bar"')) class TestBuiltinContentTypes(TestCase): def test_plain_text(self): # The UTF8_TEXT content type represents UTF-8 encoded text/plain. self.assertThat(UTF8_TEXT.type, Equals('text')) self.assertThat(UTF8_TEXT.subtype, Equals('plain')) self.assertThat(UTF8_TEXT.parameters, Equals({'charset': 'utf8'})) def test_json_content(self): # The JSON content type represents implictly UTF-8 application/json. self.assertThat(JSON.type, Equals('application')) self.assertThat(JSON.subtype, Equals('json')) self.assertThat(JSON.parameters, Equals({})) def test_suite(): from unittest import TestLoader return TestLoader().loadTestsFromName(__name__)
astrofrog/astropy-helpers
astropy_helpers/utils.py
Python
bsd-3-clause
6,917
0.000434
# Licensed under a 3-clause BSD style license - see LICENSE.rst import contextlib import imp import os import sys import inspect # Python 3.3's importlib caches filesystem reads for faster imports in the # general case. But sometimes it's necessary to manually invalidate those # caches so that the import system can pick up new generated files. See # https://github.com/astropy/astropy/issues/820 if sys.version_info[:2] >= (3, 3): from importlib import invalidate_caches else: invalidate_caches = lambda: None class _DummyFile(object): """A noop writeable object.""" def write(self, s): pass def flush(self): pass @contextlib.contextmanager def silence(): """A context manager that silences sys.stdout and sys.stderr.""" old_stdout = sys.stdout old_stderr = sys.stderr sys.stdout = _DummyFile() sys.stderr = _DummyFile() exception_occurred = False try: yield except: exception_occurred = True # Go ahead and clean up so that exception handling can work normally sys.stdout = old_stdout sys.stderr = old_stderr raise if not exception_occurred: sys.stdout = old_stdout sys.stderr = old_stderr if sys.platform == 'win32': import ctypes def _has_hidden_attribute(filepath): """ Returns True if the given filepath has the hidden attribute on MS-Windows. Based on a post here: http://stackoverflow.com/questions/284115/cross-platform-hidden-file-detection """ if isinstance(filepath, bytes): filepath = filepath.decode(sys.getfilesystemencoding()) try: attrs = ctypes.windll.kernel32.GetFileAttributesW(filepath) assert attrs != -1 result = bool(attrs & 2) except (AttributeError, AssertionError): result = False return result else: def _has_hidden_attribute(filepath): return False def is_path_hidden(filepath): """ Determines if a given file or directory is hidden. Parameters ---------- filepath : str The path to a file or directory Returns ------- hidden : bool Returns `True` if the file is hidden """ name = os.path.basename(os.path.abspath(filepath)) if isinstance(name, bytes): is_dotted = name.startswith(b'.') else: is_dotted = name.startswith('.') return is_dotted or _has_hidden_attribute(filepath) def walk_skip_hidden(top, onerror=None, followlinks=False): """ A wrapper for `os.walk` that skips hidden files and directories. This function does not have the parameter `topdown` from `os.walk`: the directories must always be recursed top-down when using this function. See also -------- os.walk : For a description of the parameters """ for root, dirs, files in os.walk( top, topdown=True, onerror=onerror, followlinks=followlinks): # These lists must be updated in-place so os.walk will skip # hidden directories dirs[:] = [d for d in dirs if not is_path_hidden(d)] files[:] = [f for f in files if not is_path_hidden(f)] yield root, dirs, files def write_if_different(filename, data): """Write `data` to `filename`, if the content of the file is different. Parameters ---------- filename : str The file name to be written to. data : bytes The data to be written to `filename`. """ assert isinstance(data, bytes) if os.path.exists(filename): with open(filename, 'rb') as fd: original_data = fd.read() else: original_data = None if original_data != data: with open(filename, 'wb') as fd: fd.write(data) def import_file(filename): """ Imports a module from a single file as if it doesn't belong to a particular package. """ # Specifying a traditional dot-separated fully qualified name here # results in a number of "Parent module 'astropy' not found while # handling absolute import" warnings. Using the same name, the # namespaces of the modules get merged together. So, this # generates an underscore-separated name which is more likely to # be unique, and it doesn't really matter because the name isn't # used directly here anyway. with open(filename, 'U') as fd: name = '_'.join( os.path.relpath(os.path.splitext(filename)[0]).split(os.sep)[1:]) return imp.load_module(name,
fd, filename, ('.py', 'U', 1)) def find_mod_objs(modname, onlylocals=False): """ Returns all the public attributes of a module referenced by name. .. note:: The returned list *not* include subpackages or modules of `modname`,nor does it include private attributes (those that beginwith '_' or are not in `__all__`). Parameters ---------- modname : str The name of the module to search. onlylocals : bool If True, o
nly attributes that are either members of `modname` OR one of its modules or subpackages will be included. Returns ------- localnames : list of str A list of the names of the attributes as they are named in the module `modname` . fqnames : list of str A list of the full qualified names of the attributes (e.g., ``astropy.utils.misc.find_mod_objs``). For attributes that are simple variables, this is based on the local name, but for functions or classes it can be different if they are actually defined elsewhere and just referenced in `modname`. objs : list of objects A list of the actual attributes themselves (in the same order as the other arguments) """ __import__(modname) mod = sys.modules[modname] if hasattr(mod, '__all__'): pkgitems = [(k, mod.__dict__[k]) for k in mod.__all__] else: pkgitems = [(k, mod.__dict__[k]) for k in dir(mod) if k[0] != '_'] # filter out modules and pull the names and objs out ismodule = inspect.ismodule localnames = [k for k, v in pkgitems if not ismodule(v)] objs = [v for k, v in pkgitems if not ismodule(v)] # fully qualified names can be determined from the object's module fqnames = [] for obj, lnm in zip(objs, localnames): if hasattr(obj, '__module__') and hasattr(obj, '__name__'): fqnames.append(obj.__module__ + '.' + obj.__name__) else: fqnames.append(modname + '.' + lnm) if onlylocals: valids = [fqn.startswith(modname) for fqn in fqnames] localnames = [e for i, e in enumerate(localnames) if valids[i]] fqnames = [e for i, e in enumerate(fqnames) if valids[i]] objs = [e for i, e in enumerate(objs) if valids[i]] return localnames, fqnames, objs
ellipticaldoor/dfiid
project/dfiid/settings/base.py
Python
gpl-2.0
2,415
0.015735
""" Django settings for dfiid project. For more information on this file, see https://docs.djangoproject.com/en/1.8/topics/settings/ For the full list of settings and their values, see https://docs.djangoproject.com/en/1.8/ref/settings/ """ import os BASE_DIR = os.path.dirname(os.path.dirname(os.path.dirname(__file__))) from django.core.exceptions import ImproperlyConfigured def get_env(setting): """ Get the environment setting or return exception """ try: return os.environ[setting] except KeyError: error_msg = 'Set the %s env variable' % setting raise ImproperlyConfigured(error_msg) SECRET_KEY = get_env('SECRET_KEY') DEBUG = True TEMPLATE_DEBUG = True ALLOWED_HOSTS = ['*'] INSTALLED_APPS = ( 'django.contrib.admin', 'django.contrib.auth', 'django.contrib.contenttypes', 'django.contrib.sessions', 'django.contrib.messages', 'django.contrib.staticfiles', 'django.contrib.sitemaps', 'nocaptcha_recaptcha', 'core', 'user', 'content', 'notify', ) MIDDLEWARE_CLASSES = ( 'django.contrib.sessions.middleware.SessionMiddleware', 'django.middleware.common.CommonMiddleware', 'django.middleware.csrf.CsrfViewMiddleware', 'django.contrib.auth.middleware.AuthenticationMiddleware', 'django.contrib.auth.middleware.SessionAuthenticationMiddleware', 'django.contrib.messages.middleware.MessageMiddleware', 'django.middleware.clickjacking.XFrameOptionsMiddleware', ) ROOT_URLCONF = 'dfiid.urls' WSGI_APPLICATION = 'dfiid.wsgi.application' DATABASES = { 'default': { 'ENGINE': 'django.db.backends.postgresql_psycopg2', 'NAME': get_env('DB_NAME'),
'USER': get_env('DB_USER'), 'PASSWORD': get_env('DB_PASSWORD'), 'HOST': get_env('DB_HOST'), 'PORT': get_env('DB_PORT'), } } LANGUAGE_CODE = get_env('LANGUAGE') TIME_ZONE = 'Atlantic/Canary' USE_I18N = True USE_L10N = True USE_TZ = True STATIC_URL = '/s/' STATICFILES_DIRS = ( os.path.
join(BASE_DIR, 'static'), ) STATIC_ROOT = os.path.join(BASE_DIR, 's') MEDIA_URL = '/m/' MEDIA_ROOT = os.path.join(BASE_DIR, 'm') TEMPLATE_DIRS = [os.path.join(BASE_DIR, 'templates')] AUTH_USER_MODEL = 'user.User' LOGIN_URL = '/login' LOGIN_REDIRECT_URL = '/' STATICFILES_FINDERS = ( 'django.contrib.staticfiles.finders.FileSystemFinder', 'django.contrib.staticfiles.finders.AppDirectoriesFinder', ) NORECAPTCHA_SITE_KEY = get_env('NORECAPTCHA_SITE_KEY') NORECAPTCHA_SECRET_KEY = get_env('NORECAPTCHA_SECRET_KEY')
minimalparts/Tutorials
FruitFly/MEN.py
Python
mit
989
0.034378
#Evaluate semantic space against MEN dataset import sys import u
tils from scipy import stats import numpy as np from math import sqrt #Note: this is scipy's spearman, without tie adjustment def spearman(x,y): return stats.spearmanr(x, y)[0] def readMEN(annotation_file): pairs=[] humans=[] f=open(annotation_file,'r') for l in f: l=l.rstrip('\n') items=l.split() pairs.append((items[0],items[1])) humans.append(float(items[2])) f.close() return pairs, humans def compute_men_spearman(dm_dict, annotation_file):
pairs, humans=readMEN(annotation_file) system_actual=[] human_actual=[] count=0 for i in range(len(pairs)): human=humans[i] a,b=pairs[i] if a in dm_dict and b in dm_dict: cos=utils.cosine_similarity(dm_dict[a],dm_dict[b]) system_actual.append(cos) human_actual.append(human) count+=1 sp = spearman(human_actual,system_actual) return sp,count
icarito/sugar
src/jarabe/journal/volumestoolbar.py
Python
gpl-3.0
13,211
0
# Copyright (C) 2007, 2011, One Laptop Per Child # Copyright (C) 2014, Ignacio Rodriguez # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. import logging import os import statvfs from gettext import gettext as _ from gi.repository import GObject from gi.repository import Gio from gi.repository import GLib from gi.repository import Gtk from gi.repository import Gdk import cPickle import xapian import json import tempfile import shutil from sugar3.graphics.radiotoolbutton import RadioToolButton from sugar3.graphics.palette import Palette from sugar3.graphics import style from sugar3 import env from sugar3 import profile from jarabe.journal import model from jarabe.journal.misc import get_mount_icon_name from jarabe.journal.misc import get_mount_color from jarabe.view.palettes import VolumePalette _JOURNAL_0_METADATA_DIR = '.olpc.store' def _get_id(document): """Get the ID for the document in the xapian database.""" tl = document.termlist() try: term = tl.skip_to('Q').term if len(term) == 0 or term[0] != 'Q': return None return term[1:] except StopIteration: return None def _convert_entries(root): """Convert entries written by the datastore version 0. The metadata and the preview will be written using the new scheme for writing Journal entries to removable storage devices. - entries that do not have an associated file are not converted. - if an entry has no title we set it to Untitled and rename the file accordingly, taking care of creating a unique filename """ try: database = xapian.Database(os.path.join(root, _JOURNAL_0_METADATA_DIR, 'index')) except xapian.DatabaseError: logging.exception('Convert DS-0 Journal entries: error reading db: %s', os.path.join(root, _JOURNAL_0_METADATA_DIR, 'index')) return metadata_dir_path = os.path.join(root, model.JOURNAL_METADATA_DIR) if not os.path.exists(metadata_dir_path): try: os.mkdir(metadata_dir_path) except EnvironmentError: logging.error('Convert DS-0 Journal entries: ' 'error creating the Journal metadata directory.') return for posting_item in database.postlist(''): try: document = database.get_document(posting_item.docid) except xapian.DocNotFoundError, e: logging.debug('Convert DS-0 Journal entries: error getting ' 'document %s: %s', posting_item.docid, e) continue _convert_entry(root, document) def _convert_entry(root, document): try: metadata_loaded = cPickle.loads(document.get_data()) except cPickle.PickleError, e: logging.debug('Convert DS-0 Journal entries: ' 'error converting metadata: %s', e) return if not ('activity_id' in metadata_loaded and 'mime_type' in metadata_loaded and 'title' in metadata_loaded): return metadata = {} uid = _get_id(document) if uid is None: return for key, value in metadata_loaded.items(): metadata[str(key)] = str(value[0]) if 'uid' not in metadata: metadata['uid'] = uid filename = metadata.pop('filename', None) if not filename: return if not os.path.exists(os.path.join(root, filename)): return if not metadata.get('title'): metadata['title'] = _('Untitled') fn = model.get_file_name(metadata['title'], metadata['mime_type']) new_filename = model.get_unique_file_name(root, fn) os.rename(os.path.join(root, filename), os.path.join(root, new_filename)) filename = new_filename preview_path = os.path.join(root, _JOURNAL_0_METADATA_DIR, 'preview', uid) if os.path.exists(preview_path): preview_fname = filename + '.preview' new_preview_path = os.path.join(root, model.JOURNAL_METADATA_DIR, preview_fname) if not os.path.exists(new_preview_path): shutil.copy(preview_path, new_preview_path) metadata_fname = filename + '.metadata' metadata_path = os.path.join(root, model.JOURNAL_METADATA_DIR, metadata_fname) if not os.path.exists(metadata_path): (fh, fn) = tempfile.mkstemp(dir=root) os.write(fh, json.dumps(metadata)) os.close(fh) os.rename(fn, metadata_path) logging.debug('Convert DS-0 Journal entries: entry converted: ' 'file=%s metadata=%s', os.path.join(root, filename), metadata) class VolumesToolbar(Gtk.Toolbar): __gtype_name__ = 'VolumesToolbar' __gsignals__ = { 'volume-changed': (GObject.SignalFlags.RUN_FIRST, None, ([str])), 'volume-error': (GObject.SignalFlags.RUN_FIRST, None, ([str, str])), } def __init__(self): Gtk.Toolbar.__init__(self) self._mount_added_hid = None self._mount_removed_hid = None button = JournalButton() button.connect('toggled', self._button_toggled_cb) self.insert(button, 0) button.show() self._volume_buttons = [button] self.connect('destroy', self.__destroy_cb) GLib.idle_add(self._set_up_volumes) def __destroy_cb(self, widget): volume_monitor = Gio.VolumeMonitor.get() volume_monitor.disconnect(self._mount_added_hid) volume_monitor.disconnect(self._mount_removed_hid) def _set_up_volumes(self): self._set_u
p_documents_button() volume_monitor = Gio.VolumeMonitor.get() self._mount_added_hid = volume_monitor.connect('mount-added', self.__mount_added_cb) self._mount_removed_hid = volume_monitor.connect( 'mount-removed', self.__mount_removed_cb) for mount in volume_monitor.get_mounts(): self._add_button(mount) def _set_up_do
cuments_button(self): documents_path = model.get_documents_path() if documents_path is not None: button = DocumentsButton(documents_path) button.props.group = self._volume_buttons[0] button.set_palette(Palette(_('Documents'))) button.connect('toggled', self._button_toggled_cb) button.show() position = self.get_item_index(self._volume_buttons[-1]) + 1 self.insert(button, position) self._volume_buttons.append(button) self.show() def __mount_added_cb(self, volume_monitor, mount): self._add_button(mount) def __mount_removed_cb(self, volume_monitor, mount): self._remove_button(mount) def _add_button(self, mount): logging.debug('VolumeToolbar._add_button: %r', mount.get_name()) if os.path.exists(os.path.join(mount.get_root().get_path(), _JOURNAL_0_METADATA_DIR)): logging.debug('Convert DS-0 Journal entries: starting conversion') GLib.idle_add(_convert_entries, mount.get_root().get_path()) button = VolumeButton(mount) button.props.group = self._volume_buttons[0] button.connect('toggled', self._button_toggled_cb) button.c
timm/timmnix
pypy3-v5.5.0-linux64/lib-python/3/lib2to3/pgen2/tokenize.py
Python
mit
19,320
0.001967
# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006 Python Software Foundation. # All rights reserved. """Tokenization help for Python programs. generate_tokens(readline) is a generator that breaks a stream of text into Python tokens. It accepts a readline-like method which is called repeatedly to get the next line of input (or "" for EOF). It generates 5-tuples with these members: the token type (see token.py) the token (a string) the starting (row, column) indices of the token (a 2-tuple of ints) the ending (row, column) indices of the token (a 2-tuple of ints) the original line (string) It is designed to match the working of the Python tokenizer exactly, except that it produces COMMENT tokens for comments and gives type OP for all operators Older entry points tokenize_loop(readline, tokeneater) tokenize(readline, tokeneater=printtoken) are the same, except instead of generating tokens, tokeneater is a callback function to which the 5 fields described above are passed as 5 arguments, each time a new token is found.""" __author__ = 'Ka-Ping Yee <ping@lfw.org>' __credits__ = \ 'GvR, ESR, Tim Peters, Thomas Wouters, Fred Drake, Skip Montanaro' import string, re from codecs import BOM_UTF8, lookup from lib2to3.pgen2.token import * from . import token __all__ = [x for x in dir(token) if x[0] != '_'] + ["tokenize", "generate_tokens", "untokenize"] del token try: bytes except NameError: # Support bytes type in Python <= 2.5, so 2to3 turns itself into # valid Python 3 code. bytes = str def group(*choices): return '(' + '|'.join(choices) + ')' def any(*choices): return group(*choices) + '*' def maybe(*choices): return group(*choices) + '?' Whitespace = r'[ \f\t]*' Comment = r'#[^\r\n]*' Ignore = Whitespace + any(r'\\\r?\n' + Whitespace) + maybe(Comment) Name = r'[a-zA-Z_]\w*' Binnumber = r'0[bB][01]*' Hexnumber = r'0[xX][\da-fA-F]*[lL]?' Octnumber = r'0[oO]?[0-7]*[lL]?' Decnumber = r'[1-9]\d*[lL]?' Intnumber = group(Binnumber, Hexnumber, Octnumber, Decnumber) Exponent = r'[eE][-+]?\d+' Pointfloat = group(r'\d+\.\d*', r'\.\d+') + maybe(Exponent) Expfloat = r'\d+' + Exponent Floatnumber = group(Pointfloat, Expfloat) Imagnumber = group(r'\d+[jJ]', Floatnumber + r'[jJ]') Number = group(Imagnumber, Floatnumber, Intnumber) # Tail end of ' string. Single = r"[^'\\]*(?:\\.[^'\\]*)*'" # Tail end of " string. Double = r'[^"\\]*(?:\\.[^"\\]*)*"' # Tail end of ''' string. Single3 = r"[^'\\]*(?:(?:\\.|'(?!''))[^'\\]*)*'''" # Tail end of """ string. Double3 = r'[^"\\]*(?:(?:\\.|"(?!""))[^"\\]*)*"""' Triple = group("[ubUB]?[rR]?'''", '[ubUB]?[rR]?"""') # Single-line ' or " string. String = group(r"[uU]?[rR]?'[^\n'\\]*(?:\\.[^\n'\\]*)*'", r'[uU]?[rR]?"[^\n"\\]*(?:\\.[^\n"\\]*)*"') # Because of leftmost-then-longest match semantics, be sure to put the # longest operators first (e.g., if = came before ==, == would get # recognized as two instances of =). Operator = group(r"\*\*=?", r">>=?", r"<<=?", r"<>", r"!=", r"//=?", r"->", r"[+\-*/%&|^=<>]=?", r"~") Bracket = '[][(){}]' Special = group(r'\r?\n', r'[:;.,`@]') Funny = group(Operator, Bracket, Special) PlainToken = group(Number, Funny, String, Name) Token = Ignore + PlainToken # First (or only) line of ' or " string. ContStr = group(r"[uUbB]?[rR]?'[^\n'\\]*(?:\\.[^\n'\\]*)*" + group("'", r'\\\r?\n'), r'[uUbB]?[rR]?"[^\n"\\]*(?:\\.[^\n"\\]*)*' + group('"', r'\\\r?\n')) PseudoExtras = group(r'\\\r?\n', Comment, Triple) PseudoToken = Whitespace + group(PseudoExtras, Number, Funny, ContStr, Name) tokenprog, pseudoprog, single3prog, double3prog = list(map( re.compile, (Token, PseudoToken, Single3, Double3))) endprogs = {"'": re.compile(Single), '"': re.compile(Double), "'''": single3prog, '"""': double3prog, "r'''": single3prog, 'r"""': double3prog, "u'''": single3prog, 'u"""': double3prog, "b'''": single3prog, 'b"""': double3prog, "ur'''": single3prog, 'ur"""': double3prog, "br'''": single3prog, 'br"""': double3prog, "R'''": single3prog, 'R"""': double3prog, "U'''": single3prog, 'U"""': double3prog, "B'''": single3prog, 'B"""': double3prog, "uR'''": single3prog, 'uR"""': double3prog, "Ur'''": single3prog, 'Ur"""': double3prog, "UR'''": single3prog, 'UR"""': double3prog, "bR'''": single3prog, 'bR"""': double3prog, "Br'''": single3prog, 'Br"""': double3prog, "BR'''": single3prog, 'BR"""': double3prog, 'r': None, 'R': None, 'u': None, 'U': None, 'b': None, 'B': None} triple_quoted = {} for t in ("'''", '"""', "r'''", 'r"""', "R'''", 'R"""', "u'''", 'u"""', "U'''", 'U"""', "b'''", 'b"""', "B'''", 'B"""', "ur'''", 'ur"""', "Ur'''", 'Ur"""', "uR'''", 'uR"""', "UR'''", 'UR"""', "br'''", 'br"""', "Br'''", 'Br"""', "bR'''", 'bR"""', "BR'''", 'BR"""',): triple_quoted[t] = t single_quoted = {} for t in ("'", '"', "r'", 'r"', "R'", 'R"', "u'", 'u"', "U'", 'U"', "b'", 'b"', "B'", 'B"', "ur'", 'ur"', "Ur'", 'Ur"', "uR'", 'uR"', "UR'", 'UR"', "br'", 'br"', "Br'", 'Br"', "bR'", 'bR"', "BR'", 'BR"', ): single_quoted[t] = t tabsize = 8 class TokenError(Exception): pass class StopTokenizing(Exception): pass def printtoken(type, token, xxx_todo_changeme, xxx_todo_changeme1, line): # for testing (srow, scol) = xxx_todo_changeme (erow, ecol) = xxx_todo_changeme1 print("%d,%d-%d,%d:\t%s\t%s" % \ (srow, scol, erow, ecol, tok_name[type], repr(token))) def tokenize(readline, tokeneater=printtoken): """ The tokenize() function accepts two parameters: one representing the input stream, and one providing an output mechanism for tokenize(). The first parameter, readline, must be a callable object which provides the same interface as the readline() method of built-in file objects. Each call to the function should return one line of input as a string. The second parameter, tokeneater, must also be a callable object. It is called once for each token, with five arguments, corresponding to the tuples generated by generate_tokens(). """ try: tokenize_loop(readline, tokeneater) except StopTokenizing: pass # backwards compatible interface def tokenize_loop(readline, tokeneater): for token_info in generate_tokens(readline): tokeneater(*token_info) class Untokenizer: def __init__(self): self.tokens = [] self.prev_row = 1 self.prev_col = 0 def add_whitespace(self, start): row, col = start assert row <= self.prev_row col_offset = col - self.prev_col if col_offset: self.tokens.append(" " * col_offset) def untokenize(self, iterable): for t in iterable: if len(t) == 2: self.compat(t, iterable) break tok_type, token, start, end, line = t self.add_whitespace(start) self.tokens.append(token) self.prev_row, self.pre
v_col = end if tok_type in (NEWLINE, NL): self.prev_row += 1 self.prev_col = 0 return "".join(self.tokens) def compat(self, token, iterable): startline = False indents = [] toks_append = self.tokens.append
toknum, tokval = token if toknum in (NAME, NUMBER): tokval += ' ' if toknum in (NEWLINE, NL): startline = True for tok in iterable: toknum, tokval = tok[:2] if toknum in (NAME, NUMBER): tokval += ' ' if toknum == INDENT: indents.append(tokval) continue elif toknum == DEDENT: indents.pop() continue elif toknum in (NEWLINE, NL):
hellsgate1001/graphs
hack_plot/migrations/0006_sshhackip_located.py
Python
mit
444
0
# -*- coding: u
tf-8 -*- from __future__ import unicode_literals from django.db import models, migrations class Migration(migrations.Migration): dependencies = [ ('hack_plot', '0005_auto_20150505_1940'), ] operations = [ migrations.AddField( model_name='sshhackip', name='located', field=models.BooleanField(default=Fa
lse), preserve_default=True, ), ]
doughyde/fitbit-cal-sync
fitbit-cal-sync.py
Python
gpl-2.0
149
0.006711
from trackers.fitbi
t_tracker import FitbitTracker __aut
hor__ = 'doughyde' # FitBit connection f = FitbitTracker() f.authenticate() f.get_devices()
lakshmi-kannan/st2
st2tests/st2tests/action_aliases.py
Python
apache-2.0
4,628
0.003025
# Licensed to the StackStorm, Inc ('StackStorm') under one or more # contributor license agreements. See the NOTICE file distributed with # this work for additional information regarding copyright ownership. # The ASF licenses this file to You under the Apache License, Version 2.0 # (the "License"); you may not use this file except in compliance with # the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import os from st2common.content.loader import ContentPackLoader from st2common.exceptions.content import ParseException from st2common.bootstrap.aliasesregistrar import AliasesRegistrar from st2common.models.utils.action_alias_utils import extract_parameters_for_action_alias_db from st2common.models.utils.action_alias_utils import extract_parameters from st2tests.pack_resource import BasePackResourceTestCase __all__ = [ 'BaseActionAliasTestCase' ] class BaseActionAliasTestCase(BasePackResourceTestCase): """ Base class for testing action aliases. """ action_alias_name = None action_alias_db = None def setUp(self): super(BaseActionAliasTestCase, self).setUp() if not self.action_alias_name: raise ValueError('"action_alias_name" class attribute needs to be provided') self.action_alias_db = self._get_action_alias_db_by_name(name=self.action_alias_name) def assertCommandMatchesExactlyOneFormatString(self, format_strings, command): """ Assert that the provided command matches exactly one format string from the provided list. """ matched_format_strings = [] for format_string in format_strings: try: extract_parameters(format_str=format_string, param_stream=command) except ParseException: continue matched_format_strings.append(format_string) if len(matched_format_strings) == 0: msg = ('Command "%s" didn\'t match any of the provided format strings' % (command)) raise Assertion
Error(msg) elif len(matched_format_strings) > 1: msg = ('Command "%s" matched multiple format strings: %s' % (command, ', '.join(matched_format_strings))) raise AssertionError(msg) def assertExtractedParametersMatch(self, f
ormat_string, command, parameters): """ Assert that the provided command matches the format string. In addition to that, also assert that the parameters which have been extracted from the user input (command) also match the provided parameters. """ extracted_params = extract_parameters_for_action_alias_db( action_alias_db=self.action_alias_db, format_str=format_string, param_stream=command) if extracted_params != parameters: msg = ('Extracted parameters from command string "%s" against format string "%s"' ' didn\'t match the provided parameters: ' % (command, format_string)) # Note: We intercept the exception so we can can include diff for the dictionaries try: self.assertEqual(extracted_params, parameters) except AssertionError as e: msg += str(e) raise AssertionError(msg) def _get_action_alias_db_by_name(self, name): """ Retrieve ActionAlias DB object for the provided alias name. """ base_pack_path = self._get_base_pack_path() _, pack = os.path.split(base_pack_path) pack_loader = ContentPackLoader() registrar = AliasesRegistrar(use_pack_cache=False) aliases_path = pack_loader.get_content_from_pack(pack_dir=base_pack_path, content_type='aliases') aliases = registrar._get_aliases_from_pack(aliases_dir=aliases_path) for alias_path in aliases: action_alias_db = registrar._get_action_alias_db(pack=pack, action_alias=alias_path) if action_alias_db.name == name: return action_alias_db raise ValueError('Alias with name "%s" not found' % (name))
kevinkepp/look-at-this
run_trainer.py
Python
mit
121
0.008264
fro
m sft.runner.Trainer import Trainer import sft.config.exp if __name__ == "__main__": Trainer().r
un(sft.config.exp)
putrasattvika/ssidstat
ssidstat/common/models/ssid_traffic_history.py
Python
apache-2.0
3,044
0.031866
import time import sqlite3 from base_model import BaseModel from datetime import datetime from contextlib import contextmanager class SSIDTrafficHistory(BaseModel): def __init__(self, dbfile, table_name, time_limit): super(SSIDTrafficHistory, self).__init__(dbfile, table_name) self.time_limit = time_limit def init_db(self): with self.db_cursor() as c: c.execute(''' CREATE TABLE IF NOT EXISTS {} ( timestamp integer, adapter text, ssid text, rx integer, tx integer, PRIMARY KEY (timestamp, adapter, ssid) ) '''.format(self.table_name)) def truncate_time(timestamp): raise NotImplementedError def query(self, adapter, ssid, timestamp=None): if not timestamp: timestamp = time.time() with self.db_cursor(commit=False) as c: query = ''' SELECT timestamp, adapter, ssid, rx, tx FROM {} WHERE adapter=? AND ssid=? AND timestamp=?; '''.format(self.table_name) c.execute(query, (adapter, ssid, self.truncate_time(timestamp))) result = c.fetchone() if result == None: result = (self.truncate_time(timestamp), adapter, ssid, 0, 0) return { 'timestamp': self.truncate_time(timestamp), 'adapter': adapter, 'ssid': ssid, 'rx': result[3], 'tx': result[4] } def query_all(self, start_time=None, end_time=None, timestamp=None): if not timestamp: timestamp = time.time() if not end_time: end_time = timestamp if not start_time: start_time = self.truncate_time(end_time) with self.db_cursor(commit=False) as c: query = ''' SELECT timestamp, adapter, ssid, sum(rx), sum(tx) FROM {} WHERE timestamp >= ? AND timestamp <= ? GROUP BY adapter, ssid ORDER BY adapter, ssid; '''.format(self.table_name) c.execute(query, (start_time, end_time)) results = c.fetchall() query_result = {} for r in results: ts, adapter, ssid, rx, tx = r if adapter not in query_result: query_result[adapter] = [] query_result[adapter].append({ 'timestamp': ts, 'adapter': adapter, 'ssid': ssid, 'rx': rx, 'tx': tx }) return query_result def update(self, adapter, ssid, rx, tx, timestamp=None): if not timestamp: timestamp = time.time() with self.db_cursor() as c: query = ''' INSERT OR REPLACE INT
O {} (timestamp, adapter, ssid, rx, tx) VALUES ( ?, ?, ?, ?, ? ); '''.format(self.table_name) c.execute(query, (self.trunc
ate_time(timestamp), adapter, ssid, rx, tx)) def add(self, adapter, ssid, delta_rx, delta_tx, timestamp=None): if not timestamp: timestamp = time.time() prev = self.query(adapter, ssid, timestamp=timestamp) self.update( adapter, ssid, prev['rx']+delta_rx, prev['tx']+delta_tx, timestamp=timestamp ) self.clear(timestamp=timestamp) def clear(self, timestamp=None): if not timestamp: timestamp = time.time() with self.db_cursor() as c: query = ''' DELETE FROM {} WHERE timestamp < ?; '''.format(self.table_name) c.execute(query, (timestamp - self.time_limit, ))
sp4x/osnf
osnf/connectors.py
Python
apache-2.0
1,423
0.026704
'
'' Created on 28/set/2014 @author: Vincenzo Pirrone <pirrone.v@gmail.com> ''' import serial, time class Connector: def readline(self):
pass def writeline(self, line): pass def close(self): pass class FakeSerial(Connector): def __init__(self, port): print 'opening fake serial on %s' % port def readline(self): time.sleep(2) return 'TIME:%d' % int(time.time()) def writeline(self, line): print 'FAKE SERIAL: ' + line def close(self): print 'closing fake serial' class Serial(Connector, serial.Serial): def __init__(self, port=None, baudrate=9600, bytesize=serial.EIGHTBITS, parity=serial.PARITY_NONE, stopbits=serial.STOPBITS_ONE, timeout=None, xonxoff=False, rtscts=False, writeTimeout=None, dsrdtr=False, interCharTimeout=None): serial.Serial.__init__(self, port=port, baudrate=baudrate, bytesize=bytesize, parity=parity, stopbits=stopbits, timeout=timeout, xonxoff=xonxoff, rtscts=rtscts, writeTimeout=writeTimeout, dsrdtr=dsrdtr, interCharTimeout=interCharTimeout) def readline(self): return Serial.readline(self) def writeline(self, line): Serial.write(self, line + '\n') def close(self): Serial.close(self)
edx/course-discovery
course_discovery/apps/course_metadata/migrations/0097_degree_lead_capture_image.py
Python
agpl-3.0
772
0.001295
# Generated by Django 1.11.15 on 2018-08-08 18:28 import django.db.models.deletion import django_extensions.db.fields import stdimage.models from course_discovery.apps.course_metadata.u
tils import UploadToFieldN
amePath from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ('course_metadata', '0096_degree_lead_capture_list_name'), ] operations = [ migrations.AddField( model_name='degree', name='lead_capture_image', field=stdimage.models.StdImageField(blank=True, help_text='Please provide an image file for the lead capture banner.', null=True, upload_to=UploadToFieldNamePath('uuid', path='media/degree_marketing/lead_capture_images/')), ), ]
femmerling/backyard
builder/installer_tools.py
Python
mit
1,906
0.028856
import os.path from subprocess import call class InstallerTool
s(object): @s
taticmethod def update_environment(file_path,environment_path): update_file = open(file_path, 'r') original_lines = update_file.readlines() original_lines[0] = environment_path+'\n' update_file.close() update_file = open(file_path, 'w') for lines in original_lines: update_file.write(lines) update_file.close() @staticmethod def fix_migrate(base_directory): print "\nFixing the migrate bug \n" buggy_path = os.path.join(base_directory, 'env/lib/python2.7/site-packages/migrate/versioning/schema.py') buggy_file = open(buggy_path,'r') original_lines = buggy_file.readlines() original_lines[9] = "from sqlalchemy import exc as sa_exceptions\n" buggy_file.close() update_file = open(buggy_path,'w') for lines in original_lines: update_file.write(lines) update_file.close() @staticmethod def refresh_environment(framework_config): InstallerTools.update_environment(framework_config.yard_path,framework_config.environment_path) InstallerTools.update_environment(framework_config.blow_path,framework_config.environment_path) InstallerTools.update_environment(framework_config.try_path,framework_config.environment_path) @staticmethod def change_permissions(framework_config): call(['chmod', 'a+x', framework_config.yard_path]) call(['chmod', 'a+x', framework_config.blow_path]) call(['chmod', 'a+x', framework_config.try_path]) @staticmethod def create_db_directory(base_directory): if not os.path.exists(os.path.join(base_directory, 'storage/')): os.makedirs(os.path.join(base_directory, 'storage/')) @staticmethod def create_virtual_environment(framework_config): call(['python', framework_config.v_path, framework_config.environment_name]) InstallerTools.refresh_environment(framework_config) InstallerTools.change_permissions(framework_config)
lgarren/spack
var/spack/repos/builtin/packages/r-protgenerics/package.py
Python
lgpl-2.1
1,699
0.001177
############################################################################## # Copyright (c) 2013-2017, Lawrence Livermore National Security, LLC. # Produced at the Lawrence Livermore National Laboratory. # # This file is part of Spack. # Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved. # LLNL-CODE-647188 # # For details, see https://github.com/llnl/spack # Please also see the NOTICE and LICENSE files for our notice and the LGPL. # # This program is free software; you can redistribute it and/or modify # it under the terms of the GNU Lesser General Public License (as # published by the Free Software Foundation) version 2.1, February 1999. # # This program is distributed in the hope that it will be useful, but # WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and # conditions of the GNU Lesser General Public License for more details. # # You should hav
e received a copy of the GNU Lesser General Public # License along with this program; if not, write to the Free Software # Foundation, Inc., 59 Temple
Place, Suite 330, Boston, MA 02111-1307 USA ############################################################################## from spack import * class RProtgenerics(RPackage): """S4 generic functions needed by Bioconductor proteomics packages.""" homepage = "https://bioconductor.org/packages/ProtGenerics/" url = "https://git.bioconductor.org/packages/ProtGenerics" list_url = homepage version('1.8.0', git='https://git.bioconductor.org/packages/ProtGenerics', commit='b2b3bb0938e20f58fca905f6870de7dbc9dfd7a3') depends_on('r@3.4.0:3.4.9', when='@1.8.0')
HoussemCharf/FunUtils
linked_lists/1_finding_middle_element_in_a_linked_list.py
Python
mit
335
0.01194
# Input: # 2 # 5 # 1 2
3 4 5 # 6 # 2 4 6 7 5 1 # # Output: # 3 # 7 def findMid(head): if head == None:
return -1 fast, slow = head, head while fast.next != None and fast.next.next != None: fast = fast.next.next slow = slow.next if fast.next != None: return slow.next return slow
Eagles2F/sync-engine
tests/events/test_inviting.py
Python
agpl-3.0
2,071
0
from tests.util.base import event def test_invite_generation(event, default_account): from inbox.events.ical import generate_icalendar_invite event.sequence_number = 1 event.participants = [{'email': 'helena@nylas.com'}, {'email': 'myles@nylas.com'}] cal = generate_icalendar_invite(event) assert cal['method'] == 'REQUEST' for component in cal.walk(): if component.name == "VEVENT": assert component.get('summary') == event.title assert int(component.get('sequence')) == event.sequence_number assert component.get('location') == event.location attendees = component.get('attendee', []) # the iCalendar python module doesn't return a list when # there's only one attendee. Go figure. if not isinstance(attendees, list): attendees = [attendees] for attendee in attendees: email = unicode(attendee) # strip mailto: if it exists if emai
l.lower().startswith('mailto:'): email = email[7:] assert email in ['h
elena@nylas.com', 'myles@nylas.com'] def test_message_generation(event, default_account): from inbox.events.ical import generate_invite_message event.title = 'A long walk on the beach' event.participants = [{'email': 'helena@nylas.com'}] msg = generate_invite_message('empty', event, default_account) # Check that we have an email with an HTML part, a plain text part, a # text/calendar with METHOD=REQUEST and an attachment. count = 0 for mimepart in msg.walk(with_self=msg.content_type.is_singlepart()): format_type = mimepart.content_type.format_type subtype = mimepart.content_type.subtype if (format_type, subtype) in [('text', 'plain'), ('text', 'html'), ('text', 'calendar; method=request'), ('application', 'ics')]: count += 1 assert count == 3
briancurtin/python-openstacksdk
openstack/network/v2/service_profile.py
Python
apache-2.0
1,571
0
# Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from openstack.network import network_service from openstack import resource2 as resource class ServiceProfile(resource.Resource): resource_key = 'service_profile' resources_key = 'service_profiles' base_path = '/service_profiles' service = network_service.NetworkService() # capabilities allow_create = True allow_g
et = True allow_update = True allow_delete = True allow_list = True _query_mapping = resource.QueryParameters( 'description', 'driver', is_enabled='enabled', project_id='tenant_id' ) # P
roperties #: Description of the service flavor profile. description = resource.Body('description') #: Provider driver for the service flavor profile driver = resource.Body('driver') #: Sets enabled flag is_enabled = resource.Body('enabled', type=bool) #: Metainformation of the service flavor profile meta_info = resource.Body('metainfo') #: The owner project ID project_id = resource.Body('tenant_id')
cvandeplas/plaso
plaso/formatters/mac_securityd.py
Python
apache-2.0
1,215
0.005761
#!/usr/bin/python # -*- coding
: utf-8 -*- # # Copyright 2014 The Plaso Project Authors. # Please see the AUTHORS file for details on individual authors. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http:
//www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """Formatter for ASL securityd log file.""" from plaso.formatters import interface class MacSecuritydLogFormatter(interface.ConditionalEventFormatter): """Formatter for ASL Securityd file.""" DATA_TYPE = 'mac:asl:securityd:line' FORMAT_STRING_PIECES = [ u'Sender: {sender}', u'({sender_pid})', u'Level: {level}', u'Facility: {facility}', u'Text: {message}'] FORMAT_STRING_SHORT_PIECES = [u'Text: {message}'] SOURCE_LONG = 'Mac ASL Securityd Log' SOURCE_SHORT = 'LOG'
frappe/frappe
frappe/core/doctype/user_document_type/user_document_type.py
Python
mit
212
0.009434
# -*- coding: utf-8 -*- # Copyright (c) 2021, Frappe Technologies and contributors # License: MIT. See LI
CENSE # import frappe from frappe.mo
del.document import Document class UserDocumentType(Document): pass
SkyTruth/skytruth-automation-hub
gae/geofeed_api.py
Python
mit
5,854
0.007345
from protorpc import messages from protorpc import message_types from protorpc import remote from google.appengine.api import taskqueue import json import endpoints import urllib2 import logging, os import settings import inspect from seqid import SeqidIssuer, seqid2str from endpointshelper import EndpointsHelper from logger import Logger from geofeed import GeoFeed package = 'GeoFeedAPI' """GeoFeed API """ class SeqidResponse(messages.Message): """Response message for geofeed.seqid method""" status = messages.StringField(1) series = messages.StringField(2) seqid_datetime = messages.StringField(3) seqid_int = messages.IntegerField(4) class TestRequest(messages.Message): """request message for taskqueue.test""" message = messages.StringField(1) class TestRespon
se(messages.Message): """response message for taskqueue.test""" status = messages.StringField(1) message = messa
ges.StringField(2) info = messages.StringField(3) class FeedItem(messages.Message): topic = messages.StringField(1, required=True) key = messages.StringField(2, required=True) url = messages.StringField(3) latitude = messages.FloatField(4) longitude = messages.FloatField(5) content = messages.StringField(6) published = messages.StringField(7) class PublishResponse(messages.Message): """response message for geofeed.publish""" status = messages.StringField(1) class ListRequest(messages.Message): """message for retrieving a list of feed items""" topic = messages.StringField(1, required=True) class ListResponse(messages.Message): """response message for geofeed.list""" status = messages.StringField(1) items = messages.MessageField(FeedItem, 2, repeated=True) class GetRequest(messages.Message): """message for retrieving a single feed items""" topic = messages.StringField(1, required=True) key = messages.StringField(2, required=True) class GetResponse(messages.Message): """response message for geofeed.get""" status = messages.StringField(1) item = messages.MessageField(FeedItem, 2) @endpoints.api(name='geofeed', version='v1.0', allowed_client_ids=['314157906781-5k944tnd2e4hvcf0nrc4dl93kgdaqnam.apps.googleusercontent.com']) #@hub_api.api_class(resource_name='geofeed') class GeoFeedApi(remote.Service): """GeoFeed API """ SEQUENCE_RESOURCE = endpoints.ResourceContainer( message_types.VoidMessage, series=messages.StringField(1)) @endpoints.method(SEQUENCE_RESOURCE, SeqidResponse, path='seqid/{series}', http_method='GET', name='seqid') def seqid(self, request): """Get a new seqid from the specified series """ response = SeqidResponse(status='OK') try: EndpointsHelper.authenticate() issuer = SeqidIssuer(series=request.series) seqid = issuer.issueSeqids()[0] response.series = issuer.series response.seqid_int = seqid response.seqid_datetime = seqid2str (seqid) except Exception, err: response.status=str(err) return response @endpoints.method(FeedItem, PublishResponse, path='publish', http_method='POST', name='publish') def publish(self, request): """Publish a new item to a feed. """ response = PublishResponse(status='OK') try: EndpointsHelper.authenticate() GeoFeed.publish(**EndpointsHelper.message2dict(request)) except Exception, err: response.status=str(err) return response @endpoints.method(ListRequest, ListResponse, path='list', http_method='POST', name='list') def list(self, request): """Retrieve a list of recent items in a feed """ response = ListResponse(status='OK') try: EndpointsHelper.authenticate() response.items = [FeedItem(**item) for item in GeoFeed.list(topic=request.topic)] except Exception, err: response.status=str(err) return response @endpoints.method(GetRequest, GetResponse, path='get', http_method='POST', name='get') def get(self, request): """Retrieve a specified feed item """ response = GetResponse(status='OK') try: EndpointsHelper.authenticate() item = GeoFeed.get(request.topic, request.key) if item: response.item = FeedItem(**item) else: response.status='NOT FOUND' except Exception, err: response.status=str(err) return response @endpoints.method(TestRequest, TestResponse, path='test', http_method='POST', name='test') def test(self, request): """Test method for debugging conncection and auth issues This method will return to the caller whatever string is supplied in the 'message' field The info field in the response contains some debug information """ response = TestResponse(message=request.message, status='OK') response.info = "USER: %s" % endpoints.get_current_user() try: EndpointsHelper.authenticate() Logger.log (op='test') except Exception, err: response.status=str(err) return response #app = endpoints.api_server([hub_api]) #app = endpoints.api_server([GeoFeedApi])
Slezhuk/ansible
lib/ansible/module_utils/_text.py
Python
gpl-3.0
12,325
0.00211
# This code is part of Ansible, but is an independent component. # This particular file snippet, and this file snippet only, is BSD licensed. # Modules you write using this snippet, which is embedded dynamically by Ansible # still belong to the author of the module, and may
assign their own license # to the complete work. # # Copyright (c), Toshio Kuratomi <a.badger@gmail.com>, 2016 # # Redistribution and use in source and binary forms, with or without modification, # are permitted provided that the following conditions are met: # # * Redistributions of source code must retain the above copyright # notice, this list of conditions and the following disclaimer. # * Redistributions in
binary form must reproduce the above copyright notice, # this list of conditions and the following disclaimer in the documentation # and/or other materials provided with the distribution. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND # ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED # WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. # IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, # INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, # PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS # INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT # LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE # USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. # """ .. warn:: This module_util is currently internal implementation. We want to evaluate this code for stability and API suitability before making backwards compatibility guarantees. The API may change between releases. Do not use this unless you are willing to port your module code. """ import codecs from ansible.module_utils.six import PY3, text_type, binary_type try: codecs.lookup_error('surrogateescape') HAS_SURROGATEESCAPE = True except LookupError: HAS_SURROGATEESCAPE = False _COMPOSED_ERROR_HANDLERS = frozenset((None, 'surrogate_or_escape', 'surrogate_or_strict', 'surrogate_then_replace')) def to_bytes(obj, encoding='utf-8', errors=None, nonstring='simplerepr'): """Make sure that a string is a byte string :arg obj: An object to make sure is a byte string. In most cases this will be either a text string or a byte string. However, with ``nonstring='simplerepr'``, this can be used as a traceback-free version of ``str(obj)``. :kwarg encoding: The encoding to use to transform from a text string to a byte string. Defaults to using 'utf-8'. :kwarg errors: The error handler to use if the text string is not encodable using the specified encoding. Any valid `codecs error handler <https://docs.python.org/2/library/codecs.html#codec-base-classes>`_ may be specified. There are three additional error strategies specifically aimed at helping people to port code. The first two are: :surrogate_or_strict: Will use ``surrogateescape`` if it is a valid handler, otherwise it will use ``strict`` :surrogate_or_replace: Will use ``surrogateescape`` if it is a valid handler, otherwise it will use ``replace``. Because ``surrogateescape`` was added in Python3 this usually means that Python3 will use ``surrogateescape`` and Python2 will use the fallback error handler. Note that the code checks for ``surrogateescape`` when the module is imported. If you have a backport of ``surrogateescape`` for Python2, be sure to register the error handler prior to importing this module. The last error handler is: :surrogate_then_replace: Will use ``surrogateescape`` if it is a valid handler. If encoding with ``surrogateescape`` would traceback, surrogates are first replaced with a replacement characters and then the string is encoded using ``replace`` (which replaces the rest of the nonencodable bytes). If ``surrogateescape`` is not present it will simply use ``replace``. (Added in Ansible 2.3) This strategy is designed to never traceback when it attempts to encode a string. The default until Ansible-2.2 was ``surrogate_or_replace`` From Ansible-2.3 onwards, the default is ``surrogate_then_replace``. :kwarg nonstring: The strategy to use if a nonstring is specified in ``obj``. Default is 'simplerepr'. Valid values are: :simplerepr: The default. This takes the ``str`` of the object and then returns the bytes version of that string. :empty: Return an empty byte string :passthru: Return the object passed in :strict: Raise a :exc:`TypeError` :returns: Typically this returns a byte string. If a nonstring object is passed in this may be a different type depending on the strategy specified by nonstring. This will never return a text string. .. note:: If passed a byte string, this function does not check that the string is valid in the specified encoding. If it's important that the byte string is in the specified encoding do:: encoded_string = to_bytes(to_text(input_string, 'latin-1'), 'utf-8') .. version_changed:: 2.3 Added the ``surrogate_then_replace`` error handler and made it the default error handler. """ if isinstance(obj, binary_type): return obj # We're given a text string # If it has surrogates, we know because it will decode original_errors = errors if errors in _COMPOSED_ERROR_HANDLERS: if HAS_SURROGATEESCAPE: errors = 'surrogateescape' elif errors == 'surrogate_or_strict': errors = 'strict' else: errors = 'replace' if isinstance(obj, text_type): try: # Try this first as it's the fastest return obj.encode(encoding, errors) except UnicodeEncodeError: if original_errors in (None, 'surrogate_then_replace'): # Slow but works return_string = obj.encode('utf-8', 'surrogateescape') return_string = return_string.decode('utf-8', 'replace') return return_string.encode(encoding, 'replace') raise # Note: We do these last even though we have to call to_bytes again on the # value because we're optimizing the common case if nonstring == 'simplerepr': try: value = str(obj) except UnicodeError: try: value = repr(obj) except UnicodeError: # Giving up return to_bytes('') elif nonstring == 'passthru': return obj elif nonstring == 'empty': # python2.4 doesn't have b'' return to_bytes('') elif nonstring == 'strict': raise TypeError('obj must be a string type') else: raise TypeError('Invalid value %s for to_bytes\' nonstring parameter' % nonstring) return to_bytes(value, encoding, errors) def to_text(obj, encoding='utf-8', errors=None, nonstring='simplerepr'): """Make sure that a string is a text string :arg obj: An object to make sure is a text string. In most cases this will be either a text string or a byte string. However, with ``nonstring='simplerepr'``, this can be used as a traceback-free version of ``str(obj)``. :kwarg encoding: The encoding to use to transform from a byte string to a text string. Defaults to using 'utf-8'. :kwarg errors: The error handler to use if the byte string is not decodable using the specified encoding. Any valid `codecs error handler <https://docs.python.org/2/l
joansalasoler/auale
src/auale/book/opening_book.py
Python
gpl-3.0
3,781
0.000794
# -*- coding: utf-8 -*- # Aualé oware graphic user interface. # Copyright (C) 2014-2020 Joan Sala Soler <contact@joansala.com> # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. import math import random import struct from game import Match from game import Oware from uci import Strength from .constants import COEFFICIENTS class OpeningBook(object): """Opening book implementation""" __MARGIN = 42 def __init__(self, path): self._scores = [] self._header = dict() self._min_score = self.__MARGIN self._load_opening_book(path) def set_strength(self, strength): """Sets the playing strength of the book""" margin = self.__MARGIN factor = 1 - strength.strength_factor self._min_score = margin + (.25 * margin * factor) ** 2 def pick_best_move(self, match): """Choose a best move from the book""" moves = self.find_best_moves(match) choice = random.choice(moves) if moves else None return choice def find_best_moves(self, match): """Obtain the best moves from the book""" moves = list() game = match.get_game() turn = match.get_turn() scores = self._get_move_scores(match) max_sc
ore = max(scores) if scores else -math.inf min_score = max(max_score - self._min_score, -self._min_score) offset = 0 if turn == game.SOUTH else 6 for move, score in enumerate(scores, offset): if score >= min_score or score >= max_score: moves.append(move) return moves def _get_move_scores(self, match): """Scores for th
e given match position""" code = self._compute_hash_code(match) scores = self._scores.get(code, []) return scores def _load_opening_book(self, path): """Loads an opening book from a file""" with open(path, 'rb') as file: self._header = self._read_header(file) self._scores = self._read_scores(file) def _read_header(self, file): """Reads the header fields from an open file""" header = dict() signature = file.readline() while True: field = file.readline() if not field or field == b'\x00\n': break values = field.decode('utf-8').split(':', 1) header.setdefault(*values) return header def _read_scores(self, file): """Reads position scores from an open file""" scores = dict() while True: entry = file.read(20) if not entry: break code, *values = struct.unpack('>q6h', entry) scores.setdefault(code, values) return scores def _compute_hash_code(self, match): """Hash code for the current match position""" game = match.get_game() turn = match.get_turn() board = match.get_board() code = 0x80000000000 if turn == game.SOUTH else 0x00 seeds = board[13] for house in range(12, -1, -1): if seeds >= 48: break code += COEFFICIENTS[seeds][house] seeds += board[house] return code
tensorflow/federated
tensorflow_federated/python/core/backends/mapreduce/__init__.py
Python
apache-2.0
12,271
0.000733
# Copyright 2019, The TensorFlow Federated Authors. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """Libraries for interacting with MapReduce-like backends. This package contains libraries for using TFF in backend systems that offer MapReduce-like capabilities, i.e., systems that can perform parallel processing on a set of clients, and then aggregate the results of such processing on the server. Systems of this type do not support the full expressiveness of TFF, but they are common enough in practice to warrant a dedicated set of libraries, and many examples of TFF computations, including those constructed by `tff.learning`, can be compiled by TFF into a form that can be deployed on such systems. This package defines a data structure `MapReduceForm`, parameterized by TensorFlow functions, which expresses the logic of a single MapReduce-style round (plus initialization) and serves as a target for TFF's compiler pipeline. `MapReduceForm` serves as the conceptual core of this package, and represents a manner of specifying a round of federated computation quite distinct from TFF's usual `computation.proto`. However, as `MapReduceForm` can express only a strict subset of the logic expressible via `computation.proto`, we discuss the mapping between the two here. Instead of `computation.proto` directly, we standardize on `tff.templates.IterativeProcess` as the basis for targeting the canonical mapreduce representation, as this type of processing is most common in federated learning scenarios, where different rounds typically involve different subsets of a potentially very large number of participating clients. The iterative aspect of the computation allows for it to not only model processes that evolve over time, but also ones that might involve a very large client population in which not all participants (clients, data shards, etc.) may be present at the same time, and the iterative approach may instead be dictated by data availability or scalability considerations. Related to the above, the fact that in practical scenarios the set of clients involved in a federated computation will (often) vary from round to round, the server state is necessary to connect subsequent rounds into a single contiguous logical sequence. Conceptually, `next`, the iterator part of an iterative process, is modeled in the same way as any stateful computation in TFF. I.e., one that takes the server state as the first component of the input, and returns updated server state as the first component of the output. If there is no need for server state, the input/output state should be modeled as an empty tuple. In addition to updating state, `next` additionally takes client-side data as input, and can produce results on server side in addition to state intended to be passed to the next round. As is the case for the server state, if this is undesired it should be modeled as an empty tuple. The type signature of `next`, in the concise TFF type notation (as defined in TFF's `computation.proto`), is as follows: ```python (<S@SERVER,{D}@CLIENTS> -> <S@SERVER,X@SERVER>) ``` The above type signature involves the following abstract types: * `S` is the type of the state that is passed at the server between rounds of processing. For example, in the context of federated training, the server state would typically include the weights of the model being trained. The weights would be updated in each round as the model is trained on more and more of the clients' data, and hence the server state would evolve as well. Note: This is also the type of the output of the `initialize` that produces the server state to feed into the first round. * `D` represents the type of per-client units of data that serve as the input to the computation. Often, this would be a sequence type, i.e., a dataset in TensorFlow's parlance, although strictly speaking this does not have to always be the case. * `X` represents the type of server-side outputs generated by the server after each round. One can think of the process based on this representation as being equivalent to the following pseudocode loop: ```python client_data = ... server_state = initialize() while True: server_state, server_outputs = next(server_state, client_data) ``` The logic of `next` in `MapReduceForm` is factored into seven variable components `prepare`, `work`, `zero`, `accumulate`, `merge`, `report`, and `update` (in addition to `initialize` that produces the server state component for the initial round and `bitwidth` that specifies runtime parameters for `federated_secure_sum_bitwidth`). The pseudocode below uses common syntactic shortcuts (such as implicit zipping) for brevity. For a concise representation of the logic embedded in the discussion below, specifying the manner in which an instance `mrf` of `MapReduceForm` maps to a single federated round, see the definitions of `init_computation` and `next_computation` in `form_utils.get_iterative_process_for_map_reduce_form`. ```python @tff.federated_computation def next(server_state, client_data): # The server prepares an input to be broadcast to all clients that controls # what will happen in this round. client_input = ( tff.federated_broadcast(tff.federated_map(prepare, server_state))) # The clients all independently do local work and produce updates, plus the # optional client-side outputs. client_updates = tff.federated_map(work, [client_data, client_input]) # `client_updates` is a two-tuple, whose first index should be aggregated # with TFF's `federated_aggregate` and whose second index should be passed # to TFF's `federated_secure_sum_bitwidth`. The updates are aggregated # across the system into a single global update at the server. simple_agg = (
tff.federated_aggregate(client_updates[0], zero(), accumulate, merge, report)) secure_agg = tff.secure_sum(client_updates[1], bitwidth()) global
_update = [simple_agg, secure_agg] # Finally, the server produces a new state as well as server-side output to # emit from this round. new_server_state, server_output = ( tff.federated_map(update, [server_state, global_update])) # The updated server state, server- and client-side outputs are returned as # results of this round. return new_server_state, server_output ``` The above characterization of `next` forms the relationship between `MapReduceForm` and `tff.templates.IterativeProcess`. It depends on the seven pieces of pure TensorFlow logic defined as follows. Please also consult the documentation for related federated operators for more detail (particularly the `tff.federated_aggregate()`, as several of the components below correspond directly to the parameters of that operator). * `prepare` represents the preparatory steps taken by the server to generate inputs that will be broadcast to the clients and that, together with the client data, will drive the client-side work in this round. It takes the initial state of the server, and produces the input for use by the clients. Its type signature is `(S -> C)`. * `work` represents the totality of client-side processing, again all as a single section of TensorFlow code. It takes a tuple of client data and client input that was broadcasted by the server, and returns a two-tuple containing the client update to be aggregated (across all the clients). The first index of this two-tuple will be passed to an aggregation parameterized by the blocks of TensorFlow below (`zero`, `accumulate`, `merge`, and `report`), and the second index will be passed to `federated_secure_sum_bitwidth`. Its type signature is `(<D,C> -> <U
QuantumElephant/horton
horton/io/test/test_wfn.py
Python
gpl-3.0
19,258
0.002181
# -*- coding: utf-8 -*- # HORTON: Helpful Open-source Research TOol for N-fermion systems. # Copyright (C) 2011-2017 The HORTON Development Team # # This file is part of HORTON. # # HORTON is free software; you can redistribute it and/or # modify it under the terms of the GNU General Public License # as published by the Free Software Foundation; either version 3 # of the License, or (at your option) any later version. # # HORTON is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program; if not, see <http://www.gnu.org/licenses/> # # -- import numpy as np from horton import * # pylint: disable=wildcard-import,unused-wildcard-import from horton.io.test.common import compute_mulliken_charges, compute_hf_energy def test_load_wfn_low_he_s(): fn_wfn = context.get_fn('test/he_s_orbital.wfn') title, numbers, coordinates, centers, type_assignment, exponents, \ mo_count, occ_num, mo_energy, coefficients, energy = load_wfn_low(fn_wfn) assert title == 'He atom - decontracted 6-31G basis set' assert numbers.shape == (1,) assert numbers == [2] assert coordinates.shape == (1, 3) assert (coordinates == [0.00, 0.00, 0.00]).all() assert centers.shape == (4,) assert (centers == [0, 0, 0, 0]).all() assert type_assignment.shape == (4,) assert (type_assignment == [1, 1, 1, 1]).all() assert exponents.shape == (4,) assert (exponents == [0.3842163E+02, 0.5778030E+01, 0.1241774E+01, 0.2979640E+00]).all() assert mo_count.shape == (1,) assert mo_count == [1] assert occ_num.shape == (1,) assert occ_num == [2.0] assert mo_energy.shape == (1,) assert mo_energy == [-0.914127] assert coefficients.shape == (4, 1) expected = np.array([0.26139500E+00, 0.41084277E+00, 0.39372947E+00, 0.14762025E+00]) assert (coefficients == expected.reshape(4, 1)).all() assert abs(energy - (-2.855160426155)) < 1.e-5 def test_load_wfn_low_h2o(): fn_wfn = context.get_fn('test/h2o_sto3g.wfn') title, numbers, coordinates, centers, type_assignment, exponents, \ mo_count, occ_num, mo_energy, coefficients, energy = load_wfn_low(fn_wfn) assert title == 'H2O Optimization' assert numbers.shape == (3,) assert (numbers == np.array([8, 1, 1])).all() assert coordinates.shape == (3, 3) assert (coordinates[0] == [-4.44734101, 3.39697999, 0.00000000]).all() assert (coordinates[1] == [-2.58401495, 3.55136194, 0.00000000]).all() assert (coordinates[2] == [-4.92380519, 5.20496220, 0.00000000]).all() assert centers.shape == (21,) assert (centers[:15] == np.zeros(15, int)).all() assert (centers[15:] == np.array([1, 1, 1, 2, 2, 2])).all() assert type_assignment.shape == (21,) assert (type_assignment[:6] == np.ones(6)).all() assert (type_assignment[6:15] == np.array([2, 2, 2, 3, 3, 3, 4, 4, 4])).all() assert (type_assignment[15:] == np.ones(6)).all() assert exponents.shape == (21,) assert (exponents[:3] == [0.1307093E+03, 0.2380887E+02, 0.6443608E+01]).all() assert (exponents[5:8] == [0.3803890E+00, 0.5033151E+01, 0.1169596E+01]).all() assert (exponents[13:16] == [0.1169596E+01, 0.3803890E+00, 0.3425251E+01]).all() assert exponents[-1] == 0.1688554E+00 assert mo_count.shape == (5,) assert (mo_count == [1, 2, 3, 4, 5]).all() assert occ_num.shape == (5,) assert np.sum(occ_num) == 10.0 assert (occ_num == [2.0, 2.0, 2.0, 2.0, 2.0]).all() assert mo_energy.shape == (5,) assert (mo_energy == np.sort(mo_energy)).all() assert (mo_energy[:3] == [-20.251576, -1.257549, -0.593857]).all() assert (mo_energy[3:] == [-0.459729, -0.392617]).all() assert coefficients.shape == (21, 5) expected = [0.42273517E+01, -0.99395832E+00, 0.19183487E-11, 0.44235381E+00, -0.57941668E-14] assert (coefficients[0] == expected).all() assert coefficients[6, 2] == 0.83831599E+00 assert coefficients[10, 3] == 0.65034846E+00 assert coefficients[17, 1] == 0.12988055E-01 assert coefficients[-1, 0] == -0.46610858E-03 assert coefficients[-1, -1] == -0.33277355E-15 assert abs(energy - (-74.965901217080)) < 1.e-6 def test_get_permutation_orbital(): assert (get_permutation_orbital(np.array([1, 1, 1])) == [0, 1, 2]).all() assert (get_permutation_orbital(np.array([1, 1, 2, 3, 4])) == [0, 1, 2, 3, 4]).all() assert (get_permutation_orbital(np.array([2, 3, 4])) == [0, 1, 2]).all() assert (get_permutation_orbital(np.array([2, 2, 3, 3, 4, 4])) == [0, 2, 4, 1, 3, 5]).all() assign = np.array([1, 1, 2, 2, 3, 3, 4, 4, 1]) expect = [0, 1, 2, 4, 6, 3, 5, 7, 8] assert (get_permutation_orbital(assign) == expect).all() assign = np.array([1, 5, 6, 7, 8, 9, 10, 1]) expect = [0, 1, 2, 3, 4, 5, 6, 7] assert (get_permutation_orbital(assign) == expect).all() assign = np.array([5, 5, 6, 6, 7, 7, 8, 8, 9, 9, 10, 10]) expect = [0, 2, 4, 6, 8, 10, 1, 3, 5, 7, 9, 11] assert (get_permutation_orbital(assign) == expect).all() assign = np.array([1, 2, 2, 3, 3, 4, 4, 5, 6, 7, 8, 9, 10]) expect = [0, 1, 3, 5, 2, 4, 6, 7, 8, 9, 10, 11, 12] assert (get_permutation_orbital(assign) == expect).all() # f orbitals assign = np.array([11, 12, 13, 17, 14, 15, 18, 19, 16, 20]) assert (get_permutation_orbital(assign) == range(10)).all() # g orbitals assign = np.array([23, 29, 32, 27, 22, 28, 35, 34, 26, 31, 33, 30, 25, 24, 21]) assert (get_permutation_orbital(assign) == range(15)).all() # g orbitals assign = np.array([23, 29, 32, 27, 22, 28, 35, 34, 26, 31, 33, 30, 25, 24, 21]) assert (get_permutation_orbital(assign) == range(15)).all() # h orbitals assert (get_permutation_orbital(np.arange(36, 57)) == range(21)).all() assign = np.array([1, 1, 11, 12, 13, 17, 14, 15, 18, 19, 16, 20]) assert (get_permutation_orbital(assign) == range(12)).all() assign = np.array([2, 3, 4, 11, 12, 13, 17, 14, 15, 18, 19, 16, 20, 1, 1]) assert (get_permutation_orbital(assign) == range(15)).all() def test_get_permutation_basis(): assert (get_permutation_basis(np.array([1, 1, 1])) == [0, 1, 2]).all() assert (get_permutation_basis(np.array([2, 2, 3, 3, 4, 4])) == [0, 2, 4, 1, 3, 5]).all() assert (get_permutation_basis(np.array([1, 2, 3, 4, 1])) == [0, 1, 2, 3, 4]).all() assert (get_permutation_basis(np.array([5, 6, 7, 8, 9, 10])) == [0, 3, 4, 1, 5, 2]).all() assign = np.repeat([5, 6, 7, 8, 9, 10], 2) expect = [0, 6, 8, 2, 10, 4, 1, 7, 9, 3, 11, 5] assert (get_permutation_basis(assign) == expect).all() assert (get_permutation_basis(np.arange(1, 11)) == [0, 1, 2, 3, 4, 7, 8, 5, 9, 6]).all() assign = np.array([1, 5, 6, 7, 8, 9, 10, 1]) expect = [0, 1, 4, 5, 2, 6, 3, 7] assert (get_permutation_basis(assign) == expect).all() assign = np.array([11, 12, 13, 17, 14, 15, 18, 19, 16, 20]) expect = [0, 4, 5, 3, 9, 6, 1, 8, 7, 2] assert (get_permutation_basis(assign) == expect).all() assign = np.array([1, 11, 12, 13, 17, 14, 15, 18, 19, 16, 20, 1]) expect = [0, 1, 5, 6, 4, 10, 7, 2, 9, 8, 3, 11] assert (get_permutation_basis(assign) == expect).all() assign = np.array([1, 11, 12, 13, 17, 14, 15, 18, 19, 16, 20, 2, 2, 3, 3, 4, 4]) expect = [0, 1, 5, 6, 4, 10, 7, 2, 9, 8, 3, 11, 13, 15, 12, 14, 16] assert (get_permutation_basis(assign) == expect).all() assign = [1, 11, 12, 13, 17, 14, 15, 18, 19, 16, 20, 2, 3, 4, 5, 6, 7, 8, 9, 10] expect = np.array([0, 1, 5, 6, 4, 10, 7, 2, 9, 8, 3, 11, 12, 13, 14, 17, 18, 15, 19, 16]) assert (get_permutation_basis(np.array(assign)) == expect).all() assert (get_permutation_ba
sis(np.arange(36, 57)) == np.arange(21)[::-1])
.all() assign = [23, 29, 32, 27, 22, 28, 35, 34, 26, 31, 33, 30, 25, 24, 21] expect = [14, 13, 12, 11, 10, 9, 8, 7, 6, 5, 4, 3, 2, 1, 0] assert (get_permutation_basis(np.array(assign)) == expect).all() assert (get_per
heplesser/nest-simulator
pynest/examples/clopath_synapse_spike_pairing.py
Python
gpl-2.0
6,051
0.003636
# -*- coding: utf-8 -*- # # clopath_synapse_spike_pairing.py # # This file is part of NEST. # # Copyright (C) 2004 The NEST Initiative # # NEST is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 2 of the License, or # (at your option) any later version. # # NEST is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with NEST. If not, see <http://www.gnu.org/licenses/>. """ Clopath Rule: Spike pairing experiment -------------------------------------- This script simulates one ``aeif_psc_delta_clopath`` neuron that is connected with a Clopath connection [1]_. The synapse receives pairs of a pre- and a postsynaptic spikes that are separated by either 10 ms (pre before post) or -10 ms (post before pre). The change of the synaptic weight is measured after five of such pairs. This experiment is repeated five times with different rates of the sequence of the spike pairs: 10Hz, 20Hz, 30Hz, 40Hz, and 50Hz. References ~~~~~~~~~~ .. [1] Clopath C, Büsing L, Vasilaki E, Gerstner W (2010). Connectivity reflects coding: a model of voltage-based STDP with homeostasis. Nature Neuroscience 13:3, 344--352 """ import numpy as np import matplotlib.pyplot as plt import nest ############################################################################## # First we specify the neuron parameters. To enable voltage dependent # prefactor ``A_LTD(u_bar_bar)`` add ``A_LTD_const: False`` to the dictionary. nrn_params = {'V_m': -70.6, 'E_L': -70.6, 'C_m': 281.0, 'theta_minus': -70.6, 'theta_plus': -45.3, 'A_LTD': 14.0e-5, 'A_LTP': 8.0e-5, 'tau_u_bar_minus': 10.0, 'tau_u_bar_plus': 7.0, 'delay_u_bars': 4.0, 'a': 4.0, 'b': 0.0805, 'V_reset': -70.6 + 21.0, 'V_clamp': 33.0, 't_clamp': 2.0, 't_ref': 0.0, } ############################################################################## # Hardcoded spike times of presynaptic spike generator spike_times_pre = [ # Presynaptic spike before the postsynaptic [ 20.0, 120.0, 220.0, 320.0, 420.0], # noqa [ 20.0, 70.0, 120.0, 170.0, 220.0], # noqa [ 20.0, 53.3, 86.7, 120.0, 153.3], # noqa [ 20.0, 45.0, 70.0, 95.0, 120.0], # noqa [ 20.0, 40.0, 60.0, 80.0, 100.0], # noqa # Presynaptic spike after the postsynaptic [120.0, 220.0, 320.0, 420.0, 520.0, 620.0], # noqa [ 70.0, 120.0, 170.0, 220.0, 270.0, 320.0], # noqa [ 53.3, 86.6, 120.0, 153.3, 186.6, 220.0], # noqa [ 45.0, 70.0, 95.0, 120.0, 145.0, 170.0], # noqa [ 40.0, 60.0, 80.0, 100.0, 120.0, 140.0]] # noqa ############################################################################## # Hardcoded spike times of postsynaptic spike generator spike_times_post = [ [ 10.0, 110.0, 210.0, 310.0, 410.0], # noqa [ 10.0, 60.0, 110.0, 160.0, 210.0], # noqa [ 10.0, 43.3, 76.7, 110.0, 143.3], # noqa [ 10.0, 35.0, 60.0, 85.0, 110.0], # noqa [ 10.0, 30.0, 50.0, 70.0, 90.0], # noqa [130.0, 230.0, 330.0, 430.0, 530.0, 630.0], # noqa [ 80.0, 130.0, 180.0, 230.0, 280.0, 330.0], # noqa [ 63.3, 96.6, 130.0, 163.3, 196.6, 230.0], # noqa [ 55.0, 80.0, 105.0
, 130.0, 155.0, 180.0], # noqa [ 50.0, 70.0, 90.0, 110.0, 130.0, 150.0]] # noqa init_w = 0.5 syn_weights = [] resolution =
0.1 ############################################################################## # Loop over pairs of spike trains for s_t_pre, s_t_post in zip(spike_times_pre, spike_times_post): nest.ResetKernel() nest.resolution = resolution # Create one neuron nrn = nest.Create("aeif_psc_delta_clopath", 1, nrn_params) # We need a parrot neuron since spike generators can only # be connected with static connections prrt_nrn = nest.Create("parrot_neuron", 1) # Create and connect spike generators spike_gen_pre = nest.Create("spike_generator", {"spike_times": s_t_pre}) nest.Connect(spike_gen_pre, prrt_nrn, syn_spec={"delay": resolution}) spike_gen_post = nest.Create("spike_generator", {"spike_times": s_t_post}) nest.Connect(spike_gen_post, nrn, syn_spec={"delay": resolution, "weight": 80.0}) # Create weight recorder wr = nest.Create('weight_recorder') # Create Clopath connection with weight recorder nest.CopyModel("clopath_synapse", "clopath_synapse_rec", {"weight_recorder": wr}) syn_dict = {"synapse_model": "clopath_synapse_rec", "weight": init_w, "delay": resolution} nest.Connect(prrt_nrn, nrn, syn_spec=syn_dict) # Simulation simulation_time = (10.0 + max(s_t_pre[-1], s_t_post[-1])) nest.Simulate(simulation_time) # Extract and save synaptic weights weights = wr.get("events", "weights") syn_weights.append(weights[-1]) syn_weights = np.array(syn_weights) # scaling of the weights so that they are comparable to [1] syn_weights = 100.0 * 15.0 * (syn_weights - init_w) / init_w + 100.0 # Plot results fig, ax = plt.subplots(1, sharex=False) ax.plot([10., 20., 30., 40., 50.], syn_weights[5:], color='b', lw=2.5, ls='-', label="pre-post pairing") ax.plot([10., 20., 30., 40., 50.], syn_weights[:5], color='g', lw=2.5, ls='-', label="post-pre pairing") ax.set_ylabel("normalized weight change") ax.set_xlabel("rho (Hz)") ax.legend() ax.set_title("synaptic weight") plt.show()
adamwiggins/cocos2d
test/test_schedule.py
Python
bsd-3-clause
1,034
0.024178
# This code is so you can run the samples without installing the package import sys import os sys.path.insert(0, os.path.join(os.path.dirname(__file__), '..')) # import cocos from cocos.director import director from cocos.sprite import Sprite import
pyglet import random class TestLayer(cocos.layer.Layer): def __init__(self): super( TestLayer, self ).__init__() x,y = director.get_window_size() self.sprite = Sprite('grossini.png') self.sprite.position = x/2, y/2 self.add( self.sprite ) self.schedule( self.change_x ) self.schedule_interval( self.change_y, 1 ) def change_x(self, dt): self
.sprite.x = random.random()*director.get_window_size()[0] def change_y(self, dt): self.sprite.y = random.random()*director.get_window_size()[1] if __name__ == "__main__": director.init() test_layer = TestLayer () main_scene = cocos.scene.Scene (test_layer) director.run (main_scene)
sebastianwebber/pgconfig-api
common/util.py
Python
bsd-2-clause
5,642
0.001241
import tornado.web import json from tornado_cors import CorsMixin from common import ParameterFormat, EnumEncoder class DefaultRequestHandler(CorsMixin, tornado.web.RequestHandler): CORS_ORIGIN = '*' def initialize(self): self.default_format = self.get_argument("format", "json", True) self.show_about = self.get_argument("show_about", True, True) self.pg_version = self.get_argument("pg_version", 9.6, True) self.version = "2.0 beta" def write_about_stuff(self, format_type="alter_system"): default_comment = "--" if format_type == "conf": default_comment = "#" self.write("{} Generated by PGConfig {}\n".format(default_comment, self.version)) self.write("{} http://pgconfig.org\n\n".format(default_comment * 2)) def write_comment(self, format_type, comment): default_comment = "--" if format_type == "conf": default_comment = "#" if comment != "NONE": self.write("\n{} {}\n".format(default_comment, comment)) def write_config(self, output_data): if self.show_about is True: self.write_about_stuff("conf") for category in output_data: self.write("# {}\n".format(category["description"])) for parameter in category["parameters"]: config_value = parameter.get("config_value", "NI") value_format = parameter.get("format", ParameterFormat.NONE) if value_format in (ParameterFormat.String, ParameterFormat.Time): config_value = "'{}'".format(config_value) parameter_comment = parameter.get("comment", "NONE")
if parameter_comment != "NONE": self.write_comment("conf", parameter_comment) self.write("{} = {}\n".format(parameter["name"], config_value)) self.write("\n") def write_alter_system(self, output_data):
if float(self.pg_version) <= 9.3: self.write("-- ALTER SYSTEM format it's only supported on version 9.4 and higher. Use 'conf' format instead.") else: if self.show_about is True: self.write_about_stuff() for category in output_data: self.write("-- {}\n".format(category["description"])) for parameter in category["parameters"]: config_value = parameter.get("config_value", "NI") parameter_comment = parameter.get("comment", "NONE") self.write_comment("alter_system", parameter_comment) self.write("ALTER SYSTEM SET {} TO '{}';\n".format(parameter[ "name"], config_value)) self.write("\n") def write_plain(self, message=list()): if len(message) == 1: self.write(message[0]) else: for line in message: self.write(line + '\n') def write_bash(self, message=list()): bash_script = """ #!/bin/bash """ self.write(bash_script) if len(message) == 1: self.write('SQL_QUERY="{}"\n'.format(message[0])) self.write('psql -c "${SQL_QUERY}"\n') else: for line in message: self.write('SQL_QUERY="{}"\n'.format(line)) self.write('psql -c "${SQL_QUERY}"\n\n') def write_json_api(self, message): self.set_header('Content-Type', 'application/vnd.api+json') _document = {} _document["data"] = message _meta = {} _meta["copyright"] = "PGConfig API" _meta["version"] = self.version _meta["arguments"] = self.request.arguments _document["meta"] = _meta _document["jsonapi"] = {"version": "1.0"} full_url = self.request.protocol + "://" + self.request.host + self.request.uri _document["links"] = {"self": full_url} self.write( json.dumps( _document, sort_keys=True, separators=(',', ': '), cls=EnumEncoder)) def write_json(self, message=list()): self.set_header('Content-Type', 'application/json') if len(message) == 1: self.write("{ \"output\": \"" + message[0] + "\"}") else: new_output = "{ \"output\": [" first_line = True for line in message: if not first_line: new_output += "," else: first_line = False new_output += "\"{}\"".format(line) new_output += "] } " self.write(new_output) def return_output(self, message=list()): # default_format=self.get_argument("format", "json", True) # converting string input into a list (for solve issue with multiline strings) process_data = [] if not isinstance(message, list): process_data.insert(0, message) else: process_data = message if self.default_format == "json": self.write_json_api(message) elif self.default_format == "bash": self.write_bash(message) elif self.default_format == "conf": self.write_config(message) elif self.default_format == "alter_system": self.write_alter_system(message) else: self.write_plain(message) class GeneratorRequestHandler(DefaultRequestHandler): pass
lzuba-tgm/A05_SimplyGame
Ui_MainWindow.py
Python
gpl-3.0
13,842
0.003396
# -*- coding: utf-8 -*- # Form implementation generated from reading ui file 'D:\github repos\Python\A05_SimplyGame\Binaries\MyView.ui' # # Created: Tue Oct 25 22:22:12 2016 # by: pyside-uic 0.2.15 running on PySide 1.2.2 # # WARNING! All changes made in this file will be lost! from PySide import QtCore, QtGui class Ui_MainWindow(object): def setupUi(self, MainWindow): MainWindow.setObjectName("MainWindow") MainWindow.resize(808, 600) self.centralwidget = QtGui.QWidget(MainWindow) self.centralwidget.setObjectName("centralwidget") self.gridLayoutWidget = QtGui.QWidget(self.centralwidget) self.gridLayoutWidget.setGeometry(QtCore.QRect(240, 110, 561, 281)) self.gridLayoutWidget.setObjectName("gridLayoutWidget") self.gridLayout = QtGui.QGridLayout(self.gridLayoutWidget) self.gridLayout.setContentsMargins(0, 0, 0, 0) self.gridLayout.setHorizontalSpacing(7) self.gridLayout.setVerticalSpacing(9) self.gridLayout.setObjectName("gridLayout") self.pushButton_5 = QtGui.QPushButton(self.gridLayoutWidget) self.pushButton_5.setObjectName("pushButton_5") self.gridLayout.addWidget(self.pushButton_5, 0, 4, 1, 1) self.pushButton_1 = QtGui.QPushButton(self.gridLayoutWidget) self.pushButton_1.setObjectName("pushButton_1") self.gridLayout.addWidget(self.pushButton_1, 0, 0, 1, 1) self.pushButton_9 = QtGui.QPushButton(self.gridLayoutWidget) self.pushButton_9.setObjectName("pushButton_9") self.gridLayout.addWidget(self.pushButton_9, 1, 3, 1, 1) self.pushButton_6 = QtGui.QPushButton(self.gridLayoutWidget) self.pushButton_6.setObjectName("pushButton_6") self.gridLayout.addWidget(self.pushButton_6, 1, 0, 1, 1) self.pushButton_10 = QtGui.QPushButton(self.gridLayoutWidget) self.pushButton_10.setObjectName("pushButton_10") self.gridLayout.addWidget(self.pushButton_10, 1, 4, 1, 1) self.pushButton_15 = QtGui.QPushButton(self.gridLayoutWidget) self.pushButton_15.setObjectName("pushButton_15") self.gridLayout.addWidget(self.pushButton_15, 2, 4, 1, 1) self.pushButton_4 = QtGui.QPushButton(self.gridLayoutWidget) self.pushButton_4.setObjectName("pushButton_4") self.gridLayout.addWidget(self.pushButton_4, 0, 3, 1, 1) self.pushButton_11 = QtGui.QPushButton(self.gridLayoutWidget) self.pushButton_11.setObjectName("pushButton_11") self.gridLayout.addWidget(self.pushButton_11, 2, 0, 1, 1) self.pushButton_12 = QtGui.QPushButton(self.gridLayoutWidget) self.pushButton_12.setObjectName("pushButton_12") self.gridLayout.addWidget(self.pushButton_12, 2, 1, 1, 1) self.pushButton_7 = QtGui.QPushButton(self.gridLayoutWidget) self.pushButton_7.setObjectName("pushButton_7") self.gridLayout.addWidget(self.pushButton_7, 1, 1, 1, 1) self.pushButton_3 = QtGui.QPushButton(self.gridLayoutWidget) self.pushButton_3.setObjectName("pushButton_3") self.gridLayout.addWidget(self.pushButton_3, 0, 2, 1, 1) self.pushButton_13 = QtGui.QPushButton(self.gridLayoutWidget) self.pushButton_13.setObjectName("pushButton_13") self.gridLayout.addWidget(self.pushButton_13, 2, 2, 1, 1) self.pushButton_8 = QtGui.QPushButton(self.gridLayoutWidget) self.pushButton_8.setObjectName("pushButton_8") self.gridLayout.addWidget(self.pushButton_8, 1, 2, 1, 1) self.pushButton_14 = QtGui.QPushButton(self.gridLayoutWidget) self.pushButton_14.setObjectName("pushButton_14") self.gridLayout.addWidget(self.pushButton_14, 2, 3, 1, 1) self.pushButton_2 = QtGui.QPushButton(self.gridLayoutWidget) self.pushButton_2.setObjectName("pushButton_2") self.gridLayout.addWidget(self.pushButton_2, 0, 1, 1, 1) self.formLayoutWidget = QtGui.QWidget(self.centralwidget) self.formLayoutWidget.setGeometry(QtCore.QRect(50, 70, 191, 481)) self.formLayoutWidget.setObjectName("formLayoutWidget") self.formLayout = QtGui.QFormLayout(self.formLayoutWidget) self.formLayout.setFieldGrowthPolicy(QtGui.QFormLayout.AllNonFixedFieldsGrow) self.formLayout.setContentsMargins(0, 0, 0, 0) self.formLayout.setObjectName("formLayout") self.label = QtGui.QLabel(self.formLayoutWidget) self.label.setObjectName("label") self.formLayout.setWidget(0, QtGui.QFormLayout.LabelRole, self.label) self.label_2 = QtGui.QLabel(self.formLayoutWidget) self.label_2.setObjectName("label_2") self.formLayout.setWidget(3, QtGui.QFormLayout.LabelRole, self.label_2) self.label_3 = QtGui.QLabel(self.formLayoutWidget) self.label_3.setObjectName("label_3") self.formLayout.setWidget(6, QtGui.QFormLayout.LabelRole, self.label_3) self.label_4 = QtGui.QLabel(self.formLayoutWidget) self.label_4.setObjectName("label_4") self.formLayout.setWidget(9, QtGui.QFormLayout.LabelRole, self.label_4) self.label_5 = QtGui.QLabel(self.formLayoutWidget) self.label_5.setObjectName("label_5") self.formLayout.setWidget(12, QtGui.QFormLayout.LabelRole, self.label_5) spacerItem = QtGui.QSpacerItem(20, 40, QtGui.QSizePolicy.Minimum, QtGui.QSizePolicy.Expanding) self.formLayout.setItem(1, QtGui.QFormLayout.LabelRole, spacerItem) spacerItem1 = QtGui.QSpacerItem(20, 40, QtGui.QSizePolicy.Minimum, QtGui.QSizePolicy.Expanding) self.formLayout.setItem(5, QtGui.QFormLayout.LabelRole, spacerItem1) spacerItem2 = QtGui.QSpacerItem(20, 40, QtGui.QSizePolicy.Minimum, QtGui.QSizePolicy.Expanding) self.formLayout.setItem(8, QtGui.QFormLayout.LabelRole, spacerItem2) spacerItem3 = QtGui.QSpacerItem(20, 40, QtGui.QSizePolicy.Minimum, QtGui.QSizePolicy.Expanding) self.formLayout.setItem(11, QtGui.QFormLayout.LabelRole, spacerItem3) spacerItem4 = QtGui.QSpacerItem(20, 40, QtGui.QSizePolicy.Minimum, QtGui.QSizePolicy.Expanding) self.formLayout.setItem(2, QtGui.QFormLayout.LabelRole, spacerItem4) spacerItem5 = QtGui.QSpacerItem(20, 40, QtGui.QSizePolicy.Minimum, QtGui.QSizePolicy.Expanding) self.formLayout.setItem(4, QtGui.QFormLayout.LabelRole, spacerItem5) spacerItem6 = QtGui.QSpacerItem(20, 40, QtGui.QSizePolicy.Minimum, QtGui.QSizePolicy.Expanding) self.formLayout.setItem(7, QtGui.QFormLayout.LabelRole, spacerItem6) spacerItem7 = QtGui.QSpacerItem(20, 40, QtGui.QSizePolicy.Minimum, QtGui.QSizePolicy.Expanding) self.formLayout.setItem(10, QtGui.QFormLayout.LabelRole, spacerItem7) self.label_6 = QtGui.QLabel(self.formLayoutWidget) self.label_6.setObjectName("label_6") self.formLayout.setWidget(0, QtGui.QFormLayout.FieldRole, self.label_6) self.label_7 = QtGui.QLabel(self.formLayoutWidget) self.label_7.setObjectName("label_7") self.formLayout.setWidget(3, QtGui.QFormLayout.FieldRole, self.label_7) self.label_8 = QtGui.QLabel(self.formLayoutWidget) self.label_8.setObjectName("label_8") self.formLayout.setWidget(6, QtGui.QFormLayout.FieldRole, self.label_8) self.label_9 = QtGui.QLabel(self.formLayoutWidget) self.label_9.setObjectName("label_9") self.formLayout.setWidget(9, QtGui.QFormLayout.FieldRole, self.label_9) self.label_10 = Q
tGui.QLabel(self.formLayo
utWidget) self.label_10.setObjectName("label_10") self.formLayout.setWidget(12, QtGui.QFormLayout.FieldRole, self.label_10) self.gridLayoutWidget_2 = QtGui.QWidget(self.centralwidget) self.gridLayoutWidget_2.setGeometry(QtCore.QRect(240, 390, 561, 161)) self.gridLayoutWidget_2.setObjectName("gridLayoutWidget_2") self.gridLayout_2 = QtGui.QGridLayout(self.gridLayoutWidget_2) self.gridLayout_2.setContentsMargins(0, 0, 0, 0) self.gridLayou
jucimarjr/IPC_2017-1
lista08/lista08_lista02_questao16.py
Python
apache-2.0
799
0
# ---------------------------------------------------------------------------------------------------------------------- # Introdução a Programação de Computadores - IPC # Universidade do Estado do Amazonas - UEA # Prof. Jucimar Jr # Edson de Lima Barros 1715310043 # Ti
ago Ferreira Aranha 1715310047 # Vitor Simôes Azevedo 1715310025 # Roberta de Oliveira da Cruz 0825070169 # Uriel Brito Barros 1515120558 # # 16. Faça um procedimento que recebe, por parâmetro, # 2 vetores de 10 elementos inteiros e
que calcule e retorne, # também por parâmetro, o vetor intersecção dos dois primeiros. from lista08.ipc import vetor vetor1 = vetor.cria_vetor(10) vetor2 = vetor.cria_vetor(10) vetor_interseccao = vetor.vetor_interseccao(vetor1, vetor2) print(vetor_interseccao)
FoamyGuy/mcpi_with_espruino
examples/example2_led/example2_led.py
Python
unlicense
1,489
0.003358
import mcpi.minecraft as minecraft import mcpi.block as Block import seria
l import time # The location where redstone torch needs to spawn. a0 = (-112, 0, 62) # <- YOU MUST SET THIS VALUE (x,y,z) """ Helper method: get_pin(pin) Returns whether the minecraft pin is turned on or off (based on redstone torch type) Block(76, 1) -> Redstone Toch ON Block(75, 1) -> Redstone Toch OFF """ def get_pin(pin): block = mc.getBlockWithData(pin) print(block) if block.id == 76: return 1
elif block.id == 75: return 0 else: return -1 if __name__ == "__main__": # My espruino was COM23, and I had to use value 22 here. port = 22; old_val = 0 ser = serial.Serial(port, timeout=1) # open first serial port print ser.portstr # check which port was really used # Create mc object. mc = minecraft.Minecraft.create() # Main loop try: while True: # Read the minecraft pin cur_val = get_pin(a0) if cur_val != old_val: # write the result to the LED1 on Espruino if int(cur_val): # turn LED on ser.write("digitalWrite(LED1, 1)\n") else: # turn LED off ser.write("digitalWrite(LED1, 0)\n") old_val = cur_val time.sleep(.5) # small sleep except KeyboardInterrupt: print("stopped") ser.close()
flgiordano/netcash
+/google-cloud-sdk/lib/googlecloudsdk/gcloud_main.py
Python
bsd-3-clause
7,025
0.00911
#!/usr/bin/env python # # Copyright 2013 Google Inc. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """gcloud command line tool.""" import time START_TIME = time.time() # pylint:disable=g-bad-import-order # pylint:disable=g-import-not-at-top, We want to get the start time first. import os import signal import sys from googlecloudsdk.calliope import backend from googlecloudsdk.calliope import base from googlecloudsdk.calliope import cli from googlecloudsdk.core import config from googlecloudsdk.core import log from googlecloudsdk.core import metrics from googlecloudsdk.core import properties from googlecloudsdk.core.updater import local_state from googlecloudsdk.core.updater import update_manager from googlecloudsdk.core.util import platforms import surface # Disable stack traces when people kill a command. def CTRLCHandler(unused_signal, unused_frame): """Custom SIGNINT handler. Signal handler that doesn't print the stack trace when a command is killed by keyboard interupt. """ try: log.err.Print('\n\nCommand killed by keyboard interrupt\n') except NameError: sys.stderr.write('\n\nCommand killed by keyboard interrupt\n') # Kill ourselves with SIGINT so our parent can detect that we exited because # of a signal. SIG_DFL disables further KeyboardInterrupt exceptions. signal.signal(signal.SIGINT, signal.SIG_DFL) os.kill(os.getpid(), signal.SIGINT) # Just in case the kill failed ... sys.exit(1) signal.signal(signal.SIGINT, CTRLCHandler) # Enable normal UNIX handling of SIGPIPE to play nice with grep -q, head, etc. # See https://mail.python.org/pipermail/python-list/2004-June/273297.html and # http://utcc.utoronto.ca/~cks/space/blog/python/SignalExceptionSurprise # for more details. if hasattr(signal, 'SIGPIPE'): signal.signal(signal.SIGPIPE, signal.SIG_DFL) def _DoStartupChecks(): if not platforms.PythonVersion().IsCompatible(): sys.exit(1) _DoStartupChecks() if not config.Paths().sdk_root: # Don't do update checks if there is no install root. properties.VALUES.component_manager.disable_update_check.Set(True) def UpdateCheck(command_path, **unused_kwargs): try: update_manager.UpdateManager.PerformUpdateCheck(command_path=command_path) # pylint:disable=broad-except, We never want this to escape, ever. Only # messages printed should reach the user. except Exception: log.debug('Failed to perform update check.', exc_info=True) def CreateCLI(surfaces): """Generates the gcloud CLI from 'surface' folder with extra surfaces. Args: surfaces: list(tuple(dot_path, dir_path)), extra commands or subsurfaces to add, where dot_path is calliope command path and dir_path path to command group or command. Returns: calliope cli object. """ def VersionFunc(): generated_cli.Execute(['version']) pkg_root = os.path.dirname(os.path.dirname(surface.__file__)) loader = cli.CLILoader( name='gcloud', command_root_directory=os.path.join(pkg_root, 'surface'), allow_non_existing_modules=True, version_func=VersionFunc) loader.AddReleaseTrack(base.ReleaseTrack.ALPHA, os.path.join(pkg_root, 'surface', 'alpha'), component='alpha') loader.AddReleaseTrack(base.ReleaseTrack.BETA, os.path.join(pkg_root, 'surface', 'beta'), component='beta') for dot_path, dir_path in surfaces: loader.AddModule(dot_path, dir_path, component=None) # Check for updates on shutdown but not for any of the updater commands. loader.RegisterPostRunHook(UpdateCheck, exclude_commands=r'gcloud\.components\..*') generated_cli = loader.Generate() return generated_cli def _PrintSuggestedAction(err, err_string): """Print the best action for the user to take, given the error.""" if (isinstance(err, backend.CommandLoadFailure) and type(err.root_exception) is ImportError): # This usually indicates installation corruption. # We do want to suggest `gcloud components reinstall` here (ex. as opposed # to the similar message in gcloud.py), because there's a good chance it'll # work (rather than a manual reinstall). # Don't suggest `gcloud feedback`, because this is probably an # installation problem. log.error( ('gcloud failed to load ({0}): {1}\n\n'
'This usually indicates corruption in your gcloud installation or ' 'problems with your Python interpreter.
\n\n' 'Please verify that the following is the path to a working Python 2.7 ' 'executable:\n' ' {2}\n' 'If it is not, please set the CLOUDSDK_PYTHON environment variable to ' 'point to a working Python 2.7 executable.\n\n' 'If you are still experiencing problems, please run the following ' 'command to reinstall:\n' ' $ gcloud components reinstall\n\n' 'If that command fails, please reinstall the Cloud SDK using the ' 'instructions here:\n' ' https://cloud.google.com/sdk/' ).format(err.command, err_string, sys.executable)) else: log.error('gcloud crashed ({0}): {1}'.format( getattr(err, 'error_name', type(err).__name__), err_string)) log.err.Print('\nIf you would like to report this issue, please run the ' 'following command:') log.err.Print(' gcloud feedback') def main(gcloud_cli=None): metrics.Started(START_TIME) # TODO(user): Put a real version number here metrics.Executions( 'gcloud', local_state.InstallationState.VersionForInstalledComponent('core')) if gcloud_cli is None: gcloud_cli = CreateCLI([]) try: gcloud_cli.Execute() except Exception as err: # pylint:disable=broad-except # We want this to be parsable by `gcloud feedback`, so we print the # stacktrace with a nice recognizable string log.file_only_logger.exception('BEGIN CRASH STACKTRACE') _PrintSuggestedAction(err, gcloud_cli.SafeExceptionToString(err)) if properties.VALUES.core.print_unhandled_tracebacks.GetBool(): # We want to see the traceback as normally handled by Python raise else: # This is the case for most non-Cloud SDK developers. They shouldn't see # the full stack trace, but just the nice "gcloud crashed" message. sys.exit(1) if __name__ == '__main__': try: main() except KeyboardInterrupt: CTRLCHandler(None, None)
shobute/go-slack
config.py
Python
isc
1,009
0.001982
DEBUG = False USERNAME = 'hikaru' CHANNEL = 'random' VOCAB = { 'RANDOM': ['random', ':troll:', ':trollface:'], 'PASS': ['pass', 'skip'], 'RESIGN': ['resign', 'give up'], 'VOTE': ['vote', 'move',
'play'], 'VOTES': ['votes', 'moves', 'voted', 'chance'], 'CAPTURES': ['captures'], 'SHOW': ['show', 'board'], 'YES': ['yes', 'yeah', 'ya', 'y', 'ja', 'please', 'ok', 'yep'], 'NO': ['no', 'nope', 'n', 'nee', "don
't", 'cancel'], } RESPONSES = { 'RESIGN_CONFIRMATION': [ 'Are you sure you want to resign?', 'Sure?', ], 'RESIGN_CANCELLED': [ 'Ok.', 'Resignation cancelled.', ], 'UNKNOWN': [ "I don't know.", 'What do you mean?', "That doesn't make any sense.", "I'm just a bot.", ], } # How often to play moves. See `man crontab` for format information. if DEBUG: CRON = '*/2 * * * *' # Every two minutes. else: CRON = '0 9-18 * * 1-5' # Hourly between 9:00 and 18:00 on weekdays.
alexgorban/models
official/transformer/v2/transformer_test.py
Python
apache-2.0
2,619
0.001909
# Copyright 2018 The TensorFlow Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # ============================================================================== """Test Transformer model.""" from __future__ import absolute_import from __future__ import division from __future__ import print_function import tensorflow as tf from official.nlp.transformer import model_params from official.transformer.v2 import transformer class TransformerV2Test(tf.test.TestCase):
def setUp(self): self.params = params = model_params.TINY_PARAMS params["batch_size"] = params["default_batch_size"] = 16 params["use_synthetic_data"] = True params["hidden_size"] = 12 params["num_hidden_layers"] = 2 params["filter_size"] = 14 params["num_heads"] = 2 params["vocab_size"
] = 41 params["extra_decode_length"] = 2 params["beam_size"] = 3 params["dtype"] = tf.float32 def test_create_model_train(self): model = transformer.create_model(self.params, True) inputs, outputs = model.inputs, model.outputs self.assertEqual(len(inputs), 2) self.assertEqual(len(outputs), 1) self.assertEqual(inputs[0].shape.as_list(), [None, None]) self.assertEqual(inputs[0].dtype, tf.int64) self.assertEqual(inputs[1].shape.as_list(), [None, None]) self.assertEqual(inputs[1].dtype, tf.int64) self.assertEqual(outputs[0].shape.as_list(), [None, None, 41]) self.assertEqual(outputs[0].dtype, tf.float32) def test_create_model_not_train(self): model = transformer.create_model(self.params, False) inputs, outputs = model.inputs, model.outputs self.assertEqual(len(inputs), 1) self.assertEqual(len(outputs), 2) self.assertEqual(inputs[0].shape.as_list(), [None, None]) self.assertEqual(inputs[0].dtype, tf.int64) self.assertEqual(outputs[0].shape.as_list(), [None, None]) self.assertEqual(outputs[0].dtype, tf.int32) self.assertEqual(outputs[1].shape.as_list(), [None]) self.assertEqual(outputs[1].dtype, tf.float32) if __name__ == "__main__": tf.compat.v1.enable_v2_behavior() tf.test.main()
neuropycon/ephypype
ephypype/interfaces/__init__.py
Python
bsd-3-clause
70
0.028571
from . import mne #
noqa from .mne.s
pectral import TFRmorlet # noqa
grnet/synnefo
snf-cyclades-app/synnefo/logic/management/commands/backend-update-status.py
Python
gpl-3.0
1,489
0
# Copyright (C) 2010-2017 GRNET S.A. # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. # from snf_django.management.commands import SynnefoCommand from synnefo.db.models import Backend from synnefo.logic import backend as backend_mod from synnefo.db import transaction HELP_MSG = """Query Ganeti backends and update the status of backend in DB. This command updates: * the list of the enabled disk-templates * the available resources (disk, memory, CPUs) """ class Command(SynnefoCommand): help = HELP_MSG @transaction.atomic def handle(self, **options): for backend in Backend.objects.select_for_update()\ .filter(offline=False): backend_mod.u
pdate_backend_disk_templates(backend) backend_mod.update_backend_resources(backend) self.stdout.write("Successful
ly updated backend '%s'\n" % backend)
niosus/EasyClangComplete
plugin/utils/search_scope.py
Python
mit
2,889
0
"""Defines all search scopes used in this project.""" from os import path ROOT_PATH = path.abspath('/') class TreeSearchScope: """Encapsulation of a search scope to search up the tree.""" def __init__(self, from_folder=ROOT_PATH, to_folder=ROOT_PATH): """Initialize the search scope.""" self.from_folder = from_folder self.to_folder = to_folder @property def from_folder(self): """Get the starting folder.""" return self._from_folder @from_folder.setter def from_folder(self, folder): """Set the last folder in search.""" self._from_folder = folder self._current_folder = self._from_folder @property def to_folder(self): """Get the end of search folder.""" return self._to_folder @to_folder.setter def to_folder(self, folder): """Set the last folder in search.""" self._to_folder = folder self._one_past_last = path.dirname(self._to_folder)
def __bool__(self): """Check
if the search scope is empty.""" return self.from_folder != ROOT_PATH def __iter__(self): """Make this an iterator.""" self._current_folder = self._from_folder return self def __next__(self): """Get next folder to search in.""" current_folder = self._current_folder self._current_folder = path.dirname(self._current_folder) scope_end_reached = current_folder == self._one_past_last root_reached = current_folder == self._current_folder if root_reached or scope_end_reached: raise StopIteration else: return current_folder def __repr__(self): """Return search scope as a printable string.""" return 'SearchScope: from_folder: {}, to_folder: {}'.format( self._from_folder, self._to_folder) class ListSearchScope: """Encapsulation of a search scope to search in a list.""" def __init__(self, paths=[]): """Initialize the search scope.""" self.folders = paths @property def folders(self): """Get the starting folder.""" return self._folders @folders.setter def folders(self, paths): """Set the folders.""" self._folders = [f for f in paths if path.isdir(f)] self._iter = iter(self._folders) def __bool__(self): """Check if the search scope is not empty.""" return len(self._folders) > 0 def __iter__(self): """Make this an iterator.""" self._iter = iter(self._folders) return self._iter def __next__(self): """Get next folder to search in.""" return next(self._iter) def __repr__(self): """Return search scope as a printable string.""" return 'SearchScope: folders: {}'.format(self._folders)
cjauvin/python_algorithms
tests/basic/test_stack.py
Python
bsd-3-clause
1,843
0.000543
#!/usr/bin/env python # -*- coding: utf-8 -*- """ test_stack ---------------------------------- Tests for `python_algorithms.stack` module. """ import unittest from python_algorithms.basic.stack import Stack class TestStack(unittest.TestCase): def setUp(self): self.empty_stack = Stack() self.stack = Stack() self.seq = [0, 2, 4, 6, 8] for x in self.seq: self.stack.push(x) def test_push_to_empty_stack(self): self.empty_stack.push(0) self.assertEqual(self.empty_stack.peek(), 0) def test_push_to_stack(self): self.stack.push(10) self.assertEqual(self.stack.peek(), 10) def test_pop_from_empty_stack(self): self.assertRaises(IndexError, self.empty_stack.pop) def test_pop_from_stack(self): self.assertEqual(self.stack.pop(), self.seq[-1]) def test_size_of_empty_stack(self): self.assertEqual(self.empty_stack.size, 0) def test_size_of_stack(self): self.assertEqual(self.stack.size, len(self.seq)) def test_peek_at_empty_stack(self): self.assertRaises(IndexError, self.empty_stack.peek) def test_peek_at_stack(self): self.assertEqual(self.stack.peek(), self.seq[-1]) def test_iterate_empty_stack(self): for curr in self.empty_stack:
self.assertEqual(False, True) def test_iterate_stack(self): iter_seq = [] for curr in self.stack: iter_seq.append(curr) iter_seq.reverse() self.assertEqual(iter_seq, self.seq) def test_str_empty_stack(self): self.assertEqual(str(self.empty_stack), "") def test_str_stack(self): self.asse
rtEqual(str(self.stack), " ".join([str(x) for x in self.seq])) def tearDown(self): pass if __name__ == '__main__': unittest.main()
claudep/pootle
tests/search/units.py
Python
gpl-3.0
10,939
0
# -*- coding: utf-8 -*- # # Copyright (C) Pootle contributors. # # This file is a part of the Pootle project. It is distributed under the GPL3 # or later license. See the LICENSE file for a copy of the license and the # AUTHORS file for copyright and authorship information. import pytest from pootle.core.delegate import search_backend from pootle.core.plugin import getter from pootle_pr
oject.models import Project from pootle_statistics
.models import Submission, SubmissionTypes from pootle_store.getters import get_search_backend from pootle_store.constants import FUZZY, TRANSLATED, UNTRANSLATED from pootle_store.models import Suggestion, Unit from pootle_store.unit.filters import ( FilterNotFound, UnitChecksFilter, UnitContributionFilter, UnitSearchFilter, UnitStateFilter, UnitTextSearch) from pootle_store.unit.search import DBSearchBackend def _expected_text_search_words(text, exact): if exact: return [text] return [t.strip() for t in text.split(" ") if t.strip()] def _expected_text_search_results(qs, words, search_fields): def _search_field(k): subresult = qs.all() for word in words: subresult = subresult.filter( **{("%s__icontains" % k): word}) return subresult result = qs.none() for k in search_fields: result = result | _search_field(k) return list(result.order_by("pk")) def _expected_text_search_fields(sfields): search_fields = set() for field in sfields: if field in UnitTextSearch.search_mappings: search_fields.update(UnitTextSearch.search_mappings[field]) else: search_fields.add(field) return search_fields def _test_units_checks_filter(qs, check_type, check_data): result = UnitChecksFilter(qs, **{check_type: check_data}).filter("checks") for item in result: assert item in qs assert result.count() == result.distinct().count() if check_type == "checks": for item in result: assert any( qc in item.qualitycheck_set.values_list("name", flat=True) for qc in check_data) assert( list(result) == list( qs.filter( qualitycheck__false_positive=False, qualitycheck__name__in=check_data).distinct())) else: for item in result: item.qualitycheck_set.values_list("category", flat=True) assert( list(result) == list( qs.filter( qualitycheck__false_positive=False, qualitycheck__category=check_data).distinct())) def _test_units_contribution_filter(qs, user, unit_filter): result = UnitContributionFilter(qs, user=user).filter(unit_filter) for item in result: assert item in qs assert result.count() == result.distinct().count() user_subs_overwritten = [ "my_submissions_overwritten", "user_submissions_overwritten"] if unit_filter == "suggestions": assert ( result.count() == qs.filter( suggestion__state__name="pending").distinct().count()) return elif not user: assert result.count() == 0 return elif unit_filter in ["my_suggestions", "user_suggestions"]: expected = qs.filter( suggestion__state__name="pending", suggestion__user=user).distinct() elif unit_filter == "user_suggestions_accepted": expected = qs.filter( suggestion__state__name="accepted", suggestion__user=user).distinct() elif unit_filter == "user_suggestions_rejected": expected = qs.filter( suggestion__state__name="rejected", suggestion__user=user).distinct() elif unit_filter in ["my_submissions", "user_submissions"]: expected = qs.filter(submitted_by=user) elif unit_filter in user_subs_overwritten: # lets calc this long hand # first submissions that have been added with no suggestion user_edit_subs = Submission.objects.filter( type__in=SubmissionTypes.EDIT_TYPES).filter( suggestion__isnull=True).filter( submitter=user).values_list("unit_id", flat=True) # next the suggestions that are accepted and the user is this user user_suggestions = Suggestion.objects.filter( state__name="accepted", user=user).values_list("unit_id", flat=True) expected = qs.filter( id__in=( set(user_edit_subs) | set(user_suggestions))).exclude(submitted_by=user) assert ( list(expected.order_by("pk")) == list(result.order_by("pk"))) def _test_unit_text_search(qs, text, sfields, exact, empty=True): unit_search = UnitTextSearch(qs) result = unit_search.search(text, sfields, exact).order_by("pk") words = unit_search.get_words(text, exact) fields = unit_search.get_search_fields(sfields) # ensure result meets our expectation assert ( list(result) == _expected_text_search_results(qs, words, fields)) # ensure that there are no dupes in result qs assert list(result) == list(result.distinct()) if not empty: assert result.count() for item in result: # item is in original qs assert item in qs for word in words: searchword_found = False for field in fields: if word.lower() in getattr(item, field).lower(): # one of the items attrs matches search searchword_found = True break assert searchword_found def _test_units_state_filter(qs, unit_filter): result = UnitStateFilter(qs).filter(unit_filter) for item in result: assert item in qs assert result.count() == result.distinct().count() if unit_filter == "all": assert list(result) == list(qs) return elif unit_filter == "translated": states = [TRANSLATED] elif unit_filter == "untranslated": states = [UNTRANSLATED] elif unit_filter == "fuzzy": states = [FUZZY] elif unit_filter == "incomplete": states = [UNTRANSLATED, FUZZY] assert all( state in states for state in result.values_list("state", flat=True)) assert ( qs.filter(state__in=states).count() == result.count()) @pytest.mark.django_db def test_get_units_text_search(units_text_searches): search = units_text_searches sfields = search["sfields"] fields = _expected_text_search_fields(sfields) words = _expected_text_search_words(search['text'], search["exact"]) # ensure the fields parser works correctly assert ( UnitTextSearch(Unit.objects.all()).get_search_fields(sfields) == fields) # ensure the text tokeniser works correctly assert ( UnitTextSearch(Unit.objects.all()).get_words( search['text'], search["exact"]) == words) assert isinstance(words, list) # run the all units test first and check its not empty if it shouldnt be _test_unit_text_search( Unit.objects.all(), search["text"], search["sfields"], search["exact"], search["empty"]) for qs in [Unit.objects.none(), Unit.objects.live()]: # run tests against different qs _test_unit_text_search( qs, search["text"], search["sfields"], search["exact"]) @pytest.mark.django_db def test_units_contribution_filter_none(units_contributor_searches): unit_filter = units_contributor_searches user = None qs = Unit.objects.all() if not hasattr(UnitContributionFilter, "filter_%s" % unit_filter): with pytest.raises(FilterNotFound): UnitContributionFilter(qs, user=user).filter(unit_filter) return test_qs = [ qs, qs.none(), qs.filter( store__translation_project__project=Project.objects.first())] for _qs in test_qs: _test_units_contribution_filter(_qs, us
polyaxon/polyaxon
platform/polycommon/tests/test_options/test_feature.py
Python
apache-2.0
1,739
0
#!/usr/bin/python # # Copyright 2018-2021 Polyaxon, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific lang
uage governing permissions and # limitations under the License. from unittest import TestCase from django.conf import settings from polycommon.options.ex
ceptions import OptionException from polycommon.options.feature import Feature from polycommon.options.option import NAMESPACE_DB_OPTION_MARKER, OptionStores class DummyFeature(Feature): pass class TestFeature(TestCase): def test_feature_default_store(self): assert DummyFeature.store == OptionStores(settings.STORE_OPTION) def test_feature_marker(self): assert DummyFeature.get_marker() == NAMESPACE_DB_OPTION_MARKER def test_parse_key_wtong_namespace(self): DummyFeature.key = "FOO" with self.assertRaises(OptionException): DummyFeature.parse_key() DummyFeature.key = "FOO:BAR" with self.assertRaises(OptionException): DummyFeature.parse_key() def test_parse_key_without_namespace(self): DummyFeature.key = "FEATURES:FOO" assert DummyFeature.parse_key() == (None, "FOO") def test_parse_key_with_namespace(self): DummyFeature.key = "FEATURES:FOO:BAR" assert DummyFeature.parse_key() == ("FOO", "BAR")
cheungpat/sqlalchemy-utils
tests/functions/test_make_order_by_deterministic.py
Python
bsd-3-clause
3,482
0
import sqlalchemy as sa from sqlalchemy_utils.functions.sort_query import make_order_by_deterministic from tests import assert_contains, TestCase class TestMakeOrderByDeterministic(TestCase): def create_models(self): class User(self.Base): __tablename__ = 'user' id = sa.Column(sa.Integer, primary_key=True) name = sa.Column(sa.Unicode) email = sa.Column(sa.Unicode, unique=True) email_lower = sa.orm.column_property( sa.func.lower(name) ) class Article(self.Base): __tablename__ = 'article' id = sa.Column(sa.Integer, primary_key=True) author_id = sa.Column(sa.Integer, sa.ForeignKey('user.id')) author = sa.orm.relationship(User) User.article_count = sa.orm.column_property( sa.select([sa.func.count()], from_obj=Article)
.where(Article.author_id == User.id) .label('article_count') ) self.User = User self.Articl
e = Article def test_column_property(self): query = self.session.query(self.User).order_by(self.User.email_lower) query = make_order_by_deterministic(query) assert_contains('lower("user".name), "user".id ASC', query) def test_unique_column(self): query = self.session.query(self.User).order_by(self.User.email) query = make_order_by_deterministic(query) assert str(query).endswith('ORDER BY "user".email') def test_non_unique_column(self): query = self.session.query(self.User).order_by(self.User.name) query = make_order_by_deterministic(query) assert_contains('ORDER BY "user".name, "user".id ASC', query) def test_descending_order_by(self): query = self.session.query(self.User).order_by( sa.desc(self.User.name) ) query = make_order_by_deterministic(query) assert_contains('ORDER BY "user".name DESC, "user".id DESC', query) def test_ascending_order_by(self): query = self.session.query(self.User).order_by( sa.asc(self.User.name) ) query = make_order_by_deterministic(query) assert_contains('ORDER BY "user".name ASC, "user".id ASC', query) def test_string_order_by(self): query = self.session.query(self.User).order_by('name') query = make_order_by_deterministic(query) assert_contains('ORDER BY name, "user".id ASC', query) def test_annotated_label(self): query = self.session.query(self.User).order_by(self.User.article_count) query = make_order_by_deterministic(query) assert_contains('article_count, "user".id ASC', query) def test_annotated_label_with_descending_order(self): query = self.session.query(self.User).order_by( sa.desc(self.User.article_count) ) query = make_order_by_deterministic(query) assert_contains('ORDER BY article_count DESC, "user".id DESC', query) def test_query_without_order_by(self): query = self.session.query(self.User) query = make_order_by_deterministic(query) assert 'ORDER BY "user".id' in str(query) def test_alias(self): alias = sa.orm.aliased(self.User.__table__) query = self.session.query(alias).order_by(alias.c.name) query = make_order_by_deterministic(query) assert str(query).endswith('ORDER BY user_1.name, "user".id ASC')
comoga/gooddata-python
tests/examples/employee.py
Python
bsd-3-clause
6,397
0.00766
from gooddataclient.dataset import Dataset from gooddataclient.columns import ConnectionPoint, Label, Reference class Employee(Dataset): employee = ConnectionPoint(title='Employee', folder='Employee') firstname = Label(title='First Name', reference='employee', folder='Employee') lastname = Label(title='Last Name', reference='employee', folder='Employee') department = Reference(title='Department', reference='department', schemaReference='Department', folder='Employee') class Meta(Dataset.Meta): column_order = ('employee', 'firstname', 'lastname', 'department') def data(self): return [{'employee': 'e1', 'lastname': 'Nowmer', 'department': 'd1', 'firstname': 'Sheri'}, {'employee': 'e2', 'lastname': 'Whelply', 'department': 'd1', 'firstname': 'Derrick'}, {'employee': 'e6', 'lastname': 'Damstra', 'department': 'd2', 'firstname': 'Roberta'}, {'employee': 'e7', 'lastname': 'Kanagaki', 'department': 'd3', 'firstname': 'Rebecca'}, {'employee': 'e8', 'lastname': 'Brunner', 'department': 'd11', 'firstname': 'Kim'}, {'employee': 'e9', 'lastname': 'Blumberg', 'department': 'd11', 'firstname': 'Brenda'}, {'employee': 'e10', 'lastname': 'Stanz', 'department': 'd5', 'firstname': 'Darren'}, {'employee': 'e11', 'lastname': 'Murraiin', 'department': 'd11', 'firstname': 'Jonathan'}, {'employee': 'e12', 'lastname': 'Creek', 'department': 'd11', 'firstname': 'Jewel'}, {'employee': 'e13', 'lastname': 'Medina', 'department': 'd11', 'firstname': 'Peggy'}, {'employee': 'e14', 'lastname': 'Rutledge', 'department': 'd11', 'firstname': 'Bryan'}, {'employee': 'e15', 'lastname': 'Cavestany', 'department': 'd11', 'firstname': 'Walter'}, {'employee': 'e16', 'lastname': 'Planck', 'department': 'd11', 'firstname': 'Peggy'}, {'employee': 'e17', 'lastname': 'Marshall', 'department': 'd11', 'firstname': 'Brenda'}, {'employee': 'e18', 'lastname': 'Wolter', 'department': 'd11', 'firstname': 'Daniel'}, {'employee': 'e19', 'lastname': 'Collins', 'department': 'd11', 'firstname': 'Dianne'} ] maql = """ # THIS IS MAQL SCRIPT THAT GENERATES PROJECT LOGICAL MODEL. # SEE THE MAQL DOCUMENTATION AT http://developer.gooddata.com/api/maql-ddl.html FOR MORE DETAILS # CREATE DATASET. DATASET GROUPS ALL FOLLOWING LOGICAL MODEL ELEMENTS TOGETHER. CREATE DATASET {dataset.employee} VISUAL(TITLE "Employee"); # CREATE THE FOLDERS THAT GROUP ATTRIBUTES AND FACTS CREATE FOLDER {dim.employee} VISUAL(TITLE "Employee") TYPE ATTRIBUTE; # CREATE ATTRIBUTES. # ATTRIBUTES ARE CATEGORIES THAT ARE USED FOR SLICING AND DICING THE NUMBERS (FACTS) CREATE ATTRIBUTE {attr.employee.employee} VISUAL(TITLE "Employee", FOLDER {dim.employee}) AS KEYS {f_employee.id} FULLSET; ALTER DATASET {dataset.employee} ADD {attr.employee.employee}; # CREATE FACTS # FACTS ARE NUMBERS THAT ARE AGGREGATED BY ATTRIBUTES. # CREATE DATE FACTS # DATES ARE REPRESENTED AS FACTS # DATES ARE ALSO CONNECTED TO THE DATE DIMENSIONS # CREATE REFERENCES # REFERENCES CONNECT THE DATASET TO OTHER DATASETS # CONNECT THE REFERENCE TO THE APPROPRIATE DIMENSION ALTER ATTRIBUTE {attr.department.department} ADD KEYS {f_employee.department_id}; # ADD LABELS TO ATTRIBUTES ALTER ATTRIBUTE {attr.employee.employee} ADD LABELS {label.employee.employee.firstname} VISUAL(TITLE "First Name") AS {f_employee.nm_firstname}; ALTER ATTRIBUTE {attr.employee.employee} DEFAULT LABEL {label.employee.employee.firstname}; # ADD LABELS TO ATTRIBUTES ALTER ATTRIBUTE {attr.employee.employee} ADD LABELS {label.employee.employee.lastname} VISUAL(TITLE "Last Name") AS {f_employee.nm_lastname}; ALTER ATTRIBUTE {attr.employee.employee} ADD LABELS {label.employee.employee} VISUAL(TITLE "Employee") AS {f_employee.nm_employee}; # SYNCHRONIZE THE STORAGE AND DATA LOADING INTERFACES WITH THE NEW LOGICAL MODEL SYNCHRONIZE {dataset.employee}; """ schema_xml = ''' <schema> <name>Employee</name> <columns> <column>
<name>employee</name> <title>Employee</title> <ldmType>CONNECTION_POINT</ldmType> <folder>Employee</folder> </column> <column> <name>firstname</name> <title>First Name</title> <ldmType>LABEL</ldmType> <reference>employee</reference> <folder>Employee</folder> </column> <column> <name>lastname</name> <
title>Last Name</title> <ldmType>LABEL</ldmType> <reference>employee</reference> <folder>Employee</folder> </column> <column> <name>department</name> <title>Department</title> <ldmType>REFERENCE</ldmType> <reference>department</reference> <schemaReference>Department</schemaReference> <folder>Employee</folder> </column> </columns> </schema> ''' data_csv = '''"employee","firstname","lastname","department" "e1","Sheri","Nowmer","d1" "e2","Derrick","Whelply","d1" "e6","Roberta","Damstra","d2" "e7","Rebecca","Kanagaki","d3" "e8","Kim","Brunner","d11" "e9","Brenda","Blumberg","d11" "e10","Darren","Stanz","d5" "e11","Jonathan","Murraiin","d11" "e12","Jewel","Creek","d11" "e13","Peggy","Medina","d11" "e14","Bryan","Rutledge","d11" "e15","Walter","Cavestany","d11" "e16","Peggy","Planck","d11" "e17","Brenda","Marshall","d11" "e18","Daniel","Wolter","d11" "e19","Dianne","Collins","d11" ''' sli_manifest = {"dataSetSLIManifest": { "parts": [ { "columnName": "employee", "mode": "FULL", "populates": ["label.employee.employee"], "referenceKey": 1 }, { "columnName": "firstname", "mode": "FULL", "populates": ["label.employee.employee.firstname"] }, { "columnName": "lastname", "mode": "FULL", "populates": ["label.employee.employee.lastname"] }, { "columnName": "department", "mode": "FULL", "populates": ["label.department.department"], "referenceKey": 1 } ], "file": "data.csv", "dataSet": "dataset.employee", "csvParams": { "quoteChar": "\"", "escapeChar": "\"", "separatorChar": ",", "endOfLine": "\n" } }}
keras-team/keras
keras/feature_column/dense_features_v2.py
Python
apache-2.0
6,123
0.00343
# Copyright 2019 The TensorFlow Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # ============================================================================== """A layer that produces a dense `Tensor` based on given `feature_columns`.""" from __future__ import absolute_import from __future__ import division from __future__ import print_function import tensorflow.compat.v2 as tf from keras.feature_column import base_feature_layer as kfc from keras.feature_column import dense_features from keras.utils import tf_contextlib from tensorflow.python.util.tf_export import keras_export @keras_export('keras.layers.DenseFeatures', v1=[]) class DenseFeatures(dense_features.DenseFeatures): """A layer that produces a dense `Tensor` based on given `feature_columns`. Generally a single example in training data is described with FeatureColumns. At the first layer of the model, this column oriented data should be converted to a single `Tensor`. This layer can be called multiple times with different features. This is the V2 version of this layer that uses name_scopes to create variables instead of variable_scopes. But this approach currently lacks support for partitioned variables. In that case, use the V1 version instead. Example: ```python price = tf.feature_column.numeric_column('price') keywords_embedded = tf.feature_column.embedding_column( tf.feature_column.categorical_column_with_hash_bucket("keywords", 10000), dimensions=16) columns = [price, keywords_embedded, ...] feature_layer = tf.keras.layers.DenseFeatures(columns) features = tf.io.parse_example( ..., features=tf.feature_column.make_parse_example_spec(columns)) dense_tensor = feature_layer(features) for units in [128, 64, 32]: dense_tensor = tf.keras.layers.Dense(units, activation='relu')(dense_tensor) prediction = tf.keras.layers.Dense(1)(dense_tensor) ``` """ def __init__(self, feature_columns, trainable=True, name=None, **kwargs): """Creates a DenseFeatures object. Args: feature_columns: An iterable containing the FeatureColumns to use as inputs to your model. All items should be instances of classes derived from `DenseColumn` such as `numeric_column`, `embedding_column`, `bucketized_column`, `indicator_column`. If you have categorical features, you can wrap them with an `embedding_column` or `indicator_column`. trainable: Boolean, whether the layer's variables will be updated via gradient descent during training. name: Name to give to the DenseFeatures. **kwargs: Keyword arguments to construct a layer. Raises: ValueError: if an item in `feature_columns` is not a `DenseColumn`. """ super(DenseFeatures, self).__init__( feature_columns=feature_columns, trainable=trainable, name=name, **kwargs) self._state_manager = _StateManagerImplV2(self, self.trainable) def build(self, _): for column in self._feature_columns: with tf.name_scope(column.name): column.create_state(self._state_manager) # We would like to call Laye
r.build and not _DenseFeaturesHelper.build. # pylint: disable=protected-access super(kfc._BaseFeaturesLayer, self).build(None) # pylint: disable=bad-super-call class _StateManagerImplV2(tf.__internal__.feature_column.StateManager): # pylint: disable=pr
otected-access """Manages the state of DenseFeatures.""" def create_variable(self, feature_column, name, shape, dtype=None, trainable=True, use_resource=True, initializer=None): if name in self._cols_to_vars_map[feature_column]: raise ValueError('Variable already exists.') # We explicitly track these variables since `name` is not guaranteed to be # unique and disable manual tracking that the add_weight call does. with no_manual_dependency_tracking_scope(self._layer): var = self._layer.add_weight( name=name, shape=shape, dtype=dtype, initializer=initializer, trainable=self._trainable and trainable, use_resource=use_resource) if isinstance(var, tf.__internal__.tracking.Trackable): self._layer._track_trackable(var, feature_column.name + '/' + name) # pylint: disable=protected-access self._cols_to_vars_map[feature_column][name] = var return var @tf_contextlib.contextmanager def no_manual_dependency_tracking_scope(obj): """A context that disables manual dependency tracking for the given `obj`. Sometimes library methods might track objects on their own and we might want to disable that and do the tracking on our own. One can then use this context manager to disable the tracking the library method does and do your own tracking. For example: class TestLayer(tf.keras.Layer): def build(): with no_manual_dependency_tracking_scope(self): var = self.add_weight("name1") # Creates a var and doesn't track it self._track_trackable("name2", var) # We track variable with name `name2` Args: obj: A trackable object. Yields: a scope in which the object doesn't track dependencies manually. """ # pylint: disable=protected-access previous_value = getattr(obj, '_manual_tracking', True) obj._manual_tracking = False try: yield finally: obj._manual_tracking = previous_value
autoprotocol/autoprotocol-utilities
autoprotocol_utilities/bio_calculators.py
Python
bsd-3-clause
16,561
0
from autoprotocol.unit import Unit def dna_mass_to_mole(length, mass, ds=True): """ For the DNA Length and mass given, return the mole amount of DNA Example Usage: .. code-block:: python from autoprotocol_utilities import dna_mass_to_mole from autoprotocol.unit import Unit dna_length = 100 dna_mass = Unit(33, 'ng') dna_mass_to_mole(dna_length, dna_mass) Returns: .. code-block:: python Unit(0.5, 'picomole') Parameters ---------- length: int Length of DNA in bp mass: str, Unit Weight of DNA in prefix-g ds: bool, optional True for dsDNA, False for ssDNA Returns ------- pmole_dna: Unit Mole amount of DNA in pmol Raises ------ ValueError If inputs are not of specified types """ if isinstance(mass, str): mass = Unit.fromstring(mass) if not isinstance(mass, Unit) or str(mass.dimensionality) != "[mass]": raise ValueError("Mass of DNA must be of type Unit in prefix-gram") if not isinstance(length, int): raise ValueError( "Length of DNA is of type %s, must be of type " "integer" % type(length)) if not isinstance(ds, bool): raise ValueError( "ds is of type %s, must be of type bool: True for dsDNA, " "False for ssDNA" % type(ds)) dna_pg = mass.to("pg") if ds: dna_pmol = dna_pg / (Unit(660, "pg/pmol") * length) else: dna_pmol = dna_pg / (Unit(330, "pg/pmol") * length) return dna_pmol def dna_mole_to_mass(length, mole, ds=True): """ For the DNA Length and mole amount given, return the mass of DNA Example Usage: .. code-block:: python from autoprotocol_utilities import dna_mole_to_mass from autoprotocol.unit import Unit dna_length = 5000 dna_mole = "10:pmol" dna_mole_to_mass(dna_length, dna_mole) Returns: .. code-block:: python Unit(33.0, 'microgram') Parameters ---------- length: int Length of DNA in bp mole: str, Unit Mole amount of DNA in prefix-mol ds: bool, optional True for dsDNA, False for ssDNA Returns ------- dna_ug: Unit Weight of DNA in ug Raises ------ ValueError If inputs are not of specified types """ if isinstance(mole, str): mole = Unit.fromstring(mole) if not isinstance(mole, Unit) or str(mole.dimensionality) != "[substance]": raise ValueError( "Mole amount of DNA must be of type Unit in prefix-mol") if not isinstance(length, int): raise ValueError( "Length of DNA is of type %s, must be of type " "integer" % type(length)) if not isinstance(ds, bool): raise ValueError( "ds is of type %s, must be of type bool: True for dsDNA, " "False for ssDNA" % type(ds)) dna_pmol = mole.to("pmol") if ds: dna_ug = ( Unit(660, "pg/pmol") * dna_pmol * Unit(10**(-6), "ug/pg") * length) else: dna_ug = ( Unit(330, "pg/pmol") * dna_pmol * Unit(10**(-6), "ug/pg") * length) return dna_ug def molar_to_mass_conc(length, molar, ds=True): """ For the DNA molarity given, return the mass concentration of DNA Example Usage: .. code-block:: python from autoprotocol_utilities import molar_to_mass_conc from autoprotocol_utilities import dna_mole_to_mass from autoprotocol.unit import Unit dna_length = 5000 dna_molarity = Unit(10, 'uM') molar_to_mass_c
onc(dna_length, dna_molarity) Returns: .. code-block:: python Unit(33000.0, 'nanogram / microliter') Parameters ---------- length: int Length of DNA in bp molar: str, Unit Molarity of DNA in prefix-M ds: bool, optional True for ds
DNA, False for ssDNA Returns ------- mass_conc: Unit Mass concentration of DNA in ng/uL Raises ------ ValueError If inputs are not of specified types """ if not isinstance(length, int): raise ValueError( "Length of DNA is of type %s, must be of type " "integer" % type(length)) if isinstance(molar, str): molar = Unit.fromstring(molar) if not (isinstance(molar, Unit) and str(molar.dimensionality) == '[substance] / [length] ** 3'): raise ValueError( "Molar concentration of DNA must be of type string or Unit") if not isinstance(ds, bool): raise ValueError( "ds is of type %s, must be of type bool: True for dsDNA, " "False for ssDNA" % type(ds)) dna_umole = Unit((molar / Unit(1, "M")).magnitude, "umol") dna_ug = dna_mole_to_mass(length, dna_umole, ds) mass_conc = Unit(dna_ug.magnitude * 1000, "ng/uL") return mass_conc def mass_conc_to_molar(length, mass_conc, ds=True): """ For the DNA mass concentration given, return the molarity of DNA Example Usage: .. code-block:: python from autoprotocol_utilities import mass_conc_to_molar from autoprotocol_utilities import dna_mass_to_mole from autoprotocol.unit import Unit dna_length = 5000 dna_mass_conc = Unit(33, 'ng/uL') mass_conc_to_molar(dna_length, dna_mass_conc) Returns: .. code-block:: python Unit(0.01, 'micromolar') Parameters ---------- length: int Length of DNA in bp mass_conc: str, Unit Mass concentration of DNA ds: bool, optional True for dsDNA, False for ssDNA Returns ------- molar: Unit Molarity of DNA in uM Raises ------ ValueError If inputs are not of specified types """ if not isinstance(length, int): raise ValueError( "Length of DNA is of type %s, must be of type " "integer" % type(length)) if isinstance(mass_conc, str): mass_conc = Unit.fromstring(mass_conc) if not isinstance(mass_conc, Unit) or \ str(mass_conc.dimensionality) != '[mass] / [length] ** 3': raise ValueError("Mass concentration of DNA must be of type Unit") if not isinstance(ds, bool): raise ValueError( "ds is of type %s, must be of type bool: True for dsDNA, " "False for ssDNA" % type(ds)) dna_ng = Unit((mass_conc / Unit(1, "ng/uL")).magnitude, "ng") dna_pmol = dna_mass_to_mole(length, dna_ng, ds) dna_molar = Unit(round(dna_pmol.magnitude, 9), "uM") return dna_molar def ligation_insert_ng(plasmid_size, plasmid_mass, insert_size, molar_ratio=1): """ For the plasmid size, plasmid amount, insert size, and molar ratio given, return the mass of insert needed for ligation Different from ligation_insert_volume: no insert concentration is given -> returns mass of insert needed Example Usage: .. code-block:: python from autoprotocol_utilities import ligation_insert_ng from autoprotocol.unit import Unit plasmid_size = 3000 plasmid_mass = Unit(100, 'ng') insert_size = 48 ligation_insert_ng(plasmid_size, plasmid_mass, insert_size) Returns: .. code-block:: python Unit(1.6, 'nanogram') Parameters ---------- plasmid_size : int Length of plasmid in bp. insert_size: int Length of insert in bp plasmid_mass : str, Unit Mass of plasmid in prefix-g molar_ratio : int, float, string, optional Ligation molar ratio of insert : vector. By default it is 1 : 1. Generally ligations are tested at 1:3, 1:1, and 3:1 Returns ------- insert_amount: Unit Amount of insert solution needed in ng Raises ------ ValueError If wells are not of type list, WellGroup or Container """ # Check input types if not isinstance(plasmid_size, int): raise ValueError("Plasmid_size: must be an integer") if not
alphagov/notifications-api
migrations/versions/0356_add_webautn_auth_type.py
Python
mit
1,380
0.003623
""" Revision ID: 0356_add_webautn_auth_type Revises: 0355_add_webauthn_table Create Date: 2021-05-13 12:42:45.190269 """ from alembic import op revision = '0356_add_webautn_auth_type' down_revision = '0355_add_webauthn_table' def upgrade(): # ### commands auto generated by Alembic - please adjust! ### op.execute("INSERT INTO auth_type VALUES ('webauthn_auth')") op.drop_constraint('ck_users_mobile_or_email_auth', 'users', type_=None, schema=None) op.execute(""" ALTER TABLE users ADD CONSTRAINT "ck_user_has_mobile_or_other_auth" CHECK (auth_type in ('email_auth', 'webauthn_auth') or mobile_number is not null) NOT VALID """) # ### end Alembic commands ### def dow
ngrade(): # ### commands auto generated by Alembic - please adjust! ### op.execute("UPDATE users SET auth_type = 'sms_auth' WHERE auth_type = 'webauthn_auth'") op.execute("UPDATE invited_users SET auth_type = 'sms_auth' WHERE auth_type = 'webauthn_auth'") op.drop_constraint('ck_user_has_mobile_or_other_auth', 'users', type_=None, schema=None) op.execute(""" ALTER TABLE users ADD CONST
RAINT "ck_users_mobile_or_email_auth" CHECK (auth_type = 'email_auth' or mobile_number is not null) NOT VALID """) op.execute("DELETE FROM auth_type WHERE name = 'webauthn_auth'") # ### end Alembic commands ###
fpoli/python-astexport
astexport/cli.py
Python
mit
1,148
0
import fileinput import argparse from astexport import __version__, __prog_name__ from astexport.parse import parse from astexport.export import export_json def create_parser(): parser = argparse.ArgumentParser( prog=__prog_name__, description="Python source code in, JSON AST out. (v{})".format( __version__ ) ) parser.add_argument( "-i", "--input", default="-", help="file to read from or '-' to use standard input (default)" ) parser.add_argument( "-p", "--pretty", action="store_true", help="print indented JSON" ) parser.add_argument( "-v", "
--version", action="store_true", help="print version and exit" ) return parser def main(): """Read source from stdin, parse and export the AST as JSON""" parser = create_
parser() args = parser.parse_args() if args.version: print("{} version {}".format(__prog_name__, __version__)) return source = "".join(fileinput.input(args.input)) tree = parse(source) json = export_json(tree, args.pretty) print(json)
MatiasSM/fcb
fcb/utils/trickle.py
Python
lgpl-3.0
1,657
0.001811
from copy import deepcopy from distutils.spawn import find_executable class Settings(object): _upload_limit = 0 def __init__(self, settings=None):
if settings: self._upload_limit = settings.up_kbytes_sec @property def upload_limit(self): """ Returns the value as required by the t
rickle command (i.e. in KBytes) """ return self._upload_limit def upload_limit_in_kbytes(self, upload_limit): self._upload_limit = upload_limit if upload_limit is not None else 0 def to_argument_list(self): """ converts the setting in a list as required by the trickle command """ return ["-u", self._upload_limit] if self._upload_limit != 0 else [] class TrickleBwShaper(object): _trickle_cmd = "trickle" """ Helper class to handle trickle (http://linux.die.net/man/1/trickle) usage """ def __init__(self, settings): self._settings = deepcopy(settings) self._trickle_cmd = find_executable("trickle") if self._trickle_cmd is None: raise RuntimeError("Couldn't find 'trickle' program") def wrap_call(self, call_cmd): """ "wraps" the call_cmd so it can be executed by subprocess.call (and related flavors) as "args" argument :param call_cmd: original args like argument (string or sequence) :return: a sequence with the original command "executed" under trickle """ if isinstance(call_cmd, basestring): # FIXME python 3 unsafe call_cmd = [call_cmd] return [self._trickle_cmd, "-s"] + self._settings.to_argument_list() + list(call_cmd)
quantopian/pyfolio
pyfolio/tests/test_nbs.py
Python
apache-2.0
666
0
#!/usr/bin/env python """ simple example script for running notebooks and reporting exceptions. U
sage: `checkipnb.py foo.ipynb [bar.ipynb [...]]` Each cell is submitted to the kernel, and checked for errors. """ import os import glob from runipy.notebook_runner import NotebookRunner from pyfolio.utils import pyfolio_root from pyfolio.ipycompat import read as read_notebook def test_nbs(): path = os.path.join(pyfolio
_root(), 'examples', '*.ipynb') for ipynb in glob.glob(path): with open(ipynb) as f: nb = read_notebook(f, 'json') nb_runner = NotebookRunner(nb) nb_runner.run_notebook(skip_exceptions=False)
rob-earwaker/rail
test_rail.py
Python
mit
23,712
0
import sys import traceback import unittest import unittest.mock import rail class TestIdentity(unittest.TestCase): def test_returns_input_value(self): value = unittest.mock.Mock() self.assertEqual(value, rail.identity(value)) class TestNot(unittest.TestCase): def test_returns_inverse_for_bool(self): self.assertEquals(True, rail.not_(False)) self.assertEquals(False, rail.not_(True)) def test_returns_inverse_for_truthy(self): self.assertEquals(True, rail.not_([])) self.assertEquals(False, rail.not_([0])) class TestRaise(unittest.TestCase): def test_raises_exception(self): with self.assertRaises(ValueError) as context: rail.raise_(ValueError('exception')) self.assertEqual('exception', str(context.exception)) def test_preserves_traceback_when_reraising_without_exception(self): def func(exception): raise exception try: try: func(ValueError('exception')) except ValueError: expected_exc_info = sys.exc_info() rail.raise_() except ValueError: actual_exc_info = sys.exc_info() self.assertEqual(expected_exc_info[0], actual_exc_info[0]) self.assertEqual(expected_exc_info[1], actual_exc_info[1]) expected_tb = traceback.format_tb(expected_exc_info[2]) actual_tb = traceback.format_tb(actual_exc_info[2]) self.assertEqual(expected_tb, actual_tb[-len(expected_tb):]) def test_preserves_traceback_when_reraising_with_exception(self): def func(exception): raise exception try: try: func(ValueError('exception')) except ValueError as exception: expected_exc_info = sys.exc_info() rail.raise_(exception) except ValueError: actual_exc_info = sys.exc_info() self.assertEqual(expected_exc_info[0], actual_exc_info[0]) self.assertEqual(expected_exc_info[1], actual_exc_info[1]) expected_tb = traceback.format_tb(expected_exc_info[2]) actual_tb = traceback.format_tb(actual_exc_info[2]) self.assertEqual(expected_tb, actual_tb[-len(expected_tb):]) class TestTry(unittest.TestCase): def test_no_exception_raised(self): input = unittest.mock.Mock() expected_value = unittest.mock.Mock() func = unittest.mock.Mock(return_value=expected_value) handle = unittest.mock.Mock() self.assertEqual(expected_value, rail.try_(func, handle)(input)) func.assert_called_once_with(input) handle.assert_not_called() def test_exception_raised(self): input = unittest.mock.Mock() exception = ValueError('value') func = unittest.mock.Mock(side_effect=lambda _: rail.raise_(exception)) output = unittest.mock.Mock() handle = unittest.mock.Mock(return_value=output) self.assertEqual(output, rail.try_(func, handle)(input)) func.assert_called_once_with(input) handle.assert_called_once_with(exception) class TestMatch(unittest.TestCase): def test_no_match_statements_provided(self): value = unittest.mock.Mock() with self.assertRaises(rail.UnmatchedValueError) as context: rail.match()(value) self.assertEqual(value, context.exception.value) def test_value_unmatched_by_all_match_statements(self): value = unittest.mock.Mock() with self.assertRaises(rail.UnmatchedValueError) as context: match = rail.match( (lambda _: False, lambda _: unittest.mock.Mock()), (lambda _: False, lambda _: unittest.mock.Mock()), (lambda _: False, lambda _: unittest.mock.Mock()) ) match(value) self.assertEqual(value, context.exception.value) def test_value_matches_single_match_statement(self): expected_value = unittest.mock.Mock() match = rail.match( (lambda _: False, lambda _: unittest.mock.Mock()), (lambda _: True, lambda _: expected_value), (lambda _: False, lambda _: unittest.mock.Mock()) ) self.assertEqual(expected_value, match(unittest.mock.Mock())) def test_value_matches_multiple_match_statements(self): expected_value = unittest.mock.Mock() match = rail.match( (lambda _: False, lambda _: unittest.mock.Mock()), (lambda _: True, lambda _: expected_value), (lambda _: True, lambda _: unittest.mock.Mock()) ) self.assertEqual(expected_value, match(unittest.mock.Mock())) class TestMatchType(unittest.TestCase): def test_no_match_statements_provided(self): value = unittest.mock.Mock() with self.assertRaises(rail.UnmatchedValueError) as context: rail.match_type()(value) self.assertEqual(value, context.exception.value) def test_value_unmatched_by_all_match_statements(self): value = unittest.mock.Mock() with self.assertRaises(rail.UnmatchedValueError) as context: match = rail.match_type( (str, lambda _: unittest.mock.Mock()), (float, lambda _: unittest.mock.Mock()), (Exception, lambda _: unittest.mock.Mock()) ) match(value) self.assertEqual(value, context.exception.value) def test_value_matches_single_match_statement(self): expected_value = unittest.mock.Mock() match = rail.match_type( (int, lambda _: unittest.mock.Mock()), (unittest.mock.Mock, lambda _: expected_value), (dict, lambda _: unittest.mock.Mock()) ) self.assertEqual(expected_value, match(unittest.mock.Mock())) def test_value_matches_multiple_match_statements(self): expected_value = unittest.mock.Mock() match = rail.match_type( (bool, lambda _: unittest.mock.Mock()), (unittest.mock.Mock, lambda _: expected_value), (unittest.mock.Mock, lambda _: unittest
.mock.Mock()) ) self.assertEqual(expected_value, match(unittest.mock.Mock())) def test_value_subclass_of_match_type(self): expected_value = unittest.mock.Mock() match = rail.match_type( (bool, lambda _: unittest.mock.Mock()), (object, lambda _: expected_value), (unittest.mock.Mock, lambda _: unittest.mock.Mock()) ) self.assertEqual(expected_value, match(unittest.mock.Mock())) clas
s TestMatchLength(unittest.TestCase): def test_no_match_statements_provided(self): value = unittest.mock.Mock() with self.assertRaises(rail.UnmatchedValueError) as context: rail.match_length()(value) self.assertEqual(value, context.exception.value) def test_value_unmatched_by_all_match_statements(self): value = unittest.mock.Mock() value.__len__ = unittest.mock.Mock(return_value=2) with self.assertRaises(rail.UnmatchedValueError) as context: match = rail.match_length( (rail.eq(8), lambda _: unittest.mock.Mock()), (rail.gt(3), lambda _: unittest.mock.Mock()) ) match(value) self.assertEqual(value, context.exception.value) def test_value_matches_single_match_statement(self): expected_value = unittest.mock.Mock() match = rail.match_length( (rail.lt(0), lambda _: unittest.mock.Mock()), (rail.eq(0), lambda _: expected_value), (rail.gt(0), lambda _: unittest.mock.Mock()) ) value = unittest.mock.Mock() value.__len__ = unittest.mock.Mock(return_value=0) self.assertEqual(expected_value, match(value)) def test_value_matches_multiple_match_statements(self): expected_value = unittest.mock.Mock() match = rail.match_length( (rail.lt(0), lambda _: unittest.mock.Mock()), (rail.ge(0), lambda _: expected_value), (rail.eq(0), lambda _: unittest.mock.Mock()) )
ratnania/pyccel
tests/warnings/codegen/is.py
Python
mit
72
0
a
d = 1. # this statement will be ignored at the codegen x = ad is Non
e
krasnoperov/django-formalizr
formalizr/tests/tests.py
Python
bsd-3-clause
4,412
0.002947
import json from django.utils import unittest from django.test.client import RequestFactory from formalizr.tests.views import SimpleFormView, SimpleCreateView, SimpleUpdateView from formalizr.tests.models import SimpleModel class AjaxFormViewTest(unittest.TestCase): view_class = SimpleFormView VALUE = 1 def setUp(self): self.factory = RequestFactory() SimpleModel.objects.all().delete() def testRequest(self): """ Posts valid form in normal way """ data = {"value": AjaxFormViewTest.VALUE} request = self.factory.post('/', data) response = self.view_class.as_view()(request) self.assertEqual(302, response.status_code) self.assertEqual(self.view_class.success_url, response["location"]) def testNotValid(self): """ Posts not valid form in normal way """ data = {} request = self.factory.post('/', data) response = self.view_class.as_view()(request) self.assertEqual(200, response.status_code) self.assertIn("value", response.context_data["form"].errors) def testAjaxRequest(self): """ Posts valid form through AJAX request. Response with redirect must be in JSON. """ data = {"value": AjaxFormViewTest.VALUE} request = self.factory.post('/', data, HTTP_X_REQUESTED_WITH='XMLHttpRequest') response = self.view_class.as_view()(request) self.assertEqual(200, response.status_code) self.assertEqual('application/json', response['content-type'].split(';')[0]) resp = json.loads(response.content) self.assertEqual("redirect", resp["status"]) self.assertEqual(self.view_class.success_url, resp["location"]) return resp def testAjaxNotValid(self): """ Posts not valid form through AJAX request. Response with errors must be in JSON. """ data = {} request = self.factory.post('/', data, HTTP_X_REQUESTED_WITH='XMLHttpRequest') response = self.view_class.as_view()(request) self.assertEqual(400, response.status_code) self.assertEqual('application/json', response['
content-type'].split(';')[0]) resp = json.loads(response.content) self.assertEqual("error", resp["status"]) self.assertIn("value", resp["errors"]) return resp def testAjaxResultRequest(self): """ Posts valid form through AJAX request. Response with result must be in JSON. """ data = {"value": AjaxFor
mViewTest.VALUE, "_return": "result"} request = self.factory.post('/', data, HTTP_X_REQUESTED_WITH='XMLHttpRequest') response = self.view_class.as_view()(request) self.assertEqual(200, response.status_code) self.assertEqual('application/json', response['content-type'].split(';')[0]) resp = json.loads(response.content) self.assertEqual("success", resp["status"]) return resp class AjaxCreateViewTest(AjaxFormViewTest): view_class = SimpleCreateView def testRequest(self): self.assertEqual(SimpleModel.objects.filter(value=AjaxFormViewTest.VALUE).count(), 0) super(AjaxCreateViewTest, self).testRequest() self.assertEqual(SimpleModel.objects.filter(value=AjaxFormViewTest.VALUE).count(), 1) def testAjaxRequest(self): self.assertEqual(SimpleModel.objects.filter(value=AjaxFormViewTest.VALUE).count(), 0) super(AjaxCreateViewTest, self).testAjaxRequest() self.assertEqual(SimpleModel.objects.filter(value=AjaxFormViewTest.VALUE).count(), 1) def testAjaxResultRequest(self): self.assertEqual(SimpleModel.objects.filter(value=AjaxFormViewTest.VALUE).count(), 0) resp = super(AjaxCreateViewTest, self).testAjaxResultRequest() self.assertEqual(SimpleModel.objects.filter(value=AjaxFormViewTest.VALUE).count(), 1) self.assertIn("pk", resp["object"]) obj = SimpleModel.objects.get(pk=resp["object"]["pk"]) self.assertEqual(AjaxFormViewTest.VALUE, obj.value) class AjaxUpdateViewTest(AjaxCreateViewTest): view_class = SimpleUpdateView def setUp(self): super(AjaxUpdateViewTest, self).setUp() SimpleModel.objects.filter(value=SimpleUpdateView.VALUE).delete() SimpleModel(value=SimpleUpdateView.VALUE).save()
HyechurnJang/acidipy
tools/ansible/acibuilder.py
Python
apache-2.0
1,055
0.010427
#!/usr/bin/python # -*- coding: utf-8 -*- ''' Created on 2016. 10. 11. @author: "comfact" ''' import yaml from ansible.module_utils.basic import * from acidipy import deployACI DOCUMENTATION = ''' --- module: acibuilder version_added: historical short_description: acidipy ansible module. description:
- This is Acidipy Ansible Module named AciBuilder options: {} author: hyjang@cisco.com ''' EXAMPLES = ''' # Test 'webservers'
status ansible webservers -m ping ''' def main(): module = AnsibleModule( argument_spec = dict( Controller=dict(required=True), Option=dict(required=True), Tenant=dict(required=True) ), supports_check_mode = True ) ctrl = yaml.load(module.params['Controller']) opts = yaml.load(module.params['Option']) tnts = yaml.load(module.params['Tenant']) desc = {'Controller' : ctrl, 'Option' : opts, 'Tenant' : tnts} result = deployACI(desc) module.exit_json(**result) main()
CACTUS-Mission/TRAPSat
TRAPSat_cFS/cfs/cfe/tools/cFS-GroundSystem/Subsystems/cmdGui/GenericCommandDialog.py
Python
mit
49,087
0.004339
# -*- coding: utf-8 -*- # Form implementation generated from reading ui file 'GenericCommandDialog.ui' # # Created: Wed Mar 25 17:15:12 2015 # by: PyQt4 UI code generator 4.11.3 # # WARNING! All changes made in this file will be lost! from PyQt4 import QtCore, QtGui try: _fromUtf8 = QtCore.QString.fromUtf8 except AttributeError: def _fromUtf8(s): return s try: _encoding = QtGui.QApplication.UnicodeUTF8 def _translate(context, text, disambig): return QtGui.QApplication.translate(context, text, disambig, _encoding) except AttributeError: def _translate(context, text, disambig): return QtGui.QApplication.translate(context, text, disambig) class Ui_GenericCommandDialog(object): def setupUi(self, GenericCommandDialog): GenericCommandDialog.setObjectName(_fromUtf8("GenericCommandDialog")) GenericCommandDialog.resize(549, 504) self.verticalLayout_6 = QtGui.QVBoxLayout(GenericCommandDialog) self.verticalLayout_6.setObjectName(_fromUtf8("verticalLayout_6")) self.verticalLayout_4 = QtGui.QVBoxLayout() self.verticalLayout_4.setObjectName(_fromUtf8("verticalLayout_4")) self.horizontalLayout = QtGui.QHBoxLayout() self.horizontalLayout.setObjectName(_fromUtf8("horizontalLayout")) self.verticalLayout = QtGui.QVBoxLayout() self.verticalLayout.setObjectName(_fromUtf8("verticalLayout")) self.subSystemCommandPageLabel = QtGui.QLabel(GenericCommandDialog) self.subSystemCommandPageLabel.setAlignment(QtCore.Qt.AlignCenter) self.subSystemCommandPageLabel.setObjectName(_fromUtf8("subSystemCommandPageLabel")) self.verticalLayout.addWidget(self.subSystemCommandPageLabel) self.subSystemTextBrowser = QtGui.QTextBrowser(GenericCommandDialog) sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Fixed, QtGui.QSizePolicy.Fixed) sizePolicy.setHorizontalStretch(0) sizePolicy.setVerticalStretch(0) sizePolicy.setHeightForWidth(self.subSystemTextBrowser.sizePolicy().hasHeightForWidth()) self.subSystemTextBrowser.setSizePolicy(sizePolicy) self.subSystemTextBrowser.setMinimumSize(QtCore.QSize(159, 31)) self.subSystemTextBrowser.setMaximumSize(QtCore.QSize(300, 31)) self.subSystemTextBrowser.setObjectName(_fromUtf8("subSystemTextBrowser")) self.verticalLayout.addWidget(self.subSystemTextBrowser) self.horizontalLayout.addLayout(self.verticalLayout) self.verticalLayout_2 = QtGui.QVBoxLayout() self.verticalLayout_2.setObjectName(_fromUtf8("verticalLayout_2")) self.packetIdLabel = QtGui.QLabel(GenericCommandDialog) sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Preferred, QtGui.QSizePolicy.Fixed) sizePolicy.setHorizontalStretch(0) sizePolicy.setVerticalStretch(0) sizePolicy.setHeightForWidth(self.packetIdLabel.sizePolicy().hasHeightForWidth()) self.packetIdLabel.setSizePolicy(sizePolicy) self.packetIdLabel.setMinimumSize(QtCore.QSize(0, 13)) self.packetIdLabel.setMaximumSize(QtCore.QSize(16777193, 13)) self.packetIdLabel.setObjectName(_fromUtf8("packetIdLabel")) self.verticalLayout_2.addWidget(self.packetIdLabel) self.packetId = QtGui.QLCDNumber(GenericCommandDialog) self.packetId.setObjectName(_fromUtf8("packetId")) self.verticalLayout_2.addWidget(self.packetId) self.horizontalLayout.addLayout(self.verticalLayout_2) self.verticalLayout_3 = QtGui.QVBoxLayout() self.verticalLayout_3.setObjectName(_fromUtf8("verticalLayout_3")) self.label_5 = QtGui.QLabel(GenericCommandDialog) self.label_5.setAlignment(QtCore.Qt.AlignCenter) self.label_5.setObjectName(_fromUtf8("label_5")) self.verticalLayout_3.addWidget(self.label_5) self.commandAddressLineEdit = QtGui.QLineEdit(GenericCommandDialog) sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Fixed, QtGui.QSizePolicy.Fixed) sizePolicy.setHorizontalStretch(0) sizePolicy.setVerticalStretch(0) sizePolicy.setHeightForWidth(self.commandAddressLineEdit.sizePolicy().hasHeightForWidth()) self.commandAddressLineEdit.setSizePolicy(sizePolicy) self.commandAddressLineEdit.setMinimumSize(QtCore.QSize(135, 31)) self.commandAddressLineEdit.setMaximumSize(QtCore.QSize(135, 31)) self.commandAddressLineEdit.setObjectName(_fromUtf8("commandAddressLineEdit")) self.verticalLayout_3.addWidget(self.commandAddressLineEdit) self.horizontalLayout.addLayout(self.verticalLayout_3) spacerItem = QtGui.QSpacerItem(40, 20, QtGui.QSizePolicy.Expanding, QtGui.QSizePolicy.Minimum) self.horizontalLayout.addItem(spacerItem) self.buttonBox = QtGui.QDialogButtonBox(GenericCommandDialog) self.buttonBox.setOrientation(QtCore.Qt.Horizontal) self.buttonBox.setStandardButtons(QtGui.QDialogButtonBox.Close) self.buttonBox.setObjectName(_fromUtf8("buttonBox")) self.horizontalLayout.addWidget(self.buttonBox) self.verticalLayout_4.addLayout(self.horizontalLayout) self.label = QtGui.QLabel(GenericCommandDialog) self.label.setAlignment(QtCore.Qt.AlignCenter) self.label.setObjectName(_fromUtf8("label")) self.verticalLayout_4.addWidget(self.label) self.verticalLayout_6.addLayout(self.verticalLayout_4) self.scrollArea = QtGui.QScrollArea(GenericCommandDialog) self.scrollArea.setVerticalScrollBarPolicy(QtCore.Qt.ScrollBarAlwaysOn) self.scrollArea.setWidgetResizable(True) self.scrollArea.setObjectName(_fromUtf8("scrollArea")) self.scrollAreaWidgetContents = QtGui.QWidget() self.scrollAreaWidgetContents.setGeometry(QtCore.QRect(0, 0, 508, 1000)) self.scrollAreaWidgetContents.setMinimumSize(QtCore.QSize(0, 1000)) self.scrollAreaWidgetContents.setObjectName(_fromUtf8("scrollAreaWidgetContents")) self.verticalLayout_5 = QtGui.QVBoxLayout(self.scrollAreaWidgetContents) self.verticalLayout_5.setObjectName(_fromUtf8("verticalLayout_5")) self.horizontalLayout_2 = QtGui.QHBoxLayout() self.horizontalLayout_2.setObjectName(_fromUtf8("horizontalLayout_2")) self.itemLabelTextBrowser_1 = QtGui.QTextBrowser(self.scrollAreaWidgetContents) sizePolicy
= QtGui.QSizePolicy(QtGui.QSizePolicy.Fixed, QtGui.QSizePolicy.Fixed) sizePolicy.setHorizontalStretch(0) sizePolicy.setVerticalStretch(0) sizePolicy.setHeightForWidth(self.itemLabelTextBrowser_1.sizePolicy().hasHeightForWidth()) self.itemLabelTextBrowser_1.setSizePolicy(sizePolicy) self.itemLabelTextBrowser_1.setMinimumSize(QtCore.QSize(391, 31)) self.itemLabelTextBrowser_1.setMaximumSize(QtCore.QSize(1234, 31)) self.itemLabelTextBrowser_1.setObjectN
ame(_fromUtf8("itemLabelTextBrowser_1")) self.horizontalLayout_2.addWidget(self.itemLabelTextBrowser_1) self.SendButton_1 = QtGui.QPushButton(self.scrollAreaWidgetContents) sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Fixed, QtGui.QSizePolicy.Fixed) sizePolicy.setHorizontalStretch(0) sizePolicy.setVerticalStretch(0) sizePolicy.setHeightForWidth(self.SendButton_1.sizePolicy().hasHeightForWidth()) self.SendButton_1.setSizePolicy(sizePolicy) self.SendButton_1.setMinimumSize(QtCore.QSize(73, 32)) self.SendButton_1.setMaximumSize(QtCore.QSize(73, 32)) self.SendButton_1.setObjectName(_fromUtf8("SendButton_1")) self.horizontalLayout_2.addWidget(self.SendButton_1) self.verticalLayout_5.addLayout(self.horizontalLayout_2) self.horizontalLayout_3 = QtGui.QHBoxLayout() self.horizontalLayout_3.setObjectName(_fromUtf8("horizontalLayout_3")) self.itemLabelTextBrowser_2 = QtGui.QTextBrowser(self.scrollAreaWidgetContents) sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Fixed, QtGui.QSizePolicy.Fixed) sizePolicy.setHorizontalStretch(0) sizePolicy.setVerticalStretch(0) sizePolicy.setHeightForWidth(
pombredanne/pants
tests/python/pants_test/build_graph/test_subproject_integration.py
Python
apache-2.0
3,193
0.006577
# coding=utf-8 # Copyright 2017 Pants project contributors (see CONTRIBUTORS.md). # Licensed under the Apache License, Version 2.0 (see LICENSE). from __future__ import (absolute_import, division, generators, nested_scopes, print_function, unicode_literals, with_statement) from contextlib import contextmanager from textwrap import dedent from pants.util.dirutil import safe_file_dump, safe_rmtree from pants_test.pants_run_integration_test import PantsRunIntegrationTest, ensure_engine SUBPROJ_SPEC = 'testprojects/src/python/subproject_test/' SUBPROJ_ROOT = 'testprojects/src/python/subproject_test/subproject' BUILD_FILES = { 'testprojects/src/python/subproject_test/BUILD': """ python_library( dependencies = ['//testprojects/src/python/subproject_test/subproject/src/python:hel
pers'], ) """, 'test
projects/src/python/subproject_test/subproject/BUILD': """ target( name = 'local', dependencies = [ ':relative', '//:absolute', ], ) target( name = 'relative', ) target( name = 'absolute', ) """, 'testprojects/src/python/subproject_test/subproject/src/python/BUILD': """ python_library( name = 'helpers', dependencies = ['//src/python/helpershelpers'], ) """, 'testprojects/src/python/subproject_test/subproject/src/python/helpershelpers/BUILD': """ python_library( name = 'helpershelpers', ) """ } """ Test layout ----------- testprojects/ src/ python/ subproject_test/ BUILD subproject/ src/ python/ BUILD/ helpershelpers/ BUILD/ """ @contextmanager def harness(): try: for name, content in BUILD_FILES.items(): safe_file_dump(name, dedent(content)) yield finally: safe_rmtree(SUBPROJ_SPEC) class SubprojectIntegrationTest(PantsRunIntegrationTest): @ensure_engine def test_subproject_without_flag(self): """ Assert that when getting the dependencies of a project which relies on a subproject which relies on its own internal library, a failure occurs without the --subproject-roots option """ with harness(): pants_args = ['dependencies', SUBPROJ_SPEC] self.assert_failure(self.run_pants(pants_args)) @ensure_engine def test_subproject_with_flag(self): """ Assert that when getting the dependencies of a project which relies on a subproject which relies on its own internal library, all things go well when that subproject is declared as a subproject """ with harness(): # Has dependencies below the subproject. pants_args = ['--subproject-roots={}'.format(SUBPROJ_ROOT), 'dependencies', SUBPROJ_SPEC] self.assert_success(self.run_pants(pants_args)) # A relative path at the root of the subproject. pants_args = ['--subproject-roots={}'.format(SUBPROJ_ROOT), 'dependencies', '{}:local'.format(SUBPROJ_ROOT)] self.assert_success(self.run_pants(pants_args))
henrynj/PMLMC
v0.0/config.py
Python
gpl-3.0
2,156
0.008813
#!/usr/bin/env python ################################################## # Parallel MLMC: Config class # # # # Jun Nie # # Last modification: 19-09-2017 # ################################################## import sys, os import numpy as np class Config: """ config class wchich is used for fvm solver, mlmc & parallelization TODO: adding read config parameters from file. """ def __init__(self, config_file): # === fvm solver parameters self.DIM = 2 self.ORDER = 1 self.case = 'vayu_burgers' # 'vayu_ls89', 'su2_ls89' self.mesh_ncoarsest = 8+1 self.mesh_nfinest = 128+1 self.mesh_filename = '/home/jun/vayu/TestMatrix/Burgers.Test/mesh/' + \ 'cartesian_tube_0009x0009x2.BlockMesh' # === mlmc parameters self.eps = 0. self.alpha = 0. self.beta = 0. self.gamma = 0. self.L = 2 # highest level self.ML = 8 # number of samples on finest level self.M = 2 # refinement factor self.SAMPLES_FACTOR = 1 self.mlmc_convergence_test = True self.READ_NUMBER_OF_SAMPLES_FROM_FILE = False self.USE_OPTIMAL_NUMBER_OF_SAMPLES = False self.USE_EQUIDISTRIBUTED_NUMBER_OF_SAMPLES = True self.COMPUTE_IN_DIFFERENCE = True # === qoi self.STATS = 'MEAN_VAR' # === parallelization parameters self.multi = 'mpi' # 'mpi' for parallel, 'si
ngle' for serial self.MULTIN = 1 # number of processes for fvm solver, 1 or multiples of 2 self.MULTIM = 4 # number of samplers (processor group) self.MULTI_CORES = 0 # === update self.update(config_file) def update(self, config_file): ''' read config file and update parameters''' pass
if __name__ == '__main__': pass
zepheira/amara
lib/xslt/expressions/avt.py
Python
apache-2.0
1,540
0.001299
######################################################################## # amara/xslt/expressions/avt.py """ Implementation of XSLT attribute value templates """ from amara.xpath import datatypes from amara.xpath.expressions import expression from amara.xslt import XsltError from amara.xslt.e
xpressions import _avt _parse_avt = _avt.parser().parse class avt_expression(expression): __slots__ = ('_format', '_args') def __init__(self, value):
try: # parts is a list of unicode and/or parsed XPath parts = _parse_avt(value) except _avt.error, error: raise XsltError(XsltError.AVT_SYNTAX) self._args = args = [] for pos, part in enumerate(parts): if isinstance(part, unicode): if '%' in part: parts[pos] = part.replace('%', '%%') else: parts[pos] = u'%s' args.append(part) self._format = u''.join(parts) if not self._args: # use empty format args to force '%%' replacement self._format = datatypes.string(self._format % ()) return def evaluate_as_string(self, context): if not self._args: return self._format result = self._format % tuple(arg.evaluate_as_string(context) for arg in self._args) return datatypes.string(result) evaluate = evaluate_as_string def __str__(self): return '{' + self._format % tuple(self._args) + '}'
earthchie/PokemonGo-Bot
setup.py
Python
mit
355
0
#!/usr/bin/env python from distutils.cor
e import setup from pip.req import parse_requirements install_reqs = parse_requirements("requirements.txt", session=False) reqs = [str(ir.req) for ir in install_reqs] setup(name='pgoapi', version='1.0', url='https://github.com/tejado/pgoapi', packages=['pgoapi'],
install_requires=reqs)
sawdog/OraclePyDoc
oraclepydoc/oracleobjects/oraclejavasource.py
Python
gpl-2.0
261
0.007663
from oracleplsqlsource i
mport OraclePLSQLSource class OracleJavaSource(OraclePLSQLSource): def __in
it__(self, name, source): self.name = name #debug_message("debug: generating java source ") OraclePLSQLSource.__init__(self,source)
tscohen/chainer
cupy/math/misc.py
Python
mit
4,772
0
from cupy import elementwise _id = 'out0 = in0' # TODO(okuta): Implement convolve _clip = elementwise.create_ufunc( 'cupy_clip', ('???->?', 'bbb->b', 'BBB->B', 'hhh->h', 'HHH->H', 'iii->i', 'III->I', 'lll->l', 'LLL->L', 'qqq->q', 'QQQ->Q', 'eee->e', 'fff->f', 'ddd->d'), 'out0 = min(in2, max(in1, in0))') def clip(a, a_min, a_max, out=None): '''Clips the values of an array to a given interval. This is equivalent to ``maximum(minimum(a, a_max), a_min)``, while this function is more efficient. Args: a (cupy.ndarray): The source array. a_min (scalar or cupy.ndarray): The left side of the interval. a_max (scalar or cupy.ndarray): The right side of the interval. out (cupy.ndarray): Output array. Returns: cupy.ndarray: Clipped array. .. seealso:: :func:`numpy.clip` ''' return _clip(a, a_min, a_max, out=out) sqrt = elementwise.create_ufunc( 'cupy_sqrt', # I think this order is a bug of NumPy, though we select this "buggy" # behavior for compatibility with NumPy. ('f->f', 'd->d', 'e->e'), 'out0 = sqrt(in0)', doc='''Elementwise positive square-root function. .. note:: This ufunc outputs float32 arrays for float16 arrays input by default as well as NumPy 1.9. If you want to override this behavior, specify the dtype argument explicitly, or use ``cupy.math.misc.sqrt_fixed`` instead. .. seealso:: :data:`numpy.sqrt` ''') # Fixed version of sqrt sqrt_fixed = elementwise.create_ufunc( 'cupy_sqrt', ('e->e', 'f->f', 'd->d'), 'out0 = sqrt(in0)') square = elementwise.create_ufunc( 'cupy_square', ('b->b', 'B->B', 'h->h', 'H->H', 'i->i', 'I->I', 'l->l', 'L->L', 'q->q', 'Q->Q', 'e->e', 'f->f', 'd->d'), 'out0 = in0 * in0', doc='''Elementwise square function. .. seealso:: :data:`numpy.square` ''') absolute = elementwise.create_ufunc( 'cupy_absolute', (('?->?', _id), 'b->b', ('B->B', _id), 'h->h', ('H->H', _id), 'i->i', ('I->I', _id), 'l->l', ('L->L', _id), 'q->q', ('Q->Q', _id), ('e->e', 'out0 = fabsf(in0)'), ('f->f', 'out0 = fabsf(in0)'), ('d->d', 'out0 = fabs(in0)')), 'out0 = in0 > 0 ? in0 : -in0', doc='''Elementwise absolute value function.
.. seealso:: :data:`numpy.absolute` ''') # TODO(beam2d): Implement it # fabs _unsigned_sign = 'out0 = in0 > 0' sign = elementwise.create_ufunc( 'cupy_sign', ('b->b', ('B->B', _unsigned_sign), 'h->h', ('H->H', _unsigned_sign), 'i->i', ('I->I', _unsigned_sign), 'l->l', ('L->L', _unsigned_sign), 'q->q', ('Q->Q', _unsigned_sign), 'e->e', 'f->f', 'd->d'), 'out0 = (in0 > 0) - (in0 < 0)'
, doc='''Elementwise sign function. It returns -1, 0, or 1 depending on the sign of the input. .. seealso:: :data:`numpy.sign` ''') _float_maximum = \ 'out0 = isnan(in0) ? in0 : isnan(in1) ? in1 : max(in0, in1)' maximum = elementwise.create_ufunc( 'cupy_maximum', ('??->?', 'bb->b', 'BB->B', 'hh->h', 'HH->H', 'ii->i', 'II->I', 'll->l', 'LL->L', 'qq->q', 'QQ->Q', ('ee->e', _float_maximum), ('ff->f', _float_maximum), ('dd->d', _float_maximum)), 'out0 = max(in0, in1)', doc='''Takes the maximum of two arrays elementwise. If NaN appears, it returns the NaN. .. seealso:: :data:`numpy.maximum` ''') _float_minimum = \ 'out0 = isnan(in0) ? in0 : isnan(in1) ? in1 : min(in0, in1)' minimum = elementwise.create_ufunc( 'cupy_minimum', ('??->?', 'bb->b', 'BB->B', 'hh->h', 'HH->H', 'ii->i', 'II->I', 'll->l', 'LL->L', 'qq->q', 'QQ->Q', ('ee->e', _float_minimum), ('ff->f', _float_minimum), ('dd->d', _float_minimum)), 'out0 = min(in0, in1)', doc='''Takes the minimum of two arrays elementwise. If NaN appears, it returns the NaN. .. seealso:: :data:`numpy.minimum` ''') fmax = elementwise.create_ufunc( 'cupy_fmax', ('??->?', 'bb->b', 'BB->B', 'hh->h', 'HH->H', 'ii->i', 'II->I', 'll->l', 'LL->L', 'qq->q', 'QQ->Q', 'ee->e', 'ff->f', 'dd->d'), 'out0 = max(in0, in1)', doc='''Takes the maximum of two arrays elementwise. If NaN appears, it returns the other operand. .. seealso:: :data:`numpy.fmax` ''') fmin = elementwise.create_ufunc( 'cupy_fmin', ('??->?', 'bb->b', 'BB->B', 'hh->h', 'HH->H', 'ii->i', 'II->I', 'll->l', 'LL->L', 'qq->q', 'QQ->Q', 'ee->e', 'ff->f', 'dd->d'), 'out0 = min(in0, in1)', doc='''Takes the minimum of two arrays elementwise. If NaN apperas, it returns the other operand. .. seealso:: :data:`numpy.fmin` ''') # TODO(okuta): Implement nan_to_num # TODO(okuta): Implement real_if_close # TODO(okuta): Implement interp
pyroscope/auvyon
src/auvyon/config.py
Python
gpl-2.0
230
0
""" Configuratio
n values. """ # Command paths (you can change these to e.g. absolute paths in calling code) CMD_FLAC = "flac" CMD_FFM
PEG = "ffmpeg" CMD_IM_MONTAGE = "montage" CMD_IM_MOGRIFY = "mogrify" CMD_IM_CONVERT = "convert"
xunilrj/sandbox
courses/coursera-sandiego-algorithms/data-structures/assignment002/make_heap/build_heap.py
Python
apache-2.0
1,100
0.010909
# python3 class HeapBuilder: def __init__(self): self._swaps = [] self._data = [] def ReadData(self): n = int(input()) self._data = [int(s) for s in input().split()] assert n == len(self._data) def Wri
teResponse(self): print(len(self._swaps)) for swap in self._swaps: print(swap[0], swap[1]) def GenerateSwaps(self): # The following naive implementation just sorts # the given sequence using selection sort algorithm # and saves the resulting sequence of swaps. # This turns the given array into a heap, # but in the worst case gives a quadratic number of swaps. # # TODO: replace by a more efficient implementation for i in range(len(self._data)):
for j in range(i + 1, len(self._data)): if self._data[i] > self._data[j]: self._swaps.append((i, j)) self._data[i], self._data[j] = self._data[j], self._data[i] def Solve(self): self.ReadData() self.GenerateSwaps() self.WriteResponse() if __name__ == '__main__': heap_builder = HeapBuilder() heap_builder.Solve()
Som-Energia/invoice-janitor
Taxes/municipaltax.py
Python
agpl-3.0
9,574
0.004493
# -*- coding: utf-8 -*- import StringIO import csv from xml.etree.ElementTree import Element, SubElement, Comment, tostring from xml.dom import minidom import configdb def prettify(elem): """Return a pretty-printed XML string for the Element. """ rough_string = tostring(elem, 'utf-8') reparsed = minidom.parseString(rough_string) return reparsed.toprettyxml(indent=" ") ## SYNTAX # script.py cities.csv 2015-01-01 2015-04-01 csv|xml # cities.csv obtained from "Gestió agrupada impost 1.5%" class MunicipalTaxesInvoicingReport: def __init__(self, cursor, start_date, end_date, tax, aggregated): self.cursor = cursor self.start_date = start_date self.end_date = end_date self.tax = tax self.aggregated = aggregated pass def by_city(self, ids, file_type): sql = ''' SELECT municipi.name AS name, municipi.ine AS ine, EXTRACT(YEAR FROM invoice.date_invoice) AS invoice_year, EXTRACT(QUARTER FROM invoice.date_invoice) AS invoice_quarter, COALESCE(SUM(invoice_line.price_subtotal::float*( CASE WHEN factura_line.tipus IN ('subtotal_xml') AND invoice.type='in_invoice' THEN 1 WHEN factura_line.tipus IN ('subtotal_xml') AND invoice.type='in_refund' THEN -1 ELSE 0 END )),0.0) AS provider_amount, COALESCE(SUM(invoice_line.price_subtotal::float*( CASE WHEN factura_line.tipus IN ('energia','reactiva','potencia') AND invoice.type='out_invoice' THEN 1 WHEN factura_line.tipus IN ('energia','reactiva','potencia') AND invoice.type='out_refund' THEN -1 ELSE 0 END )),0.0) AS client_amount FROM giscedata_facturacio_factura_linia AS factura_line LEFT JOIN account_invoice_line AS invoice_line ON invoice_line.id = factura_line.invoice_line_id LEFT JOIN giscedata_facturacio_factura AS factura ON factura.id = factura_line.factura_id LEFT JOIN account_invoice AS invoice ON invoice.id = factura.invoice_id LEFT JOIN giscedata_polissa AS polissa ON polissa.id = factura.polissa_id LEFT JOIN giscedata_cups_ps AS cups ON cups.id = polissa.cups LEFT JOIN res_municipi as municipi on municipi.id = cups.id_municipi WHERE municipi.ID IN ({0}) AND ((invoice.date_invoice >= '{1}') AND (invoice.date_invoice < '{2}')) AND (((invoice.type LIKE 'out_%%') AND ((invoice.state = 'open') OR (invoice.state = 'paid'))) OR (invoice.type LIKE 'in_%%')) GROUP BY 1,2,3,4 ORDER BY 1,2,3,4 '''.format(','.join(map(str, ids)), self.start_date, self.end_date) self.cursor.execute(sql, {'start_date': self.start_date, 'end_date': self.end_date, 'ids': ids}) return self.build_report(self.cursor.fetchall(), file_type) def build_report(self, records, file_type): invoicing_by_name = {} invoicing_by_date = {} ines = {} for record in records: name = record[0] ine = record[1] year = record[2] quarter = record[3] invoicing_by_name.setdefault(name, {'total_provider_amount': 0, 'total_client_amount': 0, 'quarters': []}) invoicing_by_name[name]['total_provider_amount'] += record[4] invoicing_by_name[name]['total_client_amount'] += record[5] invoicing_by_name[name]['quarters'].append({ 'year': record[2], 'quarter': record[3], 'provider_amount': record[4], 'client_amount': record[5] }) invoicing_by_date.setdefault(year, {}) invoicing_by_date[year].setdefault(quarter, {'total_provider_amount': 0, 'total_client_amount': 0}) invoicing_by_date[year][quarter]['total_provider_amount'] += record[4] invoicing_by_date[year][quarter]['total_client_amount'] += record[5] ines.setdefault(name, ine) if file_type=='csv': ## CSV csv_doc=StringIO.StringIO() writer_report = csv.writer(csv_doc) for name,v in sorted(invoicing_by_name.items()): writer_report.writerow([name]) writer_report.writerow(['Año', 'Trimestre', 'Pagos a distribuidora', 'Facturas a clientes']) for quarter in v['quarters']: writer_report.writerow([ quarter['year'], quarter['quarter'], round(quarter['provider_amount'], 2), round(quarter['client_amount'], 2) ]) writer_report.writerow([]) writer_report.writerow(['', '', '', '', 'Ingresos brutos', 'Tasa', 'Total']) diff = v['total_client_amount'] - v['total_provider_amount'] writer_report.writerow(['Total', '', round(v['total_provider_amount'], 2), round(v['total_client_amount'], 2), round(diff, 2), self.tax, round(diff*(self.tax/100.0), 2) ]) writer_report.writerow([]) writer_report.writerow([]) writer_report.writerow(['Año', 'Trimestre', 'Pagos a distribuidora', 'Factuas a clientes', 'Ingresos', 'Tasta', 'Total'])
for year, v in sorted(invoicing_by_date.items()):
for quarter, v in sorted(invoicing_by_date[year].items()): diff = v['total_client_amount'] - v['total_provider_amount'] writer_report.writerow([ year, quarter, round(v['total_provider_amount'], 2), round(v['total_client_amount'], 2), round(diff, 2), self.tax, round(diff*(self.tax/100.0), 2) ]) doc = csv_doc.getvalue() if file_type == 'xml': ## XML _empresa = Element("EMPRESA") _datos = SubElement(_empresa, 'DATOS') _nombre = SubElement(_datos, 'NOMBRE') _nombre.text = "Som Energia SCCL" _nif = SubElement(_datos, 'NIF') _nif.text = "F55091367" _municipios = SubElement(_empresa, 'MUNICIPIOS') for name,v in sorted(invoicing_by_name.items()): for quarter in v['quarters']: _municipio = SubElement(_municipios, 'MUNICIPIO') _ine = SubElement(_municipio, 'INEMUNICIPIO') _ine.text = ines[name] _ejercicio = SubElement(_municipio, 'EJERCICIO') _ejercicio.text = str(int(quarter['year'])) _periodo = SubElement(_municipio, 'PERIODO') _periodo.text = str(int(quarter['quarter'])) _fechaalta = SubElement(_municipio, 'FECHAALTA') _fechabaja = SubElement(_municipio, 'FECHABAJA') _tiposumin = SubElement(_municipio, 'TIPOSUMIN') _tiposumin.text = '2' _descsum = SubElement(_municipio, 'DESCSUM') _descsum.text = 'Electricidad' _descperi = SubElement(_municipio, 'DESCPERI') _facturacion = SubElement(_municipio, 'FACTURACION') _facturacion.text = '%0.2f' % quarter['client_amount'] _derechosacceso = SubElement(_municipio, 'DERECHOSACCESO'
locksmith47/turing-sim-kivy
src/plyer_lach/platforms/android/camera.py
Python
mit
2,169
0.000461
import android import android.activity from os import unlink from jnius import autoclass, cast from plyer_lach.facades import Camera from plyer_lach.platforms.android import activity Intent = autoclass('android.content.Intent') PythonActivity = autoclass('org.renpy.android.PythonActivity') MediaStore = autoclass('android.provider.MediaStore') Uri = autoclass('android.net.Uri') class AndroidCamera(Camera): def _take_picture(self, on_complete, filename=None): assert(on_complete is not None) self.on_complete = on_complete self.filename = filename android.activity.unbind(on_activity_result=self._on_activity_result) android.activity.bind(on_activity_result=self._on_activity_result) intent = Intent(MediaStore.ACTION_IMAGE_CAPTURE) uri = Uri.parse('file://' + filename) parcelable = cast('android.os.Parcelable', uri) intent.putExtra(MediaStore.EXTRA_OUTPUT, parcelable) activity.startActivityForResult(intent, 0x123) def _take_video(self, on_complete, filename=None): assert(on_complete is not None) self.on_complete = on_complete self.filename = filename android.activity.unbind(on_activity_result=self._on_activity_result) android.activity.bind(on_activity_result=self._on_activity_result) intent = Intent(MediaStore.ACTION_VIDEO_CAPTURE) uri = Uri
.parse('file://' + filename) parcelable = cast('android.os.Parcelable', uri) intent.putExtra(MediaStore.EXTRA_OUTPUT, parcelable) # 0 = low quality, suitable for MMS messages, # 1 = high quality intent.putExtra(MediaStore.EXTRA_VIDEO_QUALITY, 1) activity.startActivityForResult(intent, 0x123) def _on_activity_result(self, requestCode, resultCode, i
ntent): if requestCode != 0x123: return android.activity.unbind(on_activity_result=self._on_activity_result) if self.on_complete(self.filename): self._unlink(self.filename) def _unlink(self, fn): try: unlink(fn) except: pass def instance(): return AndroidCamera()
getyourguide/fbthrift
build/fbcode_builder_config.py
Python
apache-2.0
449
0
#!/usr/bin/env python from __future__
import absolute_import from __future__ import division from __future__ import print_function from __future__ import unicode_literals 'fbcode_builder steps to build Facebook Thrift' import sp
ecs.fbthrift as fbthrift def fbcode_builder_spec(builder): return { 'depends_on': [fbthrift], } config = { 'github_project': 'facebook/fbthrift', 'fbcode_builder_spec': fbcode_builder_spec, }
tnadeau/pybvc
setup.py
Python
bsd-3-clause
1,243
0.000805
from setuptools import setup import pybvc setup( name='pybvc', version=pybvc.__version__, description='A python library for programming your network via the Brocade Vyatta Controller (BVC)', long_description=open('README.rst').read(), author='Elbrys Networks', author_email='jeb@elbrys.com', url='https://github.com/brcdcomm/pybvc', packages=['pybvc', 'pybvc.common', 'pybvc.controller', 'pybvc.netconfdev', 'pybvc.netconfdev.vrouter', 'pybvc.netconfdev.vdx',
'pybvc.openflowdev' ], install_requires=['requests>=1.0.0', 'PyYAML', 'xmltodict'], zip_safe=False, include_package_data=True, platforms='any', license='BSD', keywords='sdn nfv bvc brocade vyatta controller network vrouter', classifiers=[ 'Development Status :: 3 - Alpha', 'Intended Audience :: System Administrators', 'Topic :: System ::
Networking', 'License :: OSI Approved :: BSD License', 'Programming Language :: Python :: 2', 'Programming Language :: Python :: 2.6', 'Programming Language :: Python :: 2.7', ] )