repo_name
stringlengths
5
92
path
stringlengths
4
221
copies
stringclasses
19 values
size
stringlengths
4
6
content
stringlengths
766
896k
license
stringclasses
15 values
hash
int64
-9,223,277,421,539,062,000
9,223,102,107B
line_mean
float64
6.51
99.9
line_max
int64
32
997
alpha_frac
float64
0.25
0.96
autogenerated
bool
1 class
ratio
float64
1.5
13.6
config_test
bool
2 classes
has_no_keywords
bool
2 classes
few_assignments
bool
1 class
calinerd/AWS
LAMBDA/Lambda_AutoUpdate_SecurityGroup_to_Allow_inbound_All_CloudFront_IPs_443.py
1
6268
''' Copyright 2015 Amazon.com, Inc. or its affiliates. All Rights Reserved. Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with the License. A copy of the License is located at http://aws.amazon.com/apache2.0/ or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. ''' import boto3 import hashlib import json import urllib2 # Name of the service, as seen in the ip-groups.json file, to extract information for SERVICE = "CLOUDFRONT" # Ports your application uses that need inbound permissions from the service for INGRESS_PORTS = [ 443 ] # Tags which identify the security groups you want to update SECURITY_GROUP_TAGS = { 'Name': 'SG_Allow_CF_IPs_443', 'AutoUpdate': 'true' } def lambda_handler(event, context): print("Received event: " + json.dumps(event, indent=2)) message = json.loads(event['Records'][0]['Sns']['Message']) # Load the ip ranges from the url ip_ranges = json.loads(get_ip_groups_json(message['url'], message['md5'])) # extract the service ranges cf_ranges = get_ranges_for_service(ip_ranges, SERVICE) # update the security groups result = update_security_groups(cf_ranges) return result def get_ip_groups_json(url, expected_hash): print("Updating from " + url) response = urllib2.urlopen(url) ip_json = response.read() m = hashlib.md5() m.update(ip_json) hash = m.hexdigest() if hash != expected_hash: raise Exception('MD5 Mismatch: got ' + hash + ' expected ' + expected_hash) return ip_json def get_ranges_for_service(ranges, service): service_ranges = list() for prefix in ranges['prefixes']: if prefix['service'] == service: print('Found ' + service + ' range: ' + prefix['ip_prefix']) service_ranges.append(prefix['ip_prefix']) return service_ranges def update_security_groups(new_ranges): client = boto3.client('ec2') groups = get_security_groups_for_update(client) print ('Found ' + str(len(groups)) + ' SecurityGroups to update') result = list() updated = 0 for group in groups: if update_security_group(client, group, new_ranges): updated += 1 result.append('Updated ' + group['GroupId']) result.append('Updated ' + str(updated) + ' of ' + str(len(groups)) + ' SecurityGroups') return result def update_security_group(client, group, new_ranges): added = 0 removed = 0 if len(group['IpPermissions']) > 0: for permission in group['IpPermissions']: if INGRESS_PORTS.count(permission['ToPort']) > 0: old_prefixes = list() to_revoke = list() to_add = list() for range in permission['IpRanges']: cidr = range['CidrIp'] old_prefixes.append(cidr) if new_ranges.count(cidr) == 0: to_revoke.append(range) print(group['GroupId'] + ": Revoking " + cidr + ":" + str(permission['ToPort'])) for range in new_ranges: if old_prefixes.count(range) == 0: to_add.append({ 'CidrIp': range }) print(group['GroupId'] + ": Adding " + range + ":" + str(permission['ToPort'])) removed += revoke_permissions(client, group, permission, to_revoke) added += add_permissions(client, group, permission, to_add) else: for port in INGRESS_PORTS: to_add = list() for range in new_ranges: to_add.append({ 'CidrIp': range }) print(group['GroupId'] + ": Adding " + range + ":" + str(port)) permission = { 'ToPort': port, 'FromPort': port, 'IpProtocol': 'tcp'} added += add_permissions(client, group, permission, to_add) print (group['GroupId'] + ": Added " + str(added) + ", Revoked " + str(removed)) return (added > 0 or removed > 0) def revoke_permissions(client, group, permission, to_revoke): if len(to_revoke) > 0: revoke_params = { 'ToPort': permission['ToPort'], 'FromPort': permission['FromPort'], 'IpRanges': to_revoke, 'IpProtocol': permission['IpProtocol'] } client.revoke_security_group_ingress(GroupId=group['GroupId'], IpPermissions=[revoke_params]) return len(to_revoke) def add_permissions(client, group, permission, to_add): if len(to_add) > 0: add_params = { 'ToPort': permission['ToPort'], 'FromPort': permission['FromPort'], 'IpRanges': to_add, 'IpProtocol': permission['IpProtocol'] } client.authorize_security_group_ingress(GroupId=group['GroupId'], IpPermissions=[add_params]) return len(to_add) def get_security_groups_for_update(client): filters = list(); for key, value in SECURITY_GROUP_TAGS.iteritems(): filters.extend( [ { 'Name': "tag-key", 'Values': [ key ] }, { 'Name': "tag-value", 'Values': [ value ] } ] ) response = client.describe_security_groups(Filters=filters) return response['SecurityGroups'] ''' Sample Event From SNS: { "Records": [ { "EventVersion": "1.0", "EventSubscriptionArn": "arn:aws:sns:EXAMPLE", "EventSource": "aws:sns", "Sns": { "SignatureVersion": "1", "Timestamp": "1970-01-01T00:00:00.000Z", "Signature": "EXAMPLE", "SigningCertUrl": "EXAMPLE", "MessageId": "95df01b4-ee98-5cb9-9903-4c221d41eb5e", "Message": "{\"create-time\": \"yyyy-mm-ddThh:mm:ss+00:00\", \"synctoken\": \"0123456789\", \"md5\": \"03a8199d0c03ddfec0e542f8bf650ee7\", \"url\": \"https://ip-ranges.amazonaws.com/ip-ranges.json\"}", "Type": "Notification", "UnsubscribeUrl": "EXAMPLE", "TopicArn": "arn:aws:sns:EXAMPLE", "Subject": "TestInvoke" } } ] } '''
unlicense
4,925,142,042,631,168,000
34.619318
266
0.596522
false
3.755542
false
false
false
Xdynix/PixivPixie
bundle_cli.py
1
2691
import os import subprocess import sys from pixiv_pixie.cli import main as cli_main, NAME BINARY_PATH = 'lib' DATA_PATH = 'data' def is_packaged(): # Return true if executing from packaged file return hasattr(sys, 'frozen') def get_path(path, package_prefix=DATA_PATH): if os.path.isabs(path) or not is_packaged(): return path else: return os.path.join( sys.prefix, os.path.join(package_prefix, path) ) def build( script, name=None, one_file=False, no_console=False, icon=None, binary_path=BINARY_PATH, addition_binary=None, data_path=DATA_PATH, addition_data=None, hidden_import=None, distpath=None, workpath=None, specpath=None, addition_args=None, ): args = [] if name is not None: args.extend(('-n', name)) if one_file: args.append('-F') if no_console: args.append('-w') if icon is not None: args.extend(('-i', icon)) if addition_args is None: addition_args = [] def add_resource(add_type, path, resources): for resource in resources: args.append('--add-{}'.format(add_type)) if isinstance(resource, tuple) or isinstance(resource, list): src = resource[0] dest = resource[1] args.append(src + os.path.pathsep + os.path.join(path, dest)) else: args.append( resource + os.path.pathsep + os.path.join(path, resource), ) if addition_binary is not None: add_resource( add_type='binary', path=binary_path, resources=addition_binary, ) if addition_data is not None: add_resource( add_type='data', path=data_path, resources=addition_data, ) if hidden_import is not None: for m in hidden_import: args.extend(('--hidden-import', m)) if distpath is not None: args.extend(('--distpath', distpath)) if workpath is not None: args.extend(('--workpath', workpath)) if specpath is not None: args.extend(('--specpath', specpath)) subprocess.call(['pyinstaller'] + args + addition_args + [script]) def main(): if not is_packaged(): build( __file__, name=NAME, one_file=True, addition_binary=[ ('freeimage-3.15.1-win64.dll', '') ], addition_args=[ '-y', '--clean', ], ) else: cli_main() if __name__ == '__main__': main()
apache-2.0
-7,565,490,886,109,477,000
24.628571
78
0.531401
false
3.860832
false
false
false
tortugueta/multilayers
examples/radcenter_distribution.py
1
8087
# -*- coding: utf-8 -*- """ Name : radcenter_distribution Author : Joan Juvert <trust.no.one.51@gmail.com> Version : 1.0 Description : This script calculates the influence of the distribution of : radiative centers in the active layer on the observed : spectrum. Copyright 2012 Joan Juvert This program is free software: you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation, either version 3 of the License, or (at your option) any later version. This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. You should have received a copy of the GNU General Public License along with this program. If not, see <http://www.gnu.org/licenses/>. """ import multilayers as ml import numpy as np import bphysics as bp import scipy.integrate as integ import argparse as ap import sys import pdb # Argument parsing parser = ap.ArgumentParser( description = "This script calculates the effect of the " + \ "distribution of radiative centers in the active layer on " + \ "the modificator to the spectrum. The observation angle is " + \ "a fixed parameter. Optionally, the output can be plotted " + \ "and output to the standard output or to a file. The matrix " + \ "containing the values of F(z, lambda) can be saved to a file " + \ "and recovered in a following run of the program to avoid " + \ "recalculating it in case we want to calculate the effect of " + \ "different distributions on the same system.") parser.add_argument( "--graph", help = "Plot the results", action = "store_true") parser.add_argument( "-o", "--output", help = "Dump the results to a file") parser.add_argument( "-s", "--savematrix", help = "Save the matrix with the F(z, lambda) values to a file") parser.add_argument( "-l", "--loadmatrix", help = "Load the matrix with the F(z, lambda) values from a file") args = parser.parse_args() # Load the depth distribution of radiative centers. Note that the origin # and units of z must be the same as in the multilayer.The distribution # should be normalized to 1. print("Loading the distribution...") path = "/home/joan/Dropbox/CNM/projectes/simulations_report/figures/" + \ "rcdistributions/" distribution = bp.rdfile(path + "gaussian_m25_s07.dat", usecols = [0, 1])[1] print("Done") print("Checking the distribution...") integral = integ.simps(distribution[:, 1], distribution[:, 0], 0) np.testing.assert_almost_equal(integral, 1, 2) print("Done") # If we load the values of F(z, lambda) calculated in a previous # execution we do not need to build the multilayer and repeat the # calculation of the F function. Notice that the values of z at which # the new distribution is sampled should be the same as the previous # one. if args.loadmatrix: print("Loading matrix...") fmatrix = np.load(args.loadmatrix) zlist = fmatrix['zlist'] np.testing.assert_array_equal(zlist, distribution[:, 0]) wlist = fmatrix['wlist'] angle = fmatrix['angle'] fte = fmatrix['fte'] ftm = fmatrix['ftm'] print("Done") else: # Create the materials print("Loading materials... ") silicon = ml.Medium("silicon.dat") air = ml.Medium("air.dat") sio2 = ml.Medium("sio2.dat") poly = ml.Medium("polysilicon.dat") print("Done") # Set the fixed parameters. angle = np.deg2rad(0) # Create the multilayer print("Building multilayer and allocating memory... ") thicknesses = [300, 50] multilayer = ml.Multilayer([ air, [poly, thicknesses[0]], [sio2, thicknesses[1]], silicon]) # Define the wavelengths and z coordinates at which F will be calculated # and allocate memory for the results. We will use a structured array to # store the values of F(z, lambda). wstep = 1 wmin = multilayer.getMinMaxWlength()[0] wmax = multilayer.getMinMaxWlength()[1] wlist = np.arange(wmin, wmax, wstep) zlist = distribution[:, 0] ftype = np.dtype([ ('fx', np.complex128), ('fy', np.complex128), ('fz', np.complex128)]) resmatrix = np.empty((zlist.size, wlist.size), dtype = ftype) print("Done") # I(wavelength, theta) = s(wavelength) * F'(wavelength, theta), where # F'(wav, theta) = integral[z](|F|^2 * rcdist(z). Therefore, we # calculate the new spectrum as a modification to the original spectrum. # The modification factor F'(wav, theta) is an integral over z. # First calculate |Fy|^2 for te and |Fx*cos^2 + Fz*sin^2|^2 for tm. We # do fx and fz in one loop and fy in another independent loop to avoid # recalculating the characteristic matrix at every iteration due to the # change of polarization. print("Calculating F...") for (widx, wlength) in enumerate(wlist): percent = (float(widx) / wlist.size) * 100 print("%.2f%%" % percent) for (zidx, z) in enumerate(zlist): resmatrix[zidx][widx]['fx'] = multilayer.calculateFx(z, wlength, angle) resmatrix[zidx][widx]['fz'] = multilayer.calculateFz(z, wlength, angle) for (zidx, z) in enumerate(zlist): resmatrix[zidx][widx]['fy'] = multilayer.calculateFy(z, wlength, angle) # We are probably more interesed on the effect of the multilayer on the # energy rather than the electric field. What we want is |Fy(z)|^2 for # TE waves and |Fx(z) cosA^2 + Fz(z) sinA^2|^2 for TM waves. ftm = np.absolute( resmatrix['fx'] * np.cos(angle) ** 2 + \ resmatrix['fz'] * np.sin(angle) ** 2) ** 2 fte = np.absolute(resmatrix['fy']) ** 2 print("Done") # Notice that until now we have not used the distribution of the # radiative ceneters, but the calculation of ftm and fte is costly. # If requested, we can save fte and ftm to a file. In a following # execution of the script, the matrix can be loaded from the file # instead of recalculated. if args.savematrix: print("Saving matrix...") np.savez(args.savematrix, fte = fte, ftm = ftm, zlist = zlist, wlist = wlist, angle = angle) print("Done") # Build or load the original spectrum. It should be sampled at the same # wavelengths defined in wlist. If we are interested only in the # modificator to the spectrum, not in the modified spectrum, we can # leave it at 1. original_spec = 1 # Multiply each F(z, lambda) by the distribution. print("Integrating...") distval = distribution[:, 1].reshape(distribution[:, 1].size, 1) fte_mplied = fte * distval ftm_mplied = ftm * distval fte_int = integ.simps(fte_mplied, zlist, axis = 0) ftm_int = integ.simps(ftm_mplied, zlist, axis = 0) spectrum_modte = original_spec * fte_int spectrum_modtm = original_spec * ftm_int print("Done") # Dump data to file or stdout comments = "# F_TE = |Fy^2|^2\n" + \ "# F_TM = |Fx * cosA^2 + Fz * sinA^2|^2\n" + \ "# Modified spectrum for TE and TM waves for a\n" + \ "# distributions of the radiative centers.\n" + \ "# wlength\tF_TE\tF_TM" if args.output: bp.wdfile(args.output, comments, np.array([wlist, spectrum_modte, spectrum_modtm]).T, '%.6e') else: print(comments) for i in xrange(wlist.size): print("%.6e\t%.6e\t%.6e" % (wlist[i], spectrum_modte[i], spectrum_modtm[i])) # Plot data if requested if args.graph: import matplotlib.pyplot as plt plt.plot(wlist, spectrum_modte, label='TE', color = 'r') plt.plot(wlist, spectrum_modtm, label='TM', color = 'b') plt.xlabel('Wavelength (nm)') plt.ylabel('Energy ratio') plt.grid() plt.legend(loc=2) plt.title('%.1f rad' % angle) plt.show() plt.close()
gpl-3.0
-2,529,649,230,264,011,300
36.967136
83
0.649808
false
3.451558
false
false
false
linaro-technologies/jobserv
jobserv/storage/local_storage.py
1
3989
# Copyright (C) 2017 Linaro Limited # Author: Andy Doan <andy.doan@linaro.org> import hmac import os import mimetypes import shutil from flask import Blueprint, request, send_file, url_for from jobserv.jsend import get_or_404 from jobserv.models import Build, Project, Run from jobserv.settings import INTERNAL_API_KEY, LOCAL_ARTIFACTS_DIR from jobserv.storage.base import BaseStorage blueprint = Blueprint('local_storage', __name__, url_prefix='/local-storage') class Storage(BaseStorage): blueprint = blueprint def __init__(self): super().__init__() self.artifacts = LOCAL_ARTIFACTS_DIR def _get_local(self, storage_path): assert storage_path[0] != '/' path = os.path.join(self.artifacts, storage_path) dirname = os.path.dirname(path) if not os.path.exists(dirname): os.makedirs(dirname) return path def _create_from_string(self, storage_path, contents): path = self._get_local(storage_path) with open(path, 'w') as f: f.write(contents) def _create_from_file(self, storage_path, filename, content_type): path = self._get_local(storage_path) with open(filename, 'rb') as fin, open(path, 'wb') as fout: shutil.copyfileobj(fin, fout) def _get_as_string(self, storage_path): assert storage_path[0] != '/' path = os.path.join(self.artifacts, storage_path) with open(path, 'r') as f: return f.read() def list_artifacts(self, run): path = '%s/%s/%s/' % ( run.build.project.name, run.build.build_id, run.name) path = os.path.join(self.artifacts, path) for base, _, names in os.walk(path): for name in names: if name != '.rundef.json': yield os.path.join(base, name)[len(path):] def get_download_response(self, request, run, path): try: p = os.path.join(self.artifacts, self._get_run_path(run), path) mt = mimetypes.guess_type(p)[0] return send_file(open(p, 'rb'), mimetype=mt) except FileNotFoundError: return 'File not found', 404 def _generate_put_url(self, run, path, expiration, content_type): p = os.path.join(self.artifacts, self._get_run_path(run), path) msg = '%s,%s,%s' % ('PUT', p, content_type) sig = hmac.new(INTERNAL_API_KEY, msg.encode(), 'sha1').hexdigest() return url_for( 'local_storage.run_upload_artifact', sig=sig, proj=run.build.project.name, build_id=run.build.build_id, run=run.name, path=path, _external=True) def _get_run(proj, build_id, run): p = get_or_404(Project.query.filter_by(name=proj)) b = get_or_404(Build.query.filter_by(project=p, build_id=build_id)) return Run.query.filter_by( name=run ).filter( Run.build.has(Build.id == b.id) ).first_or_404() @blueprint.route('/<sig>/<proj>/builds/<int:build_id>/runs/<run>/<path:path>', methods=('PUT',)) def run_upload_artifact(sig, proj, build_id, run, path): run = _get_run(proj, build_id, run) # validate the signature ls = Storage() p = os.path.join(ls.artifacts, ls._get_run_path(run), path) msg = '%s,%s,%s' % (request.method, p, request.headers.get('Content-Type')) computed = hmac.new(INTERNAL_API_KEY, msg.encode(), 'sha1').hexdigest() if not hmac.compare_digest(sig, computed): return 'Invalid signature', 401 dirname = os.path.dirname(p) try: # we could have 2 uploads trying this, so just do it this way to avoid # race conditions os.makedirs(dirname) except FileExistsError: pass # stream the contents to disk with open(p, 'wb') as f: chunk_size = 4096 while True: chunk = request.stream.read(chunk_size) if len(chunk) == 0: break f.write(chunk) return 'ok'
agpl-3.0
-3,963,570,515,246,286,300
33.094017
79
0.603159
false
3.426976
false
false
false
jadref/buffer_bci
python/echoClient/eventForwarder.py
1
2911
#!/usr/bin/env python3 bufferpath = "../../python/signalProc" fieldtripPath="../../dataAcq/buffer/python" import os, sys, random, math, time, socket, struct sys.path.append(os.path.join(os.path.dirname(os.path.abspath(__file__)),bufferpath)) import bufhelp sys.path.append(os.path.join(os.path.dirname(os.path.abspath(__file__)),fieldtripPath)) import FieldTrip # Configuration of buffer buffer1_hostname='localhost' buffer1_port=1972 # Configuration of forwarding buffer buffer2_hostname=None buffer2_port=None # holder for the buffer2 connection ftc2=None # flag to stop running when used from another function running=True def connectBuffers(buffer1_hostname,buffer1_port,buffer2_hostname,buffer2_port): if buffer1_hostname==buffer2_hostname and buffer1_port==buffer2_port : print("WARNING:: fowarding to the same port may result in infinite loops!!!!") #Connect to Buffer2 -- do this first so the global state is for ftc1 print("Connecting to " + buffer2_hostname + ":" + str(buffer2_port)) (ftc2,hdr2) = bufhelp.connect(buffer2_hostname,buffer2_port) print("Connected"); print(hdr2) #Connect to Buffer1 print("Connecting to " + buffer1_hostname + ":" + str(buffer1_port)) (ftc1,hdr1) = bufhelp.connect(buffer1_hostname,buffer1_port) print("Connected!"); print(hdr1) return (ftc1,ftc2) # Receive events from the buffer1 and send them to buffer2 def forwardBufferEvents(ftc1,ftc2): global running global ftc ftc=ftc1 while ( running ): events = bufhelp.buffer_newevents() for evt in events: print(str(evt.sample) + ": " + str(evt)) evt.sample=-1 ftc2.putEvents(evt) def guiGetBuffer2(): print("GUI info not supported yet!!") return; import tkinter as tk master = tk.Tk() tk.Label(master, text="HostName").grid(row=0) tk.Label(master, text="Port").grid(row=1) e1 = tk.Entry(master) e2 = tk.Entry(master) e1.grid(row=0, column=1) e2.grid(row=1, column=1) master.mainloop() if __name__ == "__main__": if len(sys.argv)>0: # called with options, i.e. commandline buffer2_hostname = sys.argv[1] if len(sys.argv)>1: try: buffer2_port = int(sys.argv[2]) except: print('Error: second argument (%s) must be a valid (=integer) port number'%sys.argv[2]) sys.exit(1) if buffer2_hostname is None : (buffer2_hostname,buffer2_port)=guiGetBuffer2() (ftc1,ftc2)=connectBuffers(buffer1_hostname,buffer1_port,buffer2_hostname,buffer2_port) forwardBufferEvents(ftc1,ftc2)
gpl-3.0
-8,554,221,728,104,355,000
34.938272
103
0.605634
false
3.602723
false
false
false
ozgurakgun/minion
mini-scripts/testallconstraints.py
1
3983
#!/usr/bin/python # Generate two minion input files, run them then compare dumptree outputs to # detect bugs in constraint propagators. import sys, os, getopt from constraint_test_common import * from multiprocessing import Pool, Manager import random #from sendemail import * import time (optargs, other)=getopt.gnu_getopt(sys.argv, "", ["minion=", "numtests=", "email", "fullprop", "64bit", "procs=", "seed=", "conslist="]) if len(other)>1: print("Usage: testallconstraints.py [--minion=<location of minion binary>] [--numtests=...] [--email] [--procs=...] [--seed=...] [--conslist=...]") sys.exit(1) # This one tests all the constraints in the following list. conslist=[] # equality constraints conslist+=["diseq", "eq", "gaceq"] # alldiffs conslist+=["alldiff", "gacalldiff", "alldiffmatrix"] # capacity constraints conslist+=["gcc", "gccweak", "occurrence", "occurrenceleq", "occurrencegeq"] #element constraints conslist+=["element", "element_undefzero", "watchelement", "watchelement_undefzero"] conslist+=["watchelement_one", "element_one"] # arithmetic constraints conslist+=["modulo", "modulo_undefzero", "pow", "minuseq", "product", "div", "div_undefzero", "abs"] conslist+=["watchsumleq", "watchsumgeq", "watchvecneq", "hamming", "not-hamming"] conslist+=["weightedsumleq", "weightedsumgeq"] conslist+=["litsumgeq"] # should test table to test reifytable? and reifyimplytable conslist+=["sumgeq", "sumleq", "weightedsumleq", "weightedsumgeq"] conslist+=["ineq"] conslist+=["difference"] conslist+=["negativetable", "lighttable"] # symmetry-breaking constraints conslist+=["lexleq", "lexless", "lexleq_rv", "lexleq_quick", "lexless_quick"] conslist+=["max", "min"] conslist+=["watchneq", "watchless"] conslist+=["w-inset", "w-inintervalset", "w-notinset", "w-inrange", "w-notinrange", "w-literal", "w-notliteral"] conslist+=["watchsumgeq", "litsumgeq", "watchneq", "watchless", "not-hamming"] conslist+=["not-hamming"] conslist+=["gacschema", "haggisgac", "haggisgac-stable", "str2plus", "shortstr2", "shortctuplestr2", "mddc"] conslist+=["nvalueleq", "nvaluegeq"] # add reifyimply variant of all constraints, # and reify variant of all except those in reifyexceptions it=conslist[:] for c in it: conslist+=["reifyimply"+c] conslist+=["reify"+c] numtests=100 minionbin="bin/minion" email=False fullprop=False # compare the constraint against itself with fullprop. Needs DEBUG=1. bit64=False procs=1 seed=12345 for i in optargs: (a1, a2)=i if a1=="--minion": minionbin=a2 elif a1=="--numtests": numtests=int(a2) elif a1=="--email": email=True elif a1=="--fullprop": fullprop=True elif a1=="--64bit": bit64=True elif a1=="--procs": procs=int(a2) elif a1=="--seed": seed=int(a2) elif a1=="--conslist": conslist=a2.split(",") def runtest(consname): cachename = consname starttime=time.time() sys.stdout.flush() random.seed(seed) reify=False reifyimply=False if consname[0:10]=="reifyimply": reifyimply=True consname=consname[10:] if consname[0:5]=="reify": reify=True consname=consname[5:] consname=consname.replace("-", "__minus__") testobj=eval("test"+consname+"()") testobj.solver=minionbin for testnum in range(numtests): options = {'reify': reify, 'reifyimply': reifyimply, 'fullprop': fullprop, 'printcmd': False, 'fixlength':False, 'getsatisfyingassignment':True} if not testobj.runtest(options): print("Failed when testing %s"%cachename) sys.stdout.flush() return False print("Completed testing %s, duration: %d"%(cachename, time.time()-starttime)) return True if __name__ == '__main__': p = Pool(procs) retval = p.map(runtest, conslist) if all(retval): print("Success") exit(0) else: print("Failure") exit(1)
gpl-2.0
-5,649,680,209,550,508,000
27.654676
152
0.651017
false
3.061491
true
false
false
rabramley/telomere
app/model/batch.py
1
2972
from app import db from sqlalchemy.ext.hybrid import hybrid_property from sqlalchemy.sql import select, func from app.model.outstandingError import OutstandingError import numpy import decimal class Batch(db.Model): id = db.Column(db.Integer, primary_key=True) robot = db.Column(db.String(20)) temperature = db.Column(db.Numeric(precision=3, scale=1)) datetime = db.Column(db.DateTime()) userId = db.Column(db.Integer, db.ForeignKey('user.id')) version_id = db.Column(db.Integer, nullable=False) plateName = db.Column(db.String(50)) halfPlate = db.Column(db.String(1)) humidity = db.Column(db.Integer()) primerBatch = db.Column(db.Integer()) enzymeBatch = db.Column(db.Integer()) rotorGene = db.Column(db.Integer()) operatorUserId = db.Column(db.Integer, db.ForeignKey('user.id')) batchFailureReason = db.Column(db.Integer()) processType = db.Column(db.String(20)) __mapper_args__ = { "version_id_col": version_id } def __init__(self, *args, **kwargs): self.id = kwargs.get('id') self.robot = kwargs.get('robot') self.temperature = kwargs.get('temperature') self.datetime = kwargs.get('datetime') self.userId = kwargs.get('userId') self.plateName = kwargs.get('plateName') self.halfPlate = kwargs.get('halfPlate') self.humidity = kwargs.get('humidity') self.primerBatch = kwargs.get('primerBatch') self.enzymeBatch = kwargs.get('enzymeBatch') self.rotorGene = kwargs.get('rotorGene') self.operatorUserId = kwargs.get('operatorUserId') self.batchFailureReason = kwargs.get('batchFailureReason') self.processType = kwargs.get('processType') @hybrid_property def outstandingErrorCount(self): return len(self.outstandingErrors) @outstandingErrorCount.expression def outstandingErrorCount(cls): return (select([func.count(OutstandingError.id)]). where(OutstandingError.batchId == cls.id). label("outstandingErrorCount") ) def get_measurements_for_sample_code(self, sampleCode): return [m for m in self.measurements if m.sample.sampleCode == sampleCode] def has_no_pool_samples(self): return not any(m.sample.is_pool_sample() for m in self.measurements) def has_no_non_pool_samples(self): return not any(not m.sample.is_pool_sample() for m in self.measurements) def has_invalid_pool_ts_average(self): poolTsValues = [ decimal.Decimal(m.ts) for m in self.measurements if m.ts is not None and m.sample.is_pool_sample()] averagePoolTs = numpy.mean(poolTsValues) return averagePoolTs < 0.99 or averagePoolTs > 1.01 def is_duplicate(self): return self.processType == "Duplicate" def is_replate(self): return self.processType == "Re-Plate" def is_initial(self): return self.processType == "Initial"
mit
7,113,725,852,751,646,000
37.102564
124
0.664536
false
3.580723
false
false
false
DarioGT/OMS-PluginXML
org.modelsphere.sms/lib/jython-2.2.1/Lib/uu.py
1
6092
#! /usr/bin/env python # Copyright 1994 by Lance Ellinghouse # Cathedral City, California Republic, United States of America. # All Rights Reserved # Permission to use, copy, modify, and distribute this software and its # documentation for any purpose and without fee is hereby granted, # provided that the above copyright notice appear in all copies and that # both that copyright notice and this permission notice appear in # supporting documentation, and that the name of Lance Ellinghouse # not be used in advertising or publicity pertaining to distribution # of the software without specific, written prior permission. # LANCE ELLINGHOUSE DISCLAIMS ALL WARRANTIES WITH REGARD TO # THIS SOFTWARE, INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND # FITNESS, IN NO EVENT SHALL LANCE ELLINGHOUSE CENTRUM BE LIABLE # FOR ANY SPECIAL, INDIRECT OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES # WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN # ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT # OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. # # Modified by Jack Jansen, CWI, July 1995: # - Use binascii module to do the actual line-by-line conversion # between ascii and binary. This results in a 1000-fold speedup. The C # version is still 5 times faster, though. # - Arguments more compliant with python standard """Implementation of the UUencode and UUdecode functions. encode(in_file, out_file [,name, mode]) decode(in_file [, out_file, mode]) """ import binascii import os import sys from types import StringType __all__ = ["Error", "encode", "decode"] class Error(Exception): pass def encode(in_file, out_file, name=None, mode=None): """Uuencode file""" # # If in_file is a pathname open it and change defaults # if in_file == '-': in_file = sys.stdin elif isinstance(in_file, StringType): if name is None: name = os.path.basename(in_file) if mode is None: try: mode = os.stat(in_file)[0] except AttributeError: pass in_file = open(in_file, 'rb') # # Open out_file if it is a pathname # if out_file == '-': out_file = sys.stdout elif isinstance(out_file, StringType): out_file = open(out_file, 'w') # # Set defaults for name and mode # if name is None: name = '-' if mode is None: mode = 0666 # # Write the data # out_file.write('begin %o %s\n' % ((mode&0777),name)) str = in_file.read(45) while len(str) > 0: out_file.write(binascii.b2a_uu(str)) str = in_file.read(45) out_file.write(' \nend\n') def decode(in_file, out_file=None, mode=None, quiet=0): """Decode uuencoded file""" # # Open the input file, if needed. # if in_file == '-': in_file = sys.stdin elif isinstance(in_file, StringType): in_file = open(in_file) # # Read until a begin is encountered or we've exhausted the file # while 1: hdr = in_file.readline() if not hdr: raise Error, 'No valid begin line found in input file' if hdr[:5] != 'begin': continue hdrfields = hdr.split(" ", 2) if len(hdrfields) == 3 and hdrfields[0] == 'begin': try: int(hdrfields[1], 8) break except ValueError: pass if out_file is None: out_file = hdrfields[2].rstrip() if os.path.exists(out_file): raise Error, 'Cannot overwrite existing file: %s' % out_file if mode is None: mode = int(hdrfields[1], 8) # # Open the output file # opened = False if out_file == '-': out_file = sys.stdout elif isinstance(out_file, StringType): fp = open(out_file, 'wb') try: os.path.chmod(out_file, mode) except AttributeError: pass out_file = fp opened = True # # Main decoding loop # s = in_file.readline() while s and s.strip() != 'end': try: data = binascii.a2b_uu(s) except binascii.Error, v: # Workaround for broken uuencoders by /Fredrik Lundh nbytes = (((ord(s[0])-32) & 63) * 4 + 5) / 3 data = binascii.a2b_uu(s[:nbytes]) if not quiet: sys.stderr.write("Warning: %s\n" % str(v)) out_file.write(data) s = in_file.readline() if not s: raise Error, 'Truncated input file' if opened: out_file.close() def test(): """uuencode/uudecode main program""" import getopt dopt = 0 topt = 0 input = sys.stdin output = sys.stdout ok = 1 try: optlist, args = getopt.getopt(sys.argv[1:], 'dt') except getopt.error: ok = 0 if not ok or len(args) > 2: print 'Usage:', sys.argv[0], '[-d] [-t] [input [output]]' print ' -d: Decode (in stead of encode)' print ' -t: data is text, encoded format unix-compatible text' sys.exit(1) for o, a in optlist: if o == '-d': dopt = 1 if o == '-t': topt = 1 if len(args) > 0: input = args[0] if len(args) > 1: output = args[1] if dopt: if topt: if isinstance(output, StringType): output = open(output, 'w') else: print sys.argv[0], ': cannot do -t to stdout' sys.exit(1) decode(input, output) else: if topt: if isinstance(input, StringType): input = open(input, 'r') else: print sys.argv[0], ': cannot do -t from stdin' sys.exit(1) encode(input, output) if __name__ == '__main__': test()
gpl-3.0
8,905,765,275,347,266,000
29.241026
72
0.559094
false
3.732843
false
false
false
jhogg41/gm-o-matic
gom_server/gom_server/urls.py
1
1187
"""gom_server URL Configuration The `urlpatterns` list routes URLs to views. For more information please see: https://docs.djangoproject.com/en/1.8/topics/http/urls/ Examples: Function views 1. Add an import: from my_app import views 2. Add a URL to urlpatterns: url(r'^$', views.home, name='home') Class-based views 1. Add an import: from other_app.views import Home 2. Add a URL to urlpatterns: url(r'^$', Home.as_view(), name='home') Including another URLconf 1. Add an import: from blog import urls as blog_urls 2. Add a URL to urlpatterns: url(r'^blog/', include(blog_urls)) """ from django.conf.urls import include, url from django.contrib import admin from rest_framework import routers import core.router import char_attr.router router = routers.DefaultRouter() core.router.addRoutes(router) char_attr.router.addRoutes(router) urlpatterns = [ url(r'^admin/', include(admin.site.urls)), url(r'^api-auth/', include('rest_framework.urls', namespace='rest-framework')), url(r'^', include(router.urls)), url(r'^rest-auth/', include('rest_auth.urls')), url(r'^rest-auth/registration', include('rest_auth.registration.urls')), ]
bsd-2-clause
-3,051,245,291,478,614,500
36.09375
83
0.708509
false
3.420749
false
false
false
kevin-coder/tensorflow-fork
tensorflow/python/keras/layers/normalization_test.py
1
22900
# Copyright 2016 The TensorFlow Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # ============================================================================== """Tests for normalization layers.""" from __future__ import absolute_import from __future__ import division from __future__ import print_function from absl.testing import parameterized import numpy as np from tensorflow.python import keras from tensorflow.python.framework import constant_op from tensorflow.python.framework import test_util as tf_test_util from tensorflow.python.keras import keras_parameterized from tensorflow.python.keras import testing_utils from tensorflow.python.keras.layers import normalization from tensorflow.python.keras.layers import normalization_v2 from tensorflow.python.keras.mixed_precision.experimental import policy from tensorflow.python.platform import test from tensorflow.python.training import gradient_descent class BatchNormalizationTest(keras_parameterized.TestCase): @keras_parameterized.run_all_keras_modes def test_basic_batchnorm(self): testing_utils.layer_test( keras.layers.BatchNormalization, kwargs={ 'momentum': 0.9, 'epsilon': 0.1, 'gamma_regularizer': keras.regularizers.l2(0.01), 'beta_regularizer': keras.regularizers.l2(0.01) }, input_shape=(3, 4, 2)) testing_utils.layer_test( keras.layers.BatchNormalization, kwargs={ 'gamma_initializer': 'ones', 'beta_initializer': 'ones', 'moving_mean_initializer': 'zeros', 'moving_variance_initializer': 'ones' }, input_shape=(3, 4, 2)) testing_utils.layer_test( keras.layers.BatchNormalization, kwargs={'scale': False, 'center': False}, input_shape=(3, 3)) @tf_test_util.run_in_graph_and_eager_modes def test_batchnorm_weights(self): layer = keras.layers.BatchNormalization(scale=False, center=False) layer.build((None, 3, 4)) self.assertEqual(len(layer.trainable_weights), 0) self.assertEqual(len(layer.weights), 2) layer = keras.layers.BatchNormalization() layer.build((None, 3, 4)) self.assertEqual(len(layer.trainable_weights), 2) self.assertEqual(len(layer.weights), 4) @tf_test_util.run_in_graph_and_eager_modes def test_batchnorm_regularization(self): layer = keras.layers.BatchNormalization( gamma_regularizer='l1', beta_regularizer='l1') layer.build((None, 3, 4)) self.assertEqual(len(layer.losses), 2) max_norm = keras.constraints.max_norm layer = keras.layers.BatchNormalization( gamma_constraint=max_norm, beta_constraint=max_norm) layer.build((None, 3, 4)) self.assertEqual(layer.gamma.constraint, max_norm) self.assertEqual(layer.beta.constraint, max_norm) @keras_parameterized.run_all_keras_modes def test_batchnorm_convnet(self): if test.is_gpu_available(cuda_only=True): with self.session(use_gpu=True): model = keras.models.Sequential() norm = keras.layers.BatchNormalization( axis=1, input_shape=(3, 4, 4), momentum=0.8) model.add(norm) model.compile(loss='mse', optimizer=gradient_descent.GradientDescentOptimizer(0.01), run_eagerly=testing_utils.should_run_eagerly()) # centered on 5.0, variance 10.0 x = np.random.normal(loc=5.0, scale=10.0, size=(1000, 3, 4, 4)) model.fit(x, x, epochs=4, verbose=0) out = model.predict(x) out -= np.reshape(keras.backend.eval(norm.beta), (1, 3, 1, 1)) out /= np.reshape(keras.backend.eval(norm.gamma), (1, 3, 1, 1)) np.testing.assert_allclose(np.mean(out, axis=(0, 2, 3)), 0.0, atol=1e-1) np.testing.assert_allclose(np.std(out, axis=(0, 2, 3)), 1.0, atol=1e-1) @keras_parameterized.run_all_keras_modes def test_batchnorm_convnet_channel_last(self): model = keras.models.Sequential() norm = keras.layers.BatchNormalization( axis=-1, input_shape=(4, 4, 3), momentum=0.8) model.add(norm) model.compile(loss='mse', optimizer=gradient_descent.GradientDescentOptimizer(0.01), run_eagerly=testing_utils.should_run_eagerly()) # centered on 5.0, variance 10.0 x = np.random.normal(loc=5.0, scale=10.0, size=(1000, 4, 4, 3)) model.fit(x, x, epochs=4, verbose=0) out = model.predict(x) out -= np.reshape(keras.backend.eval(norm.beta), (1, 1, 1, 3)) out /= np.reshape(keras.backend.eval(norm.gamma), (1, 1, 1, 3)) np.testing.assert_allclose(np.mean(out, axis=(0, 1, 2)), 0.0, atol=1e-1) np.testing.assert_allclose(np.std(out, axis=(0, 1, 2)), 1.0, atol=1e-1) @keras_parameterized.run_all_keras_modes def test_batchnorm_correctness(self): _run_batchnorm_correctness_test( normalization.BatchNormalization, dtype='float32') _run_batchnorm_correctness_test( normalization_v2.BatchNormalization, dtype='float32') @keras_parameterized.run_all_keras_modes def test_batchnorm_mixed_precision(self): _run_batchnorm_correctness_test( normalization.BatchNormalization, dtype='float16') _run_batchnorm_correctness_test( normalization_v2.BatchNormalization, dtype='float16') @tf_test_util.run_in_graph_and_eager_modes def test_batchnorm_policy(self): norm = keras.layers.BatchNormalization( axis=-1, input_shape=(4, 4, 3), momentum=0.8, dtype=policy.Policy('infer_float32_vars')) x = np.random.normal(size=(10, 4, 4, 3)).astype('float16') y = norm(x) self.assertEqual(y.dtype, 'float16') self.assertEqual(norm.beta.dtype.base_dtype, 'float32') self.assertEqual(norm.gamma.dtype.base_dtype, 'float32') class BatchNormalizationV1Test(test.TestCase): @tf_test_util.run_in_graph_and_eager_modes def test_v1_fused_attribute(self): norm = normalization.BatchNormalization() inp = keras.layers.Input((4, 4, 4)) norm(inp) self.assertEqual(norm.fused, True) norm = normalization.BatchNormalization(fused=False) self.assertEqual(norm.fused, False) inp = keras.layers.Input(shape=(4, 4, 4)) norm(inp) self.assertEqual(norm.fused, False) norm = normalization.BatchNormalization(virtual_batch_size=2) self.assertEqual(norm.fused, True) inp = keras.layers.Input(shape=(2, 2, 2)) norm(inp) self.assertEqual(norm.fused, False) class BatchNormalizationV2Test(keras_parameterized.TestCase): @keras_parameterized.run_all_keras_modes def test_basic_batchnorm_v2(self): testing_utils.layer_test( normalization_v2.BatchNormalization, kwargs={'fused': True}, input_shape=(3, 3, 3, 3)) testing_utils.layer_test( normalization_v2.BatchNormalization, kwargs={'fused': None}, input_shape=(3, 3, 3)) @tf_test_util.run_in_graph_and_eager_modes def test_v2_fused_attribute(self): norm = normalization_v2.BatchNormalization() self.assertEqual(norm.fused, None) inp = keras.layers.Input(shape=(4, 4, 4)) norm(inp) self.assertEqual(norm.fused, True) norm = normalization_v2.BatchNormalization() self.assertEqual(norm.fused, None) inp = keras.layers.Input(shape=(4, 4)) norm(inp) self.assertEqual(norm.fused, False) norm = normalization_v2.BatchNormalization(virtual_batch_size=2) self.assertEqual(norm.fused, False) inp = keras.layers.Input(shape=(4, 4, 4)) norm(inp) self.assertEqual(norm.fused, False) norm = normalization_v2.BatchNormalization(fused=False) self.assertEqual(norm.fused, False) inp = keras.layers.Input(shape=(4, 4, 4)) norm(inp) self.assertEqual(norm.fused, False) norm = normalization_v2.BatchNormalization(fused=True, axis=[3]) self.assertEqual(norm.fused, True) inp = keras.layers.Input(shape=(4, 4, 4)) norm(inp) self.assertEqual(norm.fused, True) with self.assertRaisesRegexp(ValueError, 'fused.*renorm'): normalization_v2.BatchNormalization(fused=True, renorm=True) with self.assertRaisesRegexp(ValueError, 'fused.*when axis is 1 or 3'): normalization_v2.BatchNormalization(fused=True, axis=2) with self.assertRaisesRegexp(ValueError, 'fused.*when axis is 1 or 3'): normalization_v2.BatchNormalization(fused=True, axis=[1, 3]) with self.assertRaisesRegexp(ValueError, 'fused.*virtual_batch_size'): normalization_v2.BatchNormalization(fused=True, virtual_batch_size=2) with self.assertRaisesRegexp(ValueError, 'fused.*adjustment'): normalization_v2.BatchNormalization(fused=True, adjustment=lambda _: (1, 0)) norm = normalization_v2.BatchNormalization(fused=True) self.assertEqual(norm.fused, True) inp = keras.layers.Input(shape=(4, 4)) with self.assertRaisesRegexp(ValueError, '4D input tensors'): norm(inp) def _run_batchnorm_correctness_test(layer, dtype='float32', fused=False): model = keras.models.Sequential() model.add(keras.Input(shape=(2, 2, 2), dtype=dtype)) norm = layer(momentum=0.8, fused=fused) model.add(norm) if dtype == 'float16': # Keras models require float32 losses. model.add(keras.layers.Lambda(lambda x: keras.backend.cast(x, 'float32'))) model.compile(loss='mse', optimizer=gradient_descent.GradientDescentOptimizer(0.01), run_eagerly=testing_utils.should_run_eagerly()) # centered on 5.0, variance 10.0 x = (np.random.normal(loc=5.0, scale=10.0, size=(1000, 2, 2, 2)) .astype(dtype)) model.fit(x, x, epochs=4, verbose=0) out = model.predict(x) out -= keras.backend.eval(norm.beta) out /= keras.backend.eval(norm.gamma) np.testing.assert_allclose(out.mean(), 0.0, atol=1e-1) np.testing.assert_allclose(out.std(), 1.0, atol=1e-1) @parameterized.parameters( [normalization.BatchNormalization, normalization_v2.BatchNormalization]) class NormalizationLayersGraphModeOnlyTest( test.TestCase, parameterized.TestCase): def test_shared_batchnorm(self, layer): """Test that a BN layer can be shared across different data streams.""" with self.cached_session(): # Test single layer reuse bn = layer() x1 = keras.layers.Input(shape=(10,)) _ = bn(x1) x2 = keras.layers.Input(shape=(10,)) y2 = bn(x2) x = np.random.normal(loc=5.0, scale=10.0, size=(2, 10)) model = keras.models.Model(x2, y2) model.compile(gradient_descent.GradientDescentOptimizer(0.01), 'mse') model.train_on_batch(x, x) self.assertEqual(len(bn.updates), 4) self.assertEqual(len(model.updates), 2) self.assertEqual(len(model.get_updates_for(x2)), 2) # Test model-level reuse x3 = keras.layers.Input(shape=(10,)) y3 = model(x3) new_model = keras.models.Model(x3, y3, name='new_model') self.assertEqual(len(new_model.updates), 2) self.assertEqual(len(model.updates), 4) self.assertEqual(len(new_model.get_updates_for(x3)), 2) new_model.compile(gradient_descent.GradientDescentOptimizer(0.01), 'mse') new_model.train_on_batch(x, x) def test_that_trainable_disables_updates(self, layer): with self.cached_session(): val_a = np.random.random((10, 4)) val_out = np.random.random((10, 4)) a = keras.layers.Input(shape=(4,)) layer = layer(input_shape=(4,)) b = layer(a) model = keras.models.Model(a, b) model.trainable = False assert not model.updates model.compile(gradient_descent.GradientDescentOptimizer(0.01), 'mse') assert not model.updates x1 = model.predict(val_a) model.train_on_batch(val_a, val_out) x2 = model.predict(val_a) self.assertAllClose(x1, x2, atol=1e-7) model.trainable = True model.compile(gradient_descent.GradientDescentOptimizer(0.01), 'mse') assert model.updates model.train_on_batch(val_a, val_out) x2 = model.predict(val_a) assert np.abs(np.sum(x1 - x2)) > 1e-5 layer.trainable = False model.compile(gradient_descent.GradientDescentOptimizer(0.01), 'mse') assert not model.updates x1 = model.predict(val_a) model.train_on_batch(val_a, val_out) x2 = model.predict(val_a) self.assertAllClose(x1, x2, atol=1e-7) @tf_test_util.run_deprecated_v1 def test_batchnorm_trainable(self, layer): """Tests that batchnorm layer is trainable when learning phase is enabled. Computes mean and std for current inputs then applies batch normalization using them. Args: layer: Either V1 or V2 of BatchNormalization layer. """ # TODO(fchollet): enable in all execution modes when issue with # learning phase setting is resolved. with self.cached_session(): bn_mean = 0.5 bn_std = 10. val_a = np.expand_dims(np.arange(10.), axis=1) def get_model(bn_mean, bn_std): inp = keras.layers.Input(shape=(1,)) x = layer()(inp) model1 = keras.models.Model(inp, x) model1.set_weights([ np.array([1.]), np.array([0.]), np.array([bn_mean]), np.array([bn_std**2]) ]) return model1 # Simulates training-mode with trainable layer. # Should use mini-batch statistics. with keras.backend.learning_phase_scope(1): model = get_model(bn_mean, bn_std) model.compile(loss='mse', optimizer='rmsprop') out = model.predict(val_a) self.assertAllClose( (val_a - np.mean(val_a)) / np.std(val_a), out, atol=1e-3) def _run_layernorm_correctness_test(layer, dtype='float32'): model = keras.models.Sequential() norm = layer(input_shape=(2, 2, 2)) model.add(norm) model.compile(loss='mse', optimizer=gradient_descent.GradientDescentOptimizer(0.01), run_eagerly=testing_utils.should_run_eagerly()) # centered on 5.0, variance 10.0 x = (np.random.normal(loc=5.0, scale=10.0, size=(1000, 2, 2, 2)) .astype(dtype)) model.fit(x, x, epochs=4, verbose=0) out = model.predict(x) out -= keras.backend.eval(norm.beta) out /= keras.backend.eval(norm.gamma) np.testing.assert_allclose(out.mean(), 0.0, atol=1e-1) np.testing.assert_allclose(out.std(), 1.0, atol=1e-1) class LayerNormalizationTest(keras_parameterized.TestCase): @keras_parameterized.run_all_keras_modes def test_basic_layernorm(self): testing_utils.layer_test( keras.layers.LayerNormalization, kwargs={ 'gamma_regularizer': keras.regularizers.l2(0.01), 'beta_regularizer': keras.regularizers.l2(0.01) }, input_shape=(3, 4, 2)) testing_utils.layer_test( keras.layers.LayerNormalization, kwargs={ 'gamma_initializer': 'ones', 'beta_initializer': 'ones', }, input_shape=(3, 4, 2)) testing_utils.layer_test( keras.layers.LayerNormalization, kwargs={'scale': False, 'center': False}, input_shape=(3, 3)) @tf_test_util.run_in_graph_and_eager_modes def test_layernorm_weights(self): layer = keras.layers.LayerNormalization(scale=False, center=False) layer.build((None, 3, 4)) self.assertEqual(len(layer.trainable_weights), 0) self.assertEqual(len(layer.weights), 0) layer = keras.layers.LayerNormalization() layer.build((None, 3, 4)) self.assertEqual(len(layer.trainable_weights), 2) self.assertEqual(len(layer.weights), 2) @tf_test_util.run_in_graph_and_eager_modes def test_layernorm_regularization(self): layer = keras.layers.LayerNormalization( gamma_regularizer='l1', beta_regularizer='l1') layer.build((None, 3, 4)) self.assertEqual(len(layer.losses), 2) max_norm = keras.constraints.max_norm layer = keras.layers.LayerNormalization( gamma_constraint=max_norm, beta_constraint=max_norm) layer.build((None, 3, 4)) self.assertEqual(layer.gamma.constraint, max_norm) self.assertEqual(layer.beta.constraint, max_norm) @keras_parameterized.run_all_keras_modes def test_layernorm_convnet(self): if test.is_gpu_available(cuda_only=True): with self.session(use_gpu=True): model = keras.models.Sequential() norm = keras.layers.LayerNormalization( input_shape=(3, 4, 4), params_axis=1) model.add(norm) model.compile(loss='mse', optimizer=gradient_descent.GradientDescentOptimizer(0.01), run_eagerly=testing_utils.should_run_eagerly()) # centered on 5.0, variance 10.0 x = np.random.normal(loc=5.0, scale=10.0, size=(1000, 3, 4, 4)) model.fit(x, x, epochs=4, verbose=0) out = model.predict(x) out -= np.reshape(keras.backend.eval(norm.beta), (1, 3, 1, 1)) out /= np.reshape(keras.backend.eval(norm.gamma), (1, 3, 1, 1)) np.testing.assert_allclose(np.mean(out, axis=(0, 2, 3)), 0.0, atol=1e-1) np.testing.assert_allclose(np.std(out, axis=(0, 2, 3)), 1.0, atol=1e-1) @keras_parameterized.run_all_keras_modes def test_layernorm_convnet_channel_last(self): model = keras.models.Sequential() norm = keras.layers.LayerNormalization(input_shape=(4, 4, 3)) model.add(norm) model.compile(loss='mse', optimizer=gradient_descent.GradientDescentOptimizer(0.01), run_eagerly=testing_utils.should_run_eagerly()) # centered on 5.0, variance 10.0 x = np.random.normal(loc=5.0, scale=10.0, size=(1000, 4, 4, 3)) model.fit(x, x, epochs=4, verbose=0) out = model.predict(x) out -= np.reshape(keras.backend.eval(norm.beta), (1, 1, 1, 3)) out /= np.reshape(keras.backend.eval(norm.gamma), (1, 1, 1, 3)) np.testing.assert_allclose(np.mean(out, axis=(0, 1, 2)), 0.0, atol=1e-1) np.testing.assert_allclose(np.std(out, axis=(0, 1, 2)), 1.0, atol=1e-1) @keras_parameterized.run_all_keras_modes def test_layernorm_correctness(self): _run_layernorm_correctness_test( normalization.LayerNormalization, dtype='float32') @keras_parameterized.run_all_keras_modes def test_layernorm_mixed_precision(self): _run_layernorm_correctness_test( normalization.LayerNormalization, dtype='float16') def doOutputTest(self, input_shape, tol=1e-5, norm_axis=None, params_axis=-1, dtype=None): ndim = len(input_shape) if norm_axis is None: moments_axis = range(1, ndim) elif isinstance(norm_axis, int): if norm_axis < 0: moments_axis = [norm_axis + ndim] else: moments_axis = [norm_axis] else: moments_axis = [] for dim in norm_axis: if dim < 0: dim = dim + ndim moments_axis.append(dim) moments_axis = tuple(moments_axis) expected_shape = [] for i in range(ndim): if i not in moments_axis: expected_shape.append(input_shape[i]) expected_mean = np.zeros(expected_shape) expected_var = np.ones(expected_shape) for mu in [0.0, 1e2]: for sigma in [1.0, 0.1]: inputs = np.random.randn(*input_shape) * sigma + mu inputs_t = constant_op.constant(inputs, shape=input_shape) layer = normalization.LayerNormalization( norm_axis=norm_axis, params_axis=params_axis, dtype=dtype) outputs = layer(inputs_t) beta = layer.beta gamma = layer.gamma for weight in layer.weights: self.evaluate(weight.initializer) outputs = self.evaluate(outputs) beta = self.evaluate(beta) gamma = self.evaluate(gamma) # The mean and variance of the output should be close to 0 and 1 # respectively. # Make sure that there are no NaNs self.assertFalse(np.isnan(outputs).any()) mean = np.mean(outputs, axis=moments_axis) var = np.var(outputs, axis=moments_axis) # Layer-norm implemented in numpy eps = 1e-12 expected_out = ( (gamma * (inputs - np.mean( inputs, axis=moments_axis, keepdims=True)) / np.sqrt(eps + np.var( inputs, axis=moments_axis, keepdims=True))) + beta) self.assertAllClose(expected_mean, mean, atol=tol, rtol=tol) self.assertAllClose(expected_var, var, atol=tol) # The full computation gets a bigger tolerance self.assertAllClose(expected_out, outputs, atol=5 * tol) @tf_test_util.run_in_graph_and_eager_modes def testOutput2DInput(self): self.doOutputTest((10, 300)) self.doOutputTest((10, 300), norm_axis=[0]) self.doOutputTest((10, 300), params_axis=[0, 1]) @tf_test_util.run_in_graph_and_eager_modes def testOutput2DInputDegenerateNormAxis(self): with self.assertRaisesRegexp(ValueError, r'Invalid axis: 2'): self.doOutputTest((10, 300), norm_axis=2) @tf_test_util.run_in_graph_and_eager_modes def testOutput4DInput(self): self.doOutputTest((100, 10, 10, 3)) @tf_test_util.run_in_graph_and_eager_modes def testOutput4DInputNormOnInnermostAxis(self): # Equivalent tests shape = (100, 10, 10, 3) self.doOutputTest( shape, norm_axis=list(range(3, len(shape))), tol=1e-4, dtype='float64') self.doOutputTest(shape, norm_axis=-1, tol=1e-4, dtype='float64') @tf_test_util.run_in_graph_and_eager_modes def testOutputSmallInput(self): self.doOutputTest((10, 10, 10, 30)) @tf_test_util.run_in_graph_and_eager_modes def testOutputSmallInputNormOnInnermostAxis(self): self.doOutputTest((10, 10, 10, 30), norm_axis=3) @tf_test_util.run_in_graph_and_eager_modes def testOutputSmallInputNormOnMixedAxes(self): self.doOutputTest((10, 10, 10, 30), norm_axis=[0, 3]) self.doOutputTest((10, 10, 10, 30), params_axis=[-2, -1]) self.doOutputTest((10, 10, 10, 30), norm_axis=[0, 3], params_axis=[-3, -2, -1]) @tf_test_util.run_in_graph_and_eager_modes def testOutputBigInput(self): self.doOutputTest((1, 100, 100, 1)) self.doOutputTest((1, 100, 100, 1), norm_axis=[1, 2]) self.doOutputTest((1, 100, 100, 1), norm_axis=[1, 2], params_axis=[-2, -1]) if __name__ == '__main__': test.main()
apache-2.0
2,995,893,033,428,413,000
36.115073
80
0.650524
false
3.394101
true
false
false
meisamhe/GPLshared
Programming/MPI — AMath 483 583, Spring 2013 1.0 documentation_files/s2.py
1
1744
import time import threading # @include class SpellCheckService: w_last = closest_to_last_word = None lock = threading.Lock() @staticmethod def service(req, resp): w = req.extract_word_to_check_from_request() result = None with SpellCheckService.lock: if w == SpellCheckService.w_last: result = SpellCheckService.closest_to_last_word.copy() if result is None: result = closest_in_dictionary(w) with SpellCheckService.lock: SpellCheckService.w_last = w SpellCheckService.closest_to_last_word = result resp.encode_into_response(result) # @exclude class ServiceRequest: def __init__(self, s): self.request = s def extract_word_to_check_from_request(self): return self.request class ServiceResponse: response = None def encode_into_response(self, s): self.response = s def closest_in_dictionary(w): time.sleep(0.2) return [w + '_result'] class ServiceThread(threading.Thread): def __init__(self, data): super().__init__() self.data = data def run(self): start_time = time.time() req = ServiceRequest(self.data) resp = ServiceResponse() SpellCheckService.service(req, resp) print(self.data, '->', resp.response, '(%.3f sec)' % (time.time() - start_time)) def main(): i = 0 while True: ServiceThread('req:%d' % (i + 1)).start() if i > 0: # while req:i+1 is computed we could return req:i from the cache ServiceThread('req:%d' % i).start() time.sleep(0.5) i += 1 if __name__ == '__main__': main()
gpl-3.0
-6,428,526,257,127,913,000
22.567568
76
0.575115
false
3.62578
false
false
false
gypsymauro/gestione-cantiere
build/lib.linux-x86_64-2.7/cantiere/admin.py
1
1533
from django.contrib import admin # Register your models here. from .models import Squadra from .models import StatoSegnalazione from .models import Segnalazione from .models import StatoIntervento from .models import Intervento from .models import Risorsa from .models import InterventoRisorsa from .models import Costo from .models import CentroCosto from .models import Allegato class InterventoRisorsaInline(admin.TabularInline): model = InterventoRisorsa exclude = ['created','created_by','modified','modified_by','deleted','note'] class RisorsaAdmin(admin.ModelAdmin): inlines = (InterventoRisorsaInline,) exclude = ['created','created_by','modified','modified_by','deleted'] class InterventoAdmin(admin.ModelAdmin): inlines = (InterventoRisorsaInline,) list_display = ['oggetto','data_inizio','stato','stampa_intervento'] list_editable = ['stato'] ordering = ['created'] exclude = ['created','created_by','modified','modified_by','deleted'] list_filter = ('stato','data_inizio','centro_costo','responsabile') save_on_top = True search_fields = ('oggetto','data_inizio') admin.site.register(Squadra) admin.site.register(StatoSegnalazione) admin.site.register(Segnalazione) admin.site.register(StatoIntervento) admin.site.register(Intervento,InterventoAdmin) admin.site.register(Risorsa,RisorsaAdmin) admin.site.register(Costo) admin.site.register(CentroCosto) admin.site.register(Allegato) #admin.site.register(InterventoMezzo) #admin.site.register(InterventoPersona)
gpl-2.0
-2,517,305,136,308,949,000
32.326087
80
0.763862
false
3.200418
false
false
false
SembeiNorimaki/Bioinformatics
EulerianCycle.py
1
1903
# Test passed :) # TODO: split right here before the conditional. import sys def handle_input_output(): # handle input graph = {} while True: try: line = sys.stdin.readline().rstrip('\n') left, right = line.split(' -> ') if left in graph.keys(): graph[left].append(right) else: graph[left] = right.split(',') except: break # EOF #print(graph) # Execute main function r = EulerianCycle(graph) # handle output print('->'.join(r)) def EulerianCycle(graph): stack = [] location = None circuit = [] # since it's an Eulerian Cycle we can start at any vertex location = list(graph)[0] # Repeat until the current vertex has no more out-going edges (neighbors) # and the stack is empty. while len(graph[location]) > 0 or len(stack) > 0: if len(graph[location]) == 0: # If current vertex has no out-going edges circuit.append(location) # add it to circuit location = stack.pop() # remove the last vertex from the stack and set it as the current one else: # otherwise stack.append(location) # add the vertex to the stack location = graph[location].pop() # take any of its neighbors # remove the edge between that vertex and selected neighbor # and set that neighbor as the current vertex # Here we must append the first element at the end to close the cycle # but since circuit is reversed, we append the last element at the beginning circuit.insert(0, circuit[-1]) return circuit[::-1] # return the reversed circuit if __name__ == '__main__': handle_input_output()
mit
-4,300,464,103,968,979,000
33
115
0.553337
false
4.541766
false
false
false
kittiu/account-payment
account_payment_return/models/payment_return.py
1
15028
# Copyright 2011-2012 7 i TRIA <http://www.7itria.cat> # Copyright 2011-2012 Avanzosc <http://www.avanzosc.com> # Copyright 2013 Pedro M. Baeza <pedro.baeza@tecnativa.com> # Copyright 2014 Markus Schneider <markus.schneider@initos.com> # Copyright 2016 Carlos Dauden <carlos.dauden@tecnativa.com> # Copyright 2017 Luis M. Ontalba <luis.martinez@tecnativa.com> # License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl). from odoo import _, api, fields, models from odoo.exceptions import Warning as UserError import odoo.addons.decimal_precision as dp class PaymentReturn(models.Model): _name = "payment.return" _inherit = ['mail.thread'] _description = 'Payment return' _order = 'date DESC, id DESC' company_id = fields.Many2one( 'res.company', string='Company', required=True, states={'done': [('readonly', True)], 'cancelled': [('readonly', True)]}, default=lambda self: self.env['res.company']._company_default_get( 'account')) date = fields.Date( string='Return date', help="This date will be used as the account entry date.", states={'done': [('readonly', True)], 'cancelled': [('readonly', True)]}, default=lambda x: fields.Date.today()) name = fields.Char( string="Reference", required=True, states={'done': [('readonly', True)], 'cancelled': [('readonly', True)]}, default=lambda self: self.env['ir.sequence'].next_by_code( 'payment.return')) line_ids = fields.One2many( comodel_name='payment.return.line', inverse_name='return_id', states={'done': [('readonly', True)], 'cancelled': [('readonly', True)]}) journal_id = fields.Many2one( comodel_name='account.journal', string='Bank journal', required=True, states={'done': [('readonly', True)], 'cancelled': [('readonly', True)]}) move_id = fields.Many2one( comodel_name='account.move', string='Reference to the created journal entry', states={'done': [('readonly', True)], 'cancelled': [('readonly', True)]}) state = fields.Selection( selection=[('draft', 'Draft'), ('imported', 'Imported'), ('done', 'Done'), ('cancelled', 'Cancelled')], string='State', readonly=True, default='draft', track_visibility='onchange') @api.multi @api.constrains('line_ids') def _check_duplicate_move_line(self): def append_error(error_line): error_list.append( _("Payment Line: %s (%s) in Payment Return: %s") % ( ', '.join(error_line.mapped('move_line_ids.name')), error_line.partner_id.name, error_line.return_id.name ) ) error_list = [] all_move_lines = self.env['account.move.line'] for line in self.mapped('line_ids'): for move_line in line.move_line_ids: if move_line in all_move_lines: append_error(line) all_move_lines |= move_line if (not error_list) and all_move_lines: duplicate_lines = self.env['payment.return.line'].search([ ('move_line_ids', 'in', all_move_lines.ids), ('return_id.state', '=', 'done'), ]) if duplicate_lines: for line in duplicate_lines: append_error(line) if error_list: raise UserError( _("Payment reference must be unique" "\n%s") % '\n'.join(error_list) ) def _get_move_amount(self, return_line): return return_line.amount def _prepare_invoice_returned_vals(self): return {'returned_payment': True} @api.multi def unlink(self): if self.filtered(lambda x: x.state == 'done'): raise UserError(_( "You can not remove a payment return if state is 'Done'")) return super(PaymentReturn, self).unlink() @api.multi def button_match(self): self.mapped('line_ids').filtered(lambda x: ( (not x.move_line_ids) and x.reference))._find_match() self._check_duplicate_move_line() @api.multi def _prepare_return_move_vals(self): """Prepare the values for the journal entry created from the return. :return: Dictionary with the record values. """ self.ensure_one() return { 'name': '/', 'ref': _('Return %s') % self.name, 'journal_id': self.journal_id.id, 'date': self.date, 'company_id': self.company_id.id, } @api.multi def action_confirm(self): self.ensure_one() # Check for incomplete lines if self.line_ids.filtered(lambda x: not x.move_line_ids): raise UserError( _("You must input all moves references in the payment " "return.")) invoices = self.env['account.invoice'] move_line_obj = self.env['account.move.line'] move = self.env['account.move'].create( self._prepare_return_move_vals() ) total_amount = 0.0 for return_line in self.line_ids: move_amount = self._get_move_amount(return_line) move_line2 = self.env['account.move.line'].with_context( check_move_validity=False).create({ 'name': move.ref, 'debit': move_amount, 'credit': 0.0, 'account_id': return_line.move_line_ids[0].account_id.id, 'move_id': move.id, 'partner_id': return_line.partner_id.id, 'journal_id': move.journal_id.id, }) total_amount += move_amount for move_line in return_line.move_line_ids: returned_moves = move_line.matched_debit_ids.mapped( 'debit_move_id') invoices |= returned_moves.mapped('invoice_id') move_line.remove_move_reconcile() (move_line | move_line2).reconcile() return_line.move_line_ids.mapped('matched_debit_ids').write( {'origin_returned_move_ids': [(6, 0, returned_moves.ids)]}) if return_line.expense_amount: expense_lines_vals = [] expense_lines_vals.append({ 'name': move.ref, 'move_id': move.id, 'debit': 0.0, 'credit': return_line.expense_amount, 'partner_id': return_line.expense_partner_id.id, 'account_id': (return_line.return_id.journal_id. default_credit_account_id.id), }) expense_lines_vals.append({ 'move_id': move.id, 'debit': return_line.expense_amount, 'name': move.ref, 'credit': 0.0, 'partner_id': return_line.expense_partner_id.id, 'account_id': return_line.expense_account.id, }) for expense_line_vals in expense_lines_vals: move_line_obj.with_context( check_move_validity=False).create(expense_line_vals) extra_lines_vals = return_line._prepare_extra_move_lines(move) for extra_line_vals in extra_lines_vals: move_line_obj.create(extra_line_vals) move_line_obj.create({ 'name': move.ref, 'debit': 0.0, 'credit': total_amount, 'account_id': self.journal_id.default_credit_account_id.id, 'move_id': move.id, 'journal_id': move.journal_id.id, }) # Write directly because we returned payments just now invoices.write(self._prepare_invoice_returned_vals()) move.post() self.write({'state': 'done', 'move_id': move.id}) return True @api.multi def action_cancel(self): invoices = self.env['account.invoice'] for move_line in self.mapped('move_id.line_ids').filtered( lambda x: x.user_type_id.type == 'receivable'): for partial_line in move_line.matched_credit_ids: invoices |= partial_line.origin_returned_move_ids.mapped( 'invoice_id') lines2reconcile = (partial_line.origin_returned_move_ids | partial_line.credit_move_id) partial_line.credit_move_id.remove_move_reconcile() lines2reconcile.reconcile() self.move_id.button_cancel() self.move_id.unlink() self.write({'state': 'cancelled', 'move_id': False}) invoices.check_payment_return() return True @api.multi def action_draft(self): self.write({'state': 'draft'}) return True class PaymentReturnLine(models.Model): _name = "payment.return.line" _description = 'Payment return lines' return_id = fields.Many2one( comodel_name='payment.return', string='Payment return', required=True, ondelete='cascade') concept = fields.Char( string='Concept', help="Read from imported file. Only for reference.") reason_id = fields.Many2one( comodel_name='payment.return.reason', oldname="reason", string='Return reason', ) reference = fields.Char( string='Reference', help="Reference to match moves from related documents") move_line_ids = fields.Many2many( comodel_name='account.move.line', string='Payment Reference') date = fields.Date( string='Return date', help="Only for reference", ) partner_name = fields.Char( string='Partner name', readonly=True, help="Read from imported file. Only for reference.") partner_id = fields.Many2one( comodel_name='res.partner', string='Customer', domain="[('customer', '=', True)]") amount = fields.Float( string='Amount', help="Returned amount. Can be different from the move amount", digits=dp.get_precision('Account')) expense_account = fields.Many2one( comodel_name='account.account', string='Charges Account') expense_amount = fields.Float(string='Charges Amount') expense_partner_id = fields.Many2one( comodel_name="res.partner", string="Charges Partner", domain=[('supplier', '=', True)], ) @api.multi def _compute_amount(self): for line in self: line.amount = sum(line.move_line_ids.mapped('credit')) @api.multi def _get_partner_from_move(self): for line in self.filtered(lambda x: not x.partner_id): partners = line.move_line_ids.mapped('partner_id') if len(partners) > 1: raise UserError( _("All payments must be owned by the same partner")) line.partner_id = partners[:1].id line.partner_name = partners[:1].name @api.onchange('move_line_ids') def _onchange_move_line(self): self._compute_amount() @api.onchange('expense_amount') def _onchange_expense_amount(self): if self.expense_amount: journal = self.return_id.journal_id self.expense_account = journal.default_expense_account_id self.expense_partner_id = journal.default_expense_partner_id @api.multi def match_invoice(self): for line in self: domain = line.partner_id and [ ('partner_id', '=', line.partner_id.id)] or [] domain.append(('number', '=', line.reference)) invoice = self.env['account.invoice'].search(domain) if invoice: payments = invoice.payment_move_line_ids if payments: line.move_line_ids = payments[0].ids if not line.concept: line.concept = _('Invoice: %s') % invoice.number @api.multi def match_move_lines(self): for line in self: domain = line.partner_id and [ ('partner_id', '=', line.partner_id.id)] or [] if line.return_id.journal_id: domain.append(('journal_id', '=', line.return_id.journal_id.id)) domain.extend([ ('account_id.internal_type', '=', 'receivable'), ('reconciled', '=', True), '|', ('name', '=', line.reference), ('ref', '=', line.reference), ]) move_lines = self.env['account.move.line'].search(domain) if move_lines: line.move_line_ids = move_lines.ids if not line.concept: line.concept = (_('Move lines: %s') % ', '.join(move_lines.mapped('name'))) @api.multi def match_move(self): for line in self: domain = line.partner_id and [ ('partner_id', '=', line.partner_id.id)] or [] domain.append(('name', '=', line.reference)) move = self.env['account.move'].search(domain) if move: if len(move) > 1: raise UserError( _("More than one matches to move reference: %s") % self.reference) line.move_line_ids = move.line_ids.filtered(lambda l: ( l.user_type_id.type == 'receivable' and l.reconciled )).ids if not line.concept: line.concept = _('Move: %s') % move.ref @api.multi def _find_match(self): # we filter again to remove all ready matched lines in inheritance lines2match = self.filtered(lambda x: ( (not x.move_line_ids) and x.reference)) lines2match.match_invoice() lines2match = lines2match.filtered(lambda x: ( (not x.move_line_ids) and x.reference)) lines2match.match_move_lines() lines2match = lines2match.filtered(lambda x: ( (not x.move_line_ids) and x.reference)) lines2match.match_move() self._get_partner_from_move() self.filtered(lambda x: not x.amount)._compute_amount() @api.multi def _prepare_extra_move_lines(self, move): """Include possible extra lines in the return journal entry for other return concepts. :param self: Reference to the payment return line. :param move: Reference to the journal entry created for the return. :return: A list with dictionaries of the extra move lines to add """ self.ensure_one() return []
agpl-3.0
5,260,136,324,633,751,000
39.506739
79
0.544118
false
4.058331
false
false
false
dpshelio/sunpy
examples/units_and_coordinates/planet_locations.py
1
1252
""" =================================== Getting the location of the planets =================================== How to get the position of planetary bodies im the solar system using `astropy's solar system ephemeris <http://docs.astropy.org/en/stable/coordinates/solarsystem.html#solar-system-ephemerides>`__ information and SunPy. """ import matplotlib.pyplot as plt from astropy.time import Time from sunpy.coordinates import get_body_heliographic_stonyhurst ############################################################################## # Lets grab the positions of each of the planets in Heliographic Stonyhurst # coordinates. obstime = Time('2014-05-15T07:54:00.005') planet_list = ['earth', 'venus', 'mars', 'mercury', 'jupiter', 'neptune', 'uranus', 'sun'] planet_coord = [get_body_heliographic_stonyhurst(this_planet, time=obstime) for this_planet in planet_list] ############################################################################## # Let's plot the results. Remember the Sun is at the center of this coordinate # system. ax = plt.subplot(projection='polar') for this_planet, this_coord in zip(planet_list, planet_coord): plt.polar(this_coord.lon.to('rad'), this_coord.radius, 'o', label=this_planet) plt.legend() plt.show()
bsd-2-clause
-3,296,977,724,421,778,000
42.172414
149
0.615815
false
3.639535
false
false
false
Djimmer/obts
Fuzzer/function_scanner.py
1
6412
#!/usr/bin/python # -*- coding: utf-8 -*- import socket import time import binascii import os import sys from libmich.formats import * import gsm_um import smarter_fuzzer_function_def as fuzzer import itertools from random import randint from math import factorial import logging from pythonjsonlogger import jsonlogger # Fill in current mobile device if len(sys.argv) > 2: device = sys.argv[1]; imsi = sys.argv[2]; else: print("ERROR: Device name not found.") print("Call the script with: ./smarter_fuzzer #DEVICE #IMSI"); print("Where #DEVICE is the name and #IMSI is the IMSI of the mobile device."); sys.exit(0); ############################################### SETTINGS ############################################# # Default OpenBTS port TESTCALL_PORT = 28670; # Log file location date = str(time.strftime("%Y%m%d-%H%M%S")); log_all_functions_JSON = "logs/functions/" + device + "_log_" + date + ".json"; # Creat socket tcsock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM) tcsock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1) tcsock.settimeout(2) ocsock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM) ocsock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1) HOST = 'localhost' # Symbolic name meaning all available interfaces PORT = 21337 # Arbitrary non-privileged port ocsock.bind((HOST, PORT)) ocsock.settimeout(20) # Initialize JSON logger logger = logging.getLogger() logger.setLevel(logging.INFO) # create a file handler handler = logging.FileHandler(log_all_functions_JSON) handler.setLevel(logging.INFO) # create a logging format formatter = jsonlogger.JsonFormatter() handler.setFormatter(formatter) # add the handlers to the logger logger.addHandler(handler) logger.info({ "message": "Function Scanner; Device and SIM information", "device": device, "imsi" : imsi}); ################################################# LOG ################################################ def log_packets(run, maxRun, packet, parsed_packet, reply, parsed_reply): if "ERROR" in parsed_reply: parsed_reply = "libmich ERROR"; logger.info({ "message": run, "maxRun" : maxRun, "packet": str(packet).encode("hex"), "parsed_packet": parsed_packet, "reply": str(reply).encode("hex"), "parsed_reply": parsed_reply }) ############################################## CHANNEL ############################################### # Send a restart to OpenBTS to establish a new channel def establishNewChannel(): restart = "RESTART"; print("Channel restart: Establishing a new channel, this may take a second."); tcsock.sendto(restart, ('127.0.0.1', TESTCALL_PORT)); # Wait for OpenBTS to confirm new channel. try: reply = ocsock.recv(20000) except: print "Could not establish a new channel."; return False; print "New channel established, fuzzing will continue."; time.sleep(1); return True; def send(tcsock, packet): try: tcsock.sendto(packet, ('127.0.0.1', TESTCALL_PORT)) reply = tcsock.recv(1024) except socket.timeout: print "socket.timeout: Mobile device is not responding"; return False return packetImplemented(reply) def packetImplemented(reply): parsed_reply = repr(L3Mobile.parse_L3(reply)); print "Received packet: ", str(reply).encode("hex") + "\n"; print "GSM_UM interpetation: " + '\n' + parsed_reply + "\n\n"; if "RELEASE_COMPLETE" in parsed_reply: return "Restart"; elif((str(reply).encode("hex") == "786e430200")): #MDL_ERROR_INDICATION return "Restart"; elif((str(reply).encode("hex") == "789ea400")): #MDL_ERROR_INDICATION return "Restart"; elif((str(reply).encode("hex") == "06126100")): return "Skip"; elif "Message type non-existent or not implemented" in parsed_reply: return "Skip"; else: return reply; ############################################### UTILS ################################################ def printPacket(packet, currentRun, total_runs): print('------------------------------- INPUT -------------------------------' + '\n'); print('Run ' + str(currentRun) + "/" + str(total_runs) + '\n'); # Make the packet readable if(len(packet) % 2 == 0): printable = str(packet).encode("hex"); print "Current complete packet: " + printable + '\n'; # Decode printable hex to make it usable for L3Mobile. # Adding the \x for the bytes. l3msg_input = repr(L3Mobile.parse_L3(str(packet))); print "GSM_UM interpetation: \n " + l3msg_input + '\n\n'; print "------------------------------- OUTPUT -------------------------------" + '\n'; ############################################ SMART FUZZER ############################################ # This fuzzer targets fields with variable length # Tries all different bytes for length byte # Tries random bytes for a range of lengths ###################################################################################################### # Fuzzer specific settings maxPacketAttempt = 5; currentPacketAttempt = 1; protocols = [3]; currentRun = 1; total_runs = len(protocols) * 256; print "Total amount of runs: " + str(total_runs); time.sleep(1); for i in protocols: firstByte = "{0:0{1}x}".format(i,2); n = 1; while n < 256: secondByte = "{0:0{1}x}".format(n,2); if(i == 5 and n == 17): # Skip because the packet 0511 is a Authentication Reject # and disconnects the mobile device secondByte = "{0:0{1}x}".format(n+1,2); packet = "\\x" + str(firstByte) + "\\x" + str(secondByte); packet = packet.replace('\\x', '').decode('hex'); print "Packet: " + str(packet).encode("hex"); printPacket(packet, currentRun, total_runs); # Send packet to the mobile device. result = send(tcsock, packet); if(result == "Restart" or result == False): currentPacketAttempt = currentPacketAttempt + 1; establishNewChannel(); if(currentPacketAttempt >= maxPacketAttempt): parsed_packet = repr(L3Mobile.parse_L3(packet)); log_packets(currentRun, total_runs, packet, parsed_packet, "None", "None"); currentRun = currentRun + 1; n = n + 1; elif(result =="Skip"): currentRun = currentRun + 1; currentPacketAttempt = 0; n = n + 1; else: parsed_result = repr(L3Mobile.parse_L3(result)); parsed_packet = repr(L3Mobile.parse_L3(packet)); log_packets(currentRun, total_runs, packet, parsed_packet, result, parsed_result); currentRun = currentRun + 1; currentPacketAttempt = 0; n = n + 1;
agpl-3.0
7,928,618,940,592,154,000
29.980676
102
0.611822
false
3.385428
false
false
false
gyurisc/stackjobs
clean_data.py
1
1758
# Ad-hoc fixing of mongo database from datetime import datetime import pymongo client = pymongo.MongoClient('localhost', 27017) db = client['stackoverflow'] jobs = db['jobs'] # total jobs total_jobs = jobs.count() print "Total jobs: %s" % total_jobs print "=== Fixing Date Stamp ===" date_stamp = datetime(2016, 6, 1, 7, 01, 01) jobs.update_many({ "date" : { "$exists" : False}}, {"$set" : {"date" : date_stamp}}) count = 0 for job in jobs.find( { "date" : { "$exists" : False}}): count = count + 1 # print(job) print "=== Fixing Date Stamp ===" print "Number of jobs with no date is %s." % count count = 0 for job in jobs.find( { "date" : date_stamp}): count = count + 1 # print(job) print "Number of jobs with default date is %s." % count # Week number print "=== Fixing Week Number ===" wkcount = jobs.find( {"weeknum" : {"$exists" : True}}).count() print "Week number exists with %s and missing for %s jobs." % (wkcount, total_jobs - wkcount) for job in jobs.find({"weeknum" : {"$exists": False}}): d = datetime.strptime(job["date"], '%Y-%m-%d') wk = d.isocalendar()[1] jobs.update({"_id" : job["_id"]}, {"$set" : {"weeknum" : wk}}) # Employee and Location Whitespace print "=== Fixing Employee & Location ===" print "Striping strings from white space in employer and location strings" for job in jobs.find(): _emp = job["employer"].strip() _loc = job["location"].strip() jobs.update({"_id" : job["_id"]}, {"$set" : {"employer" : _emp, "location" : _loc}}) print "Stripping strings from whitespace where salary exists" for job in jobs.find({ "salary" : { "$exists" : True }}): _salary = job["salary"].strip() jobs.update({"_id" : job["_id"]}, {"$set" : {"salary" : _salary}})
mit
6,723,297,113,947,829,000
31.555556
93
0.610353
false
3.106007
false
false
false
rodo/ansible-tsung
ec2tool.py
1
5117
#!/usr/bin/env python import boto.ec2 import jinja2 import sys import json import yaml class Tsing(boto.ec2.instance.Instance): def shortname(self): return self.private_dns_name.split('.')[0] @property def private_short_name(self): return self.private_dns_name.split('.')[0] def get_specs(instance, region, data): """ region (string) : the region name data (dict) """ datas = get_data_region(region, data) instance_spec = get_instance(instance, datas) return instance_spec def get_instance(instance, data): """ instance (string) data (dict) """ result = None for inst in data['instanceTypes']: for size in inst['sizes']: if instance == size['size']: result = size break return result def get_data_region(region, data): """ region (string) : the region name data (dict) """ config = data['config'] ec2_regions = {"us-east-1": "us-east", "us-west-1": "us-west", "us-west-2": "us-west-2", "eu-west-1": "eu-ireland", "ap-southeast-1": "apac-sin", "ap-southeast-2": "apac-syd", "ap-northeast-1": "apac-tokyo", "sa-east-1": "sa-east-1" } for reg in config['regions']: if reg['region'] == ec2_regions[region]: return reg def write_nodes(controller, injectors, data): """ controller (dict) injectors (dict) """ hosts = open("playbooks/roles/tsung/vars/nodes.yml", 'w') hosts.write("---\n") contr_str = "controller: { private_dns_name: '%s', private_ip_address: '%s', private_short_name: '%s' }\n\n" hosts.write(contr_str % (controller.private_dns_name, controller.private_ip_address, controller.private_short_name)) hosts.write("injectors:\n") for injec in injectors: print injec.__dict__ specs = get_specs(injec.instance_type, region, data) injector = {"private_dns_name": str(injec.private_dns_name), "private_ip_address": str(injec.private_ip_address), "private_short_name": str(injec.private_short_name), "instance_type": str(injec.instance_type), "cpu": int(specs['vCPU'])} hosts.write(" - {}".format(yaml.dump(injector, encoding='utf-8'))) hosts.close() def instance_weights(injectors, region, data): """ Define instances weights """ assw = {} weights = [] for injec in injectors: specs = get_specs(injec['instance_type'], region, data) weights.append(float(specs['memoryGiB'])) minweight = min(weights) for injec in injectors: specs = get_specs(injec['instance_type'], region, data) iid = injec['id'] assw[iid] = int(round(float(specs['memoryGiB']) / minweight)) return assw def parse_instances(instances): """ Wait for instance in running state """ controller = None injectors = [] for instance in instances: inst = instance.instances[0] inst.__class__ = Tsing if inst.state == 'running': tags = inst.tags if 'tsung_role' in tags: if tags['tsung_role'] == 'controller': controller = inst else: injectors.append(inst) else: injectors.append(inst) return controller, injectors def cloud_connect(region): """ Connect on cloud """ print "connect on {}...".format(region) conn = boto.ec2.connect_to_region(region) return conn def write_ini(injectors, controller): """ Write ansible .ini file """ templateLoader = jinja2.FileSystemLoader(searchpath=".") templateEnv = jinja2.Environment(loader=templateLoader) templateVars = {"injectors": injectors, "controller": controller} # # Configure the cluster # template = templateEnv.get_template("cluster.j2") clients = open("cluster.ini", 'w') clients.write(template.render(templateVars)) clients.close() if __name__ == "__main__": try: region = sys.argv[1] except: print "usage : ec2tool.py REGI0N" sys.exit(1) conn = cloud_connect(region) print "connected" instances = conn.get_all_instances() controller, injectors = parse_instances(instances) print "found\n {} injectors".format(len(injectors)) if controller is None: print "ERROR didn't found any controller" sys.exit(1) else: print " controller : tsung@{} ".format(controller.ip_address) # # with open("linux-od.json") as data_file: data = json.load(data_file) # # write_nodes(controller, injectors, data) write_ini(injectors, controller) # print 'ansible-playbook -i cluster.ini -u ubuntu playbooks/tsung.yml'
gpl-3.0
6,011,133,720,753,712,000
24.713568
112
0.560876
false
3.737765
false
false
false
nemesisdesign/openwisp2
openwisp_controller/config/controller/views.py
1
14788
import json from ipaddress import ip_address from django.core.exceptions import FieldDoesNotExist, ValidationError from django.db import transaction from django.db.models import Q from django.utils.decorators import method_decorator from django.views.decorators.csrf import csrf_exempt from django.views.generic.base import View from django.views.generic.detail import SingleObjectMixin from swapper import load_model from .. import settings as app_settings from ..signals import checksum_requested, config_download_requested, device_registered from ..utils import ( ControllerResponse, forbid_unallowed, get_object_or_404, invalid_response, send_device_config, send_vpn_config, update_last_ip, ) Device = load_model('config', 'Device') OrganizationConfigSettings = load_model('config', 'OrganizationConfigSettings') Vpn = load_model('config', 'Vpn') class BaseConfigView(SingleObjectMixin, View): """ Base view that implements a ``get_object`` method Subclassed by all views dealing with existing objects """ def get_object(self, *args, **kwargs): kwargs['config__isnull'] = False return get_object_or_404(self.model, *args, **kwargs) class CsrfExtemptMixin(object): """ Mixin that makes the view extempt from CSFR protection """ @method_decorator(csrf_exempt) def dispatch(self, request, *args, **kwargs): return super().dispatch(request, *args, **kwargs) class UpdateLastIpMixin(object): def update_last_ip(self, device, request): result = update_last_ip(device, request) if result: # avoid that any other device in the # same org stays with the same management_ip # This can happen when management interfaces are using DHCP # and they get a new address which was previously used by another # device that may now be offline, without this fix, we will end up # with two devices having the same management_ip, which will # cause OpenWISP to be confused self.model.objects.filter( organization=device.organization, management_ip=device.management_ip ).exclude(pk=device.pk).update(management_ip='') # in the case of last_ip, we take a different approach, # because it may be a public IP. If it's a public IP we will # allow it to be duplicated if ip_address(device.last_ip).is_private: Device.objects.filter( organization=device.organization, last_ip=device.last_ip ).exclude(pk=device.pk).update(last_ip='') return result class ActiveOrgMixin(object): """ adds check to organization.is_active to ``get_object`` method """ def get_object(self, *args, **kwargs): kwargs['organization__is_active'] = True return super().get_object(*args, **kwargs) class DeviceChecksumView(ActiveOrgMixin, UpdateLastIpMixin, BaseConfigView): """ returns device's configuration checksum """ model = Device def get(self, request, *args, **kwargs): device = self.get_object(*args, **kwargs) bad_request = forbid_unallowed(request, 'GET', 'key', device.key) if bad_request: return bad_request self.update_last_ip(device, request) checksum_requested.send( sender=device.__class__, instance=device, request=request ) return ControllerResponse(device.config.checksum, content_type='text/plain') class DeviceDownloadConfigView(ActiveOrgMixin, BaseConfigView): """ returns configuration archive as attachment """ model = Device def get(self, request, *args, **kwargs): device = self.get_object(*args, **kwargs) bad_request = forbid_unallowed(request, 'GET', 'key', device.key) if bad_request: return bad_request config_download_requested.send( sender=device.__class__, instance=device, request=request ) return send_device_config(device.config, request) class DeviceUpdateInfoView(ActiveOrgMixin, CsrfExtemptMixin, BaseConfigView): """ updates general information about the device """ model = Device UPDATABLE_FIELDS = ['os', 'model', 'system'] def post(self, request, *args, **kwargs): device = self.get_object(*args, **kwargs) bad_request = forbid_unallowed(request, 'POST', 'key', device.key) if bad_request: return bad_request # update device information for attr in self.UPDATABLE_FIELDS: if attr in request.POST: setattr(device, attr, request.POST.get(attr)) # validate and save everything or fail otherwise try: with transaction.atomic(): device.full_clean() device.save() except ValidationError as e: # dump message_dict as JSON, # this should make it easy to debug return ControllerResponse( json.dumps(e.message_dict, indent=4, sort_keys=True), content_type='text/plain', status=400, ) return ControllerResponse('update-info: success', content_type='text/plain') class DeviceReportStatusView(ActiveOrgMixin, CsrfExtemptMixin, BaseConfigView): """ updates status of config objects """ model = Device def post(self, request, *args, **kwargs): device = self.get_object(*args, **kwargs) config = device.config # ensure request is well formed and authorized allowed_status = [choices[0] for choices in config.STATUS] allowed_status.append('running') # backward compatibility required_params = [('key', device.key), ('status', allowed_status)] for key, value in required_params: bad_response = forbid_unallowed(request, 'POST', key, value) if bad_response: return bad_response status = request.POST.get('status') # mantain backward compatibility with old agents # ("running" was changed to "applied") status = status if status != 'running' else 'applied' # call set_status_{status} method on Config model method_name = f'set_status_{status}' getattr(config, method_name)() return ControllerResponse( f'report-result: success\ncurrent-status: {config.status}\n', content_type='text/plain', ) class DeviceRegisterView(UpdateLastIpMixin, CsrfExtemptMixin, View): """ registers new Config objects """ model = Device org_config_settings_model = OrganizationConfigSettings UPDATABLE_FIELDS = ['os', 'model', 'system'] def init_object(self, **kwargs): """ initializes Config object with incoming POST data """ device_model = self.model config_model = device_model.get_config_model() options = {} for attr in kwargs.keys(): # skip attributes that are not model fields try: device_model._meta.get_field(attr) except FieldDoesNotExist: continue options[attr] = kwargs.get(attr) # do not specify key if: # app_settings.CONSISTENT_REGISTRATION is False # if key is ``None`` (it would cause exception) if 'key' in options and ( app_settings.CONSISTENT_REGISTRATION is False or options['key'] is None ): del options['key'] if 'hardware_id' in options and options['hardware_id'] == "": options['hardware_id'] = None config = config_model(device=device_model(**options), backend=kwargs['backend']) config.organization = self.organization config.device.organization = self.organization return config def get_template_queryset(self, config): """ returns Template model queryset """ queryset = config.get_template_model().objects.all() # filter templates of the same organization or shared templates return queryset.filter(Q(organization=self.organization) | Q(organization=None)) def add_tagged_templates(self, config, request): """ adds templates specified in incoming POST tag setting """ tags = request.POST.get('tags') if not tags: return # retrieve tags and add them to current config tags = tags.split() queryset = self.get_template_queryset(config) templates = queryset.filter(tags__name__in=tags).only('id').distinct() for template in templates: config.templates.add(template) def invalid(self, request): """ ensures request is well formed """ allowed_backends = [path for path, name in app_settings.BACKENDS] required_params = [ ('secret', None), ('name', None), ('mac_address', None), ('backend', allowed_backends), ] # valid required params or forbid for key, value in required_params: invalid_response = forbid_unallowed(request, 'POST', key, value) if invalid_response: return invalid_response def forbidden(self, request): """ ensures request is authorized: - secret matches an organization's shared_secret - the organization has registration_enabled set to True """ try: secret = request.POST.get('secret') org_settings = self.org_config_settings_model.objects.select_related( 'organization' ).get(shared_secret=secret, organization__is_active=True) except self.org_config_settings_model.DoesNotExist: return invalid_response(request, 'error: unrecognized secret', status=403) if not org_settings.registration_enabled: return invalid_response(request, 'error: registration disabled', status=403) # set an organization attribute as a side effect # this attribute will be used in ``init_object`` self.organization = org_settings.organization def post(self, request, *args, **kwargs): """ POST logic """ if not app_settings.REGISTRATION_ENABLED: return ControllerResponse('error: registration disabled', status=403) # ensure request is valid bad_response = self.invalid(request) if bad_response: return bad_response # ensure request is allowed forbidden = self.forbidden(request) if forbidden: return forbidden # prepare model attributes key = None if app_settings.CONSISTENT_REGISTRATION: key = request.POST.get('key') # try retrieving existing Device first # (key is not None only if CONSISTENT_REGISTRATION is enabled) new = False try: device = self.model.objects.get(key=key) # update hw info for attr in self.UPDATABLE_FIELDS: if attr in request.POST: setattr(device, attr, request.POST.get(attr)) config = device.config # if get queryset fails, instantiate a new Device and Config except self.model.DoesNotExist: if not app_settings.REGISTRATION_SELF_CREATION: return ControllerResponse( 'Device not found in the system, please create it first.', status=404, ) new = True config = self.init_object(**request.POST.dict()) device = config.device # if get queryset succedes but device has no related config # instantiate new Config but reuse existing device except self.model.config.RelatedObjectDoesNotExist: config = self.init_object(**request.POST.dict()) config.device = device # update last_ip field of device device.last_ip = request.META.get('REMOTE_ADDR') # validate and save everything or fail otherwise try: with transaction.atomic(): device.full_clean() device.save() config.full_clean() config.save() except ValidationError as e: # dump message_dict as JSON, # this should make it easy to debug return ControllerResponse( json.dumps(e.message_dict, indent=4, sort_keys=True), content_type='text/plain', status=400, ) # add templates specified in tags self.add_tagged_templates(config, request) # emit device registered signal device_registered.send(sender=device.__class__, instance=device, is_new=new) # prepare response s = ( 'registration-result: success\n' 'uuid: {id}\n' 'key: {key}\n' 'hostname: {name}\n' 'is-new: {is_new}\n' ) attributes = device.__dict__.copy() attributes.update({'id': device.pk.hex, 'key': device.key, 'is_new': int(new)}) return ControllerResponse( s.format(**attributes), content_type='text/plain', status=201 ) class VpnChecksumView(BaseConfigView): """ returns vpn's configuration checksum """ model = Vpn def get(self, request, *args, **kwargs): vpn = self.get_object(*args, **kwargs) bad_request = forbid_unallowed(request, 'GET', 'key', vpn.key) if bad_request: return bad_request checksum_requested.send(sender=vpn.__class__, instance=vpn, request=request) return ControllerResponse(vpn.checksum, content_type='text/plain') class VpnDownloadConfigView(BaseConfigView): """ returns configuration archive as attachment """ model = Vpn def get(self, request, *args, **kwargs): vpn = self.get_object(*args, **kwargs) bad_request = forbid_unallowed(request, 'GET', 'key', vpn.key) if bad_request: return bad_request config_download_requested.send( sender=vpn.__class__, instance=vpn, request=request ) return send_vpn_config(vpn, request) device_checksum = DeviceChecksumView.as_view() device_download_config = DeviceDownloadConfigView.as_view() device_update_info = DeviceUpdateInfoView.as_view() device_report_status = DeviceReportStatusView.as_view() device_register = DeviceRegisterView.as_view() vpn_checksum = VpnChecksumView.as_view() vpn_download_config = VpnDownloadConfigView.as_view()
gpl-3.0
6,931,061,912,724,383,000
35.78607
88
0.61719
false
4.388131
true
false
false
pinax/pinax-blog
pinax/blog/admin.py
1
3056
from functools import partial as curry from django.contrib import admin from django.utils import timezone from django.utils.translation import ugettext_lazy as _ from pinax.images.admin import ImageInline from pinax.images.models import ImageSet from .conf import settings from .forms import AdminPostForm from .models import Blog, Post, ReviewComment, Section class PostImageSet(ImageSet): class Meta: proxy = True class ReviewInline(admin.TabularInline): model = ReviewComment def make_published(modeladmin, request, queryset): queryset = queryset.exclude(state=Post.STATE_CHOICES[-1][0], published__isnull=False) queryset.update(state=Post.STATE_CHOICES[-1][0]) queryset.filter(published__isnull=True).update(published=timezone.now()) make_published.short_description = _("Publish selected posts") class PostAdmin(admin.ModelAdmin): list_display = ["title", "state", "section", "published", "show_secret_share_url"] list_filter = ["section", "state"] form = AdminPostForm actions = [make_published] fields = [ "section", "title", "slug", "author", "markup", "teaser", "content", "description", "sharable_url", "state", "published", "image_set" # maybe this https://github.com/anziem/django_reverse_admin ] readonly_fields = ["sharable_url"] prepopulated_fields = {"slug": ("title",)} inlines = [ ReviewInline, ] def show_secret_share_url(self, obj): return '<a href="{}">{}</a>'.format(obj.sharable_url, obj.sharable_url) show_secret_share_url.short_description = _("Share this url") show_secret_share_url.allow_tags = True def formfield_for_dbfield(self, db_field, **kwargs): request = kwargs.get("request") if db_field.name == "author": ff = super().formfield_for_dbfield(db_field, **kwargs) ff.initial = request.user.id return ff return super().formfield_for_dbfield(db_field, **kwargs) def get_form(self, request, obj=None, **kwargs): kwargs.update({ "formfield_callback": curry(self.formfield_for_dbfield, request=request), }) return super().get_form(request, obj, **kwargs) def save_form(self, request, form, change): # this is done for explicitness that we want form.save to commit # form.save doesn't take a commit kwarg for this reason return form.save(Blog.objects.first() if not settings.PINAX_BLOG_SCOPING_MODEL else None) if settings.PINAX_BLOG_SCOPING_MODEL: PostAdmin.fields.insert(0, "blog") PostAdmin.list_filter.append("blog__scoper") class SectionAdmin(admin.ModelAdmin): prepopulated_fields = {"slug": ("name",)} admin.site.register(Post, PostAdmin) admin.site.register(Section, SectionAdmin) admin.site.register( PostImageSet, list_display=["blog_post", "primary_image", "created_by", "created_at"], raw_id_fields=["created_by"], inlines=[ImageInline], )
mit
7,357,990,425,241,163,000
29.56
97
0.659359
false
3.690821
false
false
false
roberthodgen/thought-jot
src/utilities.py
1
2732
""" The MIT License (MIT) Copyright (c) 2015 Robert Hodgen Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. """ from ndb_users import users import string import re import google.net.proto.ProtocolBuffer from google.appengine.ext import ndb from google.appengine.api import mail def permalinkify(string): """ Return a clean URL-friendly version of `string`. """ clean = string.lower().strip() # lowercase, striped of whitespace clean = re.sub(r'\s(\s*)?', '-', clean) # Replace spaces with dashes "-" clean = re.sub(r'[^a-z0-9-]', '', clean) # Strip non-alphanumeric return clean def key_for_urlsafe_id(key_id): """ Try returning an NDB Key for `key_id`. None otherwise. """ key = None try: key = ndb.Key(urlsafe=key_id) except google.net.proto.ProtocolBuffer.ProtocolBufferDecodeError, e: return key finally: return key def send_project_contributor_email(email_address, user, project): """ Send `email` an email notifying them they've been added as a contributor on `project`. """ sender_email_address = users._email_sender() subject = ''.join([project.name, ' invite']) with open('resource/email/project_contributor.txt', 'r') as f: body_text = f.read() body_text = body_text.format(login='http://thought-jot.appspot.com/login', from_email=user.email, to_email=email_address, project_name=project.name) mail.send_mail(sender_email_address, email_address, subject, body_text) def str_to_bool(string, allow_none=False): """ Return a Boolean value for `string`. """ if allow_none and string is None: return None if string == 'True' or string == 'true': return True else: return False
mit
7,328,222,909,209,888,000
34.025641
80
0.712299
false
3.91404
false
false
false
quokkaproject/quokka-classes
pipelines.py
1
2318
# coding: utf-8 from flask import request from quokka.modules.cart.pipelines.base import CartPipeline from quokka.utils import get_current_user from .models import CourseSubscription, Subscriber class SetSubscriber(CartPipeline): def process(self): name = request.form.get("name") email = request.form.get("email") area_code = request.form.get("area_code") phone = request.form.get("phone") document = request.form.get("document") address = request.form.get("address") confirm = request.form.get("classes_setsubscriber_confirm") if not confirm: return self.render('classes/setsubscriber.html', cart=self.cart) formdata = dict(name=name, email=email, area_code=area_code, phone=phone, document=document, address=address) subscriptions = CourseSubscription.objects.filter( cart=self.cart ) user = get_current_user() for subscription in subscriptions: subscription.subscriber = self.get_subscriber(user, **formdata) subscription.save() self.cart.sender_data = { "name": name or user.name, "email": email or user.email, "area_code": area_code, "phone": phone.replace('-', '').replace('(', '').replace(')', ''), } self.cart.addlog("SetSubscriber Pipeline: defined sender data") return self.go() def get_subscriber(self, user, **kwargs): if not user: return None try: sub = Subscriber.objects.get(user=user) sub.name = kwargs.get('name') sub.email = kwargs.get('email') sub.document = kwargs.get('document') sub.address = kwargs.get('address') sub.phone = u"%(area_code)s%(phone)s" % kwargs sub.save() return sub except: self.cart.addlog("Creating a new subscriber", save=False) return Subscriber.objects.create( name=kwargs.get('name'), email=kwargs.get('email'), user=user, document=kwargs.get('document'), address=kwargs.get('address'), phone=u"%(area_code)s%(phone)s" % kwargs )
mit
7,357,983,847,842,146,000
33.088235
78
0.572045
false
4.23766
false
false
false
walshjon/openmc
openmc/region.py
1
18303
from abc import ABCMeta, abstractmethod from collections import OrderedDict from collections.abc import Iterable, MutableSequence from copy import deepcopy import numpy as np from openmc.checkvalue import check_type class Region(metaclass=ABCMeta): """Region of space that can be assigned to a cell. Region is an abstract base class that is inherited by :class:`openmc.Halfspace`, :class:`openmc.Intersection`, :class:`openmc.Union`, and :class:`openmc.Complement`. Each of those respective classes are typically not instantiated directly but rather are created through operators of the Surface and Region classes. """ def __and__(self, other): return Intersection((self, other)) def __or__(self, other): return Union((self, other)) def __invert__(self): return Complement(self) @abstractmethod def __contains__(self, point): pass @abstractmethod def __str__(self): pass def __eq__(self, other): if not isinstance(other, type(self)): return False else: return str(self) == str(other) def __ne__(self, other): return not self == other def get_surfaces(self, surfaces=None): """ Recursively find all the surfaces referenced by a region and return them Parameters ---------- surfaces: collections.OrderedDict, optional Dictionary mapping surface IDs to :class:`openmc.Surface` instances Returns ------- surfaces: collections.OrderedDict Dictionary mapping surface IDs to :class:`openmc.Surface` instances """ if surfaces is None: surfaces = OrderedDict() for region in self: surfaces = region.get_surfaces(surfaces) return surfaces @staticmethod def from_expression(expression, surfaces): """Generate a region given an infix expression. Parameters ---------- expression : str Boolean expression relating surface half-spaces. The possible operators are union '|', intersection ' ', and complement '~'. For example, '(1 -2) | 3 ~(4 -5)'. surfaces : dict Dictionary whose keys are suface IDs that appear in the Boolean expression and whose values are Surface objects. """ # Strip leading and trailing whitespace expression = expression.strip() # Convert the string expression into a list of tokens, i.e., operators # and surface half-spaces, representing the expression in infix # notation. i = 0 i_start = -1 tokens = [] while i < len(expression): if expression[i] in '()|~ ': # If special character appears immediately after a non-operator, # create a token with the apporpriate half-space if i_start >= 0: j = int(expression[i_start:i]) if j < 0: tokens.append(-surfaces[abs(j)]) else: tokens.append(+surfaces[abs(j)]) if expression[i] in '()|~': # For everything other than intersection, add the operator # to the list of tokens tokens.append(expression[i]) else: # Find next non-space character while expression[i+1] == ' ': i += 1 # If previous token is a halfspace or right parenthesis and next token # is not a left parenthese or union operator, that implies that the # whitespace is to be interpreted as an intersection operator if (i_start >= 0 or tokens[-1] == ')') and \ expression[i+1] not in ')|': tokens.append(' ') i_start = -1 else: # Check for invalid characters if expression[i] not in '-+0123456789': raise SyntaxError("Invalid character '{}' in expression" .format(expression[i])) # If we haven't yet reached the start of a word, start one if i_start < 0: i_start = i i += 1 # If we've reached the end and we're still in a word, create a # half-space token and add it to the list if i_start >= 0: j = int(expression[i_start:]) if j < 0: tokens.append(-surfaces[abs(j)]) else: tokens.append(+surfaces[abs(j)]) # The functions below are used to apply an operator to operands on the # output queue during the shunting yard algorithm. def can_be_combined(region): return isinstance(region, Complement) or hasattr(region, 'surface') def apply_operator(output, operator): r2 = output.pop() if operator == ' ': r1 = output.pop() if isinstance(r1, Intersection): r1 &= r2 output.append(r1) elif isinstance(r2, Intersection) and can_be_combined(r1): r2.insert(0, r1) output.append(r2) else: output.append(r1 & r2) elif operator == '|': r1 = output.pop() if isinstance(r1, Union): r1 |= r2 output.append(r1) elif isinstance(r2, Union) and can_be_combined(r1): r2.insert(0, r1) output.append(r2) else: output.append(r1 | r2) elif operator == '~': output.append(~r2) # The following is an implementation of the shunting yard algorithm to # generate an abstract syntax tree for the region expression. output = [] stack = [] precedence = {'|': 1, ' ': 2, '~': 3} associativity = {'|': 'left', ' ': 'left', '~': 'right'} for token in tokens: if token in (' ', '|', '~'): # Normal operators while stack: op = stack[-1] if (op not in ('(', ')') and ((associativity[token] == 'right' and precedence[token] < precedence[op]) or (associativity[token] == 'left' and precedence[token] <= precedence[op]))): apply_operator(output, stack.pop()) else: break stack.append(token) elif token == '(': # Left parentheses stack.append(token) elif token == ')': # Right parentheses while stack[-1] != '(': apply_operator(output, stack.pop()) if len(stack) == 0: raise SyntaxError('Mismatched parentheses in ' 'region specification.') stack.pop() else: # Surface halfspaces output.append(token) while stack: if stack[-1] in '()': raise SyntaxError('Mismatched parentheses in region ' 'specification.') apply_operator(output, stack.pop()) # Since we are generating an abstract syntax tree rather than a reverse # Polish notation expression, the output queue should have a single item # at the end return output[0] @abstractmethod def clone(self, memo=None): """Create a copy of this region - each of the surfaces in the region's nodes will be cloned and will have new unique IDs. Parameters ---------- memo : dict or None A nested dictionary of previously cloned objects. This parameter is used internally and should not be specified by the user. Returns ------- clone : openmc.Region The clone of this region Raises ------ NotImplementedError This method is not implemented for the abstract region class. """ raise NotImplementedError('The clone method is not implemented for ' 'the abstract region class.') class Intersection(Region, MutableSequence): r"""Intersection of two or more regions. Instances of Intersection are generally created via the & operator applied to two instances of :class:`openmc.Region`. This is illustrated in the following example: >>> equator = openmc.ZPlane(z0=0.0) >>> earth = openmc.Sphere(R=637.1e6) >>> northern_hemisphere = -earth & +equator >>> southern_hemisphere = -earth & -equator >>> type(northern_hemisphere) <class 'openmc.region.Intersection'> Instances of this class behave like a mutable sequence, e.g., they can be indexed and have an append() method. Parameters ---------- nodes : iterable of openmc.Region Regions to take the intersection of Attributes ---------- bounding_box : tuple of numpy.array Lower-left and upper-right coordinates of an axis-aligned bounding box """ def __init__(self, nodes): self._nodes = list(nodes) def __and__(self, other): new = Intersection(self) new &= other return new def __iand__(self, other): if isinstance(other, Intersection): self.extend(other) else: self.append(other) return self # Implement mutable sequence protocol by delegating to list def __getitem__(self, key): return self._nodes[key] def __setitem__(self, key, value): self._nodes[key] = value def __delitem__(self, key): del self._nodes[key] def __len__(self): return len(self._nodes) def insert(self, index, value): self._nodes.insert(index, value) def __contains__(self, point): """Check whether a point is contained in the region. Parameters ---------- point : 3-tuple of float Cartesian coordinates, :math:`(x',y',z')`, of the point Returns ------- bool Whether the point is in the region """ return all(point in n for n in self) def __str__(self): return '(' + ' '.join(map(str, self)) + ')' @property def bounding_box(self): lower_left = np.array([-np.inf, -np.inf, -np.inf]) upper_right = np.array([np.inf, np.inf, np.inf]) for n in self: lower_left_n, upper_right_n = n.bounding_box lower_left[:] = np.maximum(lower_left, lower_left_n) upper_right[:] = np.minimum(upper_right, upper_right_n) return lower_left, upper_right def clone(self, memo=None): """Create a copy of this region - each of the surfaces in the intersection's nodes will be cloned and will have new unique IDs. Parameters ---------- memo : dict or None A nested dictionary of previously cloned objects. This parameter is used internally and should not be specified by the user. Returns ------- clone : openmc.Intersection The clone of this intersection """ if memo is None: memo = {} clone = deepcopy(self) clone[:] = [n.clone(memo) for n in self] return clone class Union(Region, MutableSequence): r"""Union of two or more regions. Instances of Union are generally created via the | operator applied to two instances of :class:`openmc.Region`. This is illustrated in the following example: >>> s1 = openmc.ZPlane(z0=0.0) >>> s2 = openmc.Sphere(R=637.1e6) >>> type(-s2 | +s1) <class 'openmc.region.Union'> Instances of this class behave like a mutable sequence, e.g., they can be indexed and have an append() method. Parameters ---------- nodes : iterable of openmc.Region Regions to take the union of Attributes ---------- bounding_box : 2-tuple of numpy.array Lower-left and upper-right coordinates of an axis-aligned bounding box """ def __init__(self, nodes): self._nodes = list(nodes) def __or__(self, other): new = Union(self) new |= other return new def __ior__(self, other): if isinstance(other, Union): self.extend(other) else: self.append(other) return self # Implement mutable sequence protocol by delegating to list def __getitem__(self, key): return self._nodes[key] def __setitem__(self, key, value): self._nodes[key] = value def __delitem__(self, key): del self._nodes[key] def __len__(self): return len(self._nodes) def insert(self, index, value): self._nodes.insert(index, value) def __contains__(self, point): """Check whether a point is contained in the region. Parameters ---------- point : 3-tuple of float Cartesian coordinates, :math:`(x',y',z')`, of the point Returns ------- bool Whether the point is in the region """ return any(point in n for n in self) def __str__(self): return '(' + ' | '.join(map(str, self)) + ')' @property def bounding_box(self): lower_left = np.array([np.inf, np.inf, np.inf]) upper_right = np.array([-np.inf, -np.inf, -np.inf]) for n in self: lower_left_n, upper_right_n = n.bounding_box lower_left[:] = np.minimum(lower_left, lower_left_n) upper_right[:] = np.maximum(upper_right, upper_right_n) return lower_left, upper_right def clone(self, memo=None): """Create a copy of this region - each of the surfaces in the union's nodes will be cloned and will have new unique IDs. Parameters ---------- memo : dict or None A nested dictionary of previously cloned objects. This parameter is used internally and should not be specified by the user. Returns ------- clone : openmc.Union The clone of this union """ if memo is None: memo = {} clone = deepcopy(self) clone[:] = [n.clone(memo) for n in self] return clone class Complement(Region): """Complement of a region. The Complement of an existing :class:`openmc.Region` can be created by using the ~ operator as the following example demonstrates: >>> xl = openmc.XPlane(x0=-10.0) >>> xr = openmc.XPlane(x0=10.0) >>> yl = openmc.YPlane(y0=-10.0) >>> yr = openmc.YPlane(y0=10.0) >>> inside_box = +xl & -xr & +yl & -yl >>> outside_box = ~inside_box >>> type(outside_box) <class 'openmc.region.Complement'> Parameters ---------- node : openmc.Region Region to take the complement of Attributes ---------- node : openmc.Region Regions to take the complement of bounding_box : tuple of numpy.array Lower-left and upper-right coordinates of an axis-aligned bounding box """ def __init__(self, node): self.node = node def __contains__(self, point): """Check whether a point is contained in the region. Parameters ---------- point : 3-tuple of float Cartesian coordinates, :math:`(x',y',z')`, of the point Returns ------- bool Whether the point is in the region """ return point not in self.node def __str__(self): return '~' + str(self.node) @property def node(self): return self._node @node.setter def node(self, node): check_type('node', node, Region) self._node = node @property def bounding_box(self): # Use De Morgan's laws to distribute the complement operator so that it # only applies to surface half-spaces, thus allowing us to calculate the # bounding box in the usual recursive manner. if isinstance(self.node, Union): temp_region = Intersection(~n for n in self.node) elif isinstance(self.node, Intersection): temp_region = Union(~n for n in self.node) elif isinstance(self.node, Complement): temp_region = self.node.node else: temp_region = ~self.node return temp_region.bounding_box def get_surfaces(self, surfaces=None): """ Recursively find and return all the surfaces referenced by the node Parameters ---------- surfaces: collections.OrderedDict, optional Dictionary mapping surface IDs to :class:`openmc.Surface` instances Returns ------- surfaces: collections.OrderedDict Dictionary mapping surface IDs to :class:`openmc.Surface` instances """ if surfaces is None: surfaces = OrderedDict() for region in self.node: surfaces = region.get_surfaces(surfaces) return surfaces def clone(self, memo=None): """Create a copy of this region - each of the surfaces in the complement's node will be cloned and will have new unique IDs. Parameters ---------- memo : dict or None A nested dictionary of previously cloned objects. This parameter is used internally and should not be specified by the user. Returns ------- clone : openmc.Complement The clone of this complement """ if memo is None: memo = {} clone = deepcopy(self) clone.node = self.node.clone(memo) return clone
mit
-4,142,554,628,031,096,300
30.233788
90
0.543845
false
4.538309
false
false
false
squilter/ardupilot
Tools/autotest/arduplane.py
1
85180
#!/usr/bin/env python # Fly ArduPlane in SITL from __future__ import print_function import math import os import time from pymavlink import quaternion from pymavlink import mavutil from common import AutoTest from common import AutoTestTimeoutException from common import NotAchievedException from common import PreconditionFailedException import operator # get location of scripts testdir = os.path.dirname(os.path.realpath(__file__)) SITL_START_LOCATION = mavutil.location(-35.362938, 149.165085, 585, 354) WIND = "0,180,0.2" # speed,direction,variance class AutoTestPlane(AutoTest): @staticmethod def get_not_armable_mode_list(): return [] @staticmethod def get_not_disarmed_settable_modes_list(): return ["FOLLOW"] @staticmethod def get_no_position_not_settable_modes_list(): return [] @staticmethod def get_position_armable_modes_list(): return ["GUIDED", "AUTO"] @staticmethod def get_normal_armable_modes_list(): return ["MANUAL", "STABILIZE", "ACRO"] def log_name(self): return "ArduPlane" def test_filepath(self): return os.path.realpath(__file__) def sitl_start_location(self): return SITL_START_LOCATION def defaults_filepath(self): return os.path.join(testdir, 'default_params/plane-jsbsim.parm') def set_current_test_name(self, name): self.current_test_name_directory = "ArduPlane_Tests/" + name + "/" def default_frame(self): return "plane-elevrev" def apply_defaultfile_parameters(self): # plane passes in a defaults_filepath in place of applying # parameters afterwards. pass def is_plane(self): return True def get_stick_arming_channel(self): return int(self.get_parameter("RCMAP_YAW")) def get_disarm_delay(self): return int(self.get_parameter("LAND_DISARMDELAY")) def set_autodisarm_delay(self, delay): self.set_parameter("LAND_DISARMDELAY", delay) def takeoff(self, alt=150, alt_max=None, relative=True): """Takeoff to altitude.""" if alt_max is None: alt_max = alt + 30 self.change_mode("FBWA") self.wait_ready_to_arm() self.arm_vehicle() # some rudder to counteract the prop torque self.set_rc(4, 1700) # some up elevator to keep the tail down self.set_rc(2, 1200) # get it moving a bit first self.set_rc(3, 1300) self.wait_groundspeed(6, 100) # a bit faster again, straighten rudder self.set_rc(3, 1600) self.set_rc(4, 1500) self.wait_groundspeed(12, 100) # hit the gas harder now, and give it some more elevator self.set_rc(2, 1100) self.set_rc(3, 2000) # gain a bit of altitude self.wait_altitude(alt, alt_max, timeout=30, relative=relative) # level off self.set_rc(2, 1500) self.progress("TAKEOFF COMPLETE") def fly_left_circuit(self): """Fly a left circuit, 200m on a side.""" self.mavproxy.send('switch 4\n') self.wait_mode('FBWA') self.set_rc(3, 2000) self.wait_level_flight() self.progress("Flying left circuit") # do 4 turns for i in range(0, 4): # hard left self.progress("Starting turn %u" % i) self.set_rc(1, 1000) self.wait_heading(270 - (90*i), accuracy=10) self.set_rc(1, 1500) self.progress("Starting leg %u" % i) self.wait_distance(100, accuracy=20) self.progress("Circuit complete") def fly_RTL(self): """Fly to home.""" self.progress("Flying home in RTL") self.mavproxy.send('switch 2\n') self.wait_mode('RTL') self.wait_location(self.homeloc, accuracy=120, target_altitude=self.homeloc.alt+100, height_accuracy=20, timeout=180) self.progress("RTL Complete") def fly_LOITER(self, num_circles=4): """Loiter where we are.""" self.progress("Testing LOITER for %u turns" % num_circles) self.mavproxy.send('loiter\n') self.wait_mode('LOITER') m = self.mav.recv_match(type='VFR_HUD', blocking=True) initial_alt = m.alt self.progress("Initial altitude %u\n" % initial_alt) while num_circles > 0: self.wait_heading(0, accuracy=10, timeout=60) self.wait_heading(180, accuracy=10, timeout=60) num_circles -= 1 self.progress("Loiter %u circles left" % num_circles) m = self.mav.recv_match(type='VFR_HUD', blocking=True) final_alt = m.alt self.progress("Final altitude %u initial %u\n" % (final_alt, initial_alt)) self.mavproxy.send('mode FBWA\n') self.wait_mode('FBWA') if abs(final_alt - initial_alt) > 20: raise NotAchievedException("Failed to maintain altitude") self.progress("Completed Loiter OK") def fly_CIRCLE(self, num_circles=1): """Circle where we are.""" self.progress("Testing CIRCLE for %u turns" % num_circles) self.mavproxy.send('mode CIRCLE\n') self.wait_mode('CIRCLE') m = self.mav.recv_match(type='VFR_HUD', blocking=True) initial_alt = m.alt self.progress("Initial altitude %u\n" % initial_alt) while num_circles > 0: self.wait_heading(0, accuracy=10, timeout=60) self.wait_heading(180, accuracy=10, timeout=60) num_circles -= 1 self.progress("CIRCLE %u circles left" % num_circles) m = self.mav.recv_match(type='VFR_HUD', blocking=True) final_alt = m.alt self.progress("Final altitude %u initial %u\n" % (final_alt, initial_alt)) self.mavproxy.send('mode FBWA\n') self.wait_mode('FBWA') if abs(final_alt - initial_alt) > 20: raise NotAchievedException("Failed to maintain altitude") self.progress("Completed CIRCLE OK") def wait_level_flight(self, accuracy=5, timeout=30): """Wait for level flight.""" tstart = self.get_sim_time() self.progress("Waiting for level flight") self.set_rc(1, 1500) self.set_rc(2, 1500) self.set_rc(4, 1500) while self.get_sim_time_cached() < tstart + timeout: m = self.mav.recv_match(type='ATTITUDE', blocking=True) roll = math.degrees(m.roll) pitch = math.degrees(m.pitch) self.progress("Roll=%.1f Pitch=%.1f" % (roll, pitch)) if math.fabs(roll) <= accuracy and math.fabs(pitch) <= accuracy: self.progress("Attained level flight") return raise NotAchievedException("Failed to attain level flight") def change_altitude(self, altitude, accuracy=30): """Get to a given altitude.""" self.mavproxy.send('mode FBWA\n') self.wait_mode('FBWA') alt_error = self.mav.messages['VFR_HUD'].alt - altitude if alt_error > 0: self.set_rc(2, 2000) else: self.set_rc(2, 1000) self.wait_altitude(altitude-accuracy/2, altitude+accuracy/2) self.set_rc(2, 1500) self.progress("Reached target altitude at %u" % self.mav.messages['VFR_HUD'].alt) return self.wait_level_flight() def axial_left_roll(self, count=1): """Fly a left axial roll.""" # full throttle! self.set_rc(3, 2000) self.change_altitude(self.homeloc.alt+300) # fly the roll in manual self.mavproxy.send('switch 6\n') self.wait_mode('MANUAL') while count > 0: self.progress("Starting roll") self.set_rc(1, 1000) try: self.wait_roll(-150, accuracy=90) self.wait_roll(150, accuracy=90) self.wait_roll(0, accuracy=90) except Exception as e: self.set_rc(1, 1500) raise e count -= 1 # back to FBWA self.set_rc(1, 1500) self.mavproxy.send('switch 4\n') self.wait_mode('FBWA') self.set_rc(3, 1700) return self.wait_level_flight() def inside_loop(self, count=1): """Fly a inside loop.""" # full throttle! self.set_rc(3, 2000) self.change_altitude(self.homeloc.alt+300) # fly the loop in manual self.mavproxy.send('switch 6\n') self.wait_mode('MANUAL') while count > 0: self.progress("Starting loop") self.set_rc(2, 1000) self.wait_pitch(-60, accuracy=20) self.wait_pitch(0, accuracy=20) count -= 1 # back to FBWA self.set_rc(2, 1500) self.mavproxy.send('switch 4\n') self.wait_mode('FBWA') self.set_rc(3, 1700) return self.wait_level_flight() def set_attitude_target(self, tolerance=10): """Test setting of attitude target in guided mode.""" self.change_mode("GUIDED") # self.set_parameter("STALL_PREVENTION", 0) state_roll_over = "roll-over" state_stabilize_roll = "stabilize-roll" state_hold = "hold" state_roll_back = "roll-back" state_done = "done" tstart = self.get_sim_time() try: state = state_roll_over while state != state_done: m = self.mav.recv_match(type='ATTITUDE', blocking=True, timeout=0.1) now = self.get_sim_time_cached() if now - tstart > 20: raise AutoTestTimeoutException("Manuevers not completed") if m is None: continue r = math.degrees(m.roll) if state == state_roll_over: target_roll_degrees = 60 if abs(r - target_roll_degrees) < tolerance: state = state_stabilize_roll stabilize_start = now elif state == state_stabilize_roll: # just give it a little time to sort it self out if now - stabilize_start > 2: state = state_hold hold_start = now elif state == state_hold: target_roll_degrees = 60 if now - hold_start > tolerance: state = state_roll_back if abs(r - target_roll_degrees) > tolerance: raise NotAchievedException("Failed to hold attitude") elif state == state_roll_back: target_roll_degrees = 0 if abs(r - target_roll_degrees) < tolerance: state = state_done else: raise ValueError("Unknown state %s" % str(state)) m_nav = self.mav.messages['NAV_CONTROLLER_OUTPUT'] self.progress("%s Roll: %f desired=%f set=%f" % (state, r, m_nav.nav_roll, target_roll_degrees)) time_boot_millis = 0 # FIXME target_system = 1 # FIXME target_component = 1 # FIXME type_mask = 0b10000001 ^ 0xFF # FIXME # attitude in radians: q = quaternion.Quaternion([math.radians(target_roll_degrees), 0, 0]) roll_rate_radians = 0.5 pitch_rate_radians = 0 yaw_rate_radians = 0 thrust = 1.0 self.mav.mav.set_attitude_target_send(time_boot_millis, target_system, target_component, type_mask, q, roll_rate_radians, pitch_rate_radians, yaw_rate_radians, thrust) except Exception as e: self.mavproxy.send('mode FBWA\n') self.wait_mode('FBWA') self.set_rc(3, 1700) raise e # back to FBWA self.mavproxy.send('mode FBWA\n') self.wait_mode('FBWA') self.set_rc(3, 1700) self.wait_level_flight() def test_stabilize(self, count=1): """Fly stabilize mode.""" # full throttle! self.set_rc(3, 2000) self.set_rc(2, 1300) self.change_altitude(self.homeloc.alt+300) self.set_rc(2, 1500) self.mavproxy.send("mode STABILIZE\n") self.wait_mode('STABILIZE') while count > 0: self.progress("Starting roll") self.set_rc(1, 2000) self.wait_roll(-150, accuracy=90) self.wait_roll(150, accuracy=90) self.wait_roll(0, accuracy=90) count -= 1 self.set_rc(1, 1500) self.wait_roll(0, accuracy=5) # back to FBWA self.mavproxy.send('mode FBWA\n') self.wait_mode('FBWA') self.set_rc(3, 1700) return self.wait_level_flight() def test_acro(self, count=1): """Fly ACRO mode.""" # full throttle! self.set_rc(3, 2000) self.set_rc(2, 1300) self.change_altitude(self.homeloc.alt+300) self.set_rc(2, 1500) self.mavproxy.send("mode ACRO\n") self.wait_mode('ACRO') while count > 0: self.progress("Starting roll") self.set_rc(1, 1000) self.wait_roll(-150, accuracy=90) self.wait_roll(150, accuracy=90) self.wait_roll(0, accuracy=90) count -= 1 self.set_rc(1, 1500) # back to FBWA self.mavproxy.send('mode FBWA\n') self.wait_mode('FBWA') self.wait_level_flight() self.mavproxy.send("mode ACRO\n") self.wait_mode('ACRO') count = 2 while count > 0: self.progress("Starting loop") self.set_rc(2, 1000) self.wait_pitch(-60, accuracy=20) self.wait_pitch(0, accuracy=20) count -= 1 self.set_rc(2, 1500) # back to FBWA self.mavproxy.send('mode FBWA\n') self.wait_mode('FBWA') self.set_rc(3, 1700) return self.wait_level_flight() def test_FBWB(self, mode='FBWB'): """Fly FBWB or CRUISE mode.""" self.mavproxy.send("mode %s\n" % mode) self.wait_mode(mode) self.set_rc(3, 1700) self.set_rc(2, 1500) # lock in the altitude by asking for an altitude change then releasing self.set_rc(2, 1000) self.wait_distance(50, accuracy=20) self.set_rc(2, 1500) self.wait_distance(50, accuracy=20) m = self.mav.recv_match(type='VFR_HUD', blocking=True) initial_alt = m.alt self.progress("Initial altitude %u\n" % initial_alt) self.progress("Flying right circuit") # do 4 turns for i in range(0, 4): # hard left self.progress("Starting turn %u" % i) self.set_rc(1, 1800) try: self.wait_heading(0 + (90*i), accuracy=20, timeout=60) except Exception as e: self.set_rc(1, 1500) raise e self.set_rc(1, 1500) self.progress("Starting leg %u" % i) self.wait_distance(100, accuracy=20) self.progress("Circuit complete") self.progress("Flying rudder left circuit") # do 4 turns for i in range(0, 4): # hard left self.progress("Starting turn %u" % i) self.set_rc(4, 1900) try: self.wait_heading(360 - (90*i), accuracy=20, timeout=60) except Exception as e: self.set_rc(4, 1500) raise e self.set_rc(4, 1500) self.progress("Starting leg %u" % i) self.wait_distance(100, accuracy=20) self.progress("Circuit complete") m = self.mav.recv_match(type='VFR_HUD', blocking=True) final_alt = m.alt self.progress("Final altitude %u initial %u\n" % (final_alt, initial_alt)) # back to FBWA self.mavproxy.send('mode FBWA\n') self.wait_mode('FBWA') if abs(final_alt - initial_alt) > 20: raise NotAchievedException("Failed to maintain altitude") return self.wait_level_flight() def fly_mission(self, filename, mission_timeout=60.0): """Fly a mission from a file.""" self.progress("Flying mission %s" % filename) self.load_mission(filename) self.mavproxy.send('switch 1\n') # auto mode self.wait_mode('AUTO') self.wait_waypoint(1, 7, max_dist=60) self.wait_groundspeed(0, 0.5, timeout=mission_timeout) self.mavproxy.expect("Auto disarmed") self.progress("Mission OK") def fly_do_reposition(self): self.progress("Takeoff") self.takeoff(alt=50) self.set_rc(3, 1500) self.progress("Entering guided and flying somewhere constant") self.change_mode("GUIDED") loc = self.mav.location() self.location_offset_ne(loc, 500, 500) new_alt = 100 self.run_cmd_int( mavutil.mavlink.MAV_CMD_DO_REPOSITION, 0, 0, 0, 0, int(loc.lat*1e7), int(loc.lng*1e7), new_alt, # alt frame=mavutil.mavlink.MAV_FRAME_GLOBAL_RELATIVE_ALT_INT, ) self.wait_altitude(new_alt-10, new_alt, timeout=30, relative=True) self.fly_home_land_and_disarm() def fly_deepstall(self): # self.fly_deepstall_absolute() self.fly_deepstall_relative() def fly_deepstall_absolute(self): self.start_subtest("DeepStall Relative Absolute") self.set_parameter("LAND_TYPE", 1) deepstall_elevator_pwm = 1661 self.set_parameter("LAND_DS_ELEV_PWM", deepstall_elevator_pwm) self.load_mission("plane-deepstall-mission.txt") self.change_mode("AUTO") self.wait_ready_to_arm() self.arm_vehicle() self.progress("Waiting for deepstall messages") self.wait_text("Deepstall: Entry: ", timeout=240) # assume elevator is on channel 2: self.wait_servo_channel_value(2, deepstall_elevator_pwm) self.disarm_wait(timeout=120) self.progress("Flying home") self.takeoff(10) self.set_parameter("LAND_TYPE", 0) self.fly_home_land_and_disarm() def fly_deepstall_relative(self): self.start_subtest("DeepStall Relative") self.set_parameter("LAND_TYPE", 1) deepstall_elevator_pwm = 1661 self.set_parameter("LAND_DS_ELEV_PWM", deepstall_elevator_pwm) self.load_mission("plane-deepstall-relative-mission.txt") self.change_mode("AUTO") self.wait_ready_to_arm() self.arm_vehicle() self.progress("Waiting for deepstall messages") self.wait_text("Deepstall: Entry: ", timeout=240) # assume elevator is on channel 2: self.wait_servo_channel_value(2, deepstall_elevator_pwm) self.disarm_wait(timeout=120) self.progress("Flying home") self.takeoff(100) self.set_parameter("LAND_TYPE", 0) self.fly_home_land_and_disarm(timeout=240) def fly_do_change_speed(self): # the following lines ensure we revert these parameter values # - DO_CHANGE_AIRSPEED is a permanent vehicle change! self.set_parameter("TRIM_ARSPD_CM", self.get_parameter("TRIM_ARSPD_CM")) self.set_parameter("MIN_GNDSPD_CM", self.get_parameter("MIN_GNDSPD_CM")) self.progress("Takeoff") self.takeoff(alt=100) self.set_rc(3, 1500) # ensure we know what the airspeed is: self.progress("Entering guided and flying somewhere constant") self.change_mode("GUIDED") self.run_cmd_int( mavutil.mavlink.MAV_CMD_DO_REPOSITION, 0, 0, 0, 0, 12345, # lat*1e7 12345, # lon*1e7 100 # alt ) self.delay_sim_time(10) self.progress("Ensuring initial speed is known and relatively constant") initial_speed = 21.5; timeout = 10 tstart = self.get_sim_time() while True: if self.get_sim_time_cached() - tstart > timeout: break m = self.mav.recv_match(type='VFR_HUD', blocking=True) self.progress("GroundSpeed: %f want=%f" % (m.groundspeed, initial_speed)) if abs(initial_speed - m.groundspeed) > 1: raise NotAchievedException("Initial speed not as expected (want=%f got=%f" % (initial_speed, m.groundspeed)) self.progress("Setting groundspeed") new_target_groundspeed = initial_speed + 5 self.run_cmd( mavutil.mavlink.MAV_CMD_DO_CHANGE_SPEED, 1, # groundspeed new_target_groundspeed, -1, # throttle / no change 0, # absolute values 0, 0, 0) self.wait_groundspeed(new_target_groundspeed-0.5, new_target_groundspeed+0.5, timeout=40) self.progress("Adding some wind, ensuring groundspeed holds") self.set_parameter("SIM_WIND_SPD", 5) self.delay_sim_time(5) self.wait_groundspeed(new_target_groundspeed-0.5, new_target_groundspeed+0.5, timeout=40) self.set_parameter("SIM_WIND_SPD", 0) self.progress("Setting airspeed") new_target_airspeed = initial_speed + 5 self.run_cmd( mavutil.mavlink.MAV_CMD_DO_CHANGE_SPEED, 0, # airspeed new_target_airspeed, -1, # throttle / no change 0, # absolute values 0, 0, 0) self.wait_groundspeed(new_target_airspeed-0.5, new_target_airspeed+0.5) self.progress("Adding some wind, hoping groundspeed increases/decreases") self.set_parameter("SIM_WIND_SPD", 5) self.set_parameter("SIM_WIND_DIR", 270) self.delay_sim_time(5) timeout = 10 tstart = self.get_sim_time() while True: if self.get_sim_time_cached() - tstart > timeout: raise NotAchievedException("Did not achieve groundspeed delta") m = self.mav.recv_match(type='VFR_HUD', blocking=True) delta = abs(m.airspeed - m.groundspeed) want_delta = 4 self.progress("groundspeed and airspeed should be different (have=%f want=%f)" % (delta, want_delta)) if delta > want_delta: break self.fly_home_land_and_disarm() def fly_home_land_and_disarm(self, timeout=120): filename = "flaps.txt" self.progress("Using %s to fly home" % filename) num_wp = self.load_mission(filename) self.change_mode("AUTO") self.mavproxy.send('wp set 7\n') self.drain_mav() # TODO: reflect on file to find this magic waypoint number? # self.wait_waypoint(7, num_wp-1, timeout=500) # we tend to miss the final waypoint by a fair bit, and this is probably too noisy anyway? self.wait_disarmed(timeout=timeout) def fly_flaps(self): """Test flaps functionality.""" filename = "flaps.txt" self.context_push() ex = None try: flaps_ch = 5 servo_ch = 5 self.set_parameter("SERVO%u_FUNCTION" % servo_ch, 3) # flapsauto self.set_parameter("RC%u_OPTION" % flaps_ch, 208) # Flaps RCx_OPTION self.set_parameter("LAND_FLAP_PERCNT", 50) self.set_parameter("LOG_DISARMED", 1) flaps_ch_min = 1000 flaps_ch_trim = 1500 flaps_ch_max = 2000 self.set_parameter("RC%u_MIN" % flaps_ch, flaps_ch_min) self.set_parameter("RC%u_MAX" % flaps_ch, flaps_ch_max) self.set_parameter("RC%u_TRIM" % flaps_ch, flaps_ch_trim) servo_ch_min = 1200 servo_ch_trim = 1300 servo_ch_max = 1800 self.set_parameter("SERVO%u_MIN" % servo_ch, servo_ch_min) self.set_parameter("SERVO%u_MAX" % servo_ch, servo_ch_max) self.set_parameter("SERVO%u_TRIM" % servo_ch, servo_ch_trim) self.progress("check flaps are not deployed") self.set_rc(flaps_ch, flaps_ch_min) self.wait_servo_channel_value(servo_ch, servo_ch_min) self.progress("deploy the flaps") self.set_rc(flaps_ch, flaps_ch_max) tstart = self.get_sim_time() self.wait_servo_channel_value(servo_ch, servo_ch_max) tstop = self.get_sim_time_cached() delta_time = tstop - tstart delta_time_min = 0.5 delta_time_max = 1.5 if delta_time < delta_time_min or delta_time > delta_time_max: raise NotAchievedException(( "Flaps Slew not working (%f seconds)" % (delta_time,))) self.progress("undeploy flaps") self.set_rc(flaps_ch, flaps_ch_min) self.wait_servo_channel_value(servo_ch, servo_ch_min) self.progress("Flying mission %s" % filename) self.load_mission(filename) self.mavproxy.send('wp set 1\n') self.mavproxy.send('switch 1\n') # auto mode self.wait_mode('AUTO') self.wait_ready_to_arm() self.arm_vehicle() last_mission_current_msg = 0 last_seq = None while self.armed(): m = self.mav.recv_match(type='MISSION_CURRENT', blocking=True) time_delta = (self.get_sim_time_cached() - last_mission_current_msg) if (time_delta > 1 or m.seq != last_seq): dist = None x = self.mav.messages.get("NAV_CONTROLLER_OUTPUT", None) if x is not None: dist = x.wp_dist self.progress("MISSION_CURRENT.seq=%u (dist=%s)" % (m.seq, str(dist))) last_mission_current_msg = self.get_sim_time_cached() last_seq = m.seq # flaps should undeploy at the end self.wait_servo_channel_value(servo_ch, servo_ch_min, timeout=30) # do a short flight in FBWA, watching for flaps # self.mavproxy.send('switch 4\n') # self.wait_mode('FBWA') # self.delay_sim_time(10) # self.mavproxy.send('switch 6\n') # self.wait_mode('MANUAL') # self.delay_sim_time(10) self.progress("Flaps OK") except Exception as e: ex = e self.context_pop() if ex: if self.armed(): self.disarm_vehicle() raise ex def test_rc_relay(self): '''test toggling channel 12 toggles relay''' self.set_parameter("RC12_OPTION", 28) # Relay On/Off self.set_rc(12, 1000) self.reboot_sitl() # needed for RC12_OPTION to take effect off = self.get_parameter("SIM_PIN_MASK") if off: raise PreconditionFailedException("SIM_MASK_PIN off") # allow time for the RC library to register initial value: self.delay_sim_time(1) self.set_rc(12, 2000) self.wait_heartbeat() self.wait_heartbeat() on = self.get_parameter("SIM_PIN_MASK") if not on: raise NotAchievedException("SIM_PIN_MASK doesn't reflect ON") self.set_rc(12, 1000) self.wait_heartbeat() self.wait_heartbeat() off = self.get_parameter("SIM_PIN_MASK") if off: raise NotAchievedException("SIM_PIN_MASK doesn't reflect OFF") def test_rc_option_camera_trigger(self): '''test toggling channel 12 takes picture''' self.set_parameter("RC12_OPTION", 9) # CameraTrigger self.reboot_sitl() # needed for RC12_OPTION to take effect x = self.mav.messages.get("CAMERA_FEEDBACK", None) if x is not None: raise PreconditionFailedException("Receiving CAMERA_FEEDBACK?!") self.set_rc(12, 2000) tstart = self.get_sim_time() while self.get_sim_time_cached() - tstart < 10: x = self.mav.messages.get("CAMERA_FEEDBACK", None) if x is not None: break self.wait_heartbeat() self.set_rc(12, 1000) if x is None: raise NotAchievedException("No CAMERA_FEEDBACK message received") def test_throttle_failsafe(self): self.change_mode('MANUAL') m = self.mav.recv_match(type='SYS_STATUS', blocking=True) receiver_bit = mavutil.mavlink.MAV_SYS_STATUS_SENSOR_RC_RECEIVER self.progress("Testing receiver enabled") if (not (m.onboard_control_sensors_enabled & receiver_bit)): raise PreconditionFailedException() self.progress("Testing receiver present") if (not (m.onboard_control_sensors_present & receiver_bit)): raise PreconditionFailedException() self.progress("Testing receiver health") if (not (m.onboard_control_sensors_health & receiver_bit)): raise PreconditionFailedException() self.progress("Ensure we know original throttle value") self.wait_rc_channel_value(3, 1000) self.set_parameter("THR_FS_VALUE", 960) self.progress("Failing receiver (throttle-to-950)") self.context_collect("HEARTBEAT") self.set_parameter("SIM_RC_FAIL", 2) # throttle-to-950 self.wait_mode('RTL') # long failsafe if (not self.get_mode_from_mode_mapping("CIRCLE") in [x.custom_mode for x in self.context_stop_collecting("HEARTBEAT")]): raise NotAchievedException("Did not go via circle mode") self.progress("Ensure we've had our throttle squashed to 950") self.wait_rc_channel_value(3, 950) self.drain_mav_unparsed() m = self.mav.recv_match(type='SYS_STATUS', blocking=True) print("%s" % str(m)) self.progress("Testing receiver enabled") if (not (m.onboard_control_sensors_enabled & receiver_bit)): raise NotAchievedException("Receiver not enabled") self.progress("Testing receiver present") if (not (m.onboard_control_sensors_present & receiver_bit)): raise NotAchievedException("Receiver not present") # skip this until RC is fixed # self.progress("Testing receiver health") # if (m.onboard_control_sensors_health & receiver_bit): # raise NotAchievedException("Sensor healthy when it shouldn't be") self.set_parameter("SIM_RC_FAIL", 0) self.drain_mav_unparsed() # have to allow time for RC to be fetched from SITL self.delay_sim_time(0.5) m = self.mav.recv_match(type='SYS_STATUS', blocking=True) self.progress("Testing receiver enabled") if (not (m.onboard_control_sensors_enabled & receiver_bit)): raise NotAchievedException("Receiver not enabled") self.progress("Testing receiver present") if (not (m.onboard_control_sensors_present & receiver_bit)): raise NotAchievedException("Receiver not present") self.progress("Testing receiver health") if (not (m.onboard_control_sensors_health & receiver_bit)): raise NotAchievedException("Receiver not healthy2") self.change_mode('MANUAL') self.progress("Failing receiver (no-pulses)") self.context_collect("HEARTBEAT") self.set_parameter("SIM_RC_FAIL", 1) # no-pulses self.wait_mode('RTL') # long failsafe if (not self.get_mode_from_mode_mapping("CIRCLE") in [x.custom_mode for x in self.context_stop_collecting("HEARTBEAT")]): raise NotAchievedException("Did not go via circle mode") self.drain_mav_unparsed() m = self.mav.recv_match(type='SYS_STATUS', blocking=True) print("%s" % str(m)) self.progress("Testing receiver enabled") if (not (m.onboard_control_sensors_enabled & receiver_bit)): raise NotAchievedException("Receiver not enabled") self.progress("Testing receiver present") if (not (m.onboard_control_sensors_present & receiver_bit)): raise NotAchievedException("Receiver not present") self.progress("Testing receiver health") if (m.onboard_control_sensors_health & receiver_bit): raise NotAchievedException("Sensor healthy when it shouldn't be") self.progress("Making RC work again") self.set_parameter("SIM_RC_FAIL", 0) # have to allow time for RC to be fetched from SITL self.progress("Giving receiver time to recover") self.delay_sim_time(0.5) self.drain_mav_unparsed() m = self.mav.recv_match(type='SYS_STATUS', blocking=True) self.progress("Testing receiver enabled") if (not (m.onboard_control_sensors_enabled & receiver_bit)): raise NotAchievedException("Receiver not enabled") self.progress("Testing receiver present") if (not (m.onboard_control_sensors_present & receiver_bit)): raise NotAchievedException("Receiver not present") self.progress("Testing receiver health") if (not (m.onboard_control_sensors_health & receiver_bit)): raise NotAchievedException("Receiver not healthy") self.change_mode('MANUAL') self.progress("Ensure long failsafe can trigger when short failsafe disabled") self.context_push() self.context_collect("STATUSTEXT") ex = None try: self.set_parameter("FS_SHORT_ACTN", 3) # 3 means disabled self.set_parameter("SIM_RC_FAIL", 1) self.wait_statustext("Long event on", check_context=True) self.wait_mode("RTL") # self.context_clear_collection("STATUSTEXT") self.set_parameter("SIM_RC_FAIL", 0) self.wait_text("Long event off", check_context=True) self.change_mode("MANUAL") self.progress("Trying again with THR_FS_VALUE") self.set_parameter("THR_FS_VALUE", 960) self.set_parameter("SIM_RC_FAIL", 2) self.wait_statustext("Long event on", check_context=True) self.wait_mode("RTL") except Exception as e: self.progress("Exception caught:") self.progress(self.get_exception_stacktrace(e)) ex = e self.context_pop() if ex is not None: raise ex def test_throttle_failsafe_fence(self): fence_bit = mavutil.mavlink.MAV_SYS_STATUS_GEOFENCE self.progress("Checking fence is not present before being configured") m = self.mav.recv_match(type='SYS_STATUS', blocking=True) print("%s" % str(m)) if (m.onboard_control_sensors_enabled & fence_bit): raise NotAchievedException("Fence enabled before being configured") self.change_mode('MANUAL') self.wait_ready_to_arm() self.load_fence("CMAC-fence.txt") self.set_parameter("FENCE_CHANNEL", 7) self.set_parameter("FENCE_ACTION", 4) self.set_rc(3, 1000) self.set_rc(7, 2000) self.progress("Checking fence is initially OK") m = self.mav.recv_match(type='SYS_STATUS', blocking=True) print("%s" % str(m)) if (not (m.onboard_control_sensors_enabled & fence_bit)): raise NotAchievedException("Fence not initially enabled") self.set_parameter("THR_FS_VALUE", 960) self.progress("Failing receiver (throttle-to-950)") self.set_parameter("SIM_RC_FAIL", 2) # throttle-to-950 self.wait_mode("CIRCLE") self.delay_sim_time(1) # give self.drain_mav_unparsed() self.progress("Checking fence is OK after receiver failure (bind-values)") fence_bit = mavutil.mavlink.MAV_SYS_STATUS_GEOFENCE m = self.mav.recv_match(type='SYS_STATUS', blocking=True) print("%s" % str(m)) if (not (m.onboard_control_sensors_enabled & fence_bit)): raise NotAchievedException("Fence not enabled after RC fail") def test_gripper_mission(self): self.context_push() ex = None try: self.load_mission("plane-gripper-mission.txt") self.mavproxy.send("wp set 1\n") self.change_mode('AUTO') self.wait_ready_to_arm() self.arm_vehicle() self.mavproxy.expect("Gripper Grabbed") self.mavproxy.expect("Gripper Released") self.mavproxy.expect("Auto disarmed") except Exception as e: self.progress("Exception caught:") self.progress(self.get_exception_stacktrace(e)) ex = e self.context_pop() if ex is not None: raise ex def assert_fence_sys_status(self, present, enabled, health): self.delay_sim_time(1) self.drain_mav_unparsed() m = self.mav.recv_match(type='SYS_STATUS', blocking=True, timeout=1) if m is None: raise NotAchievedException("Did not receive SYS_STATUS") tests = [ ( "present", present, m.onboard_control_sensors_present ), ( "enabled", enabled, m.onboard_control_sensors_enabled ), ( "health", health, m.onboard_control_sensors_health ), ] bit = mavutil.mavlink.MAV_SYS_STATUS_GEOFENCE for test in tests: (name, want, field) = test got = (field & bit) != 0 if want != got: raise NotAchievedException("fence status incorrect; %s want=%u got=%u" % (name, want, got)) def do_fence_en_or_dis_able(self, value, want_result=mavutil.mavlink.MAV_RESULT_ACCEPTED): if value: p1 = 1 else: p1 = 0 self.run_cmd(mavutil.mavlink.MAV_CMD_DO_FENCE_ENABLE, p1, # param1 0, # param2 0, # param3 0, # param4 0, # param5 0, # param6 0, # param7 want_result=want_result) def do_fence_enable(self, want_result=mavutil.mavlink.MAV_RESULT_ACCEPTED): self.do_fence_en_or_dis_able(True, want_result=want_result) def do_fence_disable(self, want_result=mavutil.mavlink.MAV_RESULT_ACCEPTED): self.do_fence_en_or_dis_able(False, want_result=want_result) def wait_circling_point_with_radius(self, loc, want_radius, epsilon=5.0, min_circle_time=5, timeout=120): on_radius_start_heading = None average_radius = 0.0 circle_time_start = 0 done_time = False done_angle = False tstart = self.get_sim_time() while True: if self.get_sim_time() - tstart > timeout: raise AutoTestTimeoutException("Did not get onto circle") here = self.mav.location() got_radius = self.get_distance(loc, here) average_radius = 0.95*average_radius + 0.05*got_radius on_radius = abs(got_radius - want_radius) < epsilon m = self.mav.recv_match(type='VFR_HUD', blocking=True) heading = m.heading on_string = "off" got_angle = "" if on_radius_start_heading is not None: got_angle = "%0.2f" % abs(on_radius_start_heading - heading) # FIXME on_string = "on" want_angle = 180 # we don't actually get this (angle-substraction issue. But we get enough... self.progress("wait-circling: got-r=%0.2f want-r=%f avg-r=%f %s want-a=%0.1f got-a=%s" % (got_radius, want_radius, average_radius, on_string, want_angle, got_angle)) if on_radius: if on_radius_start_heading is None: on_radius_start_heading = heading average_radius = got_radius circle_time_start = self.get_sim_time() continue if abs(on_radius_start_heading - heading) > want_angle: # FIXME done_angle = True if self.get_sim_time() - circle_time_start > min_circle_time: done_time = True if done_time and done_angle: return continue if on_radius_start_heading is not None: average_radius = 0.0 on_radius_start_heading = None circle_time_start = 0 def test_fence_static(self): ex = None try: self.progress("Checking for bizarre healthy-when-not-present-or-enabled") self.assert_fence_sys_status(False, False, True) self.load_fence("CMAC-fence.txt") m = self.mav.recv_match(type='FENCE_STATUS', blocking=True, timeout=2) if m is not None: raise NotAchievedException("Got FENCE_STATUS unexpectedly"); self.drain_mav_unparsed() self.set_parameter("FENCE_ACTION", mavutil.mavlink.FENCE_ACTION_NONE) # report only self.assert_fence_sys_status(False, False, True) self.set_parameter("FENCE_ACTION", mavutil.mavlink.FENCE_ACTION_RTL) # report only self.assert_fence_sys_status(True, False, True) self.mavproxy.send('fence enable\n') self.mavproxy.expect("fence enabled") self.assert_fence_sys_status(True, True, True) m = self.mav.recv_match(type='FENCE_STATUS', blocking=True, timeout=2) if m is None: raise NotAchievedException("Did not get FENCE_STATUS"); if m.breach_status: raise NotAchievedException("Breached fence unexpectedly (%u)" % (m.breach_status)) self.mavproxy.send('fence disable\n') self.mavproxy.expect("fence disabled") self.assert_fence_sys_status(True, False, True) self.set_parameter("FENCE_ACTION", mavutil.mavlink.FENCE_ACTION_NONE) self.assert_fence_sys_status(False, False, True) self.set_parameter("FENCE_ACTION", mavutil.mavlink.FENCE_ACTION_RTL) self.assert_fence_sys_status(True, False, True) self.mavproxy.send("fence clear\n") self.mavproxy.expect("fence removed") if self.get_parameter("FENCE_TOTAL") != 0: raise NotAchievedException("Expected zero points remaining") self.assert_fence_sys_status(False, False, True) self.progress("Trying to enable fence with no points") self.do_fence_enable(want_result=mavutil.mavlink.MAV_RESULT_FAILED) # test a rather unfortunate behaviour: self.progress("Killing a live fence with fence-clear") self.load_fence("CMAC-fence.txt") self.set_parameter("FENCE_ACTION", mavutil.mavlink.FENCE_ACTION_RTL) self.do_fence_enable() self.assert_fence_sys_status(True, True, True) self.mavproxy.send("fence clear\n") self.mavproxy.expect("fence removed") if self.get_parameter("FENCE_TOTAL") != 0: raise NotAchievedException("Expected zero points remaining") self.assert_fence_sys_status(False, False, True) except Exception as e: self.progress("Exception caught:") self.progress(self.get_exception_stacktrace(e)) ex = e self.mavproxy.send('fence clear\n') if ex is not None: raise ex def test_fence_breach_circle_at(self, loc, disable_on_breach=False): ex = None try: self.load_fence("CMAC-fence.txt") want_radius = 100 # when ArduPlane is fixed, remove this fudge factor REALLY_BAD_FUDGE_FACTOR = 1.16 expected_radius = REALLY_BAD_FUDGE_FACTOR * want_radius self.set_parameter("RTL_RADIUS", want_radius) self.set_parameter("NAVL1_LIM_BANK", 60) self.set_parameter("FENCE_ACTION", mavutil.mavlink.FENCE_ACTION_RTL) self.do_fence_enable() self.assert_fence_sys_status(True, True, True) self.takeoff(alt=45, alt_max=300) tstart = self.get_sim_time() while True: if self.get_sim_time() - tstart > 30: raise NotAchievedException("Did not breach fence") m = self.mav.recv_match(type='FENCE_STATUS', blocking=True, timeout=2) if m is None: raise NotAchievedException("Did not get FENCE_STATUS"); if m.breach_status == 0: continue # we've breached; check our state; if m.breach_type != mavutil.mavlink.FENCE_BREACH_BOUNDARY: raise NotAchievedException("Unexpected breach type %u" % (m.breach_type,)) if m.breach_count == 0: raise NotAchievedException("Unexpected breach count %u" % (m.breach_count,)) self.assert_fence_sys_status(True, True, False) break if disable_on_breach: self.do_fence_disable() self.wait_circling_point_with_radius(loc, expected_radius) self.disarm_vehicle(force=True) self.reboot_sitl() except Exception as e: self.progress("Exception caught:") self.progress(self.get_exception_stacktrace(e)) ex = e self.mavproxy.send('fence clear\n') if ex is not None: raise ex def test_fence_rtl(self): self.progress("Testing FENCE_ACTION_RTL no rally point") # have to disable the fence once we've breached or we breach # it as part of the loiter-at-home! self.test_fence_breach_circle_at(self.home_position_as_mav_location(), disable_on_breach=True) def test_fence_rtl_rally(self): ex = None target_system = 1 target_component = 1 try: self.progress("Testing FENCE_ACTION_RTL with rally point") self.wait_ready_to_arm() loc = self.home_position_as_mav_location() self.location_offset_ne(loc, 50, -50) self.set_parameter("RALLY_TOTAL", 1) self.mav.mav.rally_point_send(target_system, target_component, 0, # sequence number 1, # total count int(loc.lat * 1e7), int(loc.lng * 1e7), 15, 0, # "break" alt?! 0, # "land dir" 0) # flags self.delay_sim_time(1) self.mavproxy.send("rally list\n") self.test_fence_breach_circle_at(loc) except Exception as e: self.progress("Exception caught:") self.progress(self.get_exception_stacktrace(e)) ex = e self.mavproxy.send('rally clear\n') if ex is not None: raise ex def test_parachute(self): self.set_rc(9, 1000) self.set_parameter("CHUTE_ENABLED", 1) self.set_parameter("CHUTE_TYPE", 10) self.set_parameter("SERVO9_FUNCTION", 27) self.set_parameter("SIM_PARA_ENABLE", 1) self.set_parameter("SIM_PARA_PIN", 9) self.load_mission("plane-parachute-mission.txt") self.mavproxy.send("wp set 1\n") self.change_mode('AUTO') self.wait_ready_to_arm() self.arm_vehicle() self.mavproxy.expect("BANG") self.disarm_vehicle(force=True) self.reboot_sitl() def test_parachute_sinkrate(self): self.set_rc(9, 1000) self.set_parameter("CHUTE_ENABLED", 1) self.set_parameter("CHUTE_TYPE", 10) self.set_parameter("SERVO9_FUNCTION", 27) self.set_parameter("SIM_PARA_ENABLE", 1) self.set_parameter("SIM_PARA_PIN", 9) self.set_parameter("CHUTE_CRT_SINK", 9) self.progress("Takeoff") self.takeoff(alt=300) self.progress("Diving") self.set_rc(2, 2000) self.mavproxy.expect("BANG") self.disarm_vehicle(force=True) self.reboot_sitl() def run_subtest(self, desc, func): self.start_subtest(desc) func() def test_main_flight(self): self.change_mode('MANUAL') self.progress("Asserting we don't support transfer of fence via mission item protocol") self.assert_no_capability(mavutil.mavlink.MAV_PROTOCOL_CAPABILITY_MISSION_FENCE) # grab home position: self.mav.recv_match(type='HOME_POSITION', blocking=True) self.homeloc = self.mav.location() self.run_subtest("Takeoff", self.takeoff) self.run_subtest("Set Attitude Target", self.set_attitude_target) self.run_subtest("Fly left circuit", self.fly_left_circuit) self.run_subtest("Left roll", lambda: self.axial_left_roll(1)) self.run_subtest("Inside loop", self.inside_loop) self.run_subtest("Stablize test", self.test_stabilize) self.run_subtest("ACRO test", self.test_acro) self.run_subtest("FBWB test", self.test_FBWB) self.run_subtest("CRUISE test", lambda: self.test_FBWB(mode='CRUISE')) self.run_subtest("RTL test", self.fly_RTL) self.run_subtest("LOITER test", self.fly_LOITER) self.run_subtest("CIRCLE test", self.fly_CIRCLE) self.run_subtest("Mission test", lambda: self.fly_mission("ap1.txt")) def airspeed_autocal(self): self.progress("Ensure no AIRSPEED_AUTOCAL on ground") self.set_parameter("ARSPD_AUTOCAL", 1) m = self.mav.recv_match(type='AIRSPEED_AUTOCAL', blocking=True, timeout=5) if m is not None: raise NotAchievedException("Got autocal on ground") mission_filepath = "flaps.txt" num_wp = self.load_mission(mission_filepath) self.wait_ready_to_arm() self.arm_vehicle() self.change_mode("AUTO") self.progress("Ensure AIRSPEED_AUTOCAL in air") m = self.mav.recv_match(type='AIRSPEED_AUTOCAL', blocking=True, timeout=5) self.wait_waypoint(7, num_wp-1, timeout=500) self.wait_disarmed(timeout=120) def deadreckoning_main(self, disable_airspeed_sensor=False): self.gpi = None self.simstate = None self.last_print = 0 self.max_divergence = 0 def validate_global_position_int_against_simstate(mav, m): if m.get_type() == 'GLOBAL_POSITION_INT': self.gpi = m elif m.get_type() == 'SIMSTATE': self.simstate = m if self.gpi is None: return if self.simstate is None: return divergence = self.get_distance_int(self.gpi, self.simstate) max_allowed_divergence = 200 if time.time() - self.last_print > 1: self.progress("position-estimate-divergence=%fm" % (divergence,)) self.last_print = time.time() if divergence > max_allowed_divergence: raise NotAchievedException("global-position-int diverged from simstate by >%fm" % (max_allowed_divergence,)) if divergence > self.max_divergence: self.max_divergence = divergence self.install_message_hook(validate_global_position_int_against_simstate) try: # wind is from the West: self.set_parameter("SIM_WIND_DIR", 270) # light winds: self.set_parameter("SIM_WIND_SPD", 10) if disable_airspeed_sensor: self.set_parameter("ARSPD_USE", 0) self.takeoff(50) loc = self.mav.location() loc.lat = -35.35690712 loc.lng = 149.17083386 self.run_cmd_int( mavutil.mavlink.MAV_CMD_DO_REPOSITION, 0, mavutil.mavlink.MAV_DO_REPOSITION_FLAGS_CHANGE_MODE, 0, 0, int(loc.lat*1e7), int(loc.lng*1e7), 100, # alt frame=mavutil.mavlink.MAV_FRAME_GLOBAL_RELATIVE_ALT_INT, ) self.wait_location(loc, accuracy=100) self.progress("Stewing") self.delay_sim_time(20) self.set_parameter("SIM_GPS_DISABLE", 1) self.progress("Roasting") self.delay_sim_time(20) self.change_mode("RTL") self.wait_distance_to_home(100, 200, timeout=200) self.set_parameter("SIM_GPS_DISABLE", 0) self.delay_sim_time(10) self.set_rc(3, 1000) self.fly_home_land_and_disarm() self.progress("max-divergence: %fm" % (self.max_divergence,)) finally: self.remove_message_hook(validate_global_position_int_against_simstate) def deadreckoning(self): self.deadreckoning_main() self.deadreckoning_main(disable_airspeed_sensor=True) def sample_enable_parameter(self): return "Q_ENABLE" def test_rangefinder(self): ex = None self.context_push() self.progress("Making sure we don't ordinarily get RANGEFINDER") m = None try: m = self.mav.recv_match(type='RANGEFINDER', blocking=True, timeout=5) except Exception as e: self.progress("Caught exception: %s" % self.get_exception_stacktrace(e)) if m is not None: raise NotAchievedException("Received unexpected RANGEFINDER msg") try: self.set_analog_rangefinder_parameters() self.reboot_sitl() '''ensure rangefinder gives height-above-ground''' self.load_mission("plane-gripper-mission.txt") # borrow this self.mavproxy.send("wp set 1\n") self.change_mode('AUTO') self.wait_ready_to_arm() self.arm_vehicle() self.wait_waypoint(5, 5, max_dist=100) rf = self.mav.recv_match(type="RANGEFINDER", timeout=1, blocking=True) if rf is None: raise NotAchievedException("Did not receive rangefinder message") gpi = self.mav.recv_match(type='GLOBAL_POSITION_INT', blocking=True, timeout=1) if gpi is None: raise NotAchievedException("Did not receive GLOBAL_POSITION_INT message") if abs(rf.distance - gpi.relative_alt/1000.0) > 3: raise NotAchievedException("rangefinder alt (%s) disagrees with global-position-int.relative_alt (%s)" % (rf.distance, gpi.relative_alt/1000.0)) self.mavproxy.expect("Auto disarmed") self.progress("Ensure RFND messages in log") if not self.current_onboard_log_contains_message("RFND"): raise NotAchievedException("No RFND messages in log") except Exception as e: self.progress("Exception caught:") self.progress(self.get_exception_stacktrace(e)) ex = e self.context_pop() self.reboot_sitl() if ex is not None: raise ex def rc_defaults(self): ret = super(AutoTestPlane, self).rc_defaults() ret[3] = 1000 ret[8] = 1800 return ret def default_mode(self): return "MANUAL" def test_pid_tuning(self): self.change_mode("FBWA") # we don't update PIDs in MANUAL super(AutoTestPlane, self).test_pid_tuning() def test_setting_modes_via_auxswitches(self): self.set_parameter("FLTMODE5", 1) self.mavproxy.send('switch 1\n') # random mode self.wait_heartbeat() self.change_mode('MANUAL') self.mavproxy.send('switch 5\n') # acro mode self.wait_mode("CIRCLE") self.set_rc(9, 1000) self.set_rc(10, 1000) self.set_parameter("RC9_OPTION", 4) # RTL self.set_parameter("RC10_OPTION", 55) # guided self.set_rc(9, 1900) self.wait_mode("RTL") self.set_rc(10, 1900) self.wait_mode("GUIDED") self.progress("resetting both switches - should go back to CIRCLE") self.set_rc(9, 1000) self.set_rc(10, 1000) self.wait_mode("CIRCLE") self.set_rc(9, 1900) self.wait_mode("RTL") self.set_rc(10, 1900) self.wait_mode("GUIDED") self.progress("Resetting switch should repoll mode switch") self.set_rc(10, 1000) # this re-polls the mode switch self.wait_mode("CIRCLE") self.set_rc(9, 1000) def wait_for_collision_threat_to_clear(self): '''wait to get a "clear" collision message", then slurp remaining messages''' last_collision = self.get_sim_time() while True: now = self.get_sim_time() if now - last_collision > 5: return self.progress("Waiting for collision message") m = self.mav.recv_match(type='COLLISION', blocking=True, timeout=1) self.progress("Got (%s)" % str(m)) if m is None: continue last_collision = now def test_adsb_send_threatening_adsb_message(self, here): self.progress("Sending ABSD_VEHICLE message") self.mav.mav.adsb_vehicle_send(37, # ICAO address int(here.lat * 1e7), int(here.lng * 1e7), mavutil.mavlink.ADSB_ALTITUDE_TYPE_PRESSURE_QNH, int(here.alt*1000 + 10000), # 10m up 0, # heading in cdeg 0, # horizontal velocity cm/s 0, # vertical velocity cm/s "bob".encode("ascii"), # callsign mavutil.mavlink.ADSB_EMITTER_TYPE_LIGHT, 1, # time since last communication 65535, # flags 17 # squawk ) def test_adsb(self): self.context_push() ex = None try: # message ADSB_VEHICLE 37 -353632614 1491652305 0 584070 0 0 0 "bob" 3 1 255 17 self.set_parameter("RC12_OPTION", 38) # avoid-adsb self.set_rc(12, 2000) self.set_parameter("ADSB_ENABLE", 1) self.set_parameter("AVD_ENABLE", 1) self.set_parameter("AVD_F_ACTION", mavutil.mavlink.MAV_COLLISION_ACTION_RTL) self.reboot_sitl() self.wait_ready_to_arm() here = self.mav.location() self.change_mode("FBWA") self.delay_sim_time(2) # TODO: work out why this is required... self.test_adsb_send_threatening_adsb_message(here) self.progress("Waiting for collision message") m = self.mav.recv_match(type='COLLISION', blocking=True, timeout=4) if m is None: raise NotAchievedException("Did not get collision message") if m.threat_level != 2: raise NotAchievedException("Expected some threat at least") if m.action != mavutil.mavlink.MAV_COLLISION_ACTION_RTL: raise NotAchievedException("Incorrect action; want=%u got=%u" % (mavutil.mavlink.MAV_COLLISION_ACTION_RTL, m.action)) self.wait_mode("RTL") self.progress("Sending far-away ABSD_VEHICLE message") self.mav.mav.adsb_vehicle_send(37, # ICAO address int(here.lat+1 * 1e7), int(here.lng * 1e7), mavutil.mavlink.ADSB_ALTITUDE_TYPE_PRESSURE_QNH, int(here.alt*1000 + 10000), # 10m up 0, # heading in cdeg 0, # horizontal velocity cm/s 0, # vertical velocity cm/s "bob".encode("ascii"), # callsign mavutil.mavlink.ADSB_EMITTER_TYPE_LIGHT, 1, # time since last communication 65535, # flags 17 # squawk ) self.wait_for_collision_threat_to_clear() self.change_mode("FBWA") self.progress("Disabling ADSB-avoidance with RC channel") self.set_rc(12, 1000) self.delay_sim_time(1) # let the switch get polled self.test_adsb_send_threatening_adsb_message(here) m = self.mav.recv_match(type='COLLISION', blocking=True, timeout=4) print("Got (%s)" % str(m)) if m is not None: raise NotAchievedException("Got collision message when I shouldn't have") except Exception as e: ex = e self.context_pop() self.reboot_sitl() if ex is not None: raise ex def fly_do_guided_request(self, target_system=1, target_component=1): self.progress("Takeoff") self.takeoff(alt=50) self.set_rc(3, 1500) self.start_subtest("Ensure command bounced outside guided mode") desired_relative_alt = 33 loc = self.mav.location() self.location_offset_ne(loc, 300, 300) loc.alt += desired_relative_alt self.mav.mav.mission_item_int_send( target_system, target_component, 0, # seq mavutil.mavlink.MAV_FRAME_GLOBAL, mavutil.mavlink.MAV_CMD_NAV_WAYPOINT, 2, # current - guided-mode request 0, # autocontinue 0, # p1 0, # p2 0, # p3 0, # p4 int(loc.lat *1e7), # latitude int(loc.lng *1e7), # longitude loc.alt, # altitude mavutil.mavlink.MAV_MISSION_TYPE_MISSION) m = self.mav.recv_match(type='MISSION_ACK', blocking=True, timeout=5) if m is None: raise NotAchievedException("Did not get MISSION_ACK") if m.type != mavutil.mavlink.MAV_MISSION_ERROR: raise NotAchievedException("Did not get appropriate error") self.start_subtest("Enter guided and flying somewhere constant") self.change_mode("GUIDED") self.mav.mav.mission_item_int_send( target_system, target_component, 0, # seq mavutil.mavlink.MAV_FRAME_GLOBAL_RELATIVE_ALT, mavutil.mavlink.MAV_CMD_NAV_WAYPOINT, 2, # current - guided-mode request 0, # autocontinue 0, # p1 0, # p2 0, # p3 0, # p4 int(loc.lat *1e7), # latitude int(loc.lng *1e7), # longitude desired_relative_alt, # altitude mavutil.mavlink.MAV_MISSION_TYPE_MISSION) m = self.mav.recv_match(type='MISSION_ACK', blocking=True, timeout=5) if m is None: raise NotAchievedException("Did not get MISSION_ACK") if m.type != mavutil.mavlink.MAV_MISSION_ACCEPTED: raise NotAchievedException("Did not get accepted response") self.wait_location(loc, accuracy=100) # based on loiter radius self.delay_sim_time(20) self.wait_altitude(altitude_min=desired_relative_alt-3, altitude_max=desired_relative_alt+3, relative=True) self.fly_home_land_and_disarm() def LOITER(self): self.takeoff(alt=200) self.set_rc(3, 1500) self.change_mode("LOITER") self.progress("Doing a bit of loitering to start with") tstart = self.get_sim_time() while True: now = self.get_sim_time_cached() if now - tstart > 60: break m = self.mav.recv_match(type='VFR_HUD', blocking=True, timeout=5) if m is None: raise NotAchievedException("Did not get VFR_HUD") new_throttle = m.throttle alt = m.alt m = self.mav.recv_match(type='ATTITUDE', blocking=True, timeout=5) if m is None: raise NotAchievedException("Did not get ATTITUDE") pitch = math.degrees(m.pitch) self.progress("Pitch:%f throttle:%u alt:%f" % (pitch, new_throttle, alt)) m = self.mav.recv_match(type='VFR_HUD', blocking=True, timeout=5) if m is None: raise NotAchievedException("Did not get VFR_HUD") initial_throttle = m.throttle initial_alt = m.alt self.progress("Initial throttle: %u" % initial_throttle) # pitch down, ensure throttle decreases: rc2_max = self.get_parameter("RC2_MAX") self.set_rc(2, rc2_max) tstart = self.get_sim_time() while True: now = self.get_sim_time_cached() '''stick-mixing is pushing the aircraft down. It doesn't want to go down (the target loiter altitude hasn't changed), so it tries to add energy by increasing the throttle. ''' if now - tstart > 60: raise NotAchievedException("Did not see increase in throttle") m = self.mav.recv_match(type='VFR_HUD', blocking=True, timeout=5) if m is None: raise NotAchievedException("Did not get VFR_HUD") new_throttle = m.throttle alt = m.alt m = self.mav.recv_match(type='ATTITUDE', blocking=True, timeout=5) if m is None: raise NotAchievedException("Did not get ATTITUDE") pitch = math.degrees(m.pitch) self.progress("Pitch:%f throttle:%u alt:%f" % (pitch, new_throttle, alt)) if new_throttle - initial_throttle > 20: self.progress("Throttle delta achieved") break self.progress("Centering elevator and ensuring we get back to loiter altitude") self.set_rc(2, 1500) self.wait_altitude(initial_alt-1, initial_alt+1) self.fly_home_land_and_disarm() def CPUFailsafe(self): '''In lockup Plane should copy RC inputs to RC outputs''' self.plane_CPUFailsafe() def test_large_missions(self): self.load_mission("Kingaroy-vlarge.txt") self.load_mission("Kingaroy-vlarge2.txt") def fly_soaring(self): model="plane-soaring" self.customise_SITL_commandline([], model=model, defaults_filepath=self.model_defaults_filepath("ArduPlane",model), wipe=True) self.load_mission('CMAC-soar.txt') self.mavproxy.send("wp set 1\n") self.change_mode('AUTO') self.wait_ready_to_arm() self.arm_vehicle() # Enable thermalling RC rc_chan = 0 for i in range(8): rcx_option = self.get_parameter('RC{0}_OPTION'.format(i+1)) if rcx_option==88: rc_chan = i+1; break if rc_chan==0: raise NotAchievedException("Did not find soaring enable channel option.") self.send_set_rc(rc_chan, 1900) # Use trim airspeed. self.send_set_rc(3, 1500) # Wait to detect thermal self.progress("Waiting for thermal") self.wait_mode('THERMAL',timeout=600) # Wait to climb to SOAR_ALT_MAX self.progress("Waiting for climb to max altitude") alt_max = self.get_parameter('SOAR_ALT_MAX') self.wait_altitude(alt_max-10, alt_max, timeout=600, relative=True) # Wait for AUTO self.progress("Waiting for AUTO mode") self.wait_mode('AUTO') # Disable thermals self.set_parameter("SIM_THML_SCENARI", 0) # Wait to descend to SOAR_ALT_MIN self.progress("Waiting for glide to min altitude") alt_min = self.get_parameter('SOAR_ALT_MIN') self.wait_altitude(alt_min-10, alt_min, timeout=600, relative=True) self.progress("Waiting for throttle up") self.wait_servo_channel_value(3, 1200, timeout=2, comparator=operator.gt) self.progress("Waiting for climb to cutoff altitude") alt_ctf = self.get_parameter('SOAR_ALT_CUTOFF') self.wait_altitude(alt_ctf-10, alt_ctf, timeout=600, relative=True) # Allow time to suppress throttle and start descent. self.delay_sim_time(20) # Now set FBWB mode self.change_mode('FBWB') self.delay_sim_time(5) # Now disable soaring (should hold altitude) self.set_parameter("SOAR_ENABLE", 0) self.delay_sim_time(10) #And reenable. This should force throttle-down self.set_parameter("SOAR_ENABLE", 1) self.delay_sim_time(10) # Now wait for descent and check throttle up self.wait_altitude(alt_min-10, alt_min, timeout=600, relative=True) self.progress("Waiting for climb") self.wait_altitude(alt_ctf-10, alt_ctf, timeout=600, relative=True) # Back to auto self.change_mode('AUTO') # Reenable thermals self.set_parameter("SIM_THML_SCENARI", 1) # Disable soaring using RC channel. self.send_set_rc(rc_chan, 1100) # Wait to get back to waypoint before thermal. self.progress("Waiting to get back to position") self.wait_current_waypoint(3,timeout=1200) # Enable soaring with mode changes suppressed) self.send_set_rc(rc_chan, 1500) # Make sure this causes throttle down. self.wait_servo_channel_value(3, 1200, timeout=2, comparator=operator.lt) self.progress("Waiting for next WP with no thermalling") self.wait_waypoint(4,4,timeout=1200,max_dist=120) # Disarm self.disarm_vehicle() self.progress("Mission OK") def fly_terrain_mission(self): self.customise_SITL_commandline([], wipe=True) self.mavproxy.send("wp set 1\n") self.wait_ready_to_arm() self.arm_vehicle() self.fly_mission("ap-terrain.txt", mission_timeout=600) def ekf_lane_switch(self): self.context_push() ex = None # new lane swtich available only with EK3 self.set_parameter("EK3_ENABLE", 1) self.set_parameter("EK2_ENABLE", 0) self.set_parameter("AHRS_EKF_TYPE", 3) self.set_parameter("EK3_AFFINITY", 15) # enable affinity for all sensors self.set_parameter("EK3_IMU_MASK", 3) # use only 2 IMUs self.set_parameter("GPS_TYPE2", 1) self.set_parameter("SIM_GPS2_DISABLE", 0) self.set_parameter("SIM_BARO2_DISABL", 0) self.set_parameter("SIM_BARO_COUNT", 2) self.set_parameter("ARSPD2_TYPE", 2) self.set_parameter("ARSPD2_USE", 1) self.set_parameter("ARSPD2_PIN", 2) # some parameters need reboot to take effect self.reboot_sitl() self.lane_switches = [] # add an EKF lane switch hook def statustext_hook(mav, message): if message.get_type() != 'STATUSTEXT': return # example msg: EKF3 lane switch 1 if not message.text.startswith("EKF3 lane switch "): return newlane = int(message.text[-1]) self.lane_switches.append(newlane) self.install_message_hook(statustext_hook) # get flying self.takeoff(alt=50) self.change_mode('CIRCLE') try: ##################################################################################################################################################### self.progress("Checking EKF3 Lane Switching trigger from all sensors") ##################################################################################################################################################### self.start_subtest("ACCELEROMETER: Change z-axis offset") # create an accelerometer error by changing the Z-axis offset self.context_collect("STATUSTEXT") old_parameter = self.get_parameter("INS_ACCOFFS_Z") self.wait_statustext(text="EKF3 lane switch", timeout=30, the_function=self.set_parameter("INS_ACCOFFS_Z", old_parameter + 5), check_context=True) if self.lane_switches != [1]: raise NotAchievedException("Expected lane switch 1, got %s" % str(self.lane_switches[-1])) # Cleanup self.set_parameter("INS_ACCOFFS_Z", old_parameter) self.context_clear_collection("STATUSTEXT") self.wait_heading(0, accuracy=10, timeout=60) self.wait_heading(180, accuracy=10, timeout=60) ##################################################################################################################################################### self.start_subtest("BAROMETER: Freeze to last measured value") self.context_collect("STATUSTEXT") # create a barometer error by inhibiting any pressure change while changing altitude old_parameter = self.get_parameter("SIM_BARO2_FREEZE") self.set_parameter("SIM_BARO2_FREEZE", 1) self.wait_statustext(text="EKF3 lane switch", timeout=30, the_function=lambda: self.set_rc(2, 2000), check_context=True) if self.lane_switches != [1, 0]: raise NotAchievedException("Expected lane switch 0, got %s" % str(self.lane_switches[-1])) # Cleanup self.set_rc(2, 1500) self.set_parameter("SIM_BARO2_FREEZE", old_parameter) self.context_clear_collection("STATUSTEXT") self.wait_heading(0, accuracy=10, timeout=60) self.wait_heading(180, accuracy=10, timeout=60) ##################################################################################################################################################### self.start_subtest("GPS: Apply GPS Velocity Error in NED") self.context_push() self.context_collect("STATUSTEXT") # create a GPS velocity error by adding a random 2m/s noise on each axis def sim_gps_verr(): self.set_parameter("SIM_GPS_VERR_X", self.get_parameter("SIM_GPS_VERR_X") + 2) self.set_parameter("SIM_GPS_VERR_Y", self.get_parameter("SIM_GPS_VERR_Y") + 2) self.set_parameter("SIM_GPS_VERR_Z", self.get_parameter("SIM_GPS_VERR_Z") + 2) self.wait_statustext(text="EKF3 lane switch", timeout=30, the_function=sim_gps_verr(), check_context=True) if self.lane_switches != [1, 0, 1]: raise NotAchievedException("Expected lane switch 1, got %s" % str(self.lane_switches[-1])) # Cleanup self.context_pop() self.context_clear_collection("STATUSTEXT") self.wait_heading(0, accuracy=10, timeout=60) self.wait_heading(180, accuracy=10, timeout=60) ##################################################################################################################################################### self.start_subtest("MAGNETOMETER: Change X-Axis Offset") self.context_collect("STATUSTEXT") # create a magnetometer error by changing the X-axis offset old_parameter = self.get_parameter("SIM_MAG2_OFS_X") self.wait_statustext(text="EKF3 lane switch", timeout=30, the_function=self.set_parameter("SIM_MAG2_OFS_X", old_parameter + 150), check_context=True) if self.lane_switches != [1, 0, 1, 0]: raise NotAchievedException("Expected lane switch 0, got %s" % str(self.lane_switches[-1])) # Cleanup self.set_parameter("SIM_MAG2_OFS_X", old_parameter) self.context_clear_collection("STATUSTEXT") self.wait_heading(0, accuracy=10, timeout=60) self.wait_heading(180, accuracy=10, timeout=60) ##################################################################################################################################################### self.start_subtest("AIRSPEED: Fail to constant value") self.context_push() self.context_collect("STATUSTEXT") # create an airspeed sensor error by freezing to the current airspeed then changing the groundspeed old_parameter = self.get_parameter("SIM_ARSPD_FAIL") m = self.mav.recv_match(type='VFR_HUD', blocking=True) self.set_parameter("SIM_ARSPD_FAIL", m.airspeed) def change_speed(): self.change_mode("GUIDED") self.run_cmd_int( mavutil.mavlink.MAV_CMD_DO_REPOSITION, 0, 0, 0, 0, 12345, # lat*1e7 12345, # lon*1e7 50 # alt ) self.delay_sim_time(5) new_target_groundspeed = m.groundspeed + 5 self.run_cmd( mavutil.mavlink.MAV_CMD_DO_CHANGE_SPEED, 1, # groundspeed new_target_groundspeed, -1, # throttle / no change 0, # absolute values 0, 0, 0 ) self.wait_statustext(text="EKF3 lane switch", timeout=30, the_function=change_speed(), check_context=True) if self.lane_switches != [1, 0, 1, 0, 1]: raise NotAchievedException("Expected lane switch 1, got %s" % str(self.lane_switches[-1])) # Cleanup self.change_mode('CIRCLE') self.context_pop() self.context_clear_collection("STATUSTEXT") self.wait_heading(0, accuracy=10, timeout=60) self.wait_heading(180, accuracy=10, timeout=60) ##################################################################################################################################################### self.progress("GYROSCOPE: Change Y-Axis Offset") self.context_collect("STATUSTEXT") # create a gyroscope error by changing the Y-axis offset old_parameter = self.get_parameter("INS_GYR2OFFS_Y") self.wait_statustext(text="EKF3 lane switch", timeout=30, the_function=self.set_parameter("INS_GYR2OFFS_Y", old_parameter + 1), check_context=True) if self.lane_switches != [1, 0, 1, 0, 1, 0]: raise NotAchievedException("Expected lane switch 0, got %s" % str(self.lane_switches[-1])) # Cleanup self.set_parameter("INS_GYR2OFFS_Y", old_parameter) self.context_clear_collection("STATUSTEXT") ##################################################################################################################################################### self.disarm_vehicle() except Exception as e: self.progress("Caught exception: %s" % self.get_exception_stacktrace(e)) ex = e self.remove_message_hook(statustext_hook) self.context_pop() if ex is not None: raise ex def tests(self): '''return list of all tests''' ret = super(AutoTestPlane, self).tests() ret.extend([ ("AuxModeSwitch", "Set modes via auxswitches", self.test_setting_modes_via_auxswitches), ("TestRCCamera", "Test RC Option - Camera Trigger", self.test_rc_option_camera_trigger), ("TestRCRelay", "Test Relay RC Channel Option", self.test_rc_relay), ("ThrottleFailsafe", "Fly throttle failsafe", self.test_throttle_failsafe), ("ThrottleFailsafeFence", "Fly fence survives throttle failsafe", self.test_throttle_failsafe_fence), ("TestFlaps", "Flaps", self.fly_flaps), ("DO_CHANGE_SPEED", "Test mavlink DO_CHANGE_SPEED command", self.fly_do_change_speed), ("DO_REPOSITION", "Test mavlink DO_REPOSITION command", self.fly_do_reposition), ("GuidedRequest", "Test handling of MISSION_ITEM in guided mode", self.fly_do_guided_request), ("MainFlight", "Lots of things in one flight", self.test_main_flight), ("TestGripperMission", "Test Gripper mission items", self.test_gripper_mission), ("Parachute", "Test Parachute", self.test_parachute), ("ParachuteSinkRate", "Test Parachute (SinkRate triggering)", self.test_parachute_sinkrate), ("AIRSPEED_AUTOCAL", "Test AIRSPEED_AUTOCAL", self.airspeed_autocal), ("RangeFinder", "Test RangeFinder Basic Functionality", self.test_rangefinder), ("FenceStatic", "Test Basic Fence Functionality", self.test_fence_static), ("FenceRTL", "Test Fence RTL", self.test_fence_rtl), ("FenceRTLRally", "Test Fence RTL Rally", self.test_fence_rtl_rally), ("ADSB", "Test ADSB", self.test_adsb), ("Button", "Test Buttons", self.test_button), ("FRSkySPort", "Test FrSky SPort mode", self.test_frsky_sport), ("FRSkyPassThrough", "Test FrSky PassThrough serial output", self.test_frsky_passthrough), ("FRSkyD", "Test FrSkyD serial output", self.test_frsky_d), ("LTM", "Test LTM serial output", self.test_ltm), ("AdvancedFailsafe", "Test Advanced Failsafe", self.test_advanced_failsafe), ("LOITER", "Test Loiter mode", self.LOITER), ("DeepStall", "Test DeepStall Landing", self.fly_deepstall), ("LargeMissions", "Test Manipulation of Large missions", self.test_large_missions), ("Soaring", "Test Soaring feature", self.fly_soaring), ("Terrain", "Test terrain following in mission", self.fly_terrain_mission), ("Deadreckoning", "Test deadreckoning support", self.deadreckoning), ("EKFlaneswitch", "Test EKF3 Affinity and Lane Switching", self.ekf_lane_switch), ("LogUpload", "Log upload", self.log_upload), ]) return ret def disabled_tests(self): return { "Button": "See https://github.com/ArduPilot/ardupilot/issues/15259", }
gpl-3.0
-238,449,084,716,790,980
38.840973
161
0.547441
false
3.816308
true
false
false
akinaru/ffmpeg-image-sequencer
ffmpeg-appender-test.py
1
3157
#!/usr/bin/python ##################################################################################### ##################################################################################### # # title : ffmpeg-appender-test.py # authors : Bertrand Martel # copyrights : Copyright (c) 2015 Bertrand Martel # license : The MIT License (MIT) # date : 16/08/2015 # description : create video if not exist and append a series of image to this video taken from WEB # usage : python ffmpeg-appender-test.py # ##################################################################################### ##################################################################################### import sys, getopt, os, subprocess def main(argv): output_file_name = "video_space" temporary_file_name = "temp_space" temporary_file_name_video = "temp_video" picture_array = [ "https://upload.wikimedia.org/wikipedia/commons/4/4e/Anttlers101.jpg", \ "https://upload.wikimedia.org/wikipedia/commons/3/3b/NASA-SpiralGalaxyM101-20140505.jpg", \ "https://upload.wikimedia.org/wikipedia/commons/b/b0/Supernova_in_M101_2011-08-25.jpg", \ "http://1.1.1.5/bmi/images.nationalgeographic.com/wpf/media-live/photos/000/061/cache/earth-full-view_6125_990x742.jpg" ] this_dir = os.path.dirname(os.path.abspath(__file__)) os.chdir(this_dir) output_file_path = ''.join([this_dir , "/",output_file_name,".avi"]) temporary_file_path_avi = ''.join([this_dir,"/",temporary_file_name,".avi"]) temporary_file_name_jpg = ''.join([this_dir,"/",temporary_file_name,".jpg"]) temporary_file_name_video = ''.join([this_dir,"/",temporary_file_name_video,".avi"]) #remove files try: os.remove(output_file_path) except OSError: pass try: os.remove(temporary_file_path_avi) except OSError: pass try: os.remove(temporary_file_name_jpg) except OSError: pass try: os.remove(temporary_file_name_video) except OSError: pass for picture in picture_array: subprocess.call(["wget", picture, "-O", temporary_file_name_jpg]) subprocess.call(["ffmpeg -nostdin -v verbose -f image2 -pattern_type sequence -start_number 0 -r 1 -i " + temporary_file_name_jpg + " -s 1920x1080 " + temporary_file_path_avi],shell=True) try: os.remove(temporary_file_name_jpg) except OSError: pass if os.path.exists(output_file_path): # concat this video and former video subprocess.call(['cd ' + this_dir + ' | ffmpeg -nostdin -v verbose -i "concat:' + output_file_name + '.avi|' + temporary_file_name + '.avi" -c copy ' + temporary_file_name_video],shell=True) try: os.remove(temporary_file_path_avi) except OSError: pass try: os.remove(output_file_path) except OSError: pass os.rename(temporary_file_name_video, output_file_path) else: os.rename(temporary_file_path_avi, output_file_path) if __name__ == "__main__": main(sys.argv[1:]) __author__ = "Bertrand Martel" __copyright__ = "Copyright 2015, Bertrand Martel" __credits__ = ["Bertrand Martel"] __license__ = "MIT" __version__ = "1.0.0" __maintainer__ = "Bertrand Martel" __email__ = "bmartel.fr@gmail.com" __status__ = "POC"
mit
-8,222,219,400,538,782,000
30.58
193
0.602471
false
3.095098
false
false
false
csangani/ReproducingSprout
extract_traces.py
1
1317
## Create a network trace from the saturator output import glob import os import sys INPUT_PATH = 'raw_traces' OUTPUT_PATH = 'cleaned_traces' def extract_trace(filePath, targetFilePath): with open(filePath) as f: with open(targetFilePath, 'w+') as wf: firstLine = True for line in f: value = long(line.lstrip('recv_time=').rstrip(',\n')) if firstLine: base = value firstLine = False value = (value - base) / 1000000 wf.write('%s\n' % value) if __name__ == '__main__': if len(sys.argv) >= 2: source = sys.argv[1] else: source = INPUT_PATH if len(sys.argv) >= 3: destination = sys.argv[2] else: destination = OUTPUT_PATH if not os.path.exists(destination): os.makedirs(destination) networks = glob.glob('%s/*' % source) for network in networks: if not os.path.exists(network.replace(source, destination)): os.makedirs(network.replace(source, destination)) files = glob.glob('%s/*.rx' % network) for file in files: extract_trace(file, file.replace(source, destination).replace('.rx', '.pps'))
mit
-2,875,549,895,375,812,000
27.630435
89
0.535308
false
4.115625
false
false
false
meshulam/sly
deps/shapely/geos.py
1
25191
""" Proxies for the libgeos_c shared lib, GEOS-specific exceptions, and utilities """ import os import re import sys import atexit import logging import threading from ctypes import CDLL, cdll, pointer, c_void_p, c_size_t, c_char_p, string_at from ctypes.util import find_library from . import ftools from .ctypes_declarations import prototype, EXCEPTION_HANDLER_FUNCTYPE # Add message handler to this module's logger LOG = logging.getLogger(__name__) if 'all' in sys.warnoptions: # show GEOS messages in console with: python -W all logging.basicConfig() else: # no handler messages shown class NullHandler(logging.Handler): def emit(self, record): pass LOG.addHandler(NullHandler()) # Find and load the GEOS and C libraries # If this ever gets any longer, we'll break it into separate modules def load_dll(libname, fallbacks=None): lib = find_library(libname) if lib is not None: try: return CDLL(lib) except OSError: pass if fallbacks is not None: for name in fallbacks: try: return CDLL(name) except OSError: # move on to the next fallback pass # No shared library was loaded. Raise OSError. raise OSError( "Could not find library %s or load any of its variants %s" % ( libname, fallbacks or [])) if sys.platform.startswith('linux'): _lgeos = load_dll('geos_c', fallbacks=['libgeos_c.so.1', 'libgeos_c.so']) free = load_dll('c').free free.argtypes = [c_void_p] free.restype = None elif sys.platform == 'darwin': if hasattr(sys, 'frozen'): # .app file from py2app alt_paths = [os.path.join(os.environ['RESOURCEPATH'], '..', 'Frameworks', 'libgeos_c.dylib')] else: alt_paths = [ # The Framework build from Kyng Chaos: "/Library/Frameworks/GEOS.framework/Versions/Current/GEOS", # macports '/opt/local/lib/libgeos_c.dylib', ] _lgeos = load_dll('geos_c', fallbacks=alt_paths) free = load_dll('c').free free.argtypes = [c_void_p] free.restype = None elif sys.platform == 'win32': try: egg_dlls = os.path.abspath(os.path.join(os.path.dirname(__file__), "DLLs")) wininst_dlls = os.path.abspath(os.__file__ + "../../../DLLs") original_path = os.environ['PATH'] os.environ['PATH'] = "%s;%s;%s" % \ (egg_dlls, wininst_dlls, original_path) _lgeos = CDLL("geos.dll") except (ImportError, WindowsError, OSError): raise def free(m): try: cdll.msvcrt.free(m) except WindowsError: # XXX: See http://trac.gispython.org/projects/PCL/ticket/149 pass elif sys.platform == 'sunos5': _lgeos = load_dll('geos_c', fallbacks=['libgeos_c.so.1', 'libgeos_c.so']) free = CDLL('libc.so.1').free free.argtypes = [c_void_p] free.restype = None else: # other *nix systems _lgeos = load_dll('geos_c', fallbacks=['libgeos_c.so.1', 'libgeos_c.so']) free = load_dll('c', fallbacks=['libc.so.6']).free free.argtypes = [c_void_p] free.restype = None def _geos_version(): # extern const char GEOS_DLL *GEOSversion(); GEOSversion = _lgeos.GEOSversion GEOSversion.restype = c_char_p GEOSversion.argtypes = [] #define GEOS_CAPI_VERSION "@VERSION@-CAPI-@CAPI_VERSION@" geos_version_string = GEOSversion() if sys.version_info[0] >= 3: geos_version_string = geos_version_string.decode('ascii') res = re.findall(r'(\d+)\.(\d+)\.(\d+)', geos_version_string) assert len(res) == 2, res geos_version = tuple(int(x) for x in res[0]) capi_version = tuple(int(x) for x in res[1]) return geos_version_string, geos_version, capi_version geos_version_string, geos_version, geos_capi_version = _geos_version() # If we have the new interface, then record a baseline so that we know what # additional functions are declared in ctypes_declarations. if geos_version >= (3, 1, 0): start_set = set(_lgeos.__dict__) # Apply prototypes for the libgeos_c functions prototype(_lgeos, geos_version) # If we have the new interface, automatically detect all function # declarations, and declare their re-entrant counterpart. if geos_version >= (3, 1, 0): end_set = set(_lgeos.__dict__) new_func_names = end_set - start_set for func_name in new_func_names: new_func_name = "%s_r" % func_name if hasattr(_lgeos, new_func_name): new_func = getattr(_lgeos, new_func_name) old_func = getattr(_lgeos, func_name) new_func.restype = old_func.restype if old_func.argtypes is None: # Handle functions that didn't take an argument before, # finishGEOS. new_func.argtypes = [c_void_p] else: new_func.argtypes = [c_void_p] + old_func.argtypes if old_func.errcheck is not None: new_func.errcheck = old_func.errcheck # Handle special case. _lgeos.initGEOS_r.restype = c_void_p _lgeos.initGEOS_r.argtypes = \ [EXCEPTION_HANDLER_FUNCTYPE, EXCEPTION_HANDLER_FUNCTYPE] _lgeos.finishGEOS_r.argtypes = [c_void_p] # Exceptions class ReadingError(Exception): pass class DimensionError(Exception): pass class TopologicalError(Exception): pass class PredicateError(Exception): pass def error_handler(fmt, *args): if sys.version_info[0] >= 3: fmt = fmt.decode('ascii') args = [arg.decode('ascii') for arg in args] LOG.error(fmt, *args) def notice_handler(fmt, args): if sys.version_info[0] >= 3: fmt = fmt.decode('ascii') args = args.decode('ascii') LOG.warning(fmt, args) error_h = EXCEPTION_HANDLER_FUNCTYPE(error_handler) notice_h = EXCEPTION_HANDLER_FUNCTYPE(notice_handler) class WKTReader(object): _lgeos = None _reader = None def __init__(self, lgeos): """Create WKT Reader""" self._lgeos = lgeos self._reader = self._lgeos.GEOSWKTReader_create() def __del__(self): """Destroy WKT Reader""" if self._lgeos is not None: self._lgeos.GEOSWKTReader_destroy(self._reader) self._reader = None self._lgeos = None def read(self, text): """Returns geometry from WKT""" if sys.version_info[0] >= 3: text = text.encode('ascii') geom = self._lgeos.GEOSWKTReader_read(self._reader, c_char_p(text)) if not geom: raise ReadingError("Could not create geometry because of errors " "while reading input.") # avoid circular import dependency from shapely.geometry.base import geom_factory return geom_factory(geom) class WKTWriter(object): _lgeos = None _writer = None # Establish default output settings defaults = {} if geos_version >= (3, 3, 0): defaults['trim'] = True defaults['output_dimension'] = 3 # GEOS' defaults for methods without "get" _trim = False _rounding_precision = -1 _old_3d = False @property def trim(self): """Trimming of unnecessary decimals (default: True)""" return getattr(self, '_trim') @trim.setter def trim(self, value): self._trim = bool(value) self._lgeos.GEOSWKTWriter_setTrim(self._writer, self._trim) @property def rounding_precision(self): """Rounding precision when writing the WKT. A precision of -1 (default) disables it.""" return getattr(self, '_rounding_precision') @rounding_precision.setter def rounding_precision(self, value): self._rounding_precision = int(value) self._lgeos.GEOSWKTWriter_setRoundingPrecision( self._writer, self._rounding_precision) @property def output_dimension(self): """Output dimension, either 2 or 3 (default)""" return self._lgeos.GEOSWKTWriter_getOutputDimension( self._writer) @output_dimension.setter def output_dimension(self, value): self._lgeos.GEOSWKTWriter_setOutputDimension( self._writer, int(value)) @property def old_3d(self): """Show older style for 3D WKT, without 'Z' (default: False)""" return getattr(self, '_old_3d') @old_3d.setter def old_3d(self, value): self._old_3d = bool(value) self._lgeos.GEOSWKTWriter_setOld3D(self._writer, self._old_3d) def __init__(self, lgeos, **settings): """Create WKT Writer Note: writer defaults are set differently for GEOS 3.3.0 and up. For example, with 'POINT Z (1 2 3)': newer: POINT Z (1 2 3) older: POINT (1.0000000000000000 2.0000000000000000) The older formatting can be achieved for GEOS 3.3.0 and up by setting the properties: trim = False output_dimension = 2 """ self._lgeos = lgeos self._writer = self._lgeos.GEOSWKTWriter_create() applied_settings = self.defaults.copy() applied_settings.update(settings) for name in applied_settings: setattr(self, name, applied_settings[name]) def __setattr__(self, name, value): """Limit setting attributes""" if hasattr(self, name): object.__setattr__(self, name, value) else: raise AttributeError('%r object has no attribute %r' % (self.__class__.__name__, name)) def __del__(self): """Destroy WKT Writer""" if self._lgeos is not None: self._lgeos.GEOSWKTWriter_destroy(self._writer) self._writer = None self._lgeos = None def write(self, geom): """Returns WKT string for geometry""" if geom is None or geom._geom is None: raise ValueError("Null geometry supports no operations") result = self._lgeos.GEOSWKTWriter_write(self._writer, geom._geom) text = string_at(result) lgeos.GEOSFree(result) if sys.version_info[0] >= 3: return text.decode('ascii') else: return text class WKBReader(object): _lgeos = None _reader = None def __init__(self, lgeos): """Create WKB Reader""" self._lgeos = lgeos self._reader = self._lgeos.GEOSWKBReader_create() def __del__(self): """Destroy WKB Reader""" if self._lgeos is not None: self._lgeos.GEOSWKBReader_destroy(self._reader) self._reader = None self._lgeos = None def read(self, data): """Returns geometry from WKB""" geom = self._lgeos.GEOSWKBReader_read( self._reader, c_char_p(data), c_size_t(len(data))) if not geom: raise ReadingError("Could not create geometry because of errors " "while reading input.") # avoid circular import dependency from shapely import geometry return geometry.base.geom_factory(geom) def read_hex(self, data): """Returns geometry from WKB hex""" if sys.version_info[0] >= 3: data = data.encode('ascii') geom = self._lgeos.GEOSWKBReader_readHEX( self._reader, c_char_p(data), c_size_t(len(data))) if not geom: raise ReadingError("Could not create geometry because of errors " "while reading input.") # avoid circular import dependency from shapely import geometry return geometry.base.geom_factory(geom) class WKBWriter(object): _lgeos = None _writer = None # EndianType enum in ByteOrderValues.h _ENDIAN_BIG = 0 _ENDIAN_LITTLE = 1 # Establish default output setting defaults = {'output_dimension': 3} @property def output_dimension(self): """Output dimension, either 2 or 3 (default)""" return self._lgeos.GEOSWKBWriter_getOutputDimension(self._writer) @output_dimension.setter def output_dimension(self, value): self._lgeos.GEOSWKBWriter_setOutputDimension( self._writer, int(value)) @property def big_endian(self): """Byte order is big endian, True (default) or False""" return (self._lgeos.GEOSWKBWriter_getByteOrder(self._writer) == self._ENDIAN_BIG) @big_endian.setter def big_endian(self, value): self._lgeos.GEOSWKBWriter_setByteOrder( self._writer, self._ENDIAN_BIG if value else self._ENDIAN_LITTLE) @property def include_srid(self): """Include SRID, True or False (default)""" return bool(self._lgeos.GEOSWKBWriter_getIncludeSRID(self._writer)) @include_srid.setter def include_srid(self, value): self._lgeos.GEOSWKBWriter_setIncludeSRID(self._writer, bool(value)) def __init__(self, lgeos, **settings): """Create WKB Writer""" self._lgeos = lgeos self._writer = self._lgeos.GEOSWKBWriter_create() applied_settings = self.defaults.copy() applied_settings.update(settings) for name in applied_settings: setattr(self, name, applied_settings[name]) def __setattr__(self, name, value): """Limit setting attributes""" if hasattr(self, name): object.__setattr__(self, name, value) else: raise AttributeError('%r object has no attribute %r' % (self.__class__.__name__, name)) def __del__(self): """Destroy WKB Writer""" if self._lgeos is not None: self._lgeos.GEOSWKBWriter_destroy(self._writer) self._writer = None self._lgeos = None def write(self, geom): """Returns WKB byte string for geometry""" if geom is None or geom._geom is None: raise ValueError("Null geometry supports no operations") size = c_size_t() result = self._lgeos.GEOSWKBWriter_write( self._writer, geom._geom, pointer(size)) data = string_at(result, size.value) lgeos.GEOSFree(result) return data def write_hex(self, geom): """Returns WKB hex string for geometry""" if geom is None or geom._geom is None: raise ValueError("Null geometry supports no operations") size = c_size_t() result = self._lgeos.GEOSWKBWriter_writeHEX( self._writer, geom._geom, pointer(size)) data = string_at(result, size.value) lgeos.GEOSFree(result) if sys.version_info[0] >= 3: return data.decode('ascii') else: return data # Errcheck functions for ctypes def errcheck_wkb(result, func, argtuple): '''Returns bytes from a C pointer''' if not result: return None size_ref = argtuple[-1] size = size_ref.contents retval = string_at(result, size.value)[:] lgeos.GEOSFree(result) return retval def errcheck_just_free(result, func, argtuple): '''Returns string from a C pointer''' retval = string_at(result) lgeos.GEOSFree(result) if sys.version_info[0] >= 3: return retval.decode('ascii') else: return retval def errcheck_predicate(result, func, argtuple): '''Result is 2 on exception, 1 on True, 0 on False''' if result == 2: raise PredicateError("Failed to evaluate %s" % repr(func)) return result class LGEOSBase(threading.local): """Proxy for GEOS C API This is a base class. Do not instantiate. """ methods = {} def __init__(self, dll): self._lgeos = dll self.geos_handle = None def __del__(self): """Cleanup GEOS related processes""" if self._lgeos is not None: self._lgeos.finishGEOS() self._lgeos = None self.geos_handle = None class LGEOS300(LGEOSBase): """Proxy for GEOS 3.0.0-CAPI-1.4.1 """ geos_version = (3, 0, 0) geos_capi_version = (1, 4, 0) def __init__(self, dll): super(LGEOS300, self).__init__(dll) self.geos_handle = self._lgeos.initGEOS(notice_h, error_h) keys = list(self._lgeos.__dict__.keys()) for key in keys: setattr(self, key, getattr(self._lgeos, key)) self.GEOSFree = self._lgeos.free # Deprecated self.GEOSGeomToWKB_buf.errcheck = errcheck_wkb self.GEOSGeomToWKT.errcheck = errcheck_just_free self.GEOSRelate.errcheck = errcheck_just_free for pred in ( self.GEOSDisjoint, self.GEOSTouches, self.GEOSIntersects, self.GEOSCrosses, self.GEOSWithin, self.GEOSContains, self.GEOSOverlaps, self.GEOSEquals, self.GEOSEqualsExact, self.GEOSisEmpty, self.GEOSisValid, self.GEOSisSimple, self.GEOSisRing, self.GEOSHasZ): pred.errcheck = errcheck_predicate self.methods['area'] = self.GEOSArea self.methods['boundary'] = self.GEOSBoundary self.methods['buffer'] = self.GEOSBuffer self.methods['centroid'] = self.GEOSGetCentroid self.methods['representative_point'] = self.GEOSPointOnSurface self.methods['convex_hull'] = self.GEOSConvexHull self.methods['distance'] = self.GEOSDistance self.methods['envelope'] = self.GEOSEnvelope self.methods['length'] = self.GEOSLength self.methods['has_z'] = self.GEOSHasZ self.methods['is_empty'] = self.GEOSisEmpty self.methods['is_ring'] = self.GEOSisRing self.methods['is_simple'] = self.GEOSisSimple self.methods['is_valid'] = self.GEOSisValid self.methods['disjoint'] = self.GEOSDisjoint self.methods['touches'] = self.GEOSTouches self.methods['intersects'] = self.GEOSIntersects self.methods['crosses'] = self.GEOSCrosses self.methods['within'] = self.GEOSWithin self.methods['contains'] = self.GEOSContains self.methods['overlaps'] = self.GEOSOverlaps self.methods['equals'] = self.GEOSEquals self.methods['equals_exact'] = self.GEOSEqualsExact self.methods['relate'] = self.GEOSRelate self.methods['difference'] = self.GEOSDifference self.methods['symmetric_difference'] = self.GEOSSymDifference self.methods['union'] = self.GEOSUnion self.methods['intersection'] = self.GEOSIntersection self.methods['simplify'] = self.GEOSSimplify self.methods['topology_preserve_simplify'] = \ self.GEOSTopologyPreserveSimplify class LGEOS310(LGEOSBase): """Proxy for GEOS 3.1.0-CAPI-1.5.0 """ geos_version = (3, 1, 0) geos_capi_version = (1, 5, 0) def __init__(self, dll): super(LGEOS310, self).__init__(dll) self.geos_handle = self._lgeos.initGEOS_r(notice_h, error_h) keys = list(self._lgeos.__dict__.keys()) for key in [x for x in keys if not x.endswith('_r')]: if key + '_r' in keys: reentr_func = getattr(self._lgeos, key + '_r') attr = ftools.partial(reentr_func, self.geos_handle) attr.__name__ = reentr_func.__name__ setattr(self, key, attr) else: setattr(self, key, getattr(self._lgeos, key)) if not hasattr(self, 'GEOSFree'): # GEOS < 3.1.1 self.GEOSFree = self._lgeos.free # Deprecated self.GEOSGeomToWKB_buf.func.errcheck = errcheck_wkb self.GEOSGeomToWKT.func.errcheck = errcheck_just_free self.GEOSRelate.func.errcheck = errcheck_just_free for pred in ( self.GEOSDisjoint, self.GEOSTouches, self.GEOSIntersects, self.GEOSCrosses, self.GEOSWithin, self.GEOSContains, self.GEOSOverlaps, self.GEOSEquals, self.GEOSEqualsExact, self.GEOSisEmpty, self.GEOSisValid, self.GEOSisSimple, self.GEOSisRing, self.GEOSHasZ): pred.func.errcheck = errcheck_predicate self.GEOSisValidReason.func.errcheck = errcheck_just_free self.methods['area'] = self.GEOSArea self.methods['boundary'] = self.GEOSBoundary self.methods['buffer'] = self.GEOSBuffer self.methods['centroid'] = self.GEOSGetCentroid self.methods['representative_point'] = self.GEOSPointOnSurface self.methods['convex_hull'] = self.GEOSConvexHull self.methods['distance'] = self.GEOSDistance self.methods['envelope'] = self.GEOSEnvelope self.methods['length'] = self.GEOSLength self.methods['has_z'] = self.GEOSHasZ self.methods['is_empty'] = self.GEOSisEmpty self.methods['is_ring'] = self.GEOSisRing self.methods['is_simple'] = self.GEOSisSimple self.methods['is_valid'] = self.GEOSisValid self.methods['disjoint'] = self.GEOSDisjoint self.methods['touches'] = self.GEOSTouches self.methods['intersects'] = self.GEOSIntersects self.methods['crosses'] = self.GEOSCrosses self.methods['within'] = self.GEOSWithin self.methods['contains'] = self.GEOSContains self.methods['overlaps'] = self.GEOSOverlaps self.methods['equals'] = self.GEOSEquals self.methods['equals_exact'] = self.GEOSEqualsExact self.methods['relate'] = self.GEOSRelate self.methods['difference'] = self.GEOSDifference self.methods['symmetric_difference'] = self.GEOSSymDifference self.methods['union'] = self.GEOSUnion self.methods['intersection'] = self.GEOSIntersection self.methods['prepared_intersects'] = self.GEOSPreparedIntersects self.methods['prepared_contains'] = self.GEOSPreparedContains self.methods['prepared_contains_properly'] = \ self.GEOSPreparedContainsProperly self.methods['prepared_covers'] = self.GEOSPreparedCovers self.methods['simplify'] = self.GEOSSimplify self.methods['topology_preserve_simplify'] = \ self.GEOSTopologyPreserveSimplify self.methods['cascaded_union'] = self.GEOSUnionCascaded class LGEOS311(LGEOS310): """Proxy for GEOS 3.1.1-CAPI-1.6.0 """ geos_version = (3, 1, 1) geos_capi_version = (1, 6, 0) def __init__(self, dll): super(LGEOS311, self).__init__(dll) class LGEOS320(LGEOS311): """Proxy for GEOS 3.2.0-CAPI-1.6.0 """ geos_version = (3, 2, 0) geos_capi_version = (1, 6, 0) def __init__(self, dll): super(LGEOS320, self).__init__(dll) self.methods['parallel_offset'] = self.GEOSSingleSidedBuffer self.methods['project'] = self.GEOSProject self.methods['project_normalized'] = self.GEOSProjectNormalized self.methods['interpolate'] = self.GEOSInterpolate self.methods['interpolate_normalized'] = \ self.GEOSInterpolateNormalized self.methods['buffer_with_style'] = self.GEOSBufferWithStyle class LGEOS330(LGEOS320): """Proxy for GEOS 3.3.0-CAPI-1.7.0 """ geos_version = (3, 3, 0) geos_capi_version = (1, 7, 0) def __init__(self, dll): super(LGEOS330, self).__init__(dll) # GEOS 3.3.8 from homebrew has, but doesn't advertise # GEOSPolygonize_full. We patch it in explicitly here. key = 'GEOSPolygonize_full' func = getattr(self._lgeos, key + '_r') attr = ftools.partial(func, self.geos_handle) attr.__name__ = func.__name__ setattr(self, key, attr) for pred in (self.GEOSisClosed,): pred.func.errcheck = errcheck_predicate self.methods['unary_union'] = self.GEOSUnaryUnion self.methods['is_closed'] = self.GEOSisClosed self.methods['cascaded_union'] = self.methods['unary_union'] self.methods['snap'] = self.GEOSSnap class LGEOS340(LGEOS330): """Proxy for GEOS 3.4.0-CAPI-1.8.0 """ geos_version = (3, 4, 0) geos_capi_version = (1, 8, 0) def __init__(self, dll): super(LGEOS340, self).__init__(dll) self.methods['delaunay_triangulation'] = self.GEOSDelaunayTriangulation self.methods['nearest_points'] = self.GEOSNearestPoints if geos_version >= (3, 4, 0): L = LGEOS340 elif geos_version >= (3, 3, 0): L = LGEOS330 elif geos_version >= (3, 2, 0): L = LGEOS320 elif geos_version >= (3, 1, 1): L = LGEOS311 elif geos_version >= (3, 1, 0): L = LGEOS310 else: L = LGEOS300 lgeos = L(_lgeos) def cleanup(proxy): del proxy atexit.register(cleanup, lgeos)
mit
9,089,816,898,955,361,000
32.277411
79
0.596403
false
3.756487
false
false
false
philipgian/pre-commit
tests/make_archives_test.py
1
1979
from __future__ import absolute_import from __future__ import unicode_literals import os.path import tarfile import mock import pytest from pre_commit import make_archives from pre_commit.util import cmd_output from pre_commit.util import cwd from testing.fixtures import git_dir from testing.util import get_head_sha from testing.util import skipif_slowtests_false def test_make_archive(tempdir_factory): output_dir = tempdir_factory.get() git_path = git_dir(tempdir_factory) # Add a files to the git directory with cwd(git_path): cmd_output('touch', 'foo') cmd_output('git', 'add', '.') cmd_output('git', 'commit', '-m', 'foo') # We'll use this sha head_sha = get_head_sha('.') # And check that this file doesn't exist cmd_output('touch', 'bar') cmd_output('git', 'add', '.') cmd_output('git', 'commit', '-m', 'bar') # Do the thing archive_path = make_archives.make_archive( 'foo', git_path, head_sha, output_dir, ) assert archive_path == os.path.join(output_dir, 'foo.tar.gz') assert os.path.exists(archive_path) extract_dir = tempdir_factory.get() # Extract the tar with tarfile.open(archive_path) as tf: tf.extractall(extract_dir) # Verify the contents of the tar assert os.path.exists(os.path.join(extract_dir, 'foo')) assert os.path.exists(os.path.join(extract_dir, 'foo', 'foo')) assert not os.path.exists(os.path.join(extract_dir, 'foo', '.git')) assert not os.path.exists(os.path.join(extract_dir, 'foo', 'bar')) @skipif_slowtests_false @pytest.mark.integration def test_main(tempdir_factory): path = tempdir_factory.get() # Don't actually want to make these in the current repo with mock.patch.object(make_archives, 'RESOURCES_DIR', path): make_archives.main() for archive, _, _ in make_archives.REPOS: assert os.path.exists(os.path.join(path, archive + '.tar.gz'))
mit
-4,190,765,153,830,089,700
29.921875
71
0.656897
false
3.417962
true
false
false
slimta/python-slimta
slimta/util/__init__.py
1
4971
# Copyright (c) 2016 Ian C. Good # # Permission is hereby granted, free of charge, to any person obtaining a copy # of this software and associated documentation files (the "Software"), to deal # in the Software without restriction, including without limitation the rights # to use, copy, modify, merge, publish, distribute, sublicense, and/or sell # copies of the Software, and to permit persons to whom the Software is # furnished to do so, subject to the following conditions: # # The above copyright notice and this permission notice shall be included in # all copies or substantial portions of the Software. # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN # THE SOFTWARE. # """Package containing a variety of useful modules utilities that didn't really belong anywhere else. """ from __future__ import absolute_import from gevent import socket __all__ = ['build_ipv4_socket_creator', 'create_connection_ipv4', 'create_listeners'] def build_ipv4_socket_creator(only_ports=None): """Returns a function that will act like :py:func:`socket.create_connection` but only using IPv4 addresses. This function can be used as the ``socket_creator`` argument to some classes like :class:`~slimta.relay.smtp.mx.MxSmtpRelay`. :param only_ports: If given, can be a list to limit which ports are restricted to IPv4. Connections to all other ports may be IPv6. """ def socket_creator(*args, **kwargs): return create_connection_ipv4(*args, only_ports=only_ports, **kwargs) return socket_creator def create_connection_ipv4(address, timeout=None, source_address=None, only_ports=None): """Attempts to mimick to :py:func:`socket.create_connection`, but connections are only made to IPv4 addresses. :param only_ports: If given, can be a list to limit which ports are restricted to IPv4. Connections to all other ports may be IPv6. """ host, port = address if only_ports and port not in only_ports: return socket.create_connection(address, timeout, source_address) last_exc = None for res in socket.getaddrinfo(host, port, socket.AF_INET): _, _, _, _, sockaddr = res try: return socket.create_connection(sockaddr, timeout, source_address) except socket.error as exc: last_exc = exc if last_exc is not None: raise last_exc else: raise socket.error('getaddrinfo returns an empty list') def create_listeners(address, family=socket.AF_UNSPEC, socktype=socket.SOCK_STREAM, proto=socket.IPPROTO_IP): """Uses :func:`socket.getaddrinfo` to create listening sockets for available socket parameters. For example, giving *address* as ``('localhost', 80)`` on a system with IPv6 would return one socket bound to ``127.0.0.1`` and one bound to ``::1`. May also be used for ``socket.AF_UNIX`` with a file path to produce a single unix domain socket listening on that path. :param address: A ``(host, port)`` tuple to listen on. :param family: the socket family, default ``AF_UNSPEC``. :param socktype: the socket type, default ``SOCK_STREAM``. :param proto: the socket protocol, default ``IPPROTO_IP``. """ if family == socket.AF_UNIX: sock = socket.socket(family, socktype, proto) _init_socket(sock, address) return [sock] elif not isinstance(address, tuple) or len(address) != 2: raise ValueError(address) flags = socket.AI_PASSIVE host, port = address listeners = [] last_exc = None for res in socket.getaddrinfo(host, port, family, socktype, proto, flags): fam, typ, prt, _, sockaddr = res try: sock = socket.socket(fam, typ, prt) _init_socket(sock, sockaddr) except socket.error as exc: last_exc = exc else: listeners.append(sock) if last_exc and not listeners: raise last_exc return listeners def _init_socket(sock, sockaddr): try: sock.setsockopt(socket.IPPROTO_IPV6, socket.IPV6_V6ONLY, 1) except socket.error: pass try: sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1) except socket.error: pass sock.setblocking(0) sock.bind(sockaddr) if sock.type != socket.SOCK_DGRAM: sock.listen(socket.SOMAXCONN) # vim:et:fdm=marker:sts=4:sw=4:ts=4
mit
1,704,426,083,830,367,700
36.097015
79
0.662442
false
4.028363
false
false
false
Azure/azure-sdk-for-python
sdk/databoxedge/azure-mgmt-databoxedge/azure/mgmt/datab/aio/_configuration.py
1
3204
# coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for license information. # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- from typing import Any, TYPE_CHECKING from azure.core.configuration import Configuration from azure.core.pipeline import policies from azure.mgmt.core.policies import ARMHttpLoggingPolicy if TYPE_CHECKING: # pylint: disable=unused-import,ungrouped-imports from azure.core.credentials_async import AsyncTokenCredential VERSION = "unknown" class DataBoxEdgeManagementClientConfiguration(Configuration): """Configuration for DataBoxEdgeManagementClient. Note that all parameters used to create this instance are saved as instance attributes. :param credential: Credential needed for the client to connect to Azure. :type credential: ~azure.core.credentials_async.AsyncTokenCredential :param subscription_id: The subscription ID. :type subscription_id: str """ def __init__( self, credential: "AsyncTokenCredential", subscription_id: str, **kwargs: Any ) -> None: if credential is None: raise ValueError("Parameter 'credential' must not be None.") if subscription_id is None: raise ValueError("Parameter 'subscription_id' must not be None.") super(DataBoxEdgeManagementClientConfiguration, self).__init__(**kwargs) self.credential = credential self.subscription_id = subscription_id self.api_version = "2020-12-01" self.credential_scopes = kwargs.pop('credential_scopes', ['https://management.azure.com/.default']) kwargs.setdefault('sdk_moniker', 'mgmt-databoxedge/{}'.format(VERSION)) self._configure(**kwargs) def _configure( self, **kwargs: Any ) -> None: self.user_agent_policy = kwargs.get('user_agent_policy') or policies.UserAgentPolicy(**kwargs) self.headers_policy = kwargs.get('headers_policy') or policies.HeadersPolicy(**kwargs) self.proxy_policy = kwargs.get('proxy_policy') or policies.ProxyPolicy(**kwargs) self.logging_policy = kwargs.get('logging_policy') or policies.NetworkTraceLoggingPolicy(**kwargs) self.http_logging_policy = kwargs.get('http_logging_policy') or ARMHttpLoggingPolicy(**kwargs) self.retry_policy = kwargs.get('retry_policy') or policies.AsyncRetryPolicy(**kwargs) self.custom_hook_policy = kwargs.get('custom_hook_policy') or policies.CustomHookPolicy(**kwargs) self.redirect_policy = kwargs.get('redirect_policy') or policies.AsyncRedirectPolicy(**kwargs) self.authentication_policy = kwargs.get('authentication_policy') if self.credential and not self.authentication_policy: self.authentication_policy = policies.AsyncBearerTokenCredentialPolicy(self.credential, *self.credential_scopes, **kwargs)
mit
-890,126,408,029,941,400
47.545455
134
0.682272
false
4.506329
true
false
false
jeromecc/doctoctocbot
src/crowdfunding/migrations/0013_tiers.py
1
1118
# Generated by Django 2.0.13 on 2019-02-25 05:21 from decimal import Decimal from django.db import migrations, models import django.db.models.deletion class Migration(migrations.Migration): dependencies = [ ('crowdfunding', '0012_auto_20190224_0523'), ] operations = [ migrations.CreateModel( name='Tiers', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('tag', models.CharField(max_length=191)), ('description', models.CharField(max_length=191)), ('emoji', models.CharField(blank=True, max_length=4)), ('image', models.ImageField(blank=True, upload_to='')), ('min', models.DecimalField(decimal_places=2, default=Decimal('0'), max_digits=12)), ('max', models.DecimalField(decimal_places=2, default=Decimal('Infinity'), max_digits=12)), ('project', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='crowdfunding.Project')), ], ), ]
mpl-2.0
-3,936,454,583,790,252,500
38.928571
119
0.601073
false
4.007168
false
false
false
yanni4night/ursa-django
app/settings.py
1
2208
""" Django settings for ursa-django project. For more information on this file, see https://docs.djangoproject.com/en/1.7/topics/settings/ For the full list of settings and their values, see https://docs.djangoproject.com/en/1.7/ref/settings/ """ # Build paths inside the project like this: os.path.join(BASE_DIR, ...) import os BASE_DIR = os.path.dirname(os.path.dirname(__file__)) PROJECT_ROOT = os.path.join(os.path.abspath(os.path.dirname(__file__)), '..', 'dev') # Quick-start development settings - unsuitable for production # See https://docs.djangoproject.com/en/1.7/howto/deployment/checklist/ # SECURITY WARNING: keep the secret key used in production secret! SECRET_KEY = 'uq==k2a4+j^3i3)wns^+3%9)ww+eysjo0)-sg(hu5q$6=uqg^+' # SECURITY WARNING: don't run with debug turned on in production! DEBUG = True TEMPLATE_DEBUG = True ALLOWED_HOSTS = [] # Application definition INSTALLED_APPS = ( 'django.contrib.admin', 'django.contrib.auth', 'django.contrib.contenttypes', 'django.contrib.sessions', 'django.contrib.messages', 'django.contrib.staticfiles', ) MIDDLEWARE_CLASSES = ( 'django.contrib.sessions.middleware.SessionMiddleware', 'django.middleware.common.CommonMiddleware', 'django.middleware.csrf.CsrfViewMiddleware', 'django.contrib.auth.middleware.AuthenticationMiddleware', 'django.contrib.auth.middleware.SessionAuthenticationMiddleware', 'django.contrib.messages.middleware.MessageMiddleware', 'django.middleware.clickjacking.XFrameOptionsMiddleware', ) ROOT_URLCONF = 'app.urls' WSGI_APPLICATION = 'app.wsgi.application' # Database # https://docs.djangoproject.com/en/1.7/ref/settings/#databases # DATABASES = { # 'default': { # 'ENGINE': 'django.db.backends.sqlite3', # 'NAME': os.path.join(BASE_DIR, 'db.sqlite3'), # } # } # Internationalization # https://docs.djangoproject.com/en/1.7/topics/i18n/ LANGUAGE_CODE = 'zh-cn' TIME_ZONE = 'Asia/Shanghai' USE_I18N = False USE_L10N = False USE_TZ = False # Static files (CSS, JavaScript, Images) # https://docs.djangoproject.com/en/1.7/howto/static-files/ STATIC_URL = '/static/' TEMPLATE_DIRS = [os.path.join(PROJECT_ROOT, 'templates')]
mit
2,840,151,740,766,177,000
24.988235
84
0.717391
false
3.190751
false
false
false
iotaledger/iota.lib.py
iota/multisig/commands/prepare_multisig_transfer.py
1
5102
from typing import List, Optional import filters as f from iota import Address, ProposedTransaction from iota.commands import FilterCommand, RequestFilter from iota.commands.core import GetBalancesCommand from iota.exceptions import with_context from iota.filters import Trytes from iota.multisig.transaction import ProposedMultisigBundle from iota.multisig.types import MultisigAddress __all__ = [ 'PrepareMultisigTransferCommand', ] class PrepareMultisigTransferCommand(FilterCommand): """ Implements `prepare_multisig_transfer` multisig API command. References: - :py:meth:`iota.multisig.api.MultisigIota.prepare_multisig_transfer` """ command = 'prepareMultisigTransfer' def get_request_filter(self) -> 'PrepareMultisigTransferRequestFilter': return PrepareMultisigTransferRequestFilter() def get_response_filter(self): pass async def _execute(self, request: dict) -> dict: change_address: Optional[Address] = request['changeAddress'] multisig_input: MultisigAddress = request['multisigInput'] transfers: List[ProposedTransaction] = request['transfers'] bundle = ProposedMultisigBundle(transfers) want_to_spend = bundle.balance if want_to_spend > 0: gb_response = await GetBalancesCommand(self.adapter)( addresses=[multisig_input], ) multisig_input.balance = gb_response['balances'][0] if multisig_input.balance < want_to_spend: raise with_context( exc=ValueError( 'Insufficient balance; found {found}, need {need} ' '(``exc.context`` has more info).'.format( found=multisig_input.balance, need=want_to_spend, ), ), # The structure of this context object is intended # to match the one from ``PrepareTransferCommand``. context={ 'available_to_spend': multisig_input.balance, 'confirmed_inputs': [multisig_input], 'request': request, 'want_to_spend': want_to_spend, }, ) bundle.add_inputs([multisig_input]) if bundle.balance < 0: if change_address: bundle.send_unspent_inputs_to(change_address) else: # # Unlike :py:meth:`iota.api.Iota.prepare_transfer` # where all of the inputs are owned by the same # seed, creating a multisig transfer usually # involves multiple people. # # It would be unfair to the participants of the # transaction if we were to automatically generate a # change address using the seed of whoever happened # to invoke the # :py:meth:`MultisigIota.prepare_multisig_transfer` # method! # raise with_context( exc=ValueError( 'Bundle has unspent inputs, ' 'but no change address specified.', ), context={ 'available_to_spend': multisig_input.balance, 'balance': bundle.balance, 'confirmed_inputs': [multisig_input], 'request': request, 'want_to_spend': want_to_spend, }, ) else: raise with_context( exc=ValueError( 'Use ``prepare_transfer`` ' 'to create a bundle without spending IOTAs.', ), context={ 'request': request, }, ) bundle.finalize() # Return the bundle with inputs unsigned. return { 'trytes': bundle.as_tryte_strings(), } class PrepareMultisigTransferRequestFilter(RequestFilter): def __init__(self) -> None: super(PrepareMultisigTransferRequestFilter, self).__init__( { 'changeAddress': Trytes(Address), 'multisigInput': f.Required | f.Type(MultisigAddress), 'transfers': f.Required | f.Array | f.FilterRepeater( f.Required | f.Type(ProposedTransaction), ), }, allow_missing_keys={ 'changeAddress', }, )
mit
-2,375,090,809,166,574,000
35.971014
83
0.488828
false
5.232821
false
false
false
alexis-roche/nipy
nipy/testing/__init__.py
2
1369
# emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: """The testing directory contains a small set of imaging files to be used for doctests only. More thorough tests and example data will be stored in a nipy data packages that you can download separately. .. note: We use the ``nose`` testing framework for tests. Nose is a dependency for the tests, but should not be a dependency for running the algorithms in the NIPY library. This file should import without nose being present on the python path. Examples -------- >>> from nipy.testing import funcfile >>> from nipy.io.api import load_image >>> img = load_image(funcfile) >>> img.shape (17, 21, 3, 20) """ from __future__ import absolute_import import os #__all__ = ['funcfile', 'anatfile'] # Discover directory path filepath = os.path.abspath(__file__) basedir = os.path.dirname(filepath) funcfile = os.path.join(basedir, 'functional.nii.gz') anatfile = os.path.join(basedir, 'anatomical.nii.gz') from numpy.testing import * # Overwrites numpy.testing.Tester from .nosetester import NipyNoseTester as Tester test = Tester().test bench = Tester().bench from . import decorators as dec # Allow failed import of nose if not now running tests try: from nose.tools import assert_true, assert_false except ImportError: pass
bsd-3-clause
-6,188,143,799,394,715,000
26.38
73
0.723156
false
3.413965
true
false
false
openstack/mistral
mistral/api/controllers/v2/execution.py
1
17181
# Copyright 2013 - Mirantis, Inc. # Copyright 2015 - StackStorm, Inc. # Copyright 2015 Huawei Technologies Co., Ltd. # Copyright 2016 - Brocade Communications Systems, Inc. # Copyright 2018 - Extreme Networks, Inc. # Copyright 2019 - NetCracker Technology Corp. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. from oslo_log import log as logging from oslo_utils import uuidutils from pecan import rest from wsme import types as wtypes import wsmeext.pecan as wsme_pecan from mistral.api import access_control as acl from mistral.api.controllers.v2 import execution_report from mistral.api.controllers.v2 import resources from mistral.api.controllers.v2 import sub_execution from mistral.api.controllers.v2 import task from mistral.api.controllers.v2 import types from mistral import context from mistral.db.v2 import api as db_api from mistral.db.v2.sqlalchemy import models as db_models from mistral import exceptions as exc from mistral.rpc import clients as rpc from mistral.services import workflows as wf_service from mistral.utils import filter_utils from mistral.utils import rest_utils from mistral.workflow import data_flow from mistral.workflow import states from mistral_lib.utils import merge_dicts LOG = logging.getLogger(__name__) STATE_TYPES = wtypes.Enum( str, states.IDLE, states.RUNNING, states.SUCCESS, states.ERROR, states.PAUSED, states.CANCELLED ) def _get_workflow_execution_resource_with_output(wf_ex): rest_utils.load_deferred_fields(wf_ex, ['params', 'input', 'output']) return resources.Execution.from_db_model(wf_ex) def _get_workflow_execution_resource(wf_ex): rest_utils.load_deferred_fields(wf_ex, ['params', 'input']) return resources.Execution.from_db_model(wf_ex) # Use retries to prevent possible failures. @rest_utils.rest_retry_on_db_error def _get_workflow_execution(id, must_exist=True): with db_api.transaction(): if must_exist: wf_ex = db_api.get_workflow_execution(id) else: wf_ex = db_api.load_workflow_execution(id) return rest_utils.load_deferred_fields( wf_ex, ['params', 'input', 'output', 'context', 'spec'] ) # TODO(rakhmerov): Make sure to make all needed renaming on public API. class ExecutionsController(rest.RestController): tasks = task.ExecutionTasksController() report = execution_report.ExecutionReportController() executions = sub_execution.SubExecutionsController() @rest_utils.wrap_wsme_controller_exception @wsme_pecan.wsexpose(resources.Execution, wtypes.text) def get(self, id): """Return the specified Execution. :param id: UUID of execution to retrieve. """ acl.enforce("executions:get", context.ctx()) LOG.debug("Fetch execution [id=%s]", id) wf_ex = _get_workflow_execution(id) resource = resources.Execution.from_db_model(wf_ex) resource.published_global = ( data_flow.get_workflow_execution_published_global(wf_ex) ) return resource @rest_utils.wrap_wsme_controller_exception @wsme_pecan.wsexpose( resources.Execution, wtypes.text, body=resources.Execution ) def put(self, id, wf_ex): """Update the specified workflow execution. :param id: UUID of execution to update. :param wf_ex: Execution object. """ acl.enforce('executions:update', context.ctx()) LOG.debug('Update execution [id=%s, execution=%s]', id, wf_ex) @rest_utils.rest_retry_on_db_error def _compute_delta(wf_ex): with db_api.transaction(): # ensure that workflow execution exists db_api.get_workflow_execution( id, fields=(db_models.WorkflowExecution.id,) ) delta = {} if wf_ex.state: delta['state'] = wf_ex.state if wf_ex.description: delta['description'] = wf_ex.description if wf_ex.params and wf_ex.params.get('env'): delta['env'] = wf_ex.params.get('env') # Currently we can change only state, description, or env. if len(delta.values()) <= 0: raise exc.InputException( 'The property state, description, or env ' 'is not provided for update.' ) # Description cannot be updated together with state. if delta.get('description') and delta.get('state'): raise exc.InputException( 'The property description must be updated ' 'separately from state.' ) # If state change, environment cannot be updated # if not RUNNING. if (delta.get('env') and delta.get('state') and delta['state'] != states.RUNNING): raise exc.InputException( 'The property env can only be updated when workflow ' 'execution is not running or on resume from pause.' ) if delta.get('description'): wf_ex = db_api.update_workflow_execution( id, {'description': delta['description']} ) if not delta.get('state') and delta.get('env'): wf_ex = db_api.get_workflow_execution(id) wf_ex = wf_service.update_workflow_execution_env( wf_ex, delta.get('env') ) return delta, wf_ex delta, wf_ex = _compute_delta(wf_ex) if delta.get('state'): if states.is_paused(delta.get('state')): wf_ex = rpc.get_engine_client().pause_workflow(id) elif delta.get('state') == states.RUNNING: wf_ex = rpc.get_engine_client().resume_workflow( id, env=delta.get('env') ) elif states.is_completed(delta.get('state')): msg = wf_ex.state_info if wf_ex.state_info else None wf_ex = rpc.get_engine_client().stop_workflow( id, delta.get('state'), msg ) else: # To prevent changing state in other cases throw a message. raise exc.InputException( "Cannot change state to %s. Allowed states are: '%s" % ( wf_ex.state, ', '.join([ states.RUNNING, states.PAUSED, states.SUCCESS, states.ERROR, states.CANCELLED ]) ) ) return resources.Execution.from_dict( wf_ex if isinstance(wf_ex, dict) else wf_ex.to_dict() ) @rest_utils.wrap_wsme_controller_exception @wsme_pecan.wsexpose( resources.Execution, body=resources.Execution, status_code=201 ) def post(self, wf_ex): """Create a new Execution. :param wf_ex: Execution object with input content. """ acl.enforce('executions:create', context.ctx()) LOG.debug("Create execution [execution=%s]", wf_ex) exec_dict = wf_ex.to_dict() exec_id = exec_dict.get('id') if not exec_id: exec_id = uuidutils.generate_uuid() LOG.debug("Generated execution id [exec_id=%s]", exec_id) exec_dict.update({'id': exec_id}) wf_ex = None else: # If ID is present we need to check if such execution exists. # If yes, the method just returns the object. If not, the ID # will be used to create a new execution. wf_ex = _get_workflow_execution(exec_id, must_exist=False) if wf_ex: return resources.Execution.from_db_model(wf_ex) source_execution_id = exec_dict.get('source_execution_id') source_exec_dict = None if source_execution_id: # If source execution is present we will perform a lookup for # previous workflow execution model and the information to start # a new workflow based on that information. source_exec_dict = db_api.get_workflow_execution( source_execution_id).to_dict() exec_dict['description'] = "{} Based on the execution '{}'".format( exec_dict['description'], source_execution_id ) exec_dict['description'] = exec_dict['description'].strip() result_exec_dict = merge_dicts(source_exec_dict, exec_dict) if not (result_exec_dict.get('workflow_id') or result_exec_dict.get('workflow_name')): raise exc.WorkflowException( "Workflow ID or workflow name must be provided. Workflow ID is" " recommended." ) engine = rpc.get_engine_client() result = engine.start_workflow( result_exec_dict.get( 'workflow_id', result_exec_dict.get('workflow_name') ), result_exec_dict.get('workflow_namespace', ''), result_exec_dict.get('id'), result_exec_dict.get('input'), description=result_exec_dict.get('description', ''), **result_exec_dict.get('params') or {} ) return resources.Execution.from_dict(result) @rest_utils.wrap_wsme_controller_exception @wsme_pecan.wsexpose(None, wtypes.text, bool, status_code=204) def delete(self, id, force=False): """Delete the specified Execution. :param id: UUID of execution to delete. :param force: Optional. Force the deletion of unfinished executions. Default: false. While the api is backward compatible the behaviour is not the same. The new default is the safer option """ acl.enforce('executions:delete', context.ctx()) LOG.debug("Delete execution [id=%s]", id) if not force: state = db_api.get_workflow_execution( id, fields=(db_models.WorkflowExecution.state,) )[0] if not states.is_completed(state): raise exc.NotAllowedException( "Only completed executions can be deleted. " "Use --force to override this. " "Execution {} is in {} state".format(id, state) ) return rest_utils.rest_retry_on_db_error( db_api.delete_workflow_execution )(id) @rest_utils.wrap_wsme_controller_exception @wsme_pecan.wsexpose(resources.Executions, types.uuid, int, types.uniquelist, types.list, types.uniquelist, wtypes.text, types.uuid, wtypes.text, types.uniquelist, types.jsontype, types.uuid, types.uuid, STATE_TYPES, wtypes.text, types.jsontype, types.jsontype, wtypes.text, wtypes.text, bool, types.uuid, bool, types.list) def get_all(self, marker=None, limit=None, sort_keys='created_at', sort_dirs='asc', fields='', workflow_name=None, workflow_id=None, description=None, tags=None, params=None, task_execution_id=None, root_execution_id=None, state=None, state_info=None, input=None, output=None, created_at=None, updated_at=None, include_output=None, project_id=None, all_projects=False, nulls=''): """Return all Executions. :param marker: Optional. Pagination marker for large data sets. :param limit: Optional. Maximum number of resources to return in a single result. Default value is None for backward compatibility. :param sort_keys: Optional. Columns to sort results by. Default: created_at, which is backward compatible. :param sort_dirs: Optional. Directions to sort corresponding to sort_keys, "asc" or "desc" can be chosen. Default: desc. The length of sort_dirs can be equal or less than that of sort_keys. :param fields: Optional. A specified list of fields of the resource to be returned. 'id' will be included automatically in fields if it's provided, since it will be used when constructing 'next' link. :param workflow_name: Optional. Keep only resources with a specific workflow name. :param workflow_id: Optional. Keep only resources with a specific workflow ID. :param description: Optional. Keep only resources with a specific description. :param tags: Optional. Keep only resources containing specific tags. :param params: Optional. Keep only resources with specific parameters. :param task_execution_id: Optional. Keep only resources with a specific task execution ID. :param root_execution_id: Optional. Keep only resources with a specific root execution ID. :param state: Optional. Keep only resources with a specific state. :param state_info: Optional. Keep only resources with specific state information. :param input: Optional. Keep only resources with a specific input. :param output: Optional. Keep only resources with a specific output. :param created_at: Optional. Keep only resources created at a specific time and date. :param updated_at: Optional. Keep only resources with specific latest update time and date. :param include_output: Optional. Include the output for all executions in the list. :param project_id: Optional. Only get executions belong to the project. Admin required. :param all_projects: Optional. Get resources of all projects. Admin required. :param nulls: Optional. The names of the columns with null value in the query. """ acl.enforce('executions:list', context.ctx()) db_models.WorkflowExecution.check_allowed_none_values(nulls) if all_projects or project_id: acl.enforce('executions:list:all_projects', context.ctx()) filters = filter_utils.create_filters_from_request_params( none_values=nulls, created_at=created_at, workflow_name=workflow_name, workflow_id=workflow_id, tags=tags, params=params, task_execution_id=task_execution_id, state=state, state_info=state_info, input=input, output=output, updated_at=updated_at, description=description, project_id=project_id, root_execution_id=root_execution_id, ) LOG.debug( "Fetch executions. marker=%s, limit=%s, sort_keys=%s, " "sort_dirs=%s, filters=%s, all_projects=%s", marker, limit, sort_keys, sort_dirs, filters, all_projects ) if include_output: resource_function = _get_workflow_execution_resource_with_output else: resource_function = _get_workflow_execution_resource return rest_utils.get_all( resources.Executions, resources.Execution, db_api.get_workflow_executions, db_api.get_workflow_execution, resource_function=resource_function, marker=marker, limit=limit, sort_keys=sort_keys, sort_dirs=sort_dirs, fields=fields, all_projects=all_projects, **filters )
apache-2.0
-1,259,276,599,923,299,800
37.436242
79
0.569117
false
4.408776
false
false
false
kgullikson88/TS23-Scripts
CheckSyntheticTemperature.py
1
14868
import os import re from collections import defaultdict from operator import itemgetter import logging import pandas from scipy.interpolate import InterpolatedUnivariateSpline as spline from george import kernels import matplotlib.pyplot as plt import numpy as np import george import emcee import StarData import SpectralTypeRelations def classify_filename(fname, type='bright'): """ Given a CCF filename, it classifies the star combination, temperature, metallicity, and vsini :param fname: :return: """ # First, remove any leading directories fname = fname.split('/')[-1] # Star combination m1 = re.search('\.[0-9]+kps', fname) stars = fname[:m1.start()] star1 = stars.split('+')[0].replace('_', ' ') star2 = stars.split('+')[1].split('_{}'.format(type))[0].replace('_', ' ') # secondary star vsini vsini = float(fname[m1.start() + 1:].split('kps')[0]) # Temperature m2 = re.search('[0-9]+\.0K', fname) temp = float(m2.group()[:-1]) # logg m3 = re.search('K\+[0-9]\.[0-9]', fname) logg = float(m3.group()[1:]) # metallicity metal = float(fname.split(str(logg))[-1]) return star1, star2, vsini, temp, logg, metal def get_ccf_data(basedir, primary_name=None, secondary_name=None, vel_arr=np.arange(-900.0, 900.0, 0.1), type='bright'): """ Searches the given directory for CCF files, and classifies by star, temperature, metallicity, and vsini :param basedir: The directory to search for CCF files :keyword primary_name: Optional keyword. If given, it will only get the requested primary star data :keyword secondary_name: Same as primary_name, but only reads ccfs for the given secondary :keyword vel_arr: The velocities to interpolate each ccf at :return: pandas DataFrame """ if not basedir.endswith('/'): basedir += '/' all_files = ['{}{}'.format(basedir, f) for f in os.listdir(basedir) if type in f.lower()] primary = [] secondary = [] vsini_values = [] temperature = [] gravity = [] metallicity = [] ccf = [] for fname in all_files: star1, star2, vsini, temp, logg, metal = classify_filename(fname, type=type) if primary_name is not None and star1.lower() != primary_name.lower(): continue if secondary_name is not None and star2.lower() != secondary_name.lower(): continue vel, corr = np.loadtxt(fname, unpack=True) fcn = spline(vel, corr) ccf.append(fcn(vel_arr)) primary.append(star1) secondary.append(star2) vsini_values.append(vsini) temperature.append(temp) gravity.append(logg) metallicity.append(metal) # Make a pandas dataframe with all this data df = pandas.DataFrame(data={'Primary': primary, 'Secondary': secondary, 'Temperature': temperature, 'vsini': vsini_values, 'logg': gravity, '[Fe/H]': metallicity, 'CCF': ccf}) return df def get_ccf_summary(basedir, vel_arr=np.arange(-900.0, 900.0, 0.1), velocity='highest', type='bright'): """ Very similar to get_ccf_data, but does it in a way that is more memory efficient :param basedir: The directory to search for CCF files :keyword velocity: The velocity to measure the CCF at. The default is 'highest', and uses the maximum of the ccf :keyword vel_arr: The velocities to interpolate each ccf at :return: pandas DataFrame """ if not basedir.endswith('/'): basedir += '/' all_files = ['{}{}'.format(basedir, f) for f in os.listdir(basedir) if type in f.lower()] file_dict = defaultdict(lambda: defaultdict(list)) for fname in all_files: star1, star2, vsini, temp, logg, metal = classify_filename(fname, type=type) file_dict[star1][star2].append(fname) # Now, read the ccfs for each primary/secondary combo, and find the best combination summary_dfs = [] for primary in file_dict.keys(): for secondary in file_dict[primary].keys(): data = get_ccf_data(basedir, primary_name=primary, secondary_name=secondary, vel_arr=vel_arr, type=type) summary_dfs.append(find_best_pars(data, velocity=velocity, vel_arr=vel_arr)) return pandas.concat(summary_dfs, ignore_index=True) def find_best_pars(df, velocity='highest', vel_arr=np.arange(-900.0, 900.0, 0.1)): """ Find the 'best-fit' parameters for each combination of primary and secondary star :param df: the dataframe to search in :keyword velocity: The velocity to measure the CCF at. The default is 'highest', and uses the maximum of the ccf :keyword vel_arr: The velocities to interpolate each ccf at :return: a dataframe with keys of primary, secondary, and the parameters """ # Get the names of the primary and secondary stars primary_names = pandas.unique(df.Primary) secondary_names = pandas.unique(df.Secondary) # Find the ccf value at the given velocity if velocity == 'highest': fcn = lambda row: (np.max(row), vel_arr[np.argmax(row)]) vals = df['CCF'].map(fcn) df['ccf_max'] = vals.map(lambda l: l[0]) df['rv'] = vals.map(lambda l: l[1]) # df['ccf_max'] = df['CCF'].map(np.max) else: df['ccf_max'] = df['CCF'].map(lambda arr: arr[np.argmin(np.abs(vel_arr - velocity))]) # Find the best parameter for each combination d = defaultdict(list) for primary in primary_names: for secondary in secondary_names: good = df.loc[(df.Primary == primary) & (df.Secondary == secondary)] best = good.loc[good.ccf_max == good.ccf_max.max()] d['Primary'].append(primary) d['Secondary'].append(secondary) d['Temperature'].append(best['Temperature'].item()) d['vsini'].append(best['vsini'].item()) d['logg'].append(best['logg'].item()) d['[Fe/H]'].append(best['[Fe/H]'].item()) d['rv'].append(best['rv'].item()) return pandas.DataFrame(data=d) def get_detected_objects(df, tol=1.0): """ Takes a summary dataframe with RV information. Finds the median rv for each star, and removes objects that are 'tol' km/s from the median value :param df: A summary dataframe, such as created by find_best_pars :param tol: The tolerance, in km/s, to accept an observation as detected :return: a dataframe containing only detected companions """ secondary_names = pandas.unique(df.Secondary) secondary_to_rv = defaultdict(float) for secondary in secondary_names: rv = df.loc[df.Secondary == secondary]['rv'].median() secondary_to_rv[secondary] = rv print secondary, rv keys = df.Secondary.values good = df.loc[abs(df.rv.values - np.array(itemgetter(*keys)(secondary_to_rv))) < tol] return good def add_actual_temperature(df, method='spt'): """ Add the actual temperature to a given summary dataframe :param df: The dataframe to which we will add the actual secondary star temperature :param method: How to get the actual temperature. Options are: - 'spt': Use main-sequence relationships to go from spectral type --> temperature - 'excel': Use tabulated data, available in the file 'SecondaryStar_Temperatures.xls' :return: copy of the original dataframe, with an extra column for the secondary star temperature """ # First, get a list of the secondary stars in the data secondary_names = pandas.unique(df.Secondary) secondary_to_temperature = defaultdict(float) secondary_to_error = defaultdict(float) if method.lower() == 'spt': MS = SpectralTypeRelations.MainSequence() for secondary in secondary_names: star_data = StarData.GetData(secondary) spt = star_data.spectype[0] + re.search('[0-9]\.*[0-9]*', star_data.spectype).group() T_sec = MS.Interpolate(MS.Temperature, spt) secondary_to_temperature[secondary] = T_sec elif method.lower() == 'excel': table = pandas.read_excel('SecondaryStar_Temperatures.xls', 0) for secondary in secondary_names: T_sec = table.loc[table.Star.str.lower().str.contains(secondary.strip().lower())]['Literature_Temp'].item() T_error = table.loc[table.Star.str.lower().str.contains(secondary.strip().lower())][ 'Literature_error'].item() secondary_to_temperature[secondary] = T_sec secondary_to_error[secondary] = T_error df['Tactual'] = df['Secondary'].map(lambda s: secondary_to_temperature[s]) df['Tact_err'] = df['Secondary'].map(lambda s: secondary_to_error[s]) return def make_gaussian_process_samples(df): """ Make a gaussian process fitting the Tactual-Tmeasured relationship :param df: pandas DataFrame with columns 'Temperature' (with the measured temperature) and 'Tactual' (for the actual temperature) :return: emcee sampler instance """ # First, find the uncertainties at each actual temperature # Tactual = df['Tactual'].values #Tmeasured = df['Temperature'].values #error = df['Tact_err'].values temp = df.groupby('Temperature').mean()['Tactual'] Tmeasured = temp.keys().values Tactual = temp.values error = np.nan_to_num(df.groupby('Temperature').std(ddof=1)['Tactual'].values) default = np.median(error[error > 1]) error = np.maximum(error, np.ones(error.size) * default) for Tm, Ta, e in zip(Tmeasured, Tactual, error): print Tm, Ta, e plt.figure(1) plt.errorbar(Tmeasured, Tactual, yerr=error, fmt='.k', capsize=0) plt.plot(Tmeasured, Tmeasured, 'r--') plt.xlim((min(Tmeasured) - 100, max(Tmeasured) + 100)) plt.xlabel('Measured Temperature') plt.ylabel('Actual Temperature') plt.show(block=False) # Define some functions to use in the GP fit def model(pars, T): #polypars = pars[2:] #return np.poly1d(polypars)(T) return T def lnlike(pars, Tact, Tmeas, Terr): a, tau = np.exp(pars[:2]) gp = george.GP(a * kernels.ExpSquaredKernel(tau)) gp.compute(Tmeas, Terr) return gp.lnlikelihood(Tact - model(pars, Tmeas)) def lnprior(pars): lna, lntau = pars[:2] polypars = pars[2:] if -20 < lna < 20 and 4 < lntau < 20: return 0.0 return -np.inf def lnprob(pars, x, y, yerr): lp = lnprior(pars) return lp + lnlike(pars, x, y, yerr) if np.isfinite(lp) else -np.inf # Set up the emcee fitter initial = np.array([0, 6])#, 1.0, 0.0]) ndim = len(initial) nwalkers = 100 p0 = [np.array(initial) + 1e-8 * np.random.randn(ndim) for i in xrange(nwalkers)] sampler = emcee.EnsembleSampler(nwalkers, ndim, lnprob, args=(Tactual, Tmeasured, error)) print 'Running first burn-in' p1, lnp, _ = sampler.run_mcmc(p0, 500) sampler.reset() print "Running second burn-in..." p_best = p1[np.argmax(lnp)] p2 = [p_best + 1e-8 * np.random.randn(ndim) for i in xrange(nwalkers)] p3, _, _ = sampler.run_mcmc(p2, 250) sampler.reset() print "Running production..." sampler.run_mcmc(p3, 1000) # Plot a bunch of the fits print "Plotting..." N = 100 Tvalues = np.arange(3300, 7000, 20) idx = np.argsort(-sampler.lnprobability.flatten())[:N] # Get N 'best' curves par_vals = sampler.flatchain[idx] for i, pars in enumerate(par_vals): a, tau = np.exp(pars[:2]) gp = george.GP(a * kernels.ExpSquaredKernel(tau)) gp.compute(Tmeasured, error) s = gp.sample_conditional(Tactual - model(pars, Tmeasured), Tvalues) + model(pars, Tvalues) plt.plot(Tvalues, s, 'b-', alpha=0.1) plt.draw() # Finally, get posterior samples at all the possibly measured temperatures print 'Generating posterior samples at all temperatures...' N = 10000 # This is 1/10th of the total number of samples! idx = np.argsort(-sampler.lnprobability.flatten())[:N] # Get N 'best' curves par_vals = sampler.flatchain[idx] Tvalues = np.arange(3000, 6900, 100) gp_posterior = [] for pars in par_vals: a, tau = np.exp(pars[:2]) gp = george.GP(a * kernels.ExpSquaredKernel(tau)) gp.compute(Tmeasured, error) s = gp.sample_conditional(Tactual - model(pars, Tmeasured), Tvalues) + model(pars, Tvalues) gp_posterior.append(s) # Finally, make confidence intervals for the actual temperatures gp_posterior = np.array(gp_posterior) l, m, h = np.percentile(gp_posterior, [16.0, 50.0, 84.0], axis=0) conf = pandas.DataFrame(data={'Measured Temperature': Tvalues, 'Actual Temperature': m, 'Lower Bound': l, 'Upper bound': h}) conf.to_csv('Confidence_Intervals.csv', index=False) return sampler, np.array(gp_posterior) def check_posterior(df, posterior, Tvalues): """ Checks the posterior samples: Are 95% of the measurements within 2-sigma of the prediction? :param df: The summary dataframe :param posterior: The MCMC predicted values :param Tvalues: The measured temperatures the posterior was made with :return: boolean, as well as some warning messages if applicable """ # First, make 2-sigma confidence intervals l, m, h = np.percentile(posterior, [5.0, 50.0, 95.0], axis=0) # Save the confidence intervals # conf = pandas.DataFrame(data={'Measured Temperature': Tvalues, 'Actual Temperature': m, # 'Lower Bound': l, 'Upper bound': h}) #conf.to_csv('Confidence_Intervals.csv', index=False) Ntot = [] # The total number of observations with the given measured temperature Nacc = [] # The number that have actual temperatures within the confidence interval g = df.groupby('Temperature') for i, T in enumerate(Tvalues): if T in g.groups.keys(): Ta = g.get_group(T)['Tactual'] low, high = l[i], h[i] Ntot.append(len(Ta)) Nacc.append(len(Ta.loc[(Ta >= low) & (Ta <= high)])) p = float(Nacc[-1]) / float(Ntot[-1]) if p < 0.95: logging.warn( 'Only {}/{} of the samples ({:.2f}%) were accepted for T = {} K'.format(Nacc[-1], Ntot[-1], p * 100, T)) print low, high print sorted(Ta) else: Ntot.append(0) Nacc.append(0) p = float(sum(Nacc)) / float(sum(Ntot)) if p < 0.95: logging.warn('Only {:.2f}% of the total samples were accepted!'.format(p * 100)) return False return True if __name__ == '__main__': pass
gpl-3.0
-920,756,524,181,589,000
39.402174
120
0.625572
false
3.54
false
false
false
MattDevo/edk2
BaseTools/Source/Python/Workspace/MetaFileTable.py
1
16975
## @file # This file is used to create/update/query/erase a meta file table # # Copyright (c) 2008 - 2018, Intel Corporation. All rights reserved.<BR> # This program and the accompanying materials # are licensed and made available under the terms and conditions of the BSD License # which accompanies this distribution. The full text of the license may be found at # http://opensource.org/licenses/bsd-license.php # # THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN "AS IS" BASIS, # WITHOUT WARRANTIES OR REPRESENTATIONS OF ANY KIND, EITHER EXPRESS OR IMPLIED. # ## # Import Modules # from __future__ import absolute_import import uuid import Common.EdkLogger as EdkLogger from Common.BuildToolError import FORMAT_INVALID from CommonDataClass.DataClass import MODEL_FILE_DSC, MODEL_FILE_DEC, MODEL_FILE_INF, \ MODEL_FILE_OTHERS from Common.DataType import * class MetaFileTable(): # TRICK: use file ID as the part before '.' _ID_STEP_ = 1 _ID_MAX_ = 99999999 ## Constructor def __init__(self, DB, MetaFile, FileType, Temporary, FromItem=None): self.MetaFile = MetaFile self.TableName = "" self.DB = DB self._NumpyTab = None self.CurrentContent = [] DB.TblFile.append([MetaFile.Name, MetaFile.Ext, MetaFile.Dir, MetaFile.Path, FileType, MetaFile.TimeStamp, FromItem]) self.FileId = len(DB.TblFile) self.ID = self.FileId * 10**8 if Temporary: self.TableName = "_%s_%s_%s" % (FileType, len(DB.TblFile), uuid.uuid4().hex) else: self.TableName = "_%s_%s" % (FileType, len(DB.TblFile)) def IsIntegrity(self): try: TimeStamp = self.MetaFile.TimeStamp if not self.CurrentContent: Result = False else: Result = self.CurrentContent[-1][0] < 0 if not Result: # update the timestamp in database self.DB.SetFileTimeStamp(self.FileId, TimeStamp) return False if TimeStamp != self.DB.GetFileTimeStamp(self.FileId): # update the timestamp in database self.DB.SetFileTimeStamp(self.FileId, TimeStamp) return False except Exception as Exc: EdkLogger.debug(EdkLogger.DEBUG_5, str(Exc)) return False return True def SetEndFlag(self): self.CurrentContent.append(self._DUMMY_) def GetAll(self): return [item for item in self.CurrentContent if item[0] >= 0 ] ## Python class representation of table storing module data class ModuleTable(MetaFileTable): _COLUMN_ = ''' ID REAL PRIMARY KEY, Model INTEGER NOT NULL, Value1 TEXT NOT NULL, Value2 TEXT, Value3 TEXT, Scope1 TEXT, Scope2 TEXT, BelongsToItem REAL NOT NULL, StartLine INTEGER NOT NULL, StartColumn INTEGER NOT NULL, EndLine INTEGER NOT NULL, EndColumn INTEGER NOT NULL, Enabled INTEGER DEFAULT 0 ''' # used as table end flag, in case the changes to database is not committed to db file _DUMMY_ = [-1, -1, '====', '====', '====', '====', '====', -1, -1, -1, -1, -1, -1] ## Constructor def __init__(self, Db, MetaFile, Temporary): MetaFileTable.__init__(self, Db, MetaFile, MODEL_FILE_INF, Temporary) ## Insert a record into table Inf # # @param Model: Model of a Inf item # @param Value1: Value1 of a Inf item # @param Value2: Value2 of a Inf item # @param Value3: Value3 of a Inf item # @param Scope1: Arch of a Inf item # @param Scope2 Platform os a Inf item # @param BelongsToItem: The item belongs to which another item # @param StartLine: StartLine of a Inf item # @param StartColumn: StartColumn of a Inf item # @param EndLine: EndLine of a Inf item # @param EndColumn: EndColumn of a Inf item # @param Enabled: If this item enabled # def Insert(self, Model, Value1, Value2, Value3, Scope1=TAB_ARCH_COMMON, Scope2=TAB_COMMON, BelongsToItem=-1, StartLine=-1, StartColumn=-1, EndLine=-1, EndColumn=-1, Enabled=0): (Value1, Value2, Value3, Scope1, Scope2) = (Value1.strip(), Value2.strip(), Value3.strip(), Scope1.strip(), Scope2.strip()) self.ID = self.ID + self._ID_STEP_ if self.ID >= (MODEL_FILE_INF + self._ID_MAX_): self.ID = MODEL_FILE_INF + self._ID_STEP_ row = [ self.ID, Model, Value1, Value2, Value3, Scope1, Scope2, BelongsToItem, StartLine, StartColumn, EndLine, EndColumn, Enabled ] self.CurrentContent.append(row) return self.ID ## Query table # # @param Model: The Model of Record # @param Arch: The Arch attribute of Record # @param Platform The Platform attribute of Record # # @retval: A recordSet of all found records # def Query(self, Model, Arch=None, Platform=None, BelongsToItem=None): QueryTab = self.CurrentContent result = [item for item in QueryTab if item[1] == Model and item[-1]>=0 ] if Arch is not None and Arch != TAB_ARCH_COMMON: ArchList = set(['COMMON']) ArchList.add(Arch) result = [item for item in result if item[5] in ArchList] if Platform is not None and Platform != TAB_COMMON: Platformlist = set( ['COMMON','DEFAULT']) Platformlist.add(Platform) result = [item for item in result if item[6] in Platformlist] if BelongsToItem is not None: result = [item for item in result if item[7] == BelongsToItem] result = [ [r[2],r[3],r[4],r[5],r[6],r[0],r[9]] for r in result ] return result ## Python class representation of table storing package data class PackageTable(MetaFileTable): _COLUMN_ = ''' ID REAL PRIMARY KEY, Model INTEGER NOT NULL, Value1 TEXT NOT NULL, Value2 TEXT, Value3 TEXT, Scope1 TEXT, Scope2 TEXT, BelongsToItem REAL NOT NULL, StartLine INTEGER NOT NULL, StartColumn INTEGER NOT NULL, EndLine INTEGER NOT NULL, EndColumn INTEGER NOT NULL, Enabled INTEGER DEFAULT 0 ''' # used as table end flag, in case the changes to database is not committed to db file _DUMMY_ = [-1, -1, '====', '====', '====', '====', '====', -1, -1, -1, -1, -1, -1] ## Constructor def __init__(self, Cursor, MetaFile, Temporary): MetaFileTable.__init__(self, Cursor, MetaFile, MODEL_FILE_DEC, Temporary) ## Insert table # # Insert a record into table Dec # # @param Model: Model of a Dec item # @param Value1: Value1 of a Dec item # @param Value2: Value2 of a Dec item # @param Value3: Value3 of a Dec item # @param Scope1: Arch of a Dec item # @param Scope2: Module type of a Dec item # @param BelongsToItem: The item belongs to which another item # @param StartLine: StartLine of a Dec item # @param StartColumn: StartColumn of a Dec item # @param EndLine: EndLine of a Dec item # @param EndColumn: EndColumn of a Dec item # @param Enabled: If this item enabled # def Insert(self, Model, Value1, Value2, Value3, Scope1=TAB_ARCH_COMMON, Scope2=TAB_COMMON, BelongsToItem=-1, StartLine=-1, StartColumn=-1, EndLine=-1, EndColumn=-1, Enabled=0): (Value1, Value2, Value3, Scope1, Scope2) = (Value1.strip(), Value2.strip(), Value3.strip(), Scope1.strip(), Scope2.strip()) self.ID = self.ID + self._ID_STEP_ row = [ self.ID, Model, Value1, Value2, Value3, Scope1, Scope2, BelongsToItem, StartLine, StartColumn, EndLine, EndColumn, Enabled ] self.CurrentContent.append(row) return self.ID ## Query table # # @param Model: The Model of Record # @param Arch: The Arch attribute of Record # # @retval: A recordSet of all found records # def Query(self, Model, Arch=None): QueryTab = self.CurrentContent result = [item for item in QueryTab if item[1] == Model and item[-1]>=0 ] if Arch is not None and Arch != TAB_ARCH_COMMON: ArchList = set(['COMMON']) ArchList.add(Arch) result = [item for item in result if item[5] in ArchList] return [[r[2], r[3], r[4], r[5], r[6], r[0], r[8]] for r in result] def GetValidExpression(self, TokenSpaceGuid, PcdCName): QueryTab = self.CurrentContent result = [[item[2], item[8]] for item in QueryTab if item[3] == TokenSpaceGuid and item[4] == PcdCName] validateranges = [] validlists = [] expressions = [] try: for row in result: comment = row[0] LineNum = row[1] comment = comment.strip("#") comment = comment.strip() oricomment = comment if comment.startswith("@ValidRange"): comment = comment.replace("@ValidRange", "", 1) validateranges.append(comment.split("|")[1].strip()) if comment.startswith("@ValidList"): comment = comment.replace("@ValidList", "", 1) validlists.append(comment.split("|")[1].strip()) if comment.startswith("@Expression"): comment = comment.replace("@Expression", "", 1) expressions.append(comment.split("|")[1].strip()) except Exception as Exc: ValidType = "" if oricomment.startswith("@ValidRange"): ValidType = "@ValidRange" if oricomment.startswith("@ValidList"): ValidType = "@ValidList" if oricomment.startswith("@Expression"): ValidType = "@Expression" EdkLogger.error('Parser', FORMAT_INVALID, "The syntax for %s of PCD %s.%s is incorrect" % (ValidType, TokenSpaceGuid, PcdCName), ExtraData=oricomment, File=self.MetaFile, Line=LineNum) return set(), set(), set() return set(validateranges), set(validlists), set(expressions) ## Python class representation of table storing platform data class PlatformTable(MetaFileTable): _COLUMN_ = ''' ID REAL PRIMARY KEY, Model INTEGER NOT NULL, Value1 TEXT NOT NULL, Value2 TEXT, Value3 TEXT, Scope1 TEXT, Scope2 TEXT, Scope3 TEXT, BelongsToItem REAL NOT NULL, FromItem REAL NOT NULL, StartLine INTEGER NOT NULL, StartColumn INTEGER NOT NULL, EndLine INTEGER NOT NULL, EndColumn INTEGER NOT NULL, Enabled INTEGER DEFAULT 0 ''' # used as table end flag, in case the changes to database is not committed to db file _DUMMY_ = [-1, -1, '====', '====', '====', '====', '====','====', -1, -1, -1, -1, -1, -1, -1] ## Constructor def __init__(self, Cursor, MetaFile, Temporary, FromItem=0): MetaFileTable.__init__(self, Cursor, MetaFile, MODEL_FILE_DSC, Temporary, FromItem) ## Insert table # # Insert a record into table Dsc # # @param Model: Model of a Dsc item # @param Value1: Value1 of a Dsc item # @param Value2: Value2 of a Dsc item # @param Value3: Value3 of a Dsc item # @param Scope1: Arch of a Dsc item # @param Scope2: Module type of a Dsc item # @param BelongsToItem: The item belongs to which another item # @param FromItem: The item belongs to which dsc file # @param StartLine: StartLine of a Dsc item # @param StartColumn: StartColumn of a Dsc item # @param EndLine: EndLine of a Dsc item # @param EndColumn: EndColumn of a Dsc item # @param Enabled: If this item enabled # def Insert(self, Model, Value1, Value2, Value3, Scope1=TAB_ARCH_COMMON, Scope2=TAB_COMMON, Scope3=TAB_DEFAULT_STORES_DEFAULT,BelongsToItem=-1, FromItem=-1, StartLine=-1, StartColumn=-1, EndLine=-1, EndColumn=-1, Enabled=1): (Value1, Value2, Value3, Scope1, Scope2, Scope3) = (Value1.strip(), Value2.strip(), Value3.strip(), Scope1.strip(), Scope2.strip(), Scope3.strip()) self.ID = self.ID + self._ID_STEP_ row = [ self.ID, Model, Value1, Value2, Value3, Scope1, Scope2, Scope3, BelongsToItem, FromItem, StartLine, StartColumn, EndLine, EndColumn, Enabled ] self.CurrentContent.append(row) return self.ID ## Query table # # @param Model: The Model of Record # @param Scope1: Arch of a Dsc item # @param Scope2: Module type of a Dsc item # @param BelongsToItem: The item belongs to which another item # @param FromItem: The item belongs to which dsc file # # @retval: A recordSet of all found records # def Query(self, Model, Scope1=None, Scope2=None, BelongsToItem=None, FromItem=None): QueryTab = self.CurrentContent result = [item for item in QueryTab if item[1] == Model and item[-1]>0 ] if Scope1 is not None and Scope1 != TAB_ARCH_COMMON: Sc1 = set(['COMMON']) Sc1.add(Scope1) result = [item for item in result if item[5] in Sc1] Sc2 = set( ['COMMON','DEFAULT']) if Scope2 and Scope2 != TAB_COMMON: if '.' in Scope2: Index = Scope2.index('.') NewScope = TAB_COMMON + Scope2[Index:] Sc2.add(NewScope) Sc2.add(Scope2) result = [item for item in result if item[6] in Sc2] if BelongsToItem is not None: result = [item for item in result if item[8] == BelongsToItem] else: result = [item for item in result if item[8] < 0] if FromItem is not None: result = [item for item in result if item[9] == FromItem] result = [ [r[2],r[3],r[4],r[5],r[6],r[7],r[0],r[9]] for r in result ] return result ## Factory class to produce different storage for different type of meta-file class MetaFileStorage(object): _FILE_TABLE_ = { MODEL_FILE_INF : ModuleTable, MODEL_FILE_DEC : PackageTable, MODEL_FILE_DSC : PlatformTable, MODEL_FILE_OTHERS : MetaFileTable, } _FILE_TYPE_ = { ".inf" : MODEL_FILE_INF, ".dec" : MODEL_FILE_DEC, ".dsc" : MODEL_FILE_DSC, } _ObjectCache = {} ## Constructor def __new__(Class, Cursor, MetaFile, FileType=None, Temporary=False, FromItem=None): # no type given, try to find one key = (MetaFile.Path, FileType,Temporary,FromItem) if key in Class._ObjectCache: return Class._ObjectCache[key] if not FileType: if MetaFile.Type in self._FILE_TYPE_: FileType = Class._FILE_TYPE_[MetaFile.Type] else: FileType = MODEL_FILE_OTHERS # don't pass the type around if it's well known if FileType == MODEL_FILE_OTHERS: Args = (Cursor, MetaFile, FileType, Temporary) else: Args = (Cursor, MetaFile, Temporary) if FromItem: Args = Args + (FromItem,) # create the storage object and return it to caller reval = Class._FILE_TABLE_[FileType](*Args) if not Temporary: Class._ObjectCache[key] = reval return reval
bsd-2-clause
8,104,280,331,112,390,000
36.492063
155
0.544035
false
3.958722
false
false
false
ssharpjr/taskbuster-boilerplate
taskbuster/apps/taskmanager/models.py
1
2262
# -*- coding: utf-8 -*- from django.db import models from django.conf import settings from django.utils.translation import ugettext_lazy as _ from django.dispatch import receiver from django.db.models.signals import post_save from django.core.validators import RegexValidator from . import managers class Profile(models.Model): # Relations user = models.OneToOneField( settings.AUTH_USER_MODEL, related_name="profile", verbose_name=_("user") ) # Attributes - Mandatory interaction = models.PositiveIntegerField( default=0, verbose_name=_("interaction") ) # Attributes - Optional # Object Manager objects = managers.ProfileManager() # Custom Properties @property def username(self): return self.user.username # Methods # Meta and String class Meta: verbose_name = _("Profile") verbose_name_plural = _("Profiles") ordering = ("user",) def __str__(self): return self.user.username @receiver(post_save, sender=settings.AUTH_USER_MODEL) def create_profile_for_new_user(sender, created, instance, **kwargs): if created: profile = Profile(user=instance) profile.save() class Project(models.Model): # Relations user = models.ForeignKey( Profile, related_name="projects", verbose_name=_("user") ) # Attributes - Mandatory name = models.CharField( max_length=100, verbose_name=_("name"), help_text=_("Enter the project name") ) color = models.CharField( max_length=7, default="#fff", validators=[RegexValidator( "(^#[0-9a-fA-F]{3}$)|(^#[0-9a-fA-F]{6}$)")], verbose_name=_("color"), help_text=_("Enter the hex color code, like #ccc or #cccccc") ) # Attributes - Optional # Object Manager objects = managers.ProjectManager() # Custom Properties # Methods # Meta and String class Meta: verbose_name = _("Project") verbose_name_plural = _("Projects") ordering = ("user", "name") unique_together = ("user", "name") def __str__(self): return "%s - %s" % (self.user, self.name)
mit
2,343,749,543,026,243,000
25
69
0.599912
false
4.097826
false
false
false
CitoEngine/cito_engine
app/cito_engine/actions/json_formatter.py
1
1266
"""Copyright 2014 Cyrus Dasadia Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. """ import re import simplejson def create_json_parameters(event_action, incident, message=None): plugin_parameters = event_action.pluginParameters plugin_parameters = re.sub('"__EVENTID__"', simplejson.dumps(unicode(incident.event.id)), plugin_parameters) plugin_parameters = re.sub('"__INCIDENTID__"', simplejson.dumps(unicode(incident.id)), plugin_parameters) plugin_parameters = re.sub('"__ELEMENT__"', simplejson.dumps(unicode(incident.element)), plugin_parameters) plugin_parameters = re.sub('"__MESSAGE__"', simplejson.dumps(unicode(message)), plugin_parameters) return '{"plugin": %s, "parameters": %s}' % (simplejson.dumps(unicode(event_action.plugin.name)), plugin_parameters)
apache-2.0
1,861,107,108,582,375,000
47.730769
120
0.756714
false
4.044728
false
false
false
quantumlib/Cirq
dev_tools/profiling/benchmark_serializers.py
1
4296
# Copyright 2020 The Cirq Developers # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # https://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """Tool for benchmarking serialization of large circuits. This tool was originally introduced to enable comparison of the two JSON serialization protocols (gzip and non-gzip): https://github.com/quantumlib/Cirq/pull/3662 This is part of the "efficient serialization" effort: https://github.com/quantumlib/Cirq/issues/3438 Run this benchmark with the following command (make sure to install cirq-dev): python3 dev_tools/profiling/benchmark_serializers.py \ --num_gates=<int> --nesting_depth=<int> --num_repetitions=<int> WARNING: runtime increases exponentially with nesting_depth. Values much higher than nesting_depth=10 are not recommended. """ import argparse import sys import timeit import numpy as np import cirq _JSON_GZIP = 'json_gzip' _JSON = 'json' NUM_QUBITS = 8 SUFFIXES = ['B', 'kB', 'MB', 'GB', 'TB'] def serialize(serializer: str, num_gates: int, nesting_depth: int) -> int: """"Runs a round-trip of the serializer.""" circuit = cirq.Circuit() for _ in range(num_gates): which = np.random.choice(['expz', 'expw', 'exp11']) if which == 'expw': q1 = cirq.GridQubit(0, np.random.randint(NUM_QUBITS)) circuit.append( cirq.PhasedXPowGate( phase_exponent=np.random.random(), exponent=np.random.random() ).on(q1) ) elif which == 'expz': q1 = cirq.GridQubit(0, np.random.randint(NUM_QUBITS)) circuit.append(cirq.Z(q1) ** np.random.random()) elif which == 'exp11': q1 = cirq.GridQubit(0, np.random.randint(NUM_QUBITS - 1)) q2 = cirq.GridQubit(0, q1.col + 1) circuit.append(cirq.CZ(q1, q2) ** np.random.random()) cs = [circuit] for _ in range(1, nesting_depth): fc = cs[-1].freeze() cs.append(cirq.Circuit(fc.to_op(), fc.to_op())) test_circuit = cs[-1] if serializer == _JSON: json_data = cirq.to_json(test_circuit) assert json_data is not None data_size = len(json_data) cirq.read_json(json_text=json_data) elif serializer == _JSON_GZIP: gzip_data = cirq.to_json_gzip(test_circuit) assert gzip_data is not None data_size = len(gzip_data) cirq.read_json_gzip(gzip_raw=gzip_data) return data_size def main( num_gates: int, nesting_depth: int, num_repetitions: int, setup: str = 'from __main__ import serialize', ): for serializer in [_JSON_GZIP, _JSON]: print() print(f'Using serializer "{serializer}":') command = f'serialize(\'{serializer}\', {num_gates}, {nesting_depth})' time = timeit.timeit(command, setup, number=num_repetitions) print(f'Round-trip serializer time: {time / num_repetitions}s') data_size = float(serialize(serializer, num_gates, nesting_depth)) suffix_idx = 0 while data_size > 1000: data_size /= 1024 suffix_idx += 1 print(f'Serialized data size: {data_size} {SUFFIXES[suffix_idx]}.') def parse_arguments(args): parser = argparse.ArgumentParser('Benchmark a serializer.') parser.add_argument( '--num_gates', default=100, type=int, help='Number of gates at the bottom nesting layer.' ) parser.add_argument( '--nesting_depth', default=1, type=int, help='Depth of nested subcircuits. Total gate count will be 2^nesting_depth * num_gates.', ) parser.add_argument( '--num_repetitions', default=10, type=int, help='Number of times to repeat serialization.' ) return vars(parser.parse_args(args)) if __name__ == '__main__': main(**parse_arguments(sys.argv[1:]))
apache-2.0
1,166,759,302,246,157,000
33.368
98
0.64176
false
3.458937
false
false
false
Chetox/RCode
Cannon_Avanzado/client.py
1
2002
#!/usr/bin/python # -*- coding:utf-8; tab-width:4; mode:python -*- import sys import Ice Ice.loadSlice('-I {} cannon.ice'.format(Ice.getSliceDir())) import Cannon import time from matrix_utils import matrix_multiply def load_matrix_from_file(filename): with file(filename) as f: rows = f.readlines() order = len(rows[0].split()) retval = Cannon.Matrix(order, []) for row in rows: rowdata = row.split() assert len(rowdata) == order for n in rowdata: retval.data.append(float(n)) assert len(retval.data) == order ** 2 return retval class Client(Ice.Application): def run(self, argv): t_dist = 0; t_secu = 0; loader = self.string_to_proxy(argv[1], Cannon.OperationsPrx) example = argv[2] A = load_matrix_from_file('m/{}A'.format(example)) B = load_matrix_from_file('m/{}B'.format(example)) t_dist = time.time() C = loader.matrixMultiply(A, B) t_dist = time.time() - t_dist t_secu = time.time() c = matrix_multiply(A,B) t_secu = time.time() - t_secu expected = load_matrix_from_file('m/{}C'.format(example)) retval = (C == expected) print("OK" if retval else "FAIL") print("El tiempo que ha tardado en distribuido ha sido {}".format(t_dist)) print("El tiempo que ha tardado en secuencial ha sido {}".format(t_secu)) if(C == None): print("Timeout expired") return not retval def string_to_proxy(self, str_proxy, iface): proxy = self.communicator().stringToProxy(str_proxy) retval = iface.checkedCast(proxy) if not retval: raise RuntimeError('Invalid proxy %s' % str_proxy) return retval def print_matrix(self, M): ncols = M.ncols nrows = len(M.data) / ncols for r in range(nrows): print M.data[r * ncols:(r + 1) * ncols] if __name__ == '__main__': sys.exit(Client().main(sys.argv))
apache-2.0
-3,968,228,681,157,256,000
25.342105
82
0.586414
false
3.260586
false
false
false
bitcraze/crazyflie-lib-python
test/crtp/test_crtpstack.py
1
2875
# -*- coding: utf-8 -*- # # || ____ _ __ # +------+ / __ )(_) /_______________ _____ ___ # | 0xBC | / __ / / __/ ___/ ___/ __ `/_ / / _ \ # +------+ / /_/ / / /_/ /__/ / / /_/ / / /_/ __/ # || || /_____/_/\__/\___/_/ \__,_/ /___/\___/ # # Copyright (C) Bitcraze AB # # Crazyflie Nano Quadcopter Client # # This program is free software; you can redistribute it and/or # modify it under the terms of the GNU General Public License # as published by the Free Software Foundation; either version 2 # of the License, or (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # You should have received a copy of the GNU General Public License # along with this program; if not, write to the Free Software # Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, # MA 02110-1301, USA. import unittest from cflib.crtp.crtpstack import CRTPPacket class CRTPPacketTest(unittest.TestCase): def setUp(self): self.callback_count = 0 self.sut = CRTPPacket() def test_that_port_and_channle_is_encoded_in_header(self): # Fixture self.sut.set_header(2, 1) # Test actual = self.sut.get_header() # Assert expected = 0x2d self.assertEqual(expected, actual) def test_that_port_is_truncated_in_header(self): # Fixture port = 0xff self.sut.set_header(port, 0) # Test actual = self.sut.get_header() # Assert expected = 0xfc self.assertEqual(expected, actual) def test_that_channel_is_truncated_in_header(self): # Fixture channel = 0xff self.sut.set_header(0, channel) # Test actual = self.sut.get_header() # Assert expected = 0x0f self.assertEqual(expected, actual) def test_that_port_and_channel_is_encoded_in_header_when_set_separat(self): # Fixture self.sut.port = 2 self.sut.channel = 1 # Test actual = self.sut.get_header() # Assert expected = 0x2d self.assertEqual(expected, actual) def test_that_default_header_is_set_when_constructed(self): # Fixture # Test actual = self.sut.get_header() # Assert expected = 0x0c self.assertEqual(expected, actual) def test_that_header_is_set_when_constructed(self): # Fixture sut = CRTPPacket(header=0x21) # Test actual = sut.get_header() # Assert self.assertEqual(0x2d, actual) self.assertEqual(2, sut.port) self.assertEqual(1, sut.channel)
gpl-2.0
3,989,698,133,982,647
26.644231
79
0.575652
false
3.394333
true
false
false
csixteen/HackerRank_Python
Algorithms/magic_square.py
1
1071
class Solution(object): MAGIC_SQUARES = [ [4, 9, 2, 3, 5, 7, 8, 1, 6], [2, 9, 4, 7, 5, 3, 6, 1, 8], [8, 3, 4, 1, 5, 9, 6, 7, 2], [4, 3, 8, 9, 5, 1, 2, 7, 6], [6, 1, 8, 7, 5, 3, 2, 9, 4], [8, 1, 6, 3, 5, 7, 4, 9, 2], [6, 7, 2, 1, 5, 9, 8, 3, 4], [2, 7, 6, 9, 5, 1, 4, 3, 8] ] def magic_square(self, s): totals = [] for ms in self.MAGIC_SQUARES: totals.append(sum([abs(ms_e - s_e) for ms_e, s_e in zip(ms, s)])) return min(totals) import unittest class SolutionTest(unittest.TestCase): def test_magic_square(self): s = Solution() self.assertEqual(0, s.magic_square([6, 1, 8, 7, 5, 3, 2, 9, 4])) self.assertEqual(1, s.magic_square([4, 9, 2, 3, 5, 7, 8, 1, 5])) self.assertEqual(4, s.magic_square([4, 8, 2, 4, 5, 7, 6, 1, 6])) self.assertEqual(45, s.magic_square([0, 0, 0, 0, 0, 0, 0, 0, 0])) self.assertEqual(36, s.magic_square([9, 9, 9, 9, 9, 9, 9, 9, 9])) if __name__ == "__main__": unittest.main()
mit
-3,675,657,614,335,557,600
32.46875
77
0.459384
false
2.428571
true
false
false
alexpap/exareme
exareme-tools/madis/src/functionslocal/aggregate/approximatedmedian.py
1
2110
import inspect import math import random import numpy from fractions import Fraction import sys import json from array import * class approximatedmedian: registered = True #Value to define db operator def __init__(self): self.n = 0 self.totalnums = 0 self.numberofcolumns = 5 self.colname = [] self.buckets = [] self.minvalues = [] self.maxvalues = [] self.nums = [] def step(self, *args): try: self.colname.append(args[0]) self.buckets.append(int(args[1])) self.minvalues.append(float(args[2])) self.maxvalues.append(float(args[3])) self.nums.append(int(args[4])) self.totalnums += int(args[4]) self.n += 1 except (ValueError, TypeError): raise def final(self): # print self.nums # print self.totalnums / 2.0 yield ('colname0', 'val', 'bucket', 'numsBeforeMedian', 'numsAfterMedian') # yield ('attr1', 'attr2', 'val', 'reccount') currentsum = 0 for i in xrange(0,self.n): # print i,self.totalnums / 2.0,self.nums[i],currentsum currentsum += self.nums[i] if currentsum >= (self.totalnums / 2.0): break median = self.minvalues[i]+(currentsum-self.totalnums / 2.0) * (self.maxvalues[i]-self.minvalues[i]) / self.nums[i] # print (self.totalnums / 2.0), currentsum, currentsum -self.nums[i] numsBeforeMedian = (self.totalnums / 2.0) - (currentsum - self.nums[i]) numsAfterMedian = currentsum - (self.totalnums / 2.0) yield self.colname[0], median, i, numsBeforeMedian,numsAfterMedian if not ('.' in __name__): """ This is needed to be able to test the function, put it at the end of every new function you create """ import sys import setpath #from functions import * #testfunction() if __name__ == "__main__": reload(sys) sys.setdefaultencoding('utf-8') import doctest doctest.testmod()
mit
-1,993,861,639,533,290,000
23.823529
123
0.572512
false
3.675958
false
false
false
fishtown-analytics/dbt
test/integration/041_presto_test/test_simple_presto_view.py
1
2230
from test.integration.base import DBTIntegrationTest, FakeArgs, use_profile import random import time class TestBasePrestoRun(DBTIntegrationTest): @property def schema(self): return "presto_test_41" @property def models(self): return "models" @property def project_config(self): return { 'config-version': 2, 'data-paths': ['data'], 'macro-paths': ['macros'], 'seeds': { 'quote_columns': False, }, } @property def profile_config(self): return self.presto_profile() def assert_nondupes_pass(self): # The 'dupe' model should fail, but all others should pass test_results = self.run_dbt(['test'], expect_pass=False) for result in test_results: if 'dupe' in result.node.name: self.assertIsNone(result.error) self.assertFalse(result.skipped) self.assertTrue(result.status > 0) # assert that actual tests pass else: self.assertIsNone(result.error) self.assertFalse(result.skipped) # status = # of failing rows self.assertEqual(result.status, 0) class TestSimplePrestoRun(TestBasePrestoRun): def setUp(self): super().setUp() for conn in self.adapter.connections.in_use.values(): conn.transaction_open @use_profile('presto') def test__presto_simple_run(self): # make sure seed works twice. Full-refresh is a no-op self.run_dbt(['seed']) self.run_dbt(['seed', '--full-refresh']) results = self.run_dbt() self.assertEqual(len(results), 2) self.assert_nondupes_pass() class TestUnderscorePrestoRun(TestBasePrestoRun): prefix = "_test{}{:04}".format(int(time.time()), random.randint(0, 9999)) @use_profile('presto') def test_presto_run_twice(self): self.run_dbt(['seed']) results = self.run_dbt() self.assertEqual(len(results), 2) self.assert_nondupes_pass() results = self.run_dbt() self.assertEqual(len(results), 2) self.assert_nondupes_pass()
apache-2.0
3,608,262,232,624,162,000
27.961039
77
0.583857
false
3.891798
true
false
false
nihlaeth/Nagios_check_slackpkg
check_slackpkg_nonpriv.py
1
1673
#!/usr/bin/env python """Nagios module for monitoring available updates via slackpkg.""" import subprocess import sys import os # pylint: disable=invalid-name # run check-updates to poll mirror for changes result = [] try: result = subprocess.check_output("myslackpkg check-updates", shell=True).split("\n") except (OSError, subprocess.CalledProcessError) as error: print "Failed to check for updates: %s" % error sys.exit(3) updates = "idk" for line in result: if "good news" in line: updates = "no" elif "News on" in line: updates = "yes" if updates == "idk": print "Error parsing slackpkg check-updates status" sys.exit(3) elif updates == "yes": # fetch updated package list try: _ = subprocess.check_output("myslackpkg update &> /dev/null", shell=True) except (OSError, subprocess.CalledProcessError) as error: print "Failed to update package list: %s" % error sys.exit(3) # Now the packages list is up to date, check if we need to upgrade anything result = [] devnull = open(os.devnull, 'w') try: result = subprocess.check_output([ "myslackpkg", "upgrade-all"], stderr=devnull).split("\n") except (OSError, subprocess.CalledProcessError) as error: print "Failed to check for upgrades: %s" % error sys.exit(3) packages = [] for line in result: if ".txz" in line: packages.append(line.strip()) if "update gpg" in line: print "Error: need up-to-date gpg key!" sys.exit(3) if len(packages) == 0: print "OK: everything up-to-date" sys.exit(0) else: print "Updates available: " + " ".join(packages) sys.exit(2)
gpl-3.0
4,253,797,037,185,081,000
27.355932
88
0.654513
false
3.529536
false
false
false
haphaeu/yoshimi
sql/data_analysis/database.py
1
3122
from os import path from sqlalchemy import (create_engine, Column, String, Integer, Boolean, Table, ForeignKey) from sqlalchemy.orm import sessionmaker, relationship from sqlalchemy.ext.declarative import declarative_base database_filename = 'twitter.sqlite3' directory = path.abspath(path.dirname(__file__)) database_filepath = path.join(directory, database_filename) engine_url = 'sqlite:///{}'.format(database_filepath) engine = create_engine(engine_url) # Our database class objects are going to inherit from # this class Base = declarative_base(bind=engine) # create a configured “Session” class Session = sessionmaker(bind=engine, autoflush=False) # Create a Session session = Session() hashtag_tweet = Table('hashtag_tweet', Base.metadata, Column('hashtag_id', Integer, ForeignKey('hashtags.id'), nullable=False), Column('tweet_id', Integer, ForeignKey('tweets.id'), nullable=False)) class Tweet(Base): __tablename__ = 'tweets' id = Column(Integer, primary_key=True) tid = Column(String(100), nullable=False) tweet = Column(String(300), nullable=False) user_id = Column(Integer, ForeignKey('users.id'), nullable=False) coordinates = Column(String(50), nullable=True) user = relationship('User', backref='tweets') created_at = Column(String(100), nullable=False) favorite_count = Column(Integer) in_reply_to_screen_name = Column(String) in_reply_to_status_id = Column(Integer) in_reply_to_user_id = Column(Integer) lang = Column(String) quoted_status_id = Column(Integer) retweet_count = Column(Integer) source = Column(String) is_retweet = Column(Boolean) hashtags = relationship('Hashtag', secondary='hashtag_tweet', back_populates='tweets') def __repr__(self): return '<Tweet {}>'.format(self.id) class User(Base): __tablename__ = 'users' id = Column(Integer, primary_key=True) uid = Column(String(50), nullable=False) name = Column(String(100), nullable=False) screen_name = Column(String) created_at = Column(String) # Nullable description = Column(String) followers_count = Column(Integer) friends_count = Column(Integer) statuses_count = Column(Integer) favourites_count = Column(Integer) listed_count = Column(Integer) geo_enabled = Column(Boolean) lang = Column(String) def __repr__(self): return '<User {}>'.format(self.id) class Hashtag(Base): __tablename__ = 'hashtags' id = Column(Integer, primary_key=True) text = Column(String(200), nullable=False) tweets = relationship('Tweet', secondary='hashtag_tweet', back_populates='hashtags') def __repr__(self): return '<Hashtag {}>'.format(self.text) def init_db(): Base.metadata.create_all() if not path.isfile(database_filepath): init_db()
lgpl-3.0
5,123,633,034,681,498,000
29.271845
95
0.626042
false
3.907268
false
false
false
EnceladOnline/interfaX
icon.py
1
1967
from tkinter import * from tkinter import ttk import func class Icon: def __init__(self, main, icon): # Affiche les icon sur le tab self.main = main self.master = self.main.cache["CurrentTabID"] self.icon = icon if self.icon[1][1] == None: self.icon_label() else: self.icon_image() def icon_label(self): self.cadre = ttk.Button(self.main.cache["CurrentTabID"], text = self.icon[0], command = self.launch, style = "STYLE_B.TButton", takefocus = 0, cursor = "hand2") self.icon_tagorid = self.main.cache["CurrentTabID"].create_window(self.icon[2][0], self.icon[2][1], window = self.cadre, anchor = "se") self.main.cache["CurrentIconID"] = self.cadre self.main.cache["CurrentIcon"] = self.icon # Bind self.cadre.bind("<Button-3>", self.icon_menu_eventhandler) # Utilisé dans InterfaX 1 # self.cadre.bind("<Motion>", self.icon_title_eventhandler) def icon_image(self): try: self.main.cache[self.icon[0]] = PhotoImage(file = self.icon[1][1]) except: self.main.cache[self.icon[0]] = None self.cadre = ttk.Button(self.main.cache["CurrentTabID"], image = self.main.cache[self.icon[0]], takefocus = 0, command = self.launch, cursor = "hand2") self.icon_tagorid = self.main.cache["CurrentTabID"].create_window(self.icon[2][0], self.icon[2][1], window = self.cadre, anchor = "se") # Bind self.cadre.bind("<Button-3>", self.icon_menu_eventhandler) self.cadre.bind("<Motion>", self.icon_title_eventhandler) def launch(self): path_list = self.icon[3] func.launcher(path_list) def icon_menu_eventhandler(self, event): self.main.cache["CurrentIconID"] = self.cadre self.main.cache["CurrentIcon"] = self.icon self.main.cache["CurrentIconTAGORID"] = self.icon_tagorid self.main.icon_menu_eventhandler() def icon_title_eventhandler(self, event): self.main.strvar_icon_title.set(self.icon[0])
gpl-2.0
-8,296,808,383,988,564,000
22.987805
84
0.654629
false
2.788652
false
false
false
wilkinsg/piweb
watched.py
1
2615
#!/usr/bin/python import hash import os import config import video_info watched_cache = {} def prepwatched( conn ): global watched_cache result = conn.execute( "SELECT * FROM history" ) queueitem = result.fetchone() while( queueitem ): watched_cache[ queueitem[ 0 ] ] = True queueitem = result.fetchone() # def is_list_watched( hashlist, conn ): # orlist = ( '?,' * len( hashlist ) ).rstrip( ',' ) # result = conn.execute( "SELECT * FROM history WHERE hash in ({})".format( orlist ), tuple( hashlist ) ) # if( result.rowcount() == len( hashlist ) ): # return( True ) # else: # return( False ) def is_watched( hash, conn ): global watched_cache try: return( watched_cache[ hash ] ) except KeyError: result = conn.execute( "SELECT * FROM history WHERE hash = ?", ( hash, ) ) if( result.fetchone() ): watched_cache[ hash ] = True return( True ) else: watched_cache[ hash ] = False return( False ) def is_directory_watched( dir, conn ): dir = os.path.join( config.get_media_dir(), dir.lstrip( '/' ) ) for root, dirs, files in os.walk( dir ): for filename in files: if( video_info.is_video( filename ) ): file = os.path.join( root, filename ) if( False == is_watched( hash.hash_name( file ), conn ) ): return( False ) return( True ) def mark_all_watched( list, conn ): global watched_cache for filename in list: input = hash.hash_name( filename ) if( input and len( input ) == 32 and not is_watched( input, conn ) ): conn.execute( "INSERT INTO history VALUES( ? )", ( input, ) ) watched_cache[ input ] = True conn.commit() def mark_hash_watched( input, conn, docommit=True ): global watched_cache if( input and len( input ) == 32 and not is_watched( input, conn ) ): conn.execute( "INSERT INTO history VALUES( ? )", ( input, ) ) watched_cache[ input ] = True if( docommit ): conn.commit() return True return( False ) def mark_hash_unwatched( input, conn ): global watched_cache if( input and len( input ) == 32 ): conn.execute( "DELETE FROM history WHERE hash=?", ( input, ) ) watched_cache[ input ] = False conn.commit() return True return( False ) def mark_watched( filename, conn ): input = hash.hash_name( filename ) mark_hash_watched( input, conn )
mit
3,103,893,710,333,927,000
30.130952
109
0.559465
false
3.70922
false
false
false
yangl1996/libpagure
tests/test_api.py
1
12568
import pytest from libpagure import Pagure @pytest.fixture(scope='module') def simple_pg(): """ Create a simple Pagure object to be used in test """ pg = Pagure(pagure_repository="testrepo") return pg def test_pagure_object(): """ Test the pagure object creation """ pg = Pagure(pagure_token="a token", pagure_repository="test_repo") assert pg.token == "a token" assert pg.repo == "test_repo" assert pg.namespace is None assert pg.username is None assert pg.instance == "https://pagure.io" assert pg.insecure is False assert pg.header == {"Authorization": "token a token"} basic_url_data = [ (None, None, 'testrepo', 'https://pagure.io/api/0/testrepo/'), (None, 'testnamespace', 'testrepo', 'https://pagure.io/api/0/testnamespace/testrepo/'), ('testfork', None, 'testrepo', 'https://pagure.io/api/0/fork/testfork/testrepo/'), ('testfork', 'testnamespace', 'testrepo', 'https://pagure.io/api/0/fork/testfork/testnamespace/testrepo/'), ] @pytest.mark.parametrize("user, namespace, repo, expected", basic_url_data) def test_create_basic_url(user, namespace, repo, expected): """ Test creation of url in function of argument passed to the Pagure class. """ pg = Pagure(pagure_repository=repo, fork_username=user, namespace=namespace) url = pg.create_basic_url() assert url == expected def test_api_version(mocker, simple_pg): """ Test the call to the version API """ mocker.patch('libpagure.Pagure._call_api') simple_pg.api_version() Pagure._call_api.assert_called_once_with('https://pagure.io/api/0/version') def test_list_users(mocker, simple_pg): """ Test the call to the users API """ mocker.patch('libpagure.Pagure._call_api') simple_pg.list_users(pattern='c') Pagure._call_api.assert_called_once_with( 'https://pagure.io/api/0/users', params={'pattern': 'c'}) def test_list_tags(mocker, simple_pg): """ Test the call to the tags API """ mocker.patch('libpagure.Pagure._call_api') simple_pg.list_tags(pattern='easy') Pagure._call_api.assert_called_once_with( 'https://pagure.io/api/0/testrepo/tags', params={'pattern': 'easy'}) def test_list_groups(mocker, simple_pg): """ Test the call to the groups API """ mocker.patch('libpagure.Pagure._call_api') simple_pg.list_groups() Pagure._call_api.assert_called_once_with( 'https://pagure.io/api/0/groups', params=None) def test_error_codes(mocker, simple_pg): """ Test the call to the error codes API """ mocker.patch('libpagure.Pagure._call_api') simple_pg.error_codes() Pagure._call_api.assert_called_once_with( 'https://pagure.io/api/0/error_codes') pr_data = [ ('teststatus', 'testassignee', 'testauthor', {'status': 'teststatus', 'assignee': 'testassignee', 'author': 'testauthor'}), (None, None, None, {}) ] @pytest.mark.parametrize("status, assignee, author, expected", pr_data) def test_list_requests(mocker, simple_pg, status, assignee, author, expected): """ Test the API call to the pull-requests endpoint """ mocker.patch('libpagure.Pagure._call_api') simple_pg.list_requests(status, assignee, author) Pagure._call_api.assert_called_once_with( 'https://pagure.io/api/0/testrepo/pull-requests', params=expected) def test_request_info(mocker, simple_pg): """ Test the API call to get pull-request info """ mocker.patch('libpagure.Pagure._call_api') simple_pg.request_info('123') Pagure._call_api.assert_called_once_with( 'https://pagure.io/api/0/testrepo/pull-request/123') def test_merge_request(mocker, simple_pg): """ Test the API call to merge a pull-request """ mocker.patch('libpagure.Pagure._call_api') simple_pg.merge_request('123') Pagure._call_api.assert_called_once_with( 'https://pagure.io/api/0/testrepo/pull-request/123/merge', method='POST') def test_close_request(mocker, simple_pg): """ Test the API call to close a pull-request """ mocker.patch('libpagure.Pagure._call_api') simple_pg.close_request('123') Pagure._call_api.assert_called_once_with( 'https://pagure.io/api/0/testrepo/pull-request/123/close', method='POST') comment_data = [ ("test body", None, None, None, {'comment': 'test body'}), ("test body", "testcommit", "testfilename", "testrow", {'comment': 'test body', 'commit': 'testcommit', 'filename': 'testfilename', 'row': 'testrow'}) ] @pytest.mark.parametrize("body, commit, filename, row, expected", comment_data) def test_comment_request(mocker, simple_pg, body, commit, filename, row, expected): """ Test the API call to comment on a pull-request """ mocker.patch('libpagure.Pagure._call_api') simple_pg.comment_request('123', body, commit, filename, row) Pagure._call_api.assert_called_once_with( 'https://pagure.io/api/0/testrepo/pull-request/123/comment', method='POST', data=expected) flag_data = [ ('testuser', 'testpercent', 'testcomment', 'testurl', None, None, {'username': 'testuser', 'percent': 'testpercent', 'comment': 'testcomment', 'url': 'testurl'}), ('testuser', 'testpercent', 'testcomment', 'testurl', 'testuid', 'testcommit', {'username': 'testuser', 'percent': 'testpercent', 'comment': 'testcomment', 'url': 'testurl', 'uid': 'testuid', 'commit': 'testcommit'}) ] @pytest.mark.parametrize("username, percent, comment, url, uid, commit, expected", flag_data) def test_flag_request(mocker, simple_pg, username, percent, comment, url, uid, commit, expected): """ Test the API call to flag a pull-request """ mocker.patch('libpagure.Pagure._call_api') simple_pg.flag_request('123', username, percent, comment, url, uid, commit) Pagure._call_api.assert_called_once_with( 'https://pagure.io/api/0/testrepo/pull-request/123/flag', method='POST', data=expected) def test_create_issue(mocker, simple_pg): """ Test the API call to create an issue """ mocker.patch('libpagure.Pagure._call_api') simple_pg.create_issue('A test issue', 'Some issue content', True) Pagure._call_api.assert_called_once_with( 'https://pagure.io/api/0/testrepo/new_issue', method='POST', data={'title': 'A test issue', 'issue_content': 'Some issue content', 'priority': True}) def test_list_issues(mocker, simple_pg): """ Test the API call to list all issues of a project """ mocker.patch('libpagure.Pagure._call_api') simple_pg.list_issues('status', 'tags', 'assignee', 'author', 'milestones', 'priority', 'no_stones', 'since') expected = {'status': 'status', 'tags': 'tags', 'assignee': 'assignee', 'author': 'author', 'milestones': 'milestones', 'priority': 'priority', 'no_stones': 'no_stones', 'since': 'since'} Pagure._call_api.assert_called_once_with( 'https://pagure.io/api/0/testrepo/issues', params=expected) def test_issue_info(mocker, simple_pg): """ Test the API call to info about a project issue """ mocker.patch('libpagure.Pagure._call_api') simple_pg.issue_info('123') Pagure._call_api.assert_called_once_with( 'https://pagure.io/api/0/testrepo/issue/123') def test_list_comment(mocker, simple_pg): """ Test the API call to info about a project issue """ mocker.patch('libpagure.Pagure._call_api') simple_pg.get_list_comment('123', '001') Pagure._call_api.assert_called_once_with( 'https://pagure.io/api/0/testrepo/issue/123/comment/001') def test_change_issue_status(mocker, simple_pg): """ Test the API call to change the status of a project issue """ mocker.patch('libpagure.Pagure._call_api') simple_pg.change_issue_status('123', 'Closed', 'wontfix') Pagure._call_api.assert_called_once_with( 'https://pagure.io/api/0/testrepo/issue/123/status', method='POST', data={'status': 'Closed', 'close_status': 'wontfix'}) def test_change_issue_milestone(mocker, simple_pg): """ Test the API call to change the milestone of a project issue """ mocker.patch('libpagure.Pagure._call_api') simple_pg.change_issue_milestone('123', 'Tomorrow') Pagure._call_api.assert_called_once_with( 'https://pagure.io/api/0/testrepo/issue/123/milestone', method='POST', data={'milestone': 'Tomorrow'}) def test_comment_issue(mocker, simple_pg): """ Test the API call to change the milestone of a project issue """ mocker.patch('libpagure.Pagure._call_api') simple_pg.comment_issue('123', 'A comment') Pagure._call_api.assert_called_once_with( 'https://pagure.io/api/0/testrepo/issue/123/comment', method='POST', data={'comment': 'A comment'}) def test_project_tags(mocker, simple_pg): """ Test the API call to get a project tags """ mocker.patch('libpagure.Pagure._call_api') simple_pg.project_tags() Pagure._call_api.assert_called_once_with( 'https://pagure.io/api/0/testrepo/git/tags') def test_list_projects(mocker, simple_pg): """ Test the API call to list all projects on a pagure instance """ mocker.patch('libpagure.Pagure._call_api') simple_pg.list_projects('tags', 'pattern', 'username', 'owner', 'namespace', 'fork', 'short', 1, 100) expected = {'tags': 'tags', 'pattern': 'pattern', 'username': 'username', 'owner': 'owner', 'namespace': 'namespace', 'fork': 'fork', 'short': 'short', 'page': '1', 'per_page': '100'} Pagure._call_api.assert_called_once_with( 'https://pagure.io/api/0/projects', params=expected) def test_user_info(mocker, simple_pg): """ Test the API call to get info about a user """ mocker.patch('libpagure.Pagure._call_api') simple_pg.user_info('auser') Pagure._call_api.assert_called_once_with( 'https://pagure.io/api/0/user/auser') def test_new_project(mocker, simple_pg): """ Test the API call to list all projects on a pagure instance """ mocker.patch('libpagure.Pagure._call_api') simple_pg.new_project('name', 'description', 'namespace', 'url', 'avatar_email', True, True) expected = {'name': 'name', 'description': 'description', 'namespace': 'namespace', 'url': 'url', 'avatar_email': 'avatar_email', 'create_readme': True, 'private': True} Pagure._call_api.assert_called_once_with( 'https://pagure.io/api/0/new', data=expected, method='POST') def test_project_branches(mocker, simple_pg): """ Test the API call to get info about a user """ mocker.patch('libpagure.Pagure._call_api') simple_pg.project_branches() Pagure._call_api.assert_called_once_with( 'https://pagure.io/api/0/testrepo/git/branches') def test_user_activity_stats(mocker, simple_pg): """ Test the API call to get stats about a user activity""" mocker.patch('libpagure.Pagure._call_api') simple_pg.user_activity_stats('auser') expected = {'username': 'auser'} Pagure._call_api.assert_called_once_with( 'https://pagure.io/api/0/user/auser/activity/stats', params=expected) def test_user_activity_stats_by_date(mocker, simple_pg): """ Test the API call to get stats about a user activity by specific date""" mocker.patch('libpagure.Pagure._call_api') simple_pg.user_activity_stats_by_date('auser',"2017-12-30") expected = {'username': 'auser', 'date' : '2017-12-30'} Pagure._call_api.assert_called_once_with( 'https://pagure.io/api/0/user/auser/activity/2017-12-30', params=expected) def test_list_pull_requests(mocker, simple_pg): """ Test the API call to get stats about a user's pull requests""" mocker.patch('libpagure.Pagure._call_api') simple_pg.list_pull_requests('auser', 1) expected = {'username': 'auser', 'page': 1} Pagure._call_api.assert_called_once_with( 'https://pagure.io/api/0/user/auser/requests/filed', params=expected) def test_list_prs_actionable_by_user(mocker, simple_pg): """ Test the API call to list PR's actionable for a given user""" mocker.patch('libpagure.Pagure._call_api') simple_pg.list_prs_actionable_by_user('auser', 1) expected = {'username': 'auser', 'page': 1} Pagure._call_api.assert_called_once_with( 'https://pagure.io/api/0/user/auser/requests/actionable', params=expected)
gpl-2.0
1,007,721,609,211,840,800
38.898413
87
0.647279
false
3.289191
true
false
false
nixingyang/Kaggle-Competitions
TalkingData AdTracking Fraud Detection/perform_ensembling.py
1
2489
import os import glob import shutil import datetime import numpy as np import pandas as pd # Dataset PROJECT_NAME = "TalkingData AdTracking Fraud Detection" PROJECT_FOLDER_PATH = os.path.join(os.path.expanduser("~"), "Documents/Dataset", PROJECT_NAME) # Submission TEAM_NAME = "Aurora" SUBMISSION_FOLDER_PATH = os.path.join(PROJECT_FOLDER_PATH, "submission") os.makedirs(SUBMISSION_FOLDER_PATH, exist_ok=True) # Ensembling WORKSPACE_FOLDER_PATH = os.path.join(PROJECT_FOLDER_PATH, "script/Mar_25_3") KEYWORD = "DL" # Generate a zip archive for a file create_zip_archive = lambda file_path: shutil.make_archive( file_path[:file_path.rindex(".")], "zip", os.path.abspath(os.path.join(file_path, "..")), os.path.basename(file_path)) def run(): print("Searching for submissions with keyword {} at {} ...".format( KEYWORD, WORKSPACE_FOLDER_PATH)) submission_file_path_list = sorted( glob.glob(os.path.join(WORKSPACE_FOLDER_PATH, "*{}*".format(KEYWORD)))) assert len(submission_file_path_list) != 0 ranking_array_list = [] for submission_file_path in submission_file_path_list: print("Loading {} ...".format(submission_file_path)) submission_df = pd.read_csv(submission_file_path) print("Ranking the entries ...") index_series = submission_df["is_attributed"].argsort() ranking_array = np.zeros(index_series.shape, dtype=np.uint32) ranking_array[index_series] = np.arange(len(index_series)) ranking_array_list.append(ranking_array) ensemble_df = submission_df.copy() ensemble_prediction_array = np.mean(ranking_array_list, axis=0) apply_normalization = lambda data_array: 1.0 * (data_array - np.min( data_array)) / (np.max(data_array) - np.min(data_array)) ensemble_df["is_attributed"] = apply_normalization( ensemble_prediction_array) ensemble_file_path = os.path.join( SUBMISSION_FOLDER_PATH, "{} {} {}.csv".format( TEAM_NAME, KEYWORD, str(datetime.datetime.now()).split(".")[0]).replace(" ", "_")) print("Saving submission to {} ...".format(ensemble_file_path)) ensemble_df.to_csv(ensemble_file_path, float_format="%.6f", index=False) compressed_ensemble_file_path = create_zip_archive(ensemble_file_path) print("Saving compressed submission to {} ...".format( compressed_ensemble_file_path)) print("All done!") if __name__ == "__main__": run()
mit
-8,510,683,944,066,425,000
36.712121
80
0.659703
false
3.409589
false
false
false
flennerhag/mlens
mlens/externals/sklearn/validation.py
1
27114
""" Scikit-learn utilities for input validation. """ # Authors: Olivier Grisel # Gael Varoquaux # Andreas Mueller # Lars Buitinck # Alexandre Gramfort # Nicolas Tresegnie # License: BSD 3 clause import warnings import numbers import numpy as np import scipy.sparse as sp from .. import six from ...utils.exceptions import NotFittedError, NonBLASDotWarning, \ DataConversionWarning try: from inspect import signature except ImportError: from mlens.externals.funcsigs import signature FLOAT_DTYPES = (np.float64, np.float32, np.float16) # Silenced by default to reduce verbosity. Turn on at runtime for # performance profiling. warnings.simplefilter('ignore', NonBLASDotWarning) def _assert_all_finite(X): """Like assert_all_finite, but only for ndarray.""" X = np.asanyarray(X) # First try an O(n) time, O(1) space solution for the common case that # everything is finite; fall back to O(n) space np.isfinite to prevent # false positives from overflow in sum method. if (X.dtype.char in np.typecodes['AllFloat'] and not np.isfinite(X.sum()) and not np.isfinite(X).all()): raise ValueError("Input contains NaN, infinity" " or a value too large for %r." % X.dtype) def assert_all_finite(X): """Throw a ValueError if X contains NaN or infinity. Parameters ---------- X : array or sparse matrix """ _assert_all_finite(X.data if sp.issparse(X) else X) def as_float_array(X, copy=True, force_all_finite=True): """Converts an array-like to an array of floats. The new dtype will be np.float32 or np.float64, depending on the original type. The function can create a copy or modify the argument depending on the argument copy. Parameters ---------- X : {array-like, sparse matrix} copy : bool, optional If True, a copy of X will be created. If False, a copy may still be returned if X's dtype is not a floating point type. force_all_finite : boolean (default=True) Whether to raise an error on np.inf and np.nan in X. Returns ------- XT : {array, sparse matrix} An array of type np.float """ if isinstance(X, np.matrix) or (not isinstance(X, np.ndarray) and not sp.issparse(X)): return check_array(X, ['csr', 'csc', 'coo'], dtype=np.float64, copy=copy, force_all_finite=force_all_finite, ensure_2d=False) elif sp.issparse(X) and X.dtype in [np.float32, np.float64]: return X.copy() if copy else X elif X.dtype in [np.float32, np.float64]: # is numpy array return X.copy('F' if X.flags['F_CONTIGUOUS'] else 'C') if copy else X else: if X.dtype.kind in 'uib' and X.dtype.itemsize <= 4: return_dtype = np.float32 else: return_dtype = np.float64 return X.astype(return_dtype) def _is_arraylike(x): """Returns whether the input is array-like""" return (hasattr(x, '__len__') or hasattr(x, 'shape') or hasattr(x, '__array__')) def _num_samples(x): """Return number of samples in array-like x.""" if hasattr(x, 'fit') and callable(x.fit): # Don't get num_samples from an ensembles length! raise TypeError('Expected sequence or array-like, got ' 'estimator %s' % x) if not hasattr(x, '__len__') and not hasattr(x, 'shape'): if hasattr(x, '__array__'): x = np.asarray(x) else: raise TypeError("Expected sequence or array-like, got %s" % type(x)) if hasattr(x, 'shape'): if len(x.shape) == 0: raise TypeError("Singleton array %r cannot be considered" " a valid collection." % x) return x.shape[0] else: return len(x) def _shape_repr(shape): """Return a platform independent representation of an array shape Under Python 2, the `long` type introduces an 'L' suffix when using the default %r format for tuples of integers (typically used to store the shape of an array). Under Windows 64 bit (and Python 2), the `long` type is used by default in numpy shapes even when the integer dimensions are well below 32 bit. The platform specific type causes string messages or doctests to change from one platform to another which is not desirable. Under Python 3, there is no more `long` type so the `L` suffix is never introduced in string representation. >>> _shape_repr((1, 2)) '(1, 2)' >>> one = 2 ** 64 / 2 ** 64 # force an upcast to `long` under Python 2 >>> _shape_repr((one, 2 * one)) '(1, 2)' >>> _shape_repr((1,)) '(1,)' >>> _shape_repr(()) '()' """ if len(shape) == 0: return "()" joined = ", ".join("%d" % e for e in shape) if len(shape) == 1: # special notation for singleton tuples joined += ',' return "(%s)" % joined def check_consistent_length(*arrays): """Check that all arrays have consistent first dimensions. Checks whether all objects in arrays have the same shape or length. Parameters ---------- *arrays : list or tuple of input objects. Objects that will be checked for consistent length. """ lengths = [_num_samples(X) for X in arrays if X is not None] uniques = np.unique(lengths) if len(uniques) > 1: raise ValueError("Found input variables with inconsistent numbers of" " samples: %r" % [int(l) for l in lengths]) def indexable(*iterables): """Make arrays indexable for cross-validation. Checks consistent length, passes through None, and ensures that everything can be indexed by converting sparse matrices to csr and converting non-interable objects to arrays. Parameters ---------- *iterables : lists, dataframes, arrays, sparse matrices List of objects to ensure sliceability. """ result = [] for X in iterables: if sp.issparse(X): result.append(X.tocsr()) elif hasattr(X, "__getitem__") or hasattr(X, "iloc"): result.append(X) elif X is None: result.append(X) else: result.append(np.array(X)) check_consistent_length(*result) return result def _ensure_sparse_format(spmatrix, accept_sparse, dtype, copy, force_all_finite): """Convert a sparse matrix to a given format. Checks the sparse format of spmatrix and converts if necessary. Parameters ---------- spmatrix : scipy sparse matrix Input to validate and convert. accept_sparse : string, boolean or list/tuple of strings String[s] representing allowed sparse matrix formats ('csc', 'csr', 'coo', 'dok', 'bsr', 'lil', 'dia'). If the input is sparse but not in the allowed format, it will be converted to the first listed format. True allows the input to be any format. False means that a sparse matrix input will raise an error. dtype : string, type or None Data type of result. If None, the dtype of the input is preserved. copy : boolean Whether a forced copy will be triggered. If copy=False, a copy might be triggered by a conversion. force_all_finite : boolean Whether to raise an error on np.inf and np.nan in X. Returns ------- spmatrix_converted : scipy sparse matrix. Matrix that is ensured to have an allowed type. """ if dtype is None: dtype = spmatrix.dtype changed_format = False if isinstance(accept_sparse, six.string_types): accept_sparse = [accept_sparse] if accept_sparse is False: raise TypeError('A sparse matrix was passed, but dense ' 'data is required. Use X.toarray() to ' 'convert to a dense numpy array.') elif isinstance(accept_sparse, (list, tuple)): if len(accept_sparse) == 0: raise ValueError("When providing 'accept_sparse' " "as a tuple or list, it must contain at " "least one string value.") # ensure correct sparse format if spmatrix.format not in accept_sparse: # create new with correct sparse spmatrix = spmatrix.asformat(accept_sparse[0]) changed_format = True elif accept_sparse is not True: # any other type raise ValueError("Parameter 'accept_sparse' should be a string, " "boolean or list of strings. You provided " "'accept_sparse={}'.".format(accept_sparse)) if dtype != spmatrix.dtype: # convert dtype spmatrix = spmatrix.astype(dtype) elif copy and not changed_format: # force copy spmatrix = spmatrix.copy() if force_all_finite: if not hasattr(spmatrix, "data"): warnings.warn("Can't check %s sparse matrix for nan or inf." % spmatrix.format) else: _assert_all_finite(spmatrix.data) return spmatrix def check_array(array, accept_sparse=False, dtype="numeric", order=None, copy=False, force_all_finite=True, ensure_2d=True, allow_nd=False, ensure_min_samples=1, ensure_min_features=1, warn_on_dtype=False, estimator=None): """Input validation on an array, list, sparse matrix or similar. By default, the input is converted to an at least 2D numpy array. If the dtype of the array is object, attempt converting to float, raising on failure. Parameters ---------- array : object Input object to check / convert. accept_sparse : string, boolean or list/tuple of strings (default=False) String[s] representing allowed sparse matrix formats, such as 'csc', 'csr', etc. If the input is sparse but not in the allowed format, it will be converted to the first listed format. True allows the input to be any format. False means that a sparse matrix input will raise an error. .. deprecated:: 0.19 Passing 'None' to parameter ``accept_sparse`` in methods is deprecated in version 0.19 "and will be removed in 0.21. Use ``accept_sparse=False`` instead. dtype : string, type, list of types or None (default="numeric") Data type of result. If None, the dtype of the input is preserved. If "numeric", dtype is preserved unless array.dtype is object. If dtype is a list of types, conversion on the first type is only performed if the dtype of the input is not in the list. order : 'F', 'C' or None (default=None) Whether an array will be forced to be fortran or c-style. When order is None (default), then if copy=False, nothing is ensured about the memory layout of the output array; otherwise (copy=True) the memory layout of the returned array is kept as close as possible to the original array. copy : boolean (default=False) Whether a forced copy will be triggered. If copy=False, a copy might be triggered by a conversion. force_all_finite : boolean (default=True) Whether to raise an error on np.inf and np.nan in X. ensure_2d : boolean (default=True) Whether to raise a value error if X is not 2d. allow_nd : boolean (default=False) Whether to allow X.ndim > 2. ensure_min_samples : int (default=1) Make sure that the array has a minimum number of samples in its first axis (rows for a 2D array). Setting to 0 disables this check. ensure_min_features : int (default=1) Make sure that the 2D array has some minimum number of features (columns). The default value of 1 rejects empty datasets. This check is only enforced when the input data has effectively 2 dimensions or is originally 1D and ``ensure_2d`` is True. Setting to 0 disables this check. warn_on_dtype : boolean (default=False) Raise DataConversionWarning if the dtype of the input data structure does not match the requested dtype, causing a memory copy. estimator : str or estimator instance (default=None) If passed, include the name of the estimator in warning messages. Returns ------- X_converted : object The converted and validated X. """ # accept_sparse 'None' deprecation check if accept_sparse is None: warnings.warn( "Passing 'None' to parameter 'accept_sparse' in methods " "check_array and check_X_y is deprecated in version 0.19 " "and will be removed in 0.21. Use 'accept_sparse=False' " " instead.", DeprecationWarning) accept_sparse = False # store whether originally we wanted numeric dtype dtype_numeric = isinstance(dtype, six.string_types) and dtype == "numeric" dtype_orig = getattr(array, "dtype", None) if not hasattr(dtype_orig, 'kind'): # not a data type (e.g. a column named dtype in a pandas DataFrame) dtype_orig = None if dtype_numeric: if dtype_orig is not None and dtype_orig.kind == "O": # if input is object, convert to float. dtype = np.float64 else: dtype = None if isinstance(dtype, (list, tuple)): if dtype_orig is not None and dtype_orig in dtype: # no dtype conversion required dtype = None else: # dtype conversion required. Let's select the first element of the # list of accepted types. dtype = dtype[0] if estimator is not None: if isinstance(estimator, six.string_types): estimator_name = estimator else: estimator_name = estimator.__class__.__name__ else: estimator_name = "Estimator" context = " by %s" % estimator_name if estimator is not None else "" if sp.issparse(array): array = _ensure_sparse_format(array, accept_sparse, dtype, copy, force_all_finite) else: array = np.array(array, dtype=dtype, order=order, copy=copy) if ensure_2d: if array.ndim == 1: raise ValueError( "Expected 2D array, got 1D array instead:\narray={}.\n" "Reshape your data either using array.reshape(-1, 1) if " "your data has a single feature or array.reshape(1, -1) " "if it contains a single sample.".format(array)) array = np.atleast_2d(array) # To ensure that array flags are maintained array = np.array(array, dtype=dtype, order=order, copy=copy) # make sure we actually converted to numeric: if dtype_numeric and array.dtype.kind == "O": array = array.astype(np.float64) if not allow_nd and array.ndim >= 3: raise ValueError("Found array with dim %d. %s expected <= 2." % (array.ndim, estimator_name)) if force_all_finite: _assert_all_finite(array) shape_repr = _shape_repr(array.shape) if ensure_min_samples > 0: n_samples = _num_samples(array) if n_samples < ensure_min_samples: raise ValueError("Found array with %d sample(s) (shape=%s) while a" " minimum of %d is required%s." % (n_samples, shape_repr, ensure_min_samples, context)) if ensure_min_features > 0 and array.ndim == 2: n_features = array.shape[1] if n_features < ensure_min_features: raise ValueError("Found array with %d feature(s) (shape=%s) while" " a minimum of %d is required%s." % (n_features, shape_repr, ensure_min_features, context)) if warn_on_dtype and dtype_orig is not None and array.dtype != dtype_orig: msg = ("Data with input dtype %s was converted to %s%s." % (dtype_orig, array.dtype, context)) warnings.warn(msg, DataConversionWarning) return array def check_X_y(X, y, accept_sparse=False, dtype="numeric", order=None, copy=False, force_all_finite=True, ensure_2d=True, allow_nd=False, multi_output=False, ensure_min_samples=1, ensure_min_features=1, y_numeric=False, warn_on_dtype=False, estimator=None): """Input validation for standard estimators. Checks X and y for consistent length, enforces X 2d and y 1d. Standard input checks are only applied to y, such as checking that y does not have np.nan or np.inf targets. For multi-label y, set multi_output=True to allow 2d and sparse y. If the dtype of X is object, attempt converting to float, raising on failure. Parameters ---------- X : nd-array, list or sparse matrix Input data. y : nd-array, list or sparse matrix Labels. accept_sparse : string, boolean or list of string (default=False) String[s] representing allowed sparse matrix formats, such as 'csc', 'csr', etc. If the input is sparse but not in the allowed format, it will be converted to the first listed format. True allows the input to be any format. False means that a sparse matrix input will raise an error. .. deprecated:: 0.19 Passing 'None' to parameter ``accept_sparse`` in methods is deprecated in version 0.19 "and will be removed in 0.21. Use ``accept_sparse=False`` instead. dtype : string, type, list of types or None (default="numeric") Data type of result. If None, the dtype of the input is preserved. If "numeric", dtype is preserved unless array.dtype is object. If dtype is a list of types, conversion on the first type is only performed if the dtype of the input is not in the list. order : 'F', 'C' or None (default=None) Whether an array will be forced to be fortran or c-style. copy : boolean (default=False) Whether a forced copy will be triggered. If copy=False, a copy might be triggered by a conversion. force_all_finite : boolean (default=True) Whether to raise an error on np.inf and np.nan in X. This parameter does not influence whether y can have np.inf or np.nan values. ensure_2d : boolean (default=True) Whether to make X at least 2d. allow_nd : boolean (default=False) Whether to allow X.ndim > 2. multi_output : boolean (default=False) Whether to allow 2-d y (array or sparse matrix). If false, y will be validated as a vector. y cannot have np.nan or np.inf values if multi_output=True. ensure_min_samples : int (default=1) Make sure that X has a minimum number of samples in its first axis (rows for a 2D array). ensure_min_features : int (default=1) Make sure that the 2D array has some minimum number of features (columns). The default value of 1 rejects empty datasets. This check is only enforced when X has effectively 2 dimensions or is originally 1D and ``ensure_2d`` is True. Setting to 0 disables this check. y_numeric : boolean (default=False) Whether to ensure that y has a numeric type. If dtype of y is object, it is converted to float64. Should only be used for regression algorithms. warn_on_dtype : boolean (default=False) Raise DataConversionWarning if the dtype of the input data structure does not match the requested dtype, causing a memory copy. estimator : str or estimator instance (default=None) If passed, include the name of the estimator in warning messages. Returns ------- X_converted : object The converted and validated X. y_converted : object The converted and validated y. """ X = check_array(X, accept_sparse, dtype, order, copy, force_all_finite, ensure_2d, allow_nd, ensure_min_samples, ensure_min_features, warn_on_dtype, estimator) if multi_output: y = check_array(y, 'csr', force_all_finite=True, ensure_2d=False, dtype=None) else: y = column_or_1d(y, warn=True) _assert_all_finite(y) if y_numeric and y.dtype.kind == 'O': y = y.astype(np.float64) check_consistent_length(X, y) return X, y def column_or_1d(y, warn=False): """ Ravel column or 1d numpy array, else raises an error Parameters ---------- y : array-like warn : boolean, default False To control display of warnings. Returns ------- y : array """ shape = np.shape(y) if len(shape) == 1: return np.ravel(y) if len(shape) == 2 and shape[1] == 1: if warn: warnings.warn("A column-vector y was passed when a 1d array was" " expected. Please change the shape of y to " "(n_samples, ), for example using ravel().", DataConversionWarning, stacklevel=2) return np.ravel(y) raise ValueError("bad input shape {0}".format(shape)) def check_random_state(seed): """Turn seed into a np.random.RandomState instance Parameters ---------- seed : None | int | instance of RandomState If seed is None, return the RandomState singleton used by np.random. If seed is an int, return a new RandomState instance seeded with seed. If seed is already a RandomState instance, return it. Otherwise raise ValueError. """ if seed is None or seed is np.random: return np.random.mtrand._rand if isinstance(seed, (numbers.Integral, np.integer)): return np.random.RandomState(seed) if isinstance(seed, np.random.RandomState): return seed raise ValueError('%r cannot be used to seed a numpy.random.RandomState' ' instance' % seed) def has_fit_parameter(estimator, parameter): """Checks whether the estimator's fit method supports the given parameter. Parameters ---------- estimator : object An estimator to inspect. parameter: str The searched parameter. Returns ------- is_parameter: bool Whether the parameter was found to be a named parameter of the estimator's fit method. Examples -------- >>> from sklearn.svm import SVC >>> has_fit_parameter(SVC(), "sample_weight") True """ return parameter in signature(estimator.fit).parameters def check_symmetric(array, tol=1E-10, raise_warning=True, raise_exception=False): """Make sure that array is 2D, square and symmetric. If the array is not symmetric, then a symmetrized version is returned. Optionally, a warning or exception is raised if the matrix is not symmetric. Parameters ---------- array : nd-array or sparse matrix Input object to check / convert. Must be two-dimensional and square, otherwise a ValueError will be raised. tol : float Absolute tolerance for equivalence of arrays. Default = 1E-10. raise_warning : boolean (default=True) If True then raise a warning if conversion is required. raise_exception : boolean (default=False) If True then raise an exception if array is not symmetric. Returns ------- array_sym : ndarray or sparse matrix Symmetrized version of the input array, i.e. the average of array and array.transpose(). If sparse, then duplicate entries are first summed and zeros are eliminated. """ if (array.ndim != 2) or (array.shape[0] != array.shape[1]): raise ValueError("array must be 2-dimensional and square. " "shape = {0}".format(array.shape)) if sp.issparse(array): diff = array - array.T # only csr, csc, and coo have `data` attribute if diff.format not in ['csr', 'csc', 'coo']: diff = diff.tocsr() symmetric = np.all(abs(diff.data) < tol) else: symmetric = np.allclose(array, array.T, atol=tol) if not symmetric: if raise_exception: raise ValueError("Array must be symmetric") if raise_warning: warnings.warn("Array is not symmetric, and will be converted " "to symmetric by average with its transpose.") if sp.issparse(array): conversion = 'to' + array.format array = getattr(0.5 * (array + array.T), conversion)() else: array = 0.5 * (array + array.T) return array def check_is_fitted(estimator, attributes, msg=None, all_or_any=all): """Perform is_fitted validation for estimator. Checks if the estimator is fitted by verifying the presence of "all_or_any" of the passed attributes and raises a NotFittedError with the given message. Parameters ---------- estimator : estimator instance. estimator instance for which the check is performed. attributes : attribute name(s) given as string or a list/tuple of strings Eg.: ``["coef_", "estimator_", ...], "coef_"`` msg : string The default error message is, "This %(name)s instance is not fitted yet. Call 'fit' with appropriate arguments before using this method." For custom messages if "%(name)s" is present in the message string, it is substituted for the estimator name. Eg. : "Estimator, %(name)s, must be fitted before sparsifying". all_or_any : callable, {all, any}, default all Specify whether all or any of the given attributes must exist. Returns ------- None Raises ------ NotFittedError If the attributes are not found. """ if msg is None: msg = ("This %(name)s instance is not fitted yet. Call 'fit' with " "appropriate arguments before using this method.") if not hasattr(estimator, 'fit'): raise TypeError("%s is not an estimator instance." % (estimator)) if not isinstance(attributes, (list, tuple)): attributes = [attributes] if not all_or_any([hasattr(estimator, attr) for attr in attributes]): raise NotFittedError(msg % {'name': type(estimator).__name__}) def check_non_negative(X, whom): """ Check if there is any negative value in an array. Parameters ---------- X : array-like or sparse matrix Input data. whom : string Who passed X to this function. """ X = X.data if sp.issparse(X) else X if (X < 0).any(): raise ValueError("Negative values in data passed to %s" % whom)
mit
4,532,605,231,950,397,000
39.050222
79
0.615328
false
4.181678
false
false
false
gregelin/python-ideascaleapi
setup.py
1
1086
from distutils.core import setup from ideascaleapi import __version__,__license__,__doc__ license_text = open('LICENSE').read() long_description = open('README.rst').read() setup(name="python-ideascaleapi", version=__version__, py_modules=["ideascaleapi"], description="Libraries for interacting with the Ideascale API", author="Greg Elin (forking James Turk)", author_email = "greg@fotonotes.net", license=license_text, url="http://github.com/gregelin/python-ideascaleapi/tree/master", long_description=long_description, platforms=["any"], classifiers=["Development Status :: 3 - Alpha", "Intended Audience :: Developers", "License :: OSI Approved :: BSD License", "Natural Language :: English", "Operating System :: OS Independent", "Programming Language :: Python", "Topic :: Software Development :: Libraries :: Python Modules", ], install_requires=["simplejson >= 1.8"] )
bsd-3-clause
1,815,907,163,226,902,500
39.222222
82
0.593923
false
4.414634
false
false
false
marekjm/diaspy
diaspy/models.py
1
23071
#!/usr/bin/env python3 """This module is only imported in other diaspy modules and MUST NOT import anything. """ import json import copy import re BS4_SUPPORT=False try: from bs4 import BeautifulSoup except ImportError: print("[diaspy] BeautifulSoup not found, falling back on regex.") else: BS4_SUPPORT=True from diaspy import errors class Aspect(): """This class represents an aspect. Class can be initialized by passing either an id and/or name as parameters. If both are missing, an exception will be raised. """ def __init__(self, connection, id, name=None): self._connection = connection self.id, self.name = id, name self._cached = [] def getUsers(self, fetch = True): """Returns list of GUIDs of users who are listed in this aspect. """ if fetch: request = self._connection.get('contacts.json?a_id={}'.format(self.id)) self._cached = request.json() return self._cached def removeAspect(self): """ --> POST /aspects/{id} HTTP/1.1 --> _method=delete&authenticity_token={token} <-- HTTP/1.1 302 Found Removes whole aspect. :returns: None """ request = self._connection.tokenFrom('contacts').delete('aspects/{}'.format(self.id)) if request.status_code != 302: raise errors.AspectError('wrong status code: {0}'.format(request.status_code)) def addUser(self, user_id): """Add user to current aspect. :param user_id: user to add to aspect :type user_id: int :returns: JSON from request --> POST /aspect_memberships HTTP/1.1 --> Accept: application/json, text/javascript, */*; q=0.01 --> Content-Type: application/json; charset=UTF-8 --> {"aspect_id":123,"person_id":123} <-- HTTP/1.1 200 OK """ data = {'aspect_id': self.id, 'person_id': user_id} headers = {'content-type': 'application/json', 'accept': 'application/json'} request = self._connection.tokenFrom('contacts').post('aspect_memberships', data=json.dumps(data), headers=headers) if request.status_code == 400: raise errors.AspectError('duplicate record, user already exists in aspect: {0}'.format(request.status_code)) elif request.status_code == 404: raise errors.AspectError('user not found from this pod: {0}'.format(request.status_code)) elif request.status_code != 200: raise errors.AspectError('wrong status code: {0}'.format(request.status_code)) response = None try: response = request.json() except json.decoder.JSONDecodeError: """ Should be OK now, but I'll leave this commentary here at first to see if anything comes up """ # FIXME For some (?) reason removing users from aspects works, but # adding them is a no-go and Diaspora* kicks us out with CSRF errors. # Weird. pass if response is None: raise errors.CSRFProtectionKickedIn() # Now you should fetchguid(fetch_stream=False) on User to update aspect membership_id's # Or update it locally with the response return response def removeUser(self, user): """Remove user from current aspect. :param user: user to remove from aspect :type user: diaspy.people.User object """ membership_id = None to_remove = None for each in user.aspectMemberships(): if each.get('aspect', {}).get('id') == self.id: membership_id = each.get('id') to_remove = each break # no need to continue if membership_id is None: raise errors.UserIsNotMemberOfAspect(user, self) request = self._connection.delete('aspect_memberships/{0}'.format(membership_id)) if request.status_code == 404: raise errors.AspectError('cannot remove user from aspect, probably tried too fast after adding: {0}'.format(request.status_code)) elif request.status_code != 200: raise errors.AspectError('cannot remove user from aspect: {0}'.format(request.status_code)) if 'contact' in user.data: # User object if to_remove: user.data['contact']['aspect_memberships'].remove( to_remove ) # remove local aspect membership_id else: # User object from Contacts() if to_remove: user.data['aspect_memberships'].remove( to_remove ) # remove local aspect membership_id return request.json() class Notification(): """This class represents single notification. """ _who_regexp = re.compile(r'/people/([0-9a-f]+)["\']{1} class=["\']{1}hovercardable') _aboutid_regexp = re.compile(r'/posts/[0-9a-f]+') _htmltag_regexp = re.compile('</?[a-z]+( *[a-z_-]+=["\'].*?["\'])* */?>') def __init__(self, connection, data): self._connection = connection self.type = data['type'] self._data = data[self.type] self.id = self._data['id'] self.unread = self._data['unread'] def __getitem__(self, key): """Returns a key from notification data. """ return self._data[key] def __str__(self): """Returns notification note. """ if BS4_SUPPORT: soup = BeautifulSoup(self._data['note_html'], 'lxml') media_body = soup.find('div', {"class": "media-body"}) div = media_body.find('div') if div: div.decompose() return media_body.getText().strip() else: string = re.sub(self._htmltag_regexp, '', self._data['note_html']) string = string.strip().split('\n')[0] while ' ' in string: string = string.replace(' ', ' ') return string def __repr__(self): """Returns notification note with more details. """ return '{0}: {1}'.format(self.when(), str(self)) def about(self): """Returns id of post about which the notification is informing OR: If the id is None it means that it's about user so .who() is called. """ if BS4_SUPPORT: soup = BeautifulSoup(self._data['note_html'], 'lxml') id = soup.find('a', {"data-ref": True}) if id: return id['data-ref'] about = self._aboutid_regexp.search(self._data['note_html']) if about is None: about = self.who()[0] else: about = int(about.group(0)[7:]) return about def who(self): """Returns list of guids of the users who caused you to get the notification. """ if BS4_SUPPORT: # Parse the HTML with BS4 soup = BeautifulSoup(self._data['note_html'], 'lxml') hovercardable_soup = soup.findAll('a', {"class": "hovercardable"}) return list(set([soup['href'][8:] for soup in hovercardable_soup])) else: return list(set([who for who in self._who_regexp.findall(self._data['note_html'])])) def when(self): """Returns UTC time as found in note_html. """ return self._data['created_at'] def mark(self, unread=False): """Marks notification to read/unread. Marks notification to read if `unread` is False. Marks notification to unread if `unread` is True. :param unread: which state set for notification :type unread: bool """ headers = {'x-csrf-token': repr(self._connection)} params = {'set_unread': json.dumps(unread)} self._connection.put('notifications/{0}'.format(self['id']), params=params, headers=headers) self._data['unread'] = unread class Conversation(): """This class represents a conversation. .. note:: Remember that you need to have access to the conversation. """ if not BS4_SUPPORT: _message_stream_regexp = re.compile(r'<div class=["\']{1}stream["\']{1}>(.*?)<div class=["\']{1}stream-element new-message["\']{1}>', re.DOTALL) _message_guid_regexp = re.compile(r'data-guid=["\']{1}([0-9]+)["\']{1}') _message_created_at_regexp = re.compile(r'<time datetime=["\']{1}([0-9]{4}-[0-9]{2}-[0-9]{1,2}T[0-9]{1,2}:[0-9]{1,2}:[0-9]{1,2}Z)["\']{1}') _message_body_regexp = re.compile(r'<div class=["\']{1}message-content["\']{1}>\s+<p>(.*?)</p>\s+</div>', re.DOTALL) _message_author_guid_regexp = re.compile(r'<a href=["\']{1}/people/([a-f0-9]+)["\']{1} class=["\']{1}img') _message_author_name_regexp = re.compile(r'<img alt=["\']{1}(.*?)["\']{1}.*') _message_author_avatar_regexp = re.compile(r'src=["\']{1}(.*?)["\']{1}') def __init__(self, connection, id, fetch=True): """ :param conv_id: id of the post and not the guid! :type conv_id: str :param connection: connection object used to authenticate :type connection: connection.Connection """ self._connection = connection self.id = id self._data = {} self._messages = [] if fetch: self._fetch() def __len__(self): return len(self._messages) def __iter__(self): return iter(self._messages) def __getitem__(self, n): return self._messages[n] def _fetch(self): """Fetches JSON data representing conversation. """ request = self._connection.get('conversations/{}.json'.format(self.id)) if request.status_code == 200: self._data = request.json()['conversation'] else: raise errors.ConversationError('cannot download conversation data: {0}'.format(request.status_code)) def _fetch_messages(self): """Fetches HTML data we will use to parse message data. This is a workaround until Diaspora* has it's API plans implemented. """ request = self._connection.get('conversations/{}'.format(self.id)) if request.status_code == 200: # Clear potential old messages self._messages = [] message_template = { 'guid' : None, 'created_at' : None, 'body' : None, 'author' : { 'guid' : None, 'diaspora_id' : None, # TODO? Not able to get from this page. 'name' : None, 'avatar' : None } } if BS4_SUPPORT: # Parse the HTML with BS4 soup = BeautifulSoup(request.content, 'lxml') messages_soup = soup.findAll('div', {"class": "stream-element message"}) for message_soup in messages_soup: message = copy.deepcopy(message_template) # guid if message_soup and message_soup.has_attr('data-guid'): message['guid'] = message_soup['data-guid'] # created_at time_soup = message_soup.find('time', {"class": "timeago"}) if time_soup and time_soup.has_attr('datetime'): message['created_at'] = time_soup['datetime'] # body body_soup = message_soup.find('div', {"class": "message-content"}) if body_soup: message['body'] = body_soup.get_text().strip() # author author_a_soup = message_soup.find('a', {"class": "img"}) if author_a_soup: # author guid message['author']['guid'] = author_a_soup['href'][8:] # name and avatar author_img_soup = author_a_soup.find('img', {"class": "avatar"}) if author_img_soup: message['author']['name'] = author_img_soup['title'] message['author']['avatar'] = author_img_soup['src'] self._messages.append(message.copy()) else: # Regex fallback messages_stream_html = self._message_stream_regexp.search(request.content.decode('utf-8')) if messages_stream_html: messages_html = messages_stream_html.group(1).split("<div class='stream-element message'") for message_html in messages_html: message = copy.deepcopy(message_template) # Guid guid = self._message_guid_regexp.search(message_html) if guid: message['guid'] = guid.group(1) else: continue # Created at created_at = self._message_created_at_regexp.search(message_html) if created_at: message['created_at'] = created_at.group(1) # Body body = self._message_body_regexp.search(message_html) if body: message['body'] = body.group(1) # Author author_guid = self._message_author_guid_regexp.search(message_html) if author_guid: message['author']['guid'] = author_guid.group(1) author_name = self._message_author_name_regexp.search(message_html) if author_name: message['author']['name'] = author_name.group(1) author_avatar = self._message_author_avatar_regexp.search(author_name.group(0)) if author_avatar: message['author']['avatar'] = author_avatar.group(1) self._messages.append(message.copy()) else: raise errors.ConversationError('cannot download message data from conversation: {0}'.format(request.status_code)) def messages(self): return self._messages def update_messages(self): """(Re-)fetches messages in this conversation. """ self._fetch_messages() def answer(self, text): """Answer that conversation :param text: text to answer. :type text: str """ data = {'message[text]': text, 'utf8': '&#x2713;', 'authenticity_token': repr(self._connection)} request = self._connection.post('conversations/{}/messages'.format(self.id), data=data, headers={'accept': 'application/json'}) if request.status_code != 200: raise errors.ConversationError('{0}: Answer could not be posted.' .format(request.status_code)) return request.json() def delete(self): """Delete this conversation. Has to be implemented. """ data = {'authenticity_token': repr(self._connection)} request = self._connection.delete('conversations/{0}/visibility/' .format(self.id), data=data, headers={'accept': 'application/json'}) if request.status_code != 404: raise errors.ConversationError('{0}: Conversation could not be deleted.' .format(request.status_code)) def get_subject(self): """Returns the subject of this conversation """ return self._data['subject'] class Comment(): """Represents comment on post. Does not require Connection() object. Note that you should not manually create `Comment()` objects -- they are designed to be created automatically by `Comments()` objects wich automatically will be created by `Post()` objects. """ def __init__(self, data): self._data = data self.id = data['id'] self.guid = data['guid'] def __str__(self): """Returns comment's text. """ return self._data['text'] def __repr__(self): """Returns comments text and author. Format: AUTHOR (AUTHOR'S GUID): COMMENT """ return '{0} ({1}): {2}'.format(self.author(), self.author('guid'), str(self)) def when(self): """Returns time when the comment had been created. """ return self._data['created_at'] def author(self, key='name'): """Returns author of the comment. """ return self._data['author'][key] class Comments(): def __init__(self, comments=[]): self._comments = comments def __iter__(self): for comment in self._comments: yield comment def __len__(self): return len(self._comments) def __getitem__(self, index): if self._comments: return self._comments[index] def __bool__(self): if self._comments: return True return False def ids(self): return [c.id for c in self._comments] def add(self, comment): """ Expects Comment() object :param comment: Comment() object to add. :type comment: Comment() object.""" if comment and type(comment) == Comment: self._comments.append(comment) def set(self, comments): """Sets comments wich already have a Comment() obj :param comments: list with Comment() objects to set. :type comments: list. """ if comments: self._comments = comments def set_json(self, json_comments): """Sets comments for this post from post data.""" if json_comments: self._comments = [Comment(c) for c in json_comments] class Post(): """This class represents a post. .. note:: Remember that you need to have access to the post. """ def __init__(self, connection, id=0, guid='', fetch=True, comments=True, post_data=None): """ :param id: id of the post (GUID is recommended) :type id: int :param guid: GUID of the post :type guid: str :param connection: connection object used to authenticate :type connection: connection.Connection :param fetch: defines whether to fetch post's data or not :type fetch: bool :param comments: defines whether to fetch post's comments or not (if True also data will be fetched) :type comments: bool :param post_data: contains post data so no need to fetch the post if this is set, until you want to update post data :type: json """ if not (guid or id): raise TypeError('neither guid nor id was provided') self._connection = connection self.id = id self.guid = guid self._data = {} self.comments = Comments() if post_data: self._data = post_data if fetch: self._fetchdata() if comments: if not self._data: self._fetchdata() self._fetchcomments() else: if not self._data: self._fetchdata() self.comments.set_json( self.data()['interactions']['comments'] ) def __repr__(self): """Returns string containing more information then str(). """ return '{0} ({1}): {2}'.format(self._data['author']['name'], self._data['author']['guid'], self._data['text']) def __str__(self): """Returns text of a post. """ return self._data['text'] def _fetchdata(self): """This function retrieves data of the post. :returns: guid of post whose data was fetched """ if self.id: id = self.id if self.guid: id = self.guid request = self._connection.get('posts/{0}.json'.format(id)) if request.status_code != 200: raise errors.PostError('{0}: could not fetch data for post: {1}'.format(request.status_code, id)) elif request: self._data = request.json() return self.data()['guid'] def _fetchcomments(self): """Retreives comments for this post. Retrieving comments via GUID will result in 404 error. DIASPORA* does not supply comments through /posts/:guid/ endpoint. """ id = self.data()['id'] if self.data()['interactions']['comments_count']: request = self._connection.get('posts/{0}/comments.json'.format(id)) if request.status_code != 200: raise errors.PostError('{0}: could not fetch comments for post: {1}'.format(request.status_code, id)) else: self.comments.set([Comment(c) for c in request.json()]) def fetch(self, comments = False): """Fetches post data. """ self._fetchdata() if comments: self._fetchcomments() return self def data(self, data = None): if data is not None: self._data = data return self._data def like(self): """This function likes a post. It abstracts the 'Like' functionality. :returns: dict -- json formatted like object. """ data = {'authenticity_token': repr(self._connection)} request = self._connection.post('posts/{0}/likes'.format(self.id), data=data, headers={'accept': 'application/json'}) if request.status_code != 201: raise errors.PostError('{0}: Post could not be liked.' .format(request.status_code)) likes_json = request.json() if likes_json: self._data['interactions']['likes'] = [likes_json] return likes_json def reshare(self): """This function reshares a post """ data = {'root_guid': self._data['guid'], 'authenticity_token': repr(self._connection)} request = self._connection.post('reshares', data=data, headers={'accept': 'application/json'}) if request.status_code != 201: raise Exception('{0}: Post could not be reshared'.format(request.status_code)) return request.json() def comment(self, text): """This function comments on a post :param text: text to comment. :type text: str """ data = {'text': text, 'authenticity_token': repr(self._connection)} request = self._connection.post('posts/{0}/comments'.format(self.id), data=data, headers={'accept': 'application/json'}) if request.status_code != 201: raise Exception('{0}: Comment could not be posted.' .format(request.status_code)) return Comment(request.json()) def vote_poll(self, poll_answer_id): """This function votes on a post's poll :param poll_answer_id: id to poll vote. :type poll_answer_id: int """ poll_id = self._data['poll']['poll_id'] data = {'poll_answer_id': poll_answer_id, 'poll_id': poll_id, 'post_id': self.id, 'authenticity_token': repr(self._connection)} request = self._connection.post('posts/{0}/poll_participations'.format(self.id), data=data, headers={'accept': 'application/json'}) if request.status_code != 201: raise Exception('{0}: Vote on poll failed.' .format(request.status_code)) return request.json() def hide(self): """ -> PUT /share_visibilities/42 HTTP/1.1 post_id=123 <- HTTP/1.1 200 OK """ headers = {'x-csrf-token': repr(self._connection)} params = {'post_id': json.dumps(self.id)} request = self._connection.put('share_visibilities/42', params=params, headers=headers) if request.status_code != 200: raise Exception('{0}: Failed to hide post.' .format(request.status_code)) def mute(self): """ -> POST /blocks HTTP/1.1 {"block":{"person_id":123}} <- HTTP/1.1 204 No Content """ headers = {'content-type':'application/json', 'x-csrf-token': repr(self._connection)} data = json.dumps({ 'block': { 'person_id' : self._data['author']['id'] } }) request = self._connection.post('blocks', data=data, headers=headers) if request.status_code != 204: raise Exception('{0}: Failed to block person' .format(request.status_code)) def subscribe(self): """ -> POST /posts/123/participation HTTP/1.1 <- HTTP/1.1 201 Created """ headers = {'x-csrf-token': repr(self._connection)} data = {} request = self._connection.post('posts/{}/participation' .format( self.id ), data=data, headers=headers) if request.status_code != 201: raise Exception('{0}: Failed to subscribe to post' .format(request.status_code)) def unsubscribe(self): """ -> POST /posts/123/participation HTTP/1.1 _method=delete <- HTTP/1.1 200 OK """ headers = {'x-csrf-token': repr(self._connection)} data = { "_method": "delete" } request = self._connection.post('posts/{}/participation' .format( self.id ), headers=headers, data=data) if request.status_code != 200: raise Exception('{0}: Failed to unsubscribe to post' .format(request.status_code)) def report(self): """ TODO """ pass def delete(self): """ This function deletes this post """ data = {'authenticity_token': repr(self._connection)} request = self._connection.delete('posts/{0}'.format(self.id), data=data, headers={'accept': 'application/json'}) if request.status_code != 204: raise errors.PostError('{0}: Post could not be deleted'.format(request.status_code)) def delete_comment(self, comment_id): """This function removes a comment from a post :param comment_id: id of the comment to remove. :type comment_id: str """ data = {'authenticity_token': repr(self._connection)} request = self._connection.delete('posts/{0}/comments/{1}' .format(self.id, comment_id), data=data, headers={'accept': 'application/json'}) if request.status_code != 204: raise errors.PostError('{0}: Comment could not be deleted' .format(request.status_code)) def delete_like(self): """This function removes a like from a post """ data = {'authenticity_token': repr(self._connection)} url = 'posts/{0}/likes/{1}'.format(self.id, self._data['interactions']['likes'][0]['id']) request = self._connection.delete(url, data=data) if request.status_code != 204: raise errors.PostError('{0}: Like could not be removed.' .format(request.status_code)) def author(self, key='name'): """Returns author of the post. :param key: all keys available in data['author'] """ return self._data['author'][key]
mit
5,489,278,063,691,003,000
30.954294
146
0.653158
false
3.203416
false
false
false
matousc89/padasip
padasip/filters/nlmf.py
1
5444
""" .. versionadded:: 1.1.0 The least-mean-fourth (LMF) adaptive filter implemented according to the paper :cite:`zerguine2000convergence`. The NLMF is an extension of the LMF adaptive filter (:ref:`filter-lmf`). The NLMF filter can be created as follows >>> import padasip as pa >>> pa.filters.FilterNLMF(n) where `n` is the size (number of taps) of the filter. Content of this page: .. contents:: :local: :depth: 1 .. seealso:: :ref:`filters` Algorithm Explanation ====================================== The NLMF is extension of LMF filter. See :ref:`filter-lmf` for explanation of the algorithm behind. The extension is based on normalization of learning rate. The learning rage :math:`\mu` is replaced by learning rate :math:`\eta(k)` normalized with every new sample according to input power as follows :math:`\eta (k) = \\frac{\mu}{\epsilon + || \\textbf{x}(k) ||^2}`, where :math:`|| \\textbf{x}(k) ||^2` is norm of input vector and :math:`\epsilon` is a small positive constant (regularization term). This constant is introduced to preserve the stability in cases where the input is close to zero. Minimal Working Examples ====================================== If you have measured data you may filter it as follows .. code-block:: python import numpy as np import matplotlib.pylab as plt import padasip as pa # creation of data N = 500 x = np.random.normal(0, 1, (N, 4)) # input matrix v = np.random.normal(0, 0.1, N) # noise d = 2*x[:,0] + 0.1*x[:,1] - 0.3*x[:,2] + 0.5*x[:,3] + v # target # identification f = pa.filters.FilterNLMF(n=4, mu=0.005, w="random") y, e, w = f.run(d, x) # show results plt.figure(figsize=(15,9)) plt.subplot(211);plt.title("Adaptation");plt.xlabel("samples - k") plt.plot(d,"b", label="d - target") plt.plot(y,"g", label="y - output");plt.legend() plt.subplot(212);plt.title("Filter error");plt.xlabel("samples - k") plt.plot(10*np.log10(e**2),"r", label="e - error [dB]");plt.legend() plt.tight_layout() plt.show() References ====================================== .. bibliography:: lmf.bib :style: plain Code Explanation ====================================== """ import numpy as np from padasip.filters.base_filter import AdaptiveFilter class FilterNLMF(AdaptiveFilter): """ Adaptive NLMF filter. **Args:** * `n` : length of filter (integer) - how many input is input array (row of input matrix) **Kwargs:** * `mu` : learning rate (float). Also known as step size. If it is too slow, the filter may have bad performance. If it is too high, the filter will be unstable. The default value can be unstable for ill-conditioned input data. * `eps` : regularization term (float). It is introduced to preserve stability for close-to-zero input vectors * `w` : initial weights of filter. Possible values are: * array with initial weights (1 dimensional array) of filter size * "random" : create random weights * "zeros" : create zero value weights """ def __init__(self, n, mu=0.1, eps=1., w="random"): self.kind = "NLMF filter" if type(n) == int: self.n = n else: raise ValueError('The size of filter must be an integer') self.mu = self.check_float_param(mu, 0, 1000, "mu") self.eps = self.check_float_param(eps, 0, 1000, "eps") self.init_weights(w, self.n) self.w_history = False def adapt(self, d, x): """ Adapt weights according one desired value and its input. **Args:** * `d` : desired value (float) * `x` : input array (1-dimensional array) """ y = np.dot(self.w, x) e = d - y nu = self.mu / (self.eps + np.dot(x, x)) self.w += nu * x * e**3 def run(self, d, x): """ This function filters multiple samples in a row. **Args:** * `d` : desired value (1 dimensional array) * `x` : input matrix (2-dimensional array). Rows are samples, columns are input arrays. **Returns:** * `y` : output value (1 dimensional array). The size corresponds with the desired value. * `e` : filter error for every sample (1 dimensional array). The size corresponds with the desired value. * `w` : history of all weights (2 dimensional array). Every row is set of the weights for given sample. """ # measure the data and check if the dimmension agree N = len(x) if not len(d) == N: raise ValueError('The length of vector d and matrix x must agree.') self.n = len(x[0]) # prepare data try: x = np.array(x) d = np.array(d) except: raise ValueError('Impossible to convert x or d to a numpy array') # create empty arrays y = np.zeros(N) e = np.zeros(N) self.w_history = np.zeros((N,self.n)) # adaptation loop for k in range(N): self.w_history[k,:] = self.w y[k] = np.dot(self.w, x[k]) e[k] = d[k] - y[k] nu = self.mu / (self.eps + np.dot(x[k], x[k])) dw = nu * x[k] * e[k]**3 self.w += dw return y, e, self.w_history
mit
-1,379,382,755,198,736,400
28.586957
81
0.568883
false
3.55817
false
false
false
tachijuan/python
myscripts/imap.py
1
1470
import os, sys, imaplib, rfc822, re, StringIO import RPi.GPIO as GPIO import time server ='mail.xxx.us' username='juan@xxx.us' password='xxx' GPIO.setmode(GPIO.BOARD) GREEN_LED = 22 RED_LED = 7 GPIO.setup(GREEN_LED, GPIO.OUT) GPIO.setup(RED_LED, GPIO.OUT) M = imaplib.IMAP4_SSL(server) M.login(username, password) M.select() try: while 1: print "checking email" typ, data = M.search(None, '(UNSEEN SUBJECT "PIFI MESSAGE")') for num in data[0].split(): typ, data = M.fetch(num, '(RFC822)') #print 'Message %s\n%s\n' % (num, data[0][1]) redon = re.search( "RED ON", data[0][1], re.MULTILINE|re.DOTALL ) greenon = re.search( "GREEN ON", data[0][1], re.MULTILINE|re.DOTALL ) redoff = re.search( "RED OFF", data[0][1], re.MULTILINE|re.DOTALL ) greenoff = re.search( "GREEN OFF", data[0][1], re.MULTILINE|re.DOTALL ) if redon: GPIO.output(RED_LED, True) print "red on" if greenon: GPIO.output(GREEN_LED, True) print "green on" if redoff: GPIO.output(RED_LED, False) print "red off" if greenoff: GPIO.output(GREEN_LED, False) print "green off" time.sleep(120) except KeyboardInterrupt: GPIO.cleanup() pass M.close() M.logout()
mit
-6,482,561,999,222,598,000
21.96875
65
0.535374
false
3.230769
false
false
false
LevinJ/Supply-demand-forecasting
implement/xgboostmodel.py
1
4070
import sys import os sys.path.insert(0, os.path.abspath('..')) from preprocess.preparedata import PrepareData import numpy as np from utility.runtype import RunType from utility.datafilepath import g_singletonDataFilePath from preprocess.splittrainvalidation import HoldoutSplitMethod import xgboost as xgb from evaluation.sklearnmape import mean_absolute_percentage_error_xgboost from evaluation.sklearnmape import mean_absolute_percentage_error from utility.modelframework import ModelFramework from utility.xgbbasemodel import XGBoostGridSearch from evaluation.sklearnmape import mean_absolute_percentage_error_xgboost_cv from utility.xgbbasemodel import XGBoostBase import logging import sys class DidiXGBoostModel(XGBoostBase, PrepareData, XGBoostGridSearch): def __init__(self): PrepareData.__init__(self) XGBoostGridSearch.__init__(self) XGBoostBase.__init__(self) self.best_score_colname_in_cv = 'test-mape-mean' self.do_cross_val = False self.train_validation_foldid = -2 if self.do_cross_val is None: root = logging.getLogger() root.setLevel(logging.DEBUG) root.addHandler(logging.StreamHandler(sys.stdout)) root.addHandler(logging.FileHandler('logs/finetune_parameters.log', mode='w')) return def set_xgb_parameters(self): early_stopping_rounds = 3 self.xgb_params = {'silent':1, 'colsample_bytree': 0.8, 'silent': 1, 'lambda ': 1, 'min_child_weight': 1, 'subsample': 0.8, 'eta': 0.01, 'objective': 'reg:linear', 'max_depth': 7} # self.xgb_params = {'silent':1 } self.xgb_learning_params = { 'num_boost_round': 200, 'callbacks':[xgb.callback.print_evaluation(show_stdv=True),xgb.callback.early_stop(early_stopping_rounds)], 'feval':mean_absolute_percentage_error_xgboost_cv} if self.do_cross_val == False: self.xgb_learning_params['feval'] = mean_absolute_percentage_error_xgboost return def get_paramgrid_1(self): """ This method must be overriden by derived class when its objective is not reg:linear """ param_grid = {'max_depth':[6], 'eta':[0.1], 'min_child_weight':[1],'silent':[1], 'objective':['reg:linear'],'colsample_bytree':[0.8],'subsample':[0.8], 'lambda ':[1]} return param_grid def get_paramgrid_2(self, param_grid): """ This method must be overriden by derived class if it intends to fine tune parameters """ self.ramdonized_search_enable = False self.randomized_search_n_iter = 150 self.grid_search_display_result = True param_grid['eta'] = [0.01] #train-mape:-0.448062+0.00334926 test-mape:-0.448402+0.00601761 # param_grid['max_depth'] = [7] #train-mape:-0.363007+0.00454276 test-mape:-0.452832+0.00321641 # param_grid['colsample_bytree'] = [0.8] param_grid['max_depth'] = range(5,8) #train-mape:-0.363007+0.00454276 test-mape:-0.452832+0.00321641 param_grid['colsample_bytree'] = [0.6,0.8,1.0] # param_grid['lambda'] = range(1,15) # param_grid['max_depth'] = [3,4] # param_grid['eta'] = [0.01,0.1] # 0.459426+0.00518875 # param_grid['subsample'] = [0.5] #0.458935+0.00522205 # param_grid['eta'] = [0.005] #0.457677+0.00526401 return param_grid def get_learning_params(self): """e This method must be overriden by derived class if it intends to fine tune parameters """ num_boost_round = 100 early_stopping_rounds = 5 kwargs = {'num_boost_round':num_boost_round, 'feval':mean_absolute_percentage_error_xgboost_cv, 'callbacks':[xgb.callback.print_evaluation(show_stdv=True),xgb.callback.early_stop(early_stopping_rounds)]} return kwargs if __name__ == "__main__": obj= DidiXGBoostModel() obj.run()
mit
3,712,276,902,335,813,600
43.736264
187
0.629975
false
3.327882
false
false
false
Censio/filterpy
filterpy/common/tests/test_discretization.py
1
2566
# -*- coding: utf-8 -*- """Copyright 2015 Roger R Labbe Jr. FilterPy library. http://github.com/rlabbe/filterpy Documentation at: https://filterpy.readthedocs.org Supporting book at: https://github.com/rlabbe/Kalman-and-Bayesian-Filters-in-Python This is licensed under an MIT license. See the readme.MD file for more information. """ from __future__ import (absolute_import, division, print_function, unicode_literals) from filterpy.common import linear_ode_discretation, Q_discrete_white_noise from numpy import array def near_eq(x,y): return abs(x-y) < 1.e-17 def test_Q_discrete_white_noise(): Q = Q_discrete_white_noise (2) assert Q[0,0] == .25 assert Q[1,0] == .5 assert Q[0,1] == .5 assert Q[1,1] == 1 assert Q.shape == (2,2) def test_linear_ode(): F = array([[0,0,1,0,0,0], [0,0,0,1,0,0], [0,0,0,0,1,0], [0,0,0,0,0,1], [0,0,0,0,0,0], [0,0,0,0,0,0]], dtype=float) L = array ([[0,0], [0,0], [0,0], [0,0], [1,0], [0,1]], dtype=float) q = .2 Q = array([[q, 0],[0, q]]) dt = 0.5 A,Q = linear_ode_discretation(F, L, Q, dt) val = [1, 0, dt, 0, 0.5*dt**2, 0] for i in range(6): assert val[i] == A[0,i] for i in range(6): assert val[i-1] == A[1,i] if i > 0 else A[1,i] == 0 for i in range(6): assert val[i-2] == A[2,i] if i > 1 else A[2,i] == 0 for i in range(6): assert val[i-3] == A[3,i] if i > 2 else A[3,i] == 0 for i in range(6): assert val[i-4] == A[4,i] if i > 3 else A[4,i] == 0 for i in range(6): assert val[i-5] == A[5,i] if i > 4 else A[5,i] == 0 assert near_eq(Q[0,0], (1./20)*(dt**5)*q) assert near_eq(Q[0,1], 0) assert near_eq(Q[0,2], (1/8)*(dt**4)*q) assert near_eq(Q[0,3], 0) assert near_eq(Q[0,4], (1./6)*(dt**3)*q) assert near_eq(Q[0,5], 0) if __name__ == "__main__": test_linear_ode() test_Q_discrete_white_noise() F = array([[0,0,1,0,0,0], [0,0,0,1,0,0], [0,0,0,0,1,0], [0,0,0,0,0,1], [0,0,0,0,0,0], [0,0,0,0,0,0]], dtype=float) L = array ([[0,0], [0,0], [0,0], [0,0], [1,0], [0,1]], dtype=float) q = .2 Q = array([[q, 0],[0, q]]) dt = 1/30 A,Q = linear_ode_discretation(F, L, Q, dt) print(Q)
mit
-1,838,436,546,280,924,000
21.716814
75
0.464147
false
2.560878
false
false
false
boada/planckClusters
MOSAICpipe/bpz-1.99.3/prior_full.py
1
3446
from __future__ import print_function from __future__ import division from past.utils import old_div from useful import match_resol import numpy import sys # Hacked to use numpy and avoid import * commands # FM Float = numpy.float less = numpy.less def function(z, m, nt): """HDFN prior for the main six types of Benitez 2000 Returns an array pi[z[:],:6] The input magnitude is F814W AB """ if nt != 6: print("Wrong number of template spectra!") sys.exit() global zt_at_a global zt_at_1p5 global zt_at_2 nz = len(z) momin_hdf = 20. if m <= 20.: xm = numpy.arange(12., 18.0) ft = numpy.array((0.55, 0.21, 0.21, .01, .01, .01)) zm0 = numpy.array([0.021, 0.034, 0.056, 0.0845, 0.1155, 0.127]) * ( old_div(2., 3.)) if len(ft) != nt: print("Wrong number of templates!") sys.exit() nz = len(z) m = numpy.array([m]) # match_resol works with arrays m = numpy.clip(m, xm[0], xm[-1]) zm = match_resol(xm, zm0, m) try: zt_2.shape except NameError: t2 = [2.] * nt zt_2 = numpy.power.outer(z, t2) try: zt_1p5.shape except NameError: t1p5 = [1.5] * nt zt_1p5 = numpy.power.outer(z, t1p5) zm_3 = numpy.power.outer(zm, 3) zm_1p5 = numpy.power.outer(zm, 1.5) p_i = 3. / 2. / zm_3 * zt_2[:, :] * numpy.exp(-numpy.clip( old_div(zt_1p5[:, :], zm_1p5), 0., 700.)) norm = numpy.add.reduce(p_i[:nz, :], 0) #Get rid of very low probability levels p_i[:nz, :] = numpy.where( numpy.less( old_div(p_i[:nz, :], norm[:]), old_div(1e-5, float(nz))), 0., old_div(p_i[:nz, :], norm[:])) norm = numpy.add.reduce(p_i[:nz, :], 0) return p_i[:nz, :] / norm[:] * ft[:] else: m = numpy.minimum(numpy.maximum(20., m), 32) a = numpy.array((2.465, 1.806, 1.806, 0.906, 0.906, 0.906)) zo = numpy.array((0.431, 0.390, 0.390, 0.0626, 0.0626, 0.0626)) km = numpy.array((0.0913, 0.0636, 0.0636, 0.123, 0.123, 0.123)) fo_t = numpy.array((0.35, 0.25, 0.25)) k_t = numpy.array((0.450, 0.147, 0.147)) dm = m - momin_hdf zmt = numpy.clip(zo + km * dm, 0.01, 15.) zmt_at_a = zmt**(a) #We define z**a as global to keep it #between function calls. That way it is # estimated only once try: zt_at_a.shape except NameError: zt_at_a = numpy.power.outer(z, a) #Morphological fractions f_t = numpy.zeros((len(a), ), Float) f_t[:3] = fo_t * numpy.exp(-k_t * dm) f_t[3:] = old_div((1. - numpy.add.reduce(f_t[:3])), 3.) #Formula: #zm=zo+km*(m_m_min) #p(z|T,m)=(z**a)*numpy.exp(-(z/zm)**a) p_i = zt_at_a[:nz, :6] * numpy.exp(-numpy.clip( old_div(zt_at_a[:nz, :6], zmt_at_a[:6]), 0., 700.)) #This eliminates the very low level tails of the priors norm = numpy.add.reduce(p_i[:nz, :6], 0) p_i[:nz, :6] = numpy.where( less( old_div(p_i[:nz, :6], norm[:6]), old_div(1e-2, float(nz))), 0., old_div(p_i[:nz, :6], norm[:6])) norm = numpy.add.reduce(p_i[:nz, :6], 0) p_i[:nz, :6] = p_i[:nz, :6] / norm[:6] * f_t[:6] return p_i
mit
-3,085,104,049,551,145,000
31.819048
79
0.492455
false
2.732752
false
false
false
selboo/starl-mangle
webvirtmgr/dashboard/views.py
1
5187
from django.shortcuts import render_to_response from django.http import HttpResponseRedirect from django.template import RequestContext from django.utils.datastructures import SortedDict from instance.models import Host from webvirtmgr.server import ConnServer from dashboard.forms import HostAddTcpForm, HostAddSshForm def sort_host(hosts): """ Sorts dictionary of hosts by key """ if hosts: sorted_hosts = [] for host in sorted(hosts.iterkeys()): sorted_hosts.append((host, hosts[host])) return SortedDict(sorted_hosts) def index(request): """ Index page. """ if not request.user.is_authenticated(): return HttpResponseRedirect('/login') else: return HttpResponseRedirect('/dashboard') def dashboard(request): """ Dashboard page. """ if not request.user.is_authenticated(): return HttpResponseRedirect('/login') def get_hosts_status(hosts): """ Function return all hosts all vds on host """ all_hosts = {} for host in hosts: try: import socket socket_host = socket.socket(socket.AF_INET, socket.SOCK_STREAM) socket_host.settimeout(1) if host.type == 'ssh': socket_host.connect((host.hostname, host.port)) else: socket_host.connect((host.hostname, 16509)) socket_host.close() status = 1 except Exception as err: status = err all_hosts[host.id] = (host.name, host.hostname, status) return all_hosts hosts = Host.objects.filter() hosts_info = get_hosts_status(hosts) form = None if request.method == 'POST': if 'host_del' in request.POST: del_host = Host.objects.get(id=request.POST.get('host_id', '')) del_host.delete() return HttpResponseRedirect(request.get_full_path()) if 'host_tcp_add' in request.POST: form = HostAddTcpForm(request.POST) if form.is_valid(): data = form.cleaned_data new_host = Host(name=data['name'], hostname=data['hostname'], type='tcp', login=data['login'], password=data['password1'] ) new_host.save() return HttpResponseRedirect(request.get_full_path()) if 'host_ssh_add' in request.POST: form = HostAddSshForm(request.POST) if form.is_valid(): data = form.cleaned_data new_host = Host(name=data['name'], hostname=data['hostname'], type='ssh', port=data['port'], login=data['login'] ) new_host.save() return HttpResponseRedirect(request.get_full_path()) hosts_info = sort_host(hosts_info) return render_to_response('dashboard.html', {'hosts_info': hosts_info, 'form': form, }, context_instance=RequestContext(request)) def infrastructure(request): """ Infrastructure page. """ if not request.user.is_authenticated(): return HttpResponseRedirect('/login') hosts = Host.objects.filter().order_by('id') hosts_vms = {} host_info = None host_mem = None for host in hosts: try: import socket socket_host = socket.socket(socket.AF_INET, socket.SOCK_STREAM) socket_host.settimeout(1) if host.type == 'ssh': socket_host.connect((host.hostname, host.port)) else: socket_host.connect((host.hostname, 16509)) socket_host.close() status = 1 except: status = 2 if status == 1: conn = ConnServer(host) host_info = conn.node_get_info() host_mem = conn.memory_get_usage() hosts_vms[host.id, host.name, status, host_info[2], host_mem[0], host_mem[2]] = conn.vds_on_cluster() else: hosts_vms[host.id, host.name, status, None, None, None] = None for host in hosts_vms: hosts_vms[host] = sort_host(hosts_vms[host]) hosts_vms = sort_host(hosts_vms) return render_to_response('infrastructure.html', {'hosts_info': host_info, 'host_mem': host_mem, 'hosts_vms': hosts_vms, 'hosts': hosts }, context_instance=RequestContext(request)) def page_setup(request): return render_to_response('setup.html', {}, context_instance=RequestContext(request))
apache-2.0
1,696,039,395,362,939,000
31.829114
113
0.508965
false
4.639535
false
false
false
anntzer/scikit-learn
sklearn/linear_model/_passive_aggressive.py
2
17363
# Authors: Rob Zinkov, Mathieu Blondel # License: BSD 3 clause from ..utils.validation import _deprecate_positional_args from ._stochastic_gradient import BaseSGDClassifier from ._stochastic_gradient import BaseSGDRegressor from ._stochastic_gradient import DEFAULT_EPSILON class PassiveAggressiveClassifier(BaseSGDClassifier): """Passive Aggressive Classifier Read more in the :ref:`User Guide <passive_aggressive>`. Parameters ---------- C : float, default=1.0 Maximum step size (regularization). Defaults to 1.0. fit_intercept : bool, default=True Whether the intercept should be estimated or not. If False, the data is assumed to be already centered. max_iter : int, default=1000 The maximum number of passes over the training data (aka epochs). It only impacts the behavior in the ``fit`` method, and not the :meth:`partial_fit` method. .. versionadded:: 0.19 tol : float or None, default=1e-3 The stopping criterion. If it is not None, the iterations will stop when (loss > previous_loss - tol). .. versionadded:: 0.19 early_stopping : bool, default=False Whether to use early stopping to terminate training when validation. score is not improving. If set to True, it will automatically set aside a stratified fraction of training data as validation and terminate training when validation score is not improving by at least tol for n_iter_no_change consecutive epochs. .. versionadded:: 0.20 validation_fraction : float, default=0.1 The proportion of training data to set aside as validation set for early stopping. Must be between 0 and 1. Only used if early_stopping is True. .. versionadded:: 0.20 n_iter_no_change : int, default=5 Number of iterations with no improvement to wait before early stopping. .. versionadded:: 0.20 shuffle : bool, default=True Whether or not the training data should be shuffled after each epoch. verbose : integer, default=0 The verbosity level loss : string, default="hinge" The loss function to be used: hinge: equivalent to PA-I in the reference paper. squared_hinge: equivalent to PA-II in the reference paper. n_jobs : int or None, default=None The number of CPUs to use to do the OVA (One Versus All, for multi-class problems) computation. ``None`` means 1 unless in a :obj:`joblib.parallel_backend` context. ``-1`` means using all processors. See :term:`Glossary <n_jobs>` for more details. random_state : int, RandomState instance, default=None Used to shuffle the training data, when ``shuffle`` is set to ``True``. Pass an int for reproducible output across multiple function calls. See :term:`Glossary <random_state>`. warm_start : bool, default=False When set to True, reuse the solution of the previous call to fit as initialization, otherwise, just erase the previous solution. See :term:`the Glossary <warm_start>`. Repeatedly calling fit or partial_fit when warm_start is True can result in a different solution than when calling fit a single time because of the way the data is shuffled. class_weight : dict, {class_label: weight} or "balanced" or None, \ default=None Preset for the class_weight fit parameter. Weights associated with classes. If not given, all classes are supposed to have weight one. The "balanced" mode uses the values of y to automatically adjust weights inversely proportional to class frequencies in the input data as ``n_samples / (n_classes * np.bincount(y))`` .. versionadded:: 0.17 parameter *class_weight* to automatically weight samples. average : bool or int, default=False When set to True, computes the averaged SGD weights and stores the result in the ``coef_`` attribute. If set to an int greater than 1, averaging will begin once the total number of samples seen reaches average. So average=10 will begin averaging after seeing 10 samples. .. versionadded:: 0.19 parameter *average* to use weights averaging in SGD Attributes ---------- coef_ : array, shape = [1, n_features] if n_classes == 2 else [n_classes,\ n_features] Weights assigned to the features. intercept_ : array, shape = [1] if n_classes == 2 else [n_classes] Constants in decision function. n_iter_ : int The actual number of iterations to reach the stopping criterion. For multiclass fits, it is the maximum over every binary fit. classes_ : array of shape (n_classes,) The unique classes labels. t_ : int Number of weight updates performed during training. Same as ``(n_iter_ * n_samples)``. loss_function_ : callable Loss function used by the algorithm. Examples -------- >>> from sklearn.linear_model import PassiveAggressiveClassifier >>> from sklearn.datasets import make_classification >>> X, y = make_classification(n_features=4, random_state=0) >>> clf = PassiveAggressiveClassifier(max_iter=1000, random_state=0, ... tol=1e-3) >>> clf.fit(X, y) PassiveAggressiveClassifier(random_state=0) >>> print(clf.coef_) [[0.26642044 0.45070924 0.67251877 0.64185414]] >>> print(clf.intercept_) [1.84127814] >>> print(clf.predict([[0, 0, 0, 0]])) [1] See Also -------- SGDClassifier Perceptron References ---------- Online Passive-Aggressive Algorithms <http://jmlr.csail.mit.edu/papers/volume7/crammer06a/crammer06a.pdf> K. Crammer, O. Dekel, J. Keshat, S. Shalev-Shwartz, Y. Singer - JMLR (2006) """ @_deprecate_positional_args def __init__(self, *, C=1.0, fit_intercept=True, max_iter=1000, tol=1e-3, early_stopping=False, validation_fraction=0.1, n_iter_no_change=5, shuffle=True, verbose=0, loss="hinge", n_jobs=None, random_state=None, warm_start=False, class_weight=None, average=False): super().__init__( penalty=None, fit_intercept=fit_intercept, max_iter=max_iter, tol=tol, early_stopping=early_stopping, validation_fraction=validation_fraction, n_iter_no_change=n_iter_no_change, shuffle=shuffle, verbose=verbose, random_state=random_state, eta0=1.0, warm_start=warm_start, class_weight=class_weight, average=average, n_jobs=n_jobs) self.C = C self.loss = loss def partial_fit(self, X, y, classes=None): """Fit linear model with Passive Aggressive algorithm. Parameters ---------- X : {array-like, sparse matrix} of shape (n_samples, n_features) Subset of the training data y : numpy array of shape [n_samples] Subset of the target values classes : array, shape = [n_classes] Classes across all calls to partial_fit. Can be obtained by via `np.unique(y_all)`, where y_all is the target vector of the entire dataset. This argument is required for the first call to partial_fit and can be omitted in the subsequent calls. Note that y doesn't need to contain all labels in `classes`. Returns ------- self : returns an instance of self. """ self._validate_params(for_partial_fit=True) if self.class_weight == 'balanced': raise ValueError("class_weight 'balanced' is not supported for " "partial_fit. For 'balanced' weights, use " "`sklearn.utils.compute_class_weight` with " "`class_weight='balanced'`. In place of y you " "can use a large enough subset of the full " "training set target to properly estimate the " "class frequency distributions. Pass the " "resulting weights as the class_weight " "parameter.") lr = "pa1" if self.loss == "hinge" else "pa2" return self._partial_fit(X, y, alpha=1.0, C=self.C, loss="hinge", learning_rate=lr, max_iter=1, classes=classes, sample_weight=None, coef_init=None, intercept_init=None) def fit(self, X, y, coef_init=None, intercept_init=None): """Fit linear model with Passive Aggressive algorithm. Parameters ---------- X : {array-like, sparse matrix} of shape (n_samples, n_features) Training data y : numpy array of shape [n_samples] Target values coef_init : array, shape = [n_classes,n_features] The initial coefficients to warm-start the optimization. intercept_init : array, shape = [n_classes] The initial intercept to warm-start the optimization. Returns ------- self : returns an instance of self. """ self._validate_params() lr = "pa1" if self.loss == "hinge" else "pa2" return self._fit(X, y, alpha=1.0, C=self.C, loss="hinge", learning_rate=lr, coef_init=coef_init, intercept_init=intercept_init) class PassiveAggressiveRegressor(BaseSGDRegressor): """Passive Aggressive Regressor Read more in the :ref:`User Guide <passive_aggressive>`. Parameters ---------- C : float, default=1.0 Maximum step size (regularization). Defaults to 1.0. fit_intercept : bool, default=True Whether the intercept should be estimated or not. If False, the data is assumed to be already centered. Defaults to True. max_iter : int, default=1000 The maximum number of passes over the training data (aka epochs). It only impacts the behavior in the ``fit`` method, and not the :meth:`partial_fit` method. .. versionadded:: 0.19 tol : float or None, default=1e-3 The stopping criterion. If it is not None, the iterations will stop when (loss > previous_loss - tol). .. versionadded:: 0.19 early_stopping : bool, default=False Whether to use early stopping to terminate training when validation. score is not improving. If set to True, it will automatically set aside a fraction of training data as validation and terminate training when validation score is not improving by at least tol for n_iter_no_change consecutive epochs. .. versionadded:: 0.20 validation_fraction : float, default=0.1 The proportion of training data to set aside as validation set for early stopping. Must be between 0 and 1. Only used if early_stopping is True. .. versionadded:: 0.20 n_iter_no_change : int, default=5 Number of iterations with no improvement to wait before early stopping. .. versionadded:: 0.20 shuffle : bool, default=True Whether or not the training data should be shuffled after each epoch. verbose : integer, default=0 The verbosity level loss : string, default="epsilon_insensitive" The loss function to be used: epsilon_insensitive: equivalent to PA-I in the reference paper. squared_epsilon_insensitive: equivalent to PA-II in the reference paper. epsilon : float, default=0.1 If the difference between the current prediction and the correct label is below this threshold, the model is not updated. random_state : int, RandomState instance, default=None Used to shuffle the training data, when ``shuffle`` is set to ``True``. Pass an int for reproducible output across multiple function calls. See :term:`Glossary <random_state>`. warm_start : bool, default=False When set to True, reuse the solution of the previous call to fit as initialization, otherwise, just erase the previous solution. See :term:`the Glossary <warm_start>`. Repeatedly calling fit or partial_fit when warm_start is True can result in a different solution than when calling fit a single time because of the way the data is shuffled. average : bool or int, default=False When set to True, computes the averaged SGD weights and stores the result in the ``coef_`` attribute. If set to an int greater than 1, averaging will begin once the total number of samples seen reaches average. So average=10 will begin averaging after seeing 10 samples. .. versionadded:: 0.19 parameter *average* to use weights averaging in SGD Attributes ---------- coef_ : array, shape = [1, n_features] if n_classes == 2 else [n_classes,\ n_features] Weights assigned to the features. intercept_ : array, shape = [1] if n_classes == 2 else [n_classes] Constants in decision function. n_iter_ : int The actual number of iterations to reach the stopping criterion. t_ : int Number of weight updates performed during training. Same as ``(n_iter_ * n_samples)``. Examples -------- >>> from sklearn.linear_model import PassiveAggressiveRegressor >>> from sklearn.datasets import make_regression >>> X, y = make_regression(n_features=4, random_state=0) >>> regr = PassiveAggressiveRegressor(max_iter=100, random_state=0, ... tol=1e-3) >>> regr.fit(X, y) PassiveAggressiveRegressor(max_iter=100, random_state=0) >>> print(regr.coef_) [20.48736655 34.18818427 67.59122734 87.94731329] >>> print(regr.intercept_) [-0.02306214] >>> print(regr.predict([[0, 0, 0, 0]])) [-0.02306214] See Also -------- SGDRegressor References ---------- Online Passive-Aggressive Algorithms <http://jmlr.csail.mit.edu/papers/volume7/crammer06a/crammer06a.pdf> K. Crammer, O. Dekel, J. Keshat, S. Shalev-Shwartz, Y. Singer - JMLR (2006) """ @_deprecate_positional_args def __init__(self, *, C=1.0, fit_intercept=True, max_iter=1000, tol=1e-3, early_stopping=False, validation_fraction=0.1, n_iter_no_change=5, shuffle=True, verbose=0, loss="epsilon_insensitive", epsilon=DEFAULT_EPSILON, random_state=None, warm_start=False, average=False): super().__init__( penalty=None, l1_ratio=0, epsilon=epsilon, eta0=1.0, fit_intercept=fit_intercept, max_iter=max_iter, tol=tol, early_stopping=early_stopping, validation_fraction=validation_fraction, n_iter_no_change=n_iter_no_change, shuffle=shuffle, verbose=verbose, random_state=random_state, warm_start=warm_start, average=average) self.C = C self.loss = loss def partial_fit(self, X, y): """Fit linear model with Passive Aggressive algorithm. Parameters ---------- X : {array-like, sparse matrix} of shape (n_samples, n_features) Subset of training data y : numpy array of shape [n_samples] Subset of target values Returns ------- self : returns an instance of self. """ self._validate_params(for_partial_fit=True) lr = "pa1" if self.loss == "epsilon_insensitive" else "pa2" return self._partial_fit(X, y, alpha=1.0, C=self.C, loss="epsilon_insensitive", learning_rate=lr, max_iter=1, sample_weight=None, coef_init=None, intercept_init=None) def fit(self, X, y, coef_init=None, intercept_init=None): """Fit linear model with Passive Aggressive algorithm. Parameters ---------- X : {array-like, sparse matrix} of shape (n_samples, n_features) Training data y : numpy array of shape [n_samples] Target values coef_init : array, shape = [n_features] The initial coefficients to warm-start the optimization. intercept_init : array, shape = [1] The initial intercept to warm-start the optimization. Returns ------- self : returns an instance of self. """ self._validate_params() lr = "pa1" if self.loss == "epsilon_insensitive" else "pa2" return self._fit(X, y, alpha=1.0, C=self.C, loss="epsilon_insensitive", learning_rate=lr, coef_init=coef_init, intercept_init=intercept_init)
bsd-3-clause
-4,979,293,226,387,037,000
35.942553
79
0.607153
false
4.177815
false
false
false
vit-/telegram-uz-bot
uz/tests/interface/telegram/test_bot.py
1
5489
import time from datetime import datetime import mock import pytest from uz.tests import Awaitable from uz.interface.telegram import bot from uz.scanner import UknkownScanID CHAT_ID = 'chat_id' def tg_message(text): return { 'chat': { 'id': CHAT_ID, 'type': 'private', }, 'from': {'first_name': 'n/a', 'id': 'user_id'}, 'message_id': int(time.time()), 'text': text } def get_reply(send_message_mock): args, kwargs = send_message_mock.call_args_list[0] return args[1] @pytest.mark.asyncio async def test_list_trains(source_station, destination_station, train): bot.send_message = send_message = mock.MagicMock(return_value=Awaitable()) date = datetime(2016, 7, 21) command = '/trains {} {} {}'.format( date.strftime('%Y-%m-%d'), source_station.title, destination_station.title) with mock.patch('uz.interface.serializer.Deserializer.load', return_value=Awaitable((date, source_station, destination_station))) as load, \ mock.patch('uz.client.client.UZClient.list_trains', return_value=Awaitable([train])) as list_trains: await bot._process_message(tg_message(command)) load.assert_called_once_with({ 'date': date.strftime('%Y-%m-%d'), 'source': source_station.title, 'destination': destination_station.title}) list_trains.assert_called_once_with(date, source_station, destination_station) msg = get_reply(send_message) title = 'Trains from %s to %s on %s:' % ( source_station, destination_station, date.date()) assert msg.startswith(title) assert train.info() in msg @pytest.mark.asyncio @pytest.mark.parametrize('is_ok', [True, False]) async def test_status(is_ok): scan_id = 'id1234' scanner = mock.MagicMock() if is_ok: scanner.status.return_value = (attempts, error) = (10, 'i am error') else: scanner.status.side_effect = UknkownScanID() bot.send_message = send_message = mock.MagicMock(return_value=Awaitable()) bot.set_scanner(scanner) await bot._process_message(tg_message('/status_{}'.format(scan_id))) scanner.status.assert_called_once_with(scan_id) if is_ok: send_message.assert_called_once_with( CHAT_ID, 'No attempts: {}\nLast error message: {}'.format(attempts, error)) else: send_message.assert_called_once_with( CHAT_ID, 'Unknown scan id: {}'.format(scan_id)) @pytest.mark.asyncio @pytest.mark.parametrize('is_ok', [True, False]) async def test_abort_scan(is_ok): scan_id = 'id4321' scanner = mock.MagicMock() if is_ok: scanner.abort.return_value = True else: scanner.abort.side_effect = UknkownScanID() bot.send_message = send_message = mock.MagicMock(return_value=Awaitable()) bot.set_scanner(scanner) await bot._process_message(tg_message('/abort_{}'.format(scan_id))) scanner.abort.assert_called_once_with(scan_id) if is_ok: send_message.assert_called_once_with( CHAT_ID, 'OK') else: send_message.assert_called_once_with( CHAT_ID, 'Unknown scan id: {}'.format(scan_id)) @pytest.mark.asyncio @pytest.mark.parametrize('ct_letter', [None, 'C2']) async def test_scan(source_station, destination_station, ct_letter): scan_id = 'id1234' date = datetime(2016, 10, 7) train_num = '744K' firstname = 'username' lastname = 'surname' parts = [ '/scan', firstname, lastname, date.strftime('%Y-%m-%d'), source_station, destination_station, train_num] if ct_letter: parts.append(ct_letter) command = ' '.join(str(i) for i in parts) scanner = mock.MagicMock() scanner.add_item.return_value = scan_id bot.send_message = send_message = mock.MagicMock(return_value=Awaitable()) bot.set_scanner(scanner) with mock.patch('uz.interface.serializer.Deserializer.load', return_value=Awaitable((date, source_station, destination_station))) as load: await bot._process_message(tg_message(command)) load.assert_called_once_with({ 'firstname': firstname, 'lastname': lastname, 'date': date.strftime('%Y-%m-%d'), 'source': source_station.title, 'destination': destination_station.title, 'train_num': train_num, 'ct_letter': ct_letter}) scanner.add_item.assert_called_once_with( mock.ANY, firstname, lastname, date, source_station, destination_station, train_num, ct_letter) expected = ('Scanning tickets for train {train} from {src} to {dst} on {date}.\n' 'To monitor scan status: /status_{sid}\n' 'To abort scan: /abort_{sid}').format( train=train_num, src=source_station, dst=destination_station, date=date.date(), sid=scan_id) send_message.assert_called_once_with(CHAT_ID, expected) @pytest.mark.asyncio async def test_hello(): bot.send_message = send_message = mock.MagicMock(return_value=Awaitable()) await bot._process_message(tg_message('hi')) send_message.assert_called_once_with(CHAT_ID, mock.ANY) @pytest.mark.asyncio async def test_help_msg(): bot.send_message = send_message = mock.MagicMock(return_value=Awaitable()) await bot._process_message(tg_message('/help')) send_message.assert_called_once_with(CHAT_ID, mock.ANY)
mit
5,860,053,250,814,065,000
33.522013
99
0.635817
false
3.4072
true
false
false
CityofPittsburgh/pittsburgh-purchasing-suite
migrations/versions/31d29fbffe44_add_passwords_for_users.py
1
1988
"""add passwords for users Revision ID: 31d29fbffe44 Revises: 48c578b852fa Create Date: 2016-01-20 23:33:36.893832 """ # revision identifiers, used by Alembic. revision = '31d29fbffe44' down_revision = '48c578b852fa' import random from flask_security.utils import encrypt_password from alembic import op import sqlalchemy as sa ALPHABET = "0123456789abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ" def rand_alphabet(): return encrypt_password(''.join(random.choice(ALPHABET) for i in range(16))) def upgrade(): ### commands auto generated by Alembic - please adjust! ### op.add_column(u'roles', sa.Column('description', sa.String(length=255), nullable=True)) op.add_column(u'users', sa.Column('confirmed_at', sa.DateTime(), nullable=True)) op.add_column(u'users', sa.Column('current_login_at', sa.DateTime(), nullable=True)) op.add_column(u'users', sa.Column('current_login_ip', sa.String(length=255), nullable=True)) op.add_column(u'users', sa.Column('last_login_at', sa.DateTime(), nullable=True)) op.add_column(u'users', sa.Column('last_login_ip', sa.String(length=255), nullable=True)) op.add_column(u'users', sa.Column('login_count', sa.Integer(), nullable=True)) op.add_column(u'users', sa.Column( 'password', sa.String(length=255), nullable=False, default=rand_alphabet(), server_default=rand_alphabet() )) ### end Alembic commands ### op.execute(sa.sql.text(''' UPDATE users SET confirmed_at = now() ''')) def downgrade(): ### commands auto generated by Alembic - please adjust! ### op.drop_column(u'users', 'password') op.drop_column(u'users', 'login_count') op.drop_column(u'users', 'last_login_ip') op.drop_column(u'users', 'last_login_at') op.drop_column(u'users', 'current_login_ip') op.drop_column(u'users', 'current_login_at') op.drop_column(u'users', 'confirmed_at') op.drop_column(u'roles', 'description') ### end Alembic commands ###
bsd-3-clause
-1,728,439,714,103,357,000
36.509434
96
0.686117
false
3.23252
false
false
false
miracle2k/stgit
stgit/commands/delete.py
1
3073
__copyright__ = """ Copyright (C) 2005, Catalin Marinas <catalin.marinas@gmail.com> This program is free software; you can redistribute it and/or modify it under the terms of the GNU General Public License version 2 as published by the Free Software Foundation. This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. You should have received a copy of the GNU General Public License along with this program; if not, write to the Free Software Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA """ from stgit.argparse import opt from stgit.commands import common from stgit.lib import transaction from stgit import argparse help = 'Delete patches' kind = 'patch' usage = ['[options] <patch1> [<patch2>] [<patch3>..<patch4>]'] description = """ Delete the patches passed as arguments.""" args = [argparse.patch_range(argparse.applied_patches, argparse.unapplied_patches)] options = [ opt('--spill', action = 'store_true', short = 'Spill patch contents to worktree and index', long = """ Delete the patches, but do not touch the index and worktree. This only works with applied patches at the top of the stack. The effect is to "spill" the patch contents into the index and worktree. This can be useful e.g. if you want to split a patch into several smaller pieces."""), opt('-b', '--branch', args = [argparse.stg_branches], short = 'Use BRANCH instead of the default branch')] directory = common.DirectoryHasRepositoryLib() def func(parser, options, args): """Delete one or more patches.""" stack = directory.repository.get_stack(options.branch) if options.branch: iw = None # can't use index/workdir to manipulate another branch else: iw = stack.repository.default_iw if args: patches = set(common.parse_patches(args, list(stack.patchorder.all), len(stack.patchorder.applied))) else: parser.error('No patches specified') if options.spill: if set(stack.patchorder.applied[-len(patches):]) != patches: parser.error('Can only spill topmost applied patches') iw = None # don't touch index+worktree def allow_conflicts(trans): # Allow conflicts if the topmost patch stays the same. if stack.patchorder.applied: return (trans.applied and trans.applied[-1] == stack.patchorder.applied[-1]) else: return not trans.applied trans = transaction.StackTransaction(stack, 'delete', allow_conflicts = allow_conflicts) try: to_push = trans.delete_patches(lambda pn: pn in patches) for pn in to_push: trans.push_patch(pn, iw) except transaction.TransactionHalted: pass return trans.run(iw)
gpl-2.0
-8,761,710,056,651,811,000
38.397436
76
0.664497
false
4.147099
false
false
false
FedoraScientific/salome-smesh
src/Tools/blocFissure/CasTests/fissure_Coude_4.py
1
3081
# -*- coding: utf-8 -*- from fissure_Coude import fissure_Coude class fissure_Coude_4(fissure_Coude): """ probleme de fissure du Coude : ASCOU09A adaptation maillage """ # --------------------------------------------------------------------------- def setParamGeometrieSaine(self): """ Paramètres géométriques du tuyau coudé sain: angleCoude r_cintr l_tube_p1 l_tube_p2 epais de """ self.geomParams = dict(angleCoude = 40, r_cintr = 654, l_tube_p1 = 1700, l_tube_p2 = 1700, epais = 62.5, de = 912.4) # --------------------------------------------------------------------------- def setParamMaillageSain(self): self.meshParams = dict(n_long_p1 = 16, n_ep = 5, n_long_coude = 30, n_circ_g = 50, n_circ_d = 20, n_long_p2 = 12) # --------------------------------------------------------------------------- def setParamShapeFissure(self): """ paramètres de la fissure profondeur : 0 < profondeur <= épaisseur azimut : entre 0 et 360° alpha : 0 < alpha < angleCoude longueur : <=2*profondeur ==> ellipse, >2*profondeur = fissure longue orientation : 0° : longitudinale, 90° : circonférentielle, autre : uniquement fissures elliptiques externe : True : fissure face externe, False : fissure face interne """ print "setParamShapeFissure", self.nomCas self.shapeFissureParams = dict(nomRep = '.', nomFicSain = self.nomCas, nomFicFissure = 'fissure_' + self.nomCas, profondeur = 10, azimut = 90, alpha = 20, longueur = 240, orientation = 90, lgInfluence = 30, elliptique = False, convexe = True, externe = True) # --------------------------------------------------------------------------- def setReferencesMaillageFissure(self): self.referencesMaillageFissure = dict(Entity_Quad_Pyramid = 948, Entity_Quad_Triangle = 1562, Entity_Quad_Edge = 1192, Entity_Quad_Penta = 732, Entity_Quad_Hexa = 22208, Entity_Node = 133418, Entity_Quad_Tetra = 18759, Entity_Quad_Quadrangle = 11852)
lgpl-2.1
-4,575,732,381,039,616,000
41.068493
102
0.370563
false
4.230028
false
false
false
davidwilson-85/easymap
graphic_output/Pillow-4.2.1/Tests/test_file_wmf.py
1
1215
from helper import unittest, PillowTestCase, hopper from PIL import Image class TestFileWmf(PillowTestCase): def test_load_raw(self): # Test basic EMF open and rendering im = Image.open('Tests/images/drawing.emf') if hasattr(Image.core, "drawwmf"): # Currently, support for WMF/EMF is Windows-only im.load() # Compare to reference rendering imref = Image.open('Tests/images/drawing_emf_ref.png') imref.load() self.assert_image_similar(im, imref, 0) # Test basic WMF open and rendering im = Image.open('Tests/images/drawing.wmf') if hasattr(Image.core, "drawwmf"): # Currently, support for WMF/EMF is Windows-only im.load() # Compare to reference rendering imref = Image.open('Tests/images/drawing_wmf_ref.png') imref.load() self.assert_image_similar(im, imref, 2.0) def test_save(self): im = hopper() for ext in [".wmf", ".emf"]: tmpfile = self.tempfile("temp"+ext) self.assertRaises(IOError, lambda: im.save(tmpfile)) if __name__ == '__main__': unittest.main()
gpl-3.0
-6,634,027,561,541,816,000
30.973684
66
0.584362
false
3.75
true
false
false
abrt/faf
src/pyfaf/storage/migrations/versions/a2b6d12819f9_drop_yum_type.py
1
2158
# Copyright (C) 2019 ABRT Team # Copyright (C) 2019 Red Hat, Inc. # # This file is part of faf. # # faf is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # faf is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with faf. If not, see <http://www.gnu.org/licenses/>. """ drop_yum_type Revision ID: a2b6d12819f9 Revises: e5d5cefb8ca4 Create Date: 2019-02-08 11:41:56.967881 """ from alembic.op import execute, get_bind import sqlalchemy as sa # revision identifiers, used by Alembic. revision = 'a2b6d12819f9' down_revision = 'e5d5cefb8ca4' new_values = ['dnf', 'koji', 'rpmmetadata'] old_values = new_values + ['yum'] old_type = sa.Enum(*old_values, name='repo_type') new_type = sa.Enum(*new_values, name='repo_type') tmp_type = sa.Enum(*new_values, name='_repo_type') def upgrade() -> None: bind = get_bind() execute('UPDATE repo SET type=\'dnf\' WHERE type=\'yum\'') tmp_type.create(bind, checkfirst=False) execute('ALTER TABLE repo ALTER COLUMN type TYPE _repo_type USING ' 'type::text::_repo_type') old_type.drop(bind, checkfirst=False) new_type.create(bind, checkfirst=False) execute('ALTER TABLE repo ALTER COLUMN type TYPE repo_type USING ' 'type::text::repo_type') tmp_type.drop(bind, checkfirst=False) def downgrade() -> None: bind = get_bind() tmp_type.create(bind, checkfirst=False) execute('ALTER TABLE repo ALTER COLUMN type TYPE _repo_type USING ' 'type::text::_repo_type') new_type.drop(bind, checkfirst=False) old_type.create(bind, checkfirst=False) execute('ALTER TABLE repo ALTER COLUMN type TYPE repo_type USING ' 'type::text::repo_type') tmp_type.drop(bind, checkfirst=False)
gpl-3.0
894,444,310,800,265,600
30.735294
71
0.696015
false
3.284627
false
false
false
lucius-feng/tg2
tests/test_middlewares.py
2
3091
from webtest import TestApp from tg.support.middlewares import StatusCodeRedirect from tg.support.middlewares import DBSessionRemoverMiddleware from tg.support.middlewares import MingSessionRemoverMiddleware def FakeApp(environ, start_response): if environ['PATH_INFO'].startswith('/error'): start_response('403 Forbidden', []) else: start_response('200 Success', []) if environ['PATH_INFO'] == '/error/document': yield b'ERROR!!!' else: yield b'HI' yield b'MORE' class TestStatusCodeRedirectMiddleware(object): def setup(self): self.app = TestApp(StatusCodeRedirect(FakeApp, [403])) def test_error_redirection(self): r = self.app.get('/error_test', status=403) assert 'ERROR!!!' in r, r def test_success_passthrough(self): r = self.app.get('/success_test') assert 'HI' in r, r class FakeDBSession(object): removed = False def remove(self): self.removed = True def close_all(self): self.remove() class FakeAppWithClose(object): closed = False step = 0 def __call__(self, environ, start_response): start_response('200 Success', []) if environ['PATH_INFO'] == '/crash': raise Exception('crashed') return self def __iter__(self): return self def next(self): self.step += 1 if self.step > 3: raise StopIteration() return str(self.step) def close(self): self.closed = True def __repr__(self): return '%s - %s' % (self.step, self.closed) class TestDBSessionRemoverMiddleware(object): def setup(self): self.app_with_close = FakeAppWithClose() self.session = FakeDBSession() self.app = TestApp(DBSessionRemoverMiddleware(self.session, self.app_with_close)) def test_close_is_called(self): r = self.app.get('/nonerror') assert self.app_with_close.closed == True, self.app_with_close def test_session_is_removed(self): r = self.app.get('/nonerror') assert self.session.removed == True, self.app_with_close def test_session_is_removed_on_crash(self): try: r = self.app.get('/crash') except: pass assert self.session.removed == True, self.app_with_close class TestMingSessionRemoverMiddlewaree(object): def setup(self): self.app_with_close = FakeAppWithClose() self.session = FakeDBSession() self.app = TestApp(MingSessionRemoverMiddleware(self.session, self.app_with_close)) def test_close_is_called(self): r = self.app.get('/nonerror') assert self.app_with_close.closed == True, self.app_with_close def test_session_is_removed(self): r = self.app.get('/nonerror') assert self.session.removed == True, self.app_with_close def test_session_is_removed_on_crash(self): try: r = self.app.get('/crash') except: pass assert self.session.removed == True, self.app_with_close
mit
-5,575,966,756,473,490,000
25.646552
91
0.619864
false
3.697368
true
false
false
cloud-fan/spark
python/pyspark/pandas/data_type_ops/base.py
1
12265
# # Licensed to the Apache Software Foundation (ASF) under one or more # contributor license agreements. See the NOTICE file distributed with # this work for additional information regarding copyright ownership. # The ASF licenses this file to You under the Apache License, Version 2.0 # (the "License"); you may not use this file except in compliance with # the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # import numbers from abc import ABCMeta from itertools import chain from typing import Any, Optional, TYPE_CHECKING, Union import numpy as np import pandas as pd from pandas.api.types import CategoricalDtype from pyspark.sql import functions as F from pyspark.sql.types import ( ArrayType, BinaryType, BooleanType, DataType, DateType, FractionalType, IntegralType, MapType, NullType, NumericType, StringType, StructType, TimestampType, UserDefinedType, ) from pyspark.pandas.typedef import Dtype, extension_dtypes from pyspark.pandas.typedef.typehints import extension_object_dtypes_available if extension_object_dtypes_available: from pandas import BooleanDtype if TYPE_CHECKING: from pyspark.pandas.indexes import Index # noqa: F401 (SPARK-34943) from pyspark.pandas.series import Series # noqa: F401 (SPARK-34943) def is_valid_operand_for_numeric_arithmetic(operand: Any, *, allow_bool: bool = True) -> bool: """Check whether the `operand` is valid for arithmetic operations against numerics.""" from pyspark.pandas.base import IndexOpsMixin if isinstance(operand, numbers.Number): return not isinstance(operand, bool) or allow_bool elif isinstance(operand, IndexOpsMixin): if isinstance(operand.dtype, CategoricalDtype): return False else: return isinstance(operand.spark.data_type, NumericType) or ( allow_bool and isinstance(operand.spark.data_type, BooleanType) ) else: return False def transform_boolean_operand_to_numeric( operand: Any, spark_type: Optional[DataType] = None ) -> Any: """Transform boolean operand to numeric. If the `operand` is: - a boolean IndexOpsMixin, transform the `operand` to the `spark_type`. - a boolean literal, transform to the int value. Otherwise, return the operand as it is. """ from pyspark.pandas.base import IndexOpsMixin if isinstance(operand, IndexOpsMixin) and isinstance(operand.spark.data_type, BooleanType): assert spark_type, "spark_type must be provided if the operand is a boolean IndexOpsMixin" return operand.spark.transform(lambda scol: scol.cast(spark_type)) elif isinstance(operand, bool): return int(operand) else: return operand def _as_categorical_type( index_ops: Union["Series", "Index"], dtype: CategoricalDtype, spark_type: DataType ) -> Union["Index", "Series"]: """Cast `index_ops` to categorical dtype, given `dtype` and `spark_type`.""" assert isinstance(dtype, CategoricalDtype) if dtype.categories is None: codes, uniques = index_ops.factorize() return codes._with_new_scol( codes.spark.column, field=codes._internal.data_fields[0].copy(dtype=CategoricalDtype(categories=uniques)), ) else: categories = dtype.categories if len(categories) == 0: scol = F.lit(-1) else: kvs = chain( *[(F.lit(category), F.lit(code)) for code, category in enumerate(categories)] ) map_scol = F.create_map(*kvs) scol = F.coalesce(map_scol.getItem(index_ops.spark.column), F.lit(-1)) return index_ops._with_new_scol( scol.cast(spark_type).alias(index_ops._internal.data_fields[0].name), field=index_ops._internal.data_fields[0].copy( dtype=dtype, spark_type=spark_type, nullable=False ), ) def _as_bool_type( index_ops: Union["Series", "Index"], dtype: Union[str, type, Dtype] ) -> Union["Index", "Series"]: """Cast `index_ops` to BooleanType Spark type, given `dtype`.""" from pyspark.pandas.internal import InternalField if isinstance(dtype, extension_dtypes): scol = index_ops.spark.column.cast(BooleanType()) else: scol = F.when(index_ops.spark.column.isNull(), F.lit(False)).otherwise( index_ops.spark.column.cast(BooleanType()) ) return index_ops._with_new_scol( scol.alias(index_ops._internal.data_spark_column_names[0]), field=InternalField(dtype=dtype), ) def _as_string_type( index_ops: Union["Series", "Index"], dtype: Union[str, type, Dtype], *, null_str: str = str(None) ) -> Union["Index", "Series"]: """Cast `index_ops` to StringType Spark type, given `dtype` and `null_str`, representing null Spark column. """ from pyspark.pandas.internal import InternalField if isinstance(dtype, extension_dtypes): scol = index_ops.spark.column.cast(StringType()) else: casted = index_ops.spark.column.cast(StringType()) scol = F.when(index_ops.spark.column.isNull(), null_str).otherwise(casted) return index_ops._with_new_scol( scol.alias(index_ops._internal.data_spark_column_names[0]), field=InternalField(dtype=dtype), ) def _as_other_type( index_ops: Union["Series", "Index"], dtype: Union[str, type, Dtype], spark_type: DataType ) -> Union["Index", "Series"]: """Cast `index_ops` to a `dtype` (`spark_type`) that needs no pre-processing. Destination types that need pre-processing: CategoricalDtype, BooleanType, and StringType. """ from pyspark.pandas.internal import InternalField need_pre_process = ( isinstance(dtype, CategoricalDtype) or isinstance(spark_type, BooleanType) or isinstance(spark_type, StringType) ) assert not need_pre_process, "Pre-processing is needed before the type casting." scol = index_ops.spark.column.cast(spark_type) return index_ops._with_new_scol( scol.alias(index_ops._internal.data_spark_column_names[0]), field=InternalField(dtype=dtype), ) class DataTypeOps(object, metaclass=ABCMeta): """The base class for binary operations of pandas-on-Spark objects (of different data types).""" def __new__(cls, dtype: Dtype, spark_type: DataType): from pyspark.pandas.data_type_ops.binary_ops import BinaryOps from pyspark.pandas.data_type_ops.boolean_ops import BooleanOps, BooleanExtensionOps from pyspark.pandas.data_type_ops.categorical_ops import CategoricalOps from pyspark.pandas.data_type_ops.complex_ops import ArrayOps, MapOps, StructOps from pyspark.pandas.data_type_ops.date_ops import DateOps from pyspark.pandas.data_type_ops.datetime_ops import DatetimeOps from pyspark.pandas.data_type_ops.null_ops import NullOps from pyspark.pandas.data_type_ops.num_ops import IntegralOps, FractionalOps from pyspark.pandas.data_type_ops.string_ops import StringOps from pyspark.pandas.data_type_ops.udt_ops import UDTOps if isinstance(dtype, CategoricalDtype): return object.__new__(CategoricalOps) elif isinstance(spark_type, FractionalType): return object.__new__(FractionalOps) elif isinstance(spark_type, IntegralType): return object.__new__(IntegralOps) elif isinstance(spark_type, StringType): return object.__new__(StringOps) elif isinstance(spark_type, BooleanType): if extension_object_dtypes_available and isinstance(dtype, BooleanDtype): return object.__new__(BooleanExtensionOps) else: return object.__new__(BooleanOps) elif isinstance(spark_type, TimestampType): return object.__new__(DatetimeOps) elif isinstance(spark_type, DateType): return object.__new__(DateOps) elif isinstance(spark_type, BinaryType): return object.__new__(BinaryOps) elif isinstance(spark_type, ArrayType): return object.__new__(ArrayOps) elif isinstance(spark_type, MapType): return object.__new__(MapOps) elif isinstance(spark_type, StructType): return object.__new__(StructOps) elif isinstance(spark_type, NullType): return object.__new__(NullOps) elif isinstance(spark_type, UserDefinedType): return object.__new__(UDTOps) else: raise TypeError("Type %s was not understood." % dtype) def __init__(self, dtype: Dtype, spark_type: DataType): self.dtype = dtype self.spark_type = spark_type @property def pretty_name(self) -> str: raise NotImplementedError() def add(self, left, right) -> Union["Series", "Index"]: raise TypeError("Addition can not be applied to %s." % self.pretty_name) def sub(self, left, right) -> Union["Series", "Index"]: raise TypeError("Subtraction can not be applied to %s." % self.pretty_name) def mul(self, left, right) -> Union["Series", "Index"]: raise TypeError("Multiplication can not be applied to %s." % self.pretty_name) def truediv(self, left, right) -> Union["Series", "Index"]: raise TypeError("True division can not be applied to %s." % self.pretty_name) def floordiv(self, left, right) -> Union["Series", "Index"]: raise TypeError("Floor division can not be applied to %s." % self.pretty_name) def mod(self, left, right) -> Union["Series", "Index"]: raise TypeError("Modulo can not be applied to %s." % self.pretty_name) def pow(self, left, right) -> Union["Series", "Index"]: raise TypeError("Exponentiation can not be applied to %s." % self.pretty_name) def radd(self, left, right) -> Union["Series", "Index"]: raise TypeError("Addition can not be applied to %s." % self.pretty_name) def rsub(self, left, right) -> Union["Series", "Index"]: raise TypeError("Subtraction can not be applied to %s." % self.pretty_name) def rmul(self, left, right) -> Union["Series", "Index"]: raise TypeError("Multiplication can not be applied to %s." % self.pretty_name) def rtruediv(self, left, right) -> Union["Series", "Index"]: raise TypeError("True division can not be applied to %s." % self.pretty_name) def rfloordiv(self, left, right) -> Union["Series", "Index"]: raise TypeError("Floor division can not be applied to %s." % self.pretty_name) def rmod(self, left, right) -> Union["Series", "Index"]: raise TypeError("Modulo can not be applied to %s." % self.pretty_name) def rpow(self, left, right) -> Union["Series", "Index"]: raise TypeError("Exponentiation can not be applied to %s." % self.pretty_name) def __and__(self, left, right) -> Union["Series", "Index"]: raise TypeError("Bitwise and can not be applied to %s." % self.pretty_name) def __or__(self, left, right) -> Union["Series", "Index"]: raise TypeError("Bitwise or can not be applied to %s." % self.pretty_name) def rand(self, left, right) -> Union["Series", "Index"]: return left.__and__(right) def ror(self, left, right) -> Union["Series", "Index"]: return left.__or__(right) def restore(self, col: pd.Series) -> pd.Series: """Restore column when to_pandas.""" return col def prepare(self, col: pd.Series) -> pd.Series: """Prepare column when from_pandas.""" return col.replace({np.nan: None}) def astype( self, index_ops: Union["Index", "Series"], dtype: Union[str, type, Dtype] ) -> Union["Index", "Series"]: raise TypeError("astype can not be applied to %s." % self.pretty_name)
apache-2.0
8,822,871,887,076,614,000
39.081699
100
0.658296
false
3.882558
false
false
false
MTK6580/walkie-talkie
ALPS.L1.MP6.V2_HEXING6580_WE_L/alps/build/tools/releasetools/img_from_target_files.py
1
4926
#!/usr/bin/env python # # Copyright (C) 2008 The Android Open Source Project # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """ Given a target-files zipfile, produces an image zipfile suitable for use with 'fastboot update'. Usage: img_from_target_files [flags] input_target_files output_image_zip -z (--bootable_zip) Include only the bootable images (eg 'boot' and 'recovery') in the output. """ import sys if sys.hexversion < 0x02070000: print >> sys.stderr, "Python 2.7 or newer is required." sys.exit(1) import errno import os import re import shutil import subprocess import tempfile import zipfile # missing in Python 2.4 and before if not hasattr(os, "SEEK_SET"): os.SEEK_SET = 0 import common OPTIONS = common.OPTIONS def CopyInfo(output_zip): """Copy the android-info.txt file from the input to the output.""" output_zip.write(os.path.join(OPTIONS.input_tmp, "OTA", "android-info.txt"), "android-info.txt") def main(argv): bootable_only = [False] def option_handler(o, a): if o in ("-z", "--bootable_zip"): bootable_only[0] = True else: return False return True args = common.ParseOptions(argv, __doc__, extra_opts="z", extra_long_opts=["bootable_zip"], extra_option_handler=option_handler) bootable_only = bootable_only[0] if len(args) != 2: common.Usage(__doc__) sys.exit(1) OPTIONS.input_tmp, input_zip = common.UnzipTemp(args[0]) output_zip = zipfile.ZipFile(args[1], "w", compression=zipfile.ZIP_DEFLATED) CopyInfo(output_zip) try: done = False images_path = os.path.join(OPTIONS.input_tmp, "IMAGES") if os.path.exists(images_path): # If this is a new target-files, it already contains the images, # and all we have to do is copy them to the output zip. images = os.listdir(images_path) if images: for i in images: if bootable_only and i not in ("boot.img", "recovery.img"): continue if not i.endswith(".img"): continue with open(os.path.join(images_path, i), "r") as f: common.ZipWriteStr(output_zip, i, f.read()) done = True if not done: # We have an old target-files that doesn't already contain the # images, so build them. import add_img_to_target_files OPTIONS.info_dict = common.LoadInfoDict(input_zip) # If this image was originally labelled with SELinux contexts, # make sure we also apply the labels in our new image. During # building, the "file_contexts" is in the out/ directory tree, # but for repacking from target-files.zip it's in the root # directory of the ramdisk. if "selinux_fc" in OPTIONS.info_dict: OPTIONS.info_dict["selinux_fc"] = os.path.join( OPTIONS.input_tmp, "BOOT", "RAMDISK", "file_contexts") boot_image = common.GetBootableImage( "boot.img", "boot.img", OPTIONS.input_tmp, "BOOT") if boot_image: boot_image.AddToZip(output_zip) recovery_image = common.GetBootableImage( "recovery.img", "recovery.img", OPTIONS.input_tmp, "RECOVERY") if recovery_image: recovery_image.AddToZip(output_zip) def banner(s): print "\n\n++++ " + s + " ++++\n\n" if not bootable_only: banner("AddSystem") add_img_to_target_files.AddSystem(output_zip, prefix="") try: input_zip.getinfo("VENDOR/") banner("AddVendor") add_img_to_target_files.AddVendor(output_zip, prefix="") except KeyError: pass # no vendor partition for this device try: input_zip.getinfo("CUSTOM/") banner("AddCustom") add_img_to_target_files.AddCustom(output_zip, prefix="") except KeyError: pass # no custom partition for this device banner("AddUserdata") add_img_to_target_files.AddUserdata(output_zip, prefix="") banner("AddCache") add_img_to_target_files.AddCache(output_zip, prefix="") finally: print "cleaning up..." output_zip.close() shutil.rmtree(OPTIONS.input_tmp) print "done." if __name__ == '__main__': try: common.CloseInheritedPipes() main(sys.argv[1:]) except common.ExternalError, e: print print " ERROR: %s" % (e,) print sys.exit(1)
gpl-3.0
3,824,091,379,033,798,000
29.407407
78
0.634592
false
3.603511
false
false
false
JKarathiya/Lean
Algorithm.Python/FutureOptionShortPutOTMExpiryRegressionAlgorithm.py
1
5416
# QUANTCONNECT.COM - Democratizing Finance, Empowering Individuals. # Lean Algorithmic Trading Engine v2.0. Copyright 2014 QuantConnect Corporation. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License from datetime import datetime, timedelta import clr from System import * from System.Reflection import * from QuantConnect import * from QuantConnect.Algorithm import * from QuantConnect.Data import * from QuantConnect.Data.Market import * from QuantConnect.Orders import * from QuantConnect.Securities import * from QuantConnect.Securities.Future import * from QuantConnect import Market ### <summary> ### This regression algorithm tests Out of The Money (OTM) future option expiry for short puts. ### We expect 2 orders from the algorithm, which are: ### ### * Initial entry, sell ES Put Option (expiring OTM) ### - Profit the option premium, since the option was not assigned. ### ### * Liquidation of ES put OTM contract on the last trade date ### ### Additionally, we test delistings for future options and assert that our ### portfolio holdings reflect the orders the algorithm has submitted. ### </summary> class FutureOptionShortPutOTMExpiryRegressionAlgorithm(QCAlgorithm): def Initialize(self): self.SetStartDate(2020, 1, 5) self.SetEndDate(2020, 6, 30) self.es19m20 = self.AddFutureContract( Symbol.CreateFuture( Futures.Indices.SP500EMini, Market.CME, datetime(2020, 6, 19)), Resolution.Minute).Symbol # Select a future option expiring ITM, and adds it to the algorithm. self.esOption = self.AddFutureOptionContract( list( sorted( [x for x in self.OptionChainProvider.GetOptionContractList(self.es19m20, self.Time) if x.ID.StrikePrice <= 3000.0 and x.ID.OptionRight == OptionRight.Put], key=lambda x: x.ID.StrikePrice, reverse=True ) )[0], Resolution.Minute).Symbol self.expectedContract = Symbol.CreateOption(self.es19m20, Market.CME, OptionStyle.American, OptionRight.Put, 3000.0, datetime(2020, 6, 19)) if self.esOption != self.expectedContract: raise AssertionError(f"Contract {self.expectedContract} was not found in the chain"); self.Schedule.On(self.DateRules.Tomorrow, self.TimeRules.AfterMarketOpen(self.es19m20, 1), self.ScheduledMarketOrder) def ScheduledMarketOrder(self): self.MarketOrder(self.esOption, -1) def OnData(self, data: Slice): # Assert delistings, so that we can make sure that we receive the delisting warnings at # the expected time. These assertions detect bug #4872 for delisting in data.Delistings.Values: if delisting.Type == DelistingType.Warning: if delisting.Time != datetime(2020, 6, 19): raise AssertionError(f"Delisting warning issued at unexpected date: {delisting.Time}"); if delisting.Type == DelistingType.Delisted: if delisting.Time != datetime(2020, 6, 20): raise AssertionError(f"Delisting happened at unexpected date: {delisting.Time}"); def OnOrderEvent(self, orderEvent: OrderEvent): if orderEvent.Status != OrderStatus.Filled: # There's lots of noise with OnOrderEvent, but we're only interested in fills. return if not self.Securities.ContainsKey(orderEvent.Symbol): raise AssertionError(f"Order event Symbol not found in Securities collection: {orderEvent.Symbol}") security = self.Securities[orderEvent.Symbol] if security.Symbol == self.es19m20: raise AssertionError(f"Expected no order events for underlying Symbol {security.Symbol}") if security.Symbol == self.expectedContract: self.AssertFutureOptionContractOrder(orderEvent, security) else: raise AssertionError(f"Received order event for unknown Symbol: {orderEvent.Symbol}") self.Log(f"{orderEvent}"); def AssertFutureOptionContractOrder(self, orderEvent: OrderEvent, optionContract: Security): if orderEvent.Direction == OrderDirection.Sell and optionContract.Holdings.Quantity != -1: raise AssertionError(f"No holdings were created for option contract {optionContract.Symbol}") if orderEvent.Direction == OrderDirection.Buy and optionContract.Holdings.Quantity != 0: raise AssertionError("Expected no options holdings after closing position") if orderEvent.IsAssignment: raise AssertionError(f"Assignment was not expected for {orderEvent.Symbol}") def OnEndOfAlgorithm(self): if self.Portfolio.Invested: raise AssertionError(f"Expected no holdings at end of algorithm, but are invested in: {', '.join([str(i.ID) for i in self.Portfolio.Keys])}")
apache-2.0
1,143,300,330,185,993,500
45.299145
175
0.694978
false
4.069121
false
false
false
Johnzero/erp
openerp/addons/clivia_analysis/report/analysis_report.py
1
2399
# -*- encoding: utf-8 -*- import tools from osv import fields, osv class common_report(osv.osv): _name = "clivia_analysis.production_report" _description = "报表视图" _auto = False _rec_name = 'date' _columns = { 'year': fields.char('年份', size=4, readonly=True), 'month': fields.selection([('01', '一月'), ('02', '二月'), ('03', '三月'), ('04', '四月'), ('05', '五月'), ('06', '六月'), ('07', '七月'), ('08', '八月'), ('09', '九月'), ('10', '十月'), ('11', '十一月'), ('12', '十二月')], '月份', readonly=True), 'date': fields.date('上报时间', required=True, readonly=True), 'product_id': fields.many2one('clivia_analysis.stocked_product', '产品', readonly=True), 'produced': fields.integer('生产', readonly=True), 'sent': fields.float('发出', readonly=True), 'sold': fields.integer('销售', readonly=True), 'hefei_today_inventory':fields.integer('君子兰结存', readonly=True), 'sanhe_last_inventory':fields.integer('三河实际库存', readonly=True), } _order = 'date desc' def init(self, cr): tools.drop_view_if_exists(cr, 'clivia_analysis_production_report') cr.execute(""" CREATE OR REPLACE VIEW clivia_analysis_production_report AS SELECT DISTINCT ON (product.id) product.id, product.id AS product_id, mpl.production AS produced, mpl.hefei_warning_level, mpl.sanhe_warning_level, drl.hefei_today_inventory AS hefei_today_inventory, drl.sanhe_real_inventory AS sanhe_real_inventory, dr.date_created date, to_char(dr.date_created::timestamp with time zone, 'YYYY'::text) AS year, to_char(dr.date_created::timestamp with time zone, 'MM'::text) AS month, drl.sent, drl.sold FROM clivia_analysis_stocked_product product JOIN clivia_analysis_daily_report_line drl ON product.id = drl.product_id JOIN clivia_analysis_daily_report dr ON dr.id = drl.report_id JOIN clivia_analysis_monthly_plan_line mpl ON mpl.product_id = product.id WHERE dr.state::text = 'review'::text ORDER BY product.id, dr.date_created DESC; """)
agpl-3.0
-2,405,913,437,995,879,000
43.803922
96
0.570241
false
3.204769
false
false
false
CN-UPB/OpenBarista
components/decaf-masta/decaf_masta/components/database/datacenter.py
1
1976
## # Copyright 2016 DECaF Project Group, University of Paderborn # This file is part of the decaf orchestration framework # All Rights Reserved. # # This Source Code Form is subject to the terms of the Mozilla Public # License, v. 2.0. If a copy of the MPL was not distributed with this # file, You can obtain one at http://mozilla.org/MPL/2.0/. ## __author__ = 'Kristian Hinnenthal' __date__ = '$13-okt-2015 14:15:27$' from sqlalchemy import Column, Integer, String, ForeignKey from sqlalchemy.orm import relationship from .mastadatabase import Base from .keystone import Keystone import json class Datacenter(Base): __tablename__ = 'datacenters' datacenter_id = Column(Integer, primary_key=True,autoincrement=True) datacenter_name = Column(String(250), nullable=False) keystone_id = Column(Integer, ForeignKey('keystone_credentials.keystone_id'), nullable=False) keystone_region = Column(String(250), nullable=False) flavors = relationship('Flavor', backref='datacenters') images = relationship('Image', backref='datacenters') monitoring_alarms = relationship('MonitoringAlarm', backref='datacenters') management_networks = relationship('ManagementNetwork', backref='datacenters') public_networks = relationship('PublicNetwork', backref='datacenters') vm_instances = relationship('VMInstance', backref='datacenters') internal_edges = relationship('InternalEdge', backref='datacenters') public_ports = relationship('PublicPort', backref='datacenters') keypairs = relationship('KeyPair', backref='datacenter') def to_json(self): return json.dumps(self.to_dict()) def to_dict(self): return_dict = { "datacenter" : { "datacenter_id": self.datacenter_id, "datacenter_name": self.datacenter_name, "keystone_id": self.keystone_id, "keystone_region": self.keystone_region } } return return_dict
mpl-2.0
-113,731,610,094,640,820
41.06383
97
0.695344
false
4.074227
false
false
false
airodactyl/qutebrowser
tests/helpers/stubs.py
1
16414
# vim: ft=python fileencoding=utf-8 sts=4 sw=4 et: # Copyright 2014-2018 Florian Bruhin (The Compiler) <mail@qutebrowser.org> # # This file is part of qutebrowser. # # qutebrowser is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # qutebrowser is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with qutebrowser. If not, see <http://www.gnu.org/licenses/>. # pylint: disable=invalid-name,abstract-method """Fake objects/stubs.""" from unittest import mock import contextlib import shutil import attr from PyQt5.QtCore import pyqtSignal, QPoint, QProcess, QObject, QUrl from PyQt5.QtGui import QIcon from PyQt5.QtNetwork import (QNetworkRequest, QAbstractNetworkCache, QNetworkCacheMetaData) from PyQt5.QtWidgets import QCommonStyle, QLineEdit, QWidget, QTabBar from qutebrowser.browser import browsertab, downloads from qutebrowser.utils import usertypes from qutebrowser.mainwindow import mainwindow class FakeNetworkCache(QAbstractNetworkCache): """Fake cache with no data.""" def cacheSize(self): return 0 def data(self, _url): return None def insert(self, _dev): pass def metaData(self, _url): return QNetworkCacheMetaData() def prepare(self, _metadata): return None def remove(self, _url): return False def updateMetaData(self, _url): pass class FakeKeyEvent: """Fake QKeyPressEvent stub.""" def __init__(self, key, modifiers=0, text=''): self.key = mock.Mock(return_value=key) self.text = mock.Mock(return_value=text) self.modifiers = mock.Mock(return_value=modifiers) class FakeWebFrame: """A stub for QWebFrame.""" def __init__(self, geometry=None, *, scroll=None, plaintext=None, html=None, parent=None, zoom=1.0): """Constructor. Args: geometry: The geometry of the frame as QRect. scroll: The scroll position as QPoint. plaintext: Return value of toPlainText html: Return value of tohtml. zoom: The zoom factor. parent: The parent frame. """ if scroll is None: scroll = QPoint(0, 0) self.geometry = mock.Mock(return_value=geometry) self.scrollPosition = mock.Mock(return_value=scroll) self.parentFrame = mock.Mock(return_value=parent) self.toPlainText = mock.Mock(return_value=plaintext) self.toHtml = mock.Mock(return_value=html) self.zoomFactor = mock.Mock(return_value=zoom) class FakeChildrenFrame: """A stub for QWebFrame to test get_child_frames.""" def __init__(self, children=None): if children is None: children = [] self.childFrames = mock.Mock(return_value=children) class FakeQApplication: """Stub to insert as QApplication module.""" UNSET = object() def __init__(self, style=None, all_widgets=None, active_window=None, instance=UNSET): if instance is self.UNSET: self.instance = mock.Mock(return_value=self) else: self.instance = mock.Mock(return_value=instance) self.style = mock.Mock(spec=QCommonStyle) self.style().metaObject().className.return_value = style self.allWidgets = lambda: all_widgets self.activeWindow = lambda: active_window class FakeNetworkReply: """QNetworkReply stub which provides a Content-Disposition header.""" KNOWN_HEADERS = { QNetworkRequest.ContentTypeHeader: 'Content-Type', } def __init__(self, headers=None, url=None): if url is None: url = QUrl() if headers is None: self.headers = {} else: self.headers = headers self.url = mock.Mock(return_value=url) def hasRawHeader(self, name): """Check if the reply has a certain header. Args: name: The name of the header as ISO-8859-1 encoded bytes object. Return: True if the header is present, False if not. """ return name.decode('iso-8859-1') in self.headers def rawHeader(self, name): """Get the raw header data of a header. Args: name: The name of the header as ISO-8859-1 encoded bytes object. Return: The header data, as ISO-8859-1 encoded bytes() object. """ name = name.decode('iso-8859-1') return self.headers[name].encode('iso-8859-1') def header(self, known_header): """Get a known header. Args: known_header: A QNetworkRequest::KnownHeaders member. """ key = self.KNOWN_HEADERS[known_header] try: return self.headers[key] except KeyError: return None def setHeader(self, known_header, value): """Set a known header. Args: known_header: A QNetworkRequest::KnownHeaders member. value: The value to set. """ key = self.KNOWN_HEADERS[known_header] self.headers[key] = value def fake_qprocess(): """Factory for a QProcess mock which has the QProcess enum values.""" m = mock.Mock(spec=QProcess) for name in ['NormalExit', 'CrashExit', 'FailedToStart', 'Crashed', 'Timedout', 'WriteError', 'ReadError', 'UnknownError']: setattr(m, name, getattr(QProcess, name)) return m class FakeWebTabScroller(browsertab.AbstractScroller): """Fake AbstractScroller to use in tests.""" def __init__(self, tab, pos_perc): super().__init__(tab) self._pos_perc = pos_perc def pos_perc(self): return self._pos_perc class FakeWebTabHistory(browsertab.AbstractHistory): """Fake for Web{Kit,Engine}History.""" def __init__(self, tab, *, can_go_back, can_go_forward): super().__init__(tab) self._can_go_back = can_go_back self._can_go_forward = can_go_forward def can_go_back(self): assert self._can_go_back is not None return self._can_go_back def can_go_forward(self): assert self._can_go_forward is not None return self._can_go_forward class FakeWebTabAudio(browsertab.AbstractAudio): def is_muted(self): return False def is_recently_audible(self): return False class FakeWebTab(browsertab.AbstractTab): """Fake AbstractTab to use in tests.""" def __init__(self, url=QUrl(), title='', tab_id=0, *, scroll_pos_perc=(0, 0), load_status=usertypes.LoadStatus.success, progress=0, can_go_back=None, can_go_forward=None): super().__init__(win_id=0, mode_manager=None, private=False) self._load_status = load_status self._title = title self._url = url self._progress = progress self.history = FakeWebTabHistory(self, can_go_back=can_go_back, can_go_forward=can_go_forward) self.scroller = FakeWebTabScroller(self, scroll_pos_perc) self.audio = FakeWebTabAudio() wrapped = QWidget() self._layout.wrap(self, wrapped) def url(self, requested=False): assert not requested return self._url def title(self): return self._title def progress(self): return self._progress def load_status(self): return self._load_status def shutdown(self): pass def icon(self): return QIcon() class FakeSignal: """Fake pyqtSignal stub which does nothing. Attributes: signal: The name of the signal, like pyqtSignal. _func: The function to be invoked when the signal gets called. """ def __init__(self, name='fake', func=None): self.signal = '2{}(int, int)'.format(name) self._func = func def __call__(self): if self._func is None: raise TypeError("'FakeSignal' object is not callable") else: return self._func() def connect(self, slot): """Connect the signal to a slot. Currently does nothing, but could be improved to do some sanity checking on the slot. """ pass def disconnect(self, slot=None): """Disconnect the signal from a slot. Currently does nothing, but could be improved to do some sanity checking on the slot and see if it actually got connected. """ pass def emit(self, *args): """Emit the signal. Currently does nothing, but could be improved to do type checking based on a signature given to __init__. """ pass @attr.s class FakeCmdUtils: """Stub for cmdutils which provides a cmd_dict.""" cmd_dict = attr.ib() @attr.s(frozen=True) class FakeCommand: """A simple command stub which has a description.""" name = attr.ib('') desc = attr.ib('') hide = attr.ib(False) debug = attr.ib(False) deprecated = attr.ib(False) completion = attr.ib(None) maxsplit = attr.ib(None) takes_count = attr.ib(lambda: False) modes = attr.ib((usertypes.KeyMode.normal, )) class FakeTimer(QObject): """Stub for a usertypes.Timer.""" timeout_signal = pyqtSignal() def __init__(self, parent=None, name=None): super().__init__(parent) self.timeout = mock.Mock(spec=['connect', 'disconnect', 'emit']) self.timeout.connect.side_effect = self.timeout_signal.connect self.timeout.disconnect.side_effect = self.timeout_signal.disconnect self.timeout.emit.side_effect = self._emit self._started = False self._singleshot = False self._interval = 0 self._name = name def __repr__(self): return '<{} name={!r}>'.format(self.__class__.__name__, self._name) def _emit(self): """Called when the timeout "signal" gets emitted.""" if self._singleshot: self._started = False self.timeout_signal.emit() def setInterval(self, interval): self._interval = interval def interval(self): return self._interval def setSingleShot(self, singleshot): self._singleshot = singleshot def isSingleShot(self): return self._singleshot def start(self, interval=None): if interval: self._interval = interval self._started = True def stop(self): self._started = False def isActive(self): return self._started class InstaTimer(QObject): """Stub for a QTimer that fires instantly on start(). Useful to test a time-based event without inserting an artificial delay. """ timeout = pyqtSignal() def start(self, interval=None): self.timeout.emit() def setSingleShot(self, yes): pass def setInterval(self, interval): pass @staticmethod def singleShot(_interval, fun): fun() class StatusBarCommandStub(QLineEdit): """Stub for the statusbar command prompt.""" got_cmd = pyqtSignal(str) clear_completion_selection = pyqtSignal() hide_completion = pyqtSignal() update_completion = pyqtSignal() show_cmd = pyqtSignal() hide_cmd = pyqtSignal() def prefix(self): return self.text()[0] class UrlMarkManagerStub(QObject): """Stub for the quickmark-manager or bookmark-manager object.""" added = pyqtSignal(str, str) removed = pyqtSignal(str) def __init__(self, parent=None): super().__init__(parent) self.marks = {} def delete(self, key): del self.marks[key] self.removed.emit(key) class BookmarkManagerStub(UrlMarkManagerStub): """Stub for the bookmark-manager object.""" pass class QuickmarkManagerStub(UrlMarkManagerStub): """Stub for the quickmark-manager object.""" def quickmark_del(self, key): self.delete(key) class HostBlockerStub: """Stub for the host-blocker object.""" def __init__(self): self.blocked_hosts = set() class SessionManagerStub: """Stub for the session-manager object.""" def __init__(self): self.sessions = [] def list_sessions(self): return self.sessions def save_autosave(self): pass class TabbedBrowserStub(QObject): """Stub for the tabbed-browser object.""" def __init__(self, parent=None): super().__init__(parent) self.widget = TabWidgetStub() self.shutting_down = False self.opened_url = None def on_tab_close_requested(self, idx): del self.widget.tabs[idx] def widgets(self): return self.widget.tabs def tabopen(self, url): self.opened_url = url def openurl(self, url, *, newtab): self.opened_url = url class TabWidgetStub(QObject): """Stub for the tab-widget object.""" new_tab = pyqtSignal(browsertab.AbstractTab, int) def __init__(self, parent=None): super().__init__(parent) self.tabs = [] self._qtabbar = QTabBar() self.index_of = None self.current_index = None def count(self): return len(self.tabs) def widget(self, i): return self.tabs[i] def page_title(self, i): return self.tabs[i].title() def tabBar(self): return self._qtabbar def indexOf(self, _tab): if self.index_of is None: raise ValueError("indexOf got called with index_of None!") elif self.index_of is RuntimeError: raise RuntimeError else: return self.index_of def currentIndex(self): if self.current_index is None: raise ValueError("currentIndex got called with current_index " "None!") return self.current_index def currentWidget(self): idx = self.currentIndex() if idx == -1: return None return self.tabs[idx - 1] class ApplicationStub(QObject): """Stub to insert as the app object in objreg.""" new_window = pyqtSignal(mainwindow.MainWindow) class HTTPPostStub(QObject): """A stub class for HTTPClient. Attributes: url: the last url send by post() data: the last data send by post() """ success = pyqtSignal(str) error = pyqtSignal(str) def __init__(self, parent=None): super().__init__(parent) self.url = None self.data = None def post(self, url, data=None): self.url = url self.data = data class FakeDownloadItem(QObject): """Mock browser.downloads.DownloadItem.""" finished = pyqtSignal() def __init__(self, fileobj, name, parent=None): super().__init__(parent) self.fileobj = fileobj self.name = name self.successful = False class FakeDownloadManager: """Mock browser.downloads.DownloadManager.""" def __init__(self, tmpdir): self._tmpdir = tmpdir self.downloads = [] @contextlib.contextmanager def _open_fileobj(self, target): """Ensure a DownloadTarget's fileobj attribute is available.""" if isinstance(target, downloads.FileDownloadTarget): target.fileobj = open(target.filename, 'wb') try: yield target.fileobj finally: target.fileobj.close() else: yield target.fileobj def get(self, url, target, **kwargs): """Return a FakeDownloadItem instance with a fileobj. The content is copied from the file the given url links to. """ with self._open_fileobj(target): download_item = FakeDownloadItem(target.fileobj, name=url.path()) with (self._tmpdir / url.path()).open('rb') as fake_url_file: shutil.copyfileobj(fake_url_file, download_item.fileobj) self.downloads.append(download_item) return download_item
gpl-3.0
5,947,849,121,296,085,000
24.88959
79
0.61277
false
4.021068
false
false
false
googleapis/python-essential-contacts
google/cloud/essential_contacts_v1/services/essential_contacts_service/client.py
1
36542
# -*- coding: utf-8 -*- # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # from collections import OrderedDict from distutils import util import os import re from typing import Callable, Dict, Optional, Sequence, Tuple, Type, Union import pkg_resources from google.api_core import client_options as client_options_lib # type: ignore from google.api_core import exceptions as core_exceptions # type: ignore from google.api_core import gapic_v1 # type: ignore from google.api_core import retry as retries # type: ignore from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport import mtls # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore from google.auth.exceptions import MutualTLSChannelError # type: ignore from google.oauth2 import service_account # type: ignore from google.cloud.essential_contacts_v1.services.essential_contacts_service import ( pagers, ) from google.cloud.essential_contacts_v1.types import enums from google.cloud.essential_contacts_v1.types import service from google.protobuf import field_mask_pb2 # type: ignore from google.protobuf import timestamp_pb2 # type: ignore from .transports.base import EssentialContactsServiceTransport, DEFAULT_CLIENT_INFO from .transports.grpc import EssentialContactsServiceGrpcTransport from .transports.grpc_asyncio import EssentialContactsServiceGrpcAsyncIOTransport class EssentialContactsServiceClientMeta(type): """Metaclass for the EssentialContactsService client. This provides class-level methods for building and retrieving support objects (e.g. transport) without polluting the client instance objects. """ _transport_registry = ( OrderedDict() ) # type: Dict[str, Type[EssentialContactsServiceTransport]] _transport_registry["grpc"] = EssentialContactsServiceGrpcTransport _transport_registry["grpc_asyncio"] = EssentialContactsServiceGrpcAsyncIOTransport def get_transport_class( cls, label: str = None, ) -> Type[EssentialContactsServiceTransport]: """Returns an appropriate transport class. Args: label: The name of the desired transport. If none is provided, then the first transport in the registry is used. Returns: The transport class to use. """ # If a specific transport is requested, return that one. if label: return cls._transport_registry[label] # No transport is requested; return the default (that is, the first one # in the dictionary). return next(iter(cls._transport_registry.values())) class EssentialContactsServiceClient(metaclass=EssentialContactsServiceClientMeta): """Manages contacts for important Google Cloud notifications.""" @staticmethod def _get_default_mtls_endpoint(api_endpoint): """Converts api endpoint to mTLS endpoint. Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. Args: api_endpoint (Optional[str]): the api endpoint to convert. Returns: str: converted mTLS api endpoint. """ if not api_endpoint: return api_endpoint mtls_endpoint_re = re.compile( r"(?P<name>[^.]+)(?P<mtls>\.mtls)?(?P<sandbox>\.sandbox)?(?P<googledomain>\.googleapis\.com)?" ) m = mtls_endpoint_re.match(api_endpoint) name, mtls, sandbox, googledomain = m.groups() if mtls or not googledomain: return api_endpoint if sandbox: return api_endpoint.replace( "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" ) return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") DEFAULT_ENDPOINT = "essentialcontacts.googleapis.com" DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore DEFAULT_ENDPOINT ) @classmethod def from_service_account_info(cls, info: dict, *args, **kwargs): """Creates an instance of this client using the provided credentials info. Args: info (dict): The service account private key info. args: Additional arguments to pass to the constructor. kwargs: Additional arguments to pass to the constructor. Returns: EssentialContactsServiceClient: The constructed client. """ credentials = service_account.Credentials.from_service_account_info(info) kwargs["credentials"] = credentials return cls(*args, **kwargs) @classmethod def from_service_account_file(cls, filename: str, *args, **kwargs): """Creates an instance of this client using the provided credentials file. Args: filename (str): The path to the service account private key json file. args: Additional arguments to pass to the constructor. kwargs: Additional arguments to pass to the constructor. Returns: EssentialContactsServiceClient: The constructed client. """ credentials = service_account.Credentials.from_service_account_file(filename) kwargs["credentials"] = credentials return cls(*args, **kwargs) from_service_account_json = from_service_account_file @property def transport(self) -> EssentialContactsServiceTransport: """Returns the transport used by the client instance. Returns: EssentialContactsServiceTransport: The transport used by the client instance. """ return self._transport @staticmethod def contact_path(project: str, contact: str,) -> str: """Returns a fully-qualified contact string.""" return "projects/{project}/contacts/{contact}".format( project=project, contact=contact, ) @staticmethod def parse_contact_path(path: str) -> Dict[str, str]: """Parses a contact path into its component segments.""" m = re.match(r"^projects/(?P<project>.+?)/contacts/(?P<contact>.+?)$", path) return m.groupdict() if m else {} @staticmethod def common_billing_account_path(billing_account: str,) -> str: """Returns a fully-qualified billing_account string.""" return "billingAccounts/{billing_account}".format( billing_account=billing_account, ) @staticmethod def parse_common_billing_account_path(path: str) -> Dict[str, str]: """Parse a billing_account path into its component segments.""" m = re.match(r"^billingAccounts/(?P<billing_account>.+?)$", path) return m.groupdict() if m else {} @staticmethod def common_folder_path(folder: str,) -> str: """Returns a fully-qualified folder string.""" return "folders/{folder}".format(folder=folder,) @staticmethod def parse_common_folder_path(path: str) -> Dict[str, str]: """Parse a folder path into its component segments.""" m = re.match(r"^folders/(?P<folder>.+?)$", path) return m.groupdict() if m else {} @staticmethod def common_organization_path(organization: str,) -> str: """Returns a fully-qualified organization string.""" return "organizations/{organization}".format(organization=organization,) @staticmethod def parse_common_organization_path(path: str) -> Dict[str, str]: """Parse a organization path into its component segments.""" m = re.match(r"^organizations/(?P<organization>.+?)$", path) return m.groupdict() if m else {} @staticmethod def common_project_path(project: str,) -> str: """Returns a fully-qualified project string.""" return "projects/{project}".format(project=project,) @staticmethod def parse_common_project_path(path: str) -> Dict[str, str]: """Parse a project path into its component segments.""" m = re.match(r"^projects/(?P<project>.+?)$", path) return m.groupdict() if m else {} @staticmethod def common_location_path(project: str, location: str,) -> str: """Returns a fully-qualified location string.""" return "projects/{project}/locations/{location}".format( project=project, location=location, ) @staticmethod def parse_common_location_path(path: str) -> Dict[str, str]: """Parse a location path into its component segments.""" m = re.match(r"^projects/(?P<project>.+?)/locations/(?P<location>.+?)$", path) return m.groupdict() if m else {} def __init__( self, *, credentials: Optional[ga_credentials.Credentials] = None, transport: Union[str, EssentialContactsServiceTransport, None] = None, client_options: Optional[client_options_lib.ClientOptions] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, ) -> None: """Instantiates the essential contacts service client. Args: credentials (Optional[google.auth.credentials.Credentials]): The authorization credentials to attach to requests. These credentials identify the application to the service; if none are specified, the client will attempt to ascertain the credentials from the environment. transport (Union[str, EssentialContactsServiceTransport]): The transport to use. If set to None, a transport is chosen automatically. client_options (google.api_core.client_options.ClientOptions): Custom options for the client. It won't take effect if a ``transport`` instance is provided. (1) The ``api_endpoint`` property can be used to override the default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT environment variable can also be used to override the endpoint: "always" (always use the default mTLS endpoint), "never" (always use the default regular endpoint) and "auto" (auto switch to the default mTLS endpoint if client certificate is present, this is the default value). However, the ``api_endpoint`` property takes precedence if provided. (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable is "true", then the ``client_cert_source`` property can be used to provide client certificate for mutual TLS transport. If not provided, the default SSL client certificate will be used if present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not set, no client certificate will be used. client_info (google.api_core.gapic_v1.client_info.ClientInfo): The client info used to send a user-agent string along with API requests. If ``None``, then default info will be used. Generally, you only need to set this if you're developing your own client library. Raises: google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport creation failed for any reason. """ if isinstance(client_options, dict): client_options = client_options_lib.from_dict(client_options) if client_options is None: client_options = client_options_lib.ClientOptions() # Create SSL credentials for mutual TLS if needed. use_client_cert = bool( util.strtobool(os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false")) ) client_cert_source_func = None is_mtls = False if use_client_cert: if client_options.client_cert_source: is_mtls = True client_cert_source_func = client_options.client_cert_source else: is_mtls = mtls.has_default_client_cert_source() if is_mtls: client_cert_source_func = mtls.default_client_cert_source() else: client_cert_source_func = None # Figure out which api endpoint to use. if client_options.api_endpoint is not None: api_endpoint = client_options.api_endpoint else: use_mtls_env = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") if use_mtls_env == "never": api_endpoint = self.DEFAULT_ENDPOINT elif use_mtls_env == "always": api_endpoint = self.DEFAULT_MTLS_ENDPOINT elif use_mtls_env == "auto": if is_mtls: api_endpoint = self.DEFAULT_MTLS_ENDPOINT else: api_endpoint = self.DEFAULT_ENDPOINT else: raise MutualTLSChannelError( "Unsupported GOOGLE_API_USE_MTLS_ENDPOINT value. Accepted " "values: never, auto, always" ) # Save or instantiate the transport. # Ordinarily, we provide the transport, but allowing a custom transport # instance provides an extensibility point for unusual situations. if isinstance(transport, EssentialContactsServiceTransport): # transport is a EssentialContactsServiceTransport instance. if credentials or client_options.credentials_file: raise ValueError( "When providing a transport instance, " "provide its credentials directly." ) if client_options.scopes: raise ValueError( "When providing a transport instance, provide its scopes " "directly." ) self._transport = transport else: Transport = type(self).get_transport_class(transport) self._transport = Transport( credentials=credentials, credentials_file=client_options.credentials_file, host=api_endpoint, scopes=client_options.scopes, client_cert_source_for_mtls=client_cert_source_func, quota_project_id=client_options.quota_project_id, client_info=client_info, ) def create_contact( self, request: service.CreateContactRequest = None, *, parent: str = None, contact: service.Contact = None, retry: retries.Retry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> service.Contact: r"""Adds a new contact for a resource. Args: request (google.cloud.essential_contacts_v1.types.CreateContactRequest): The request object. Request message for the CreateContact method. parent (str): Required. The resource to save this contact for. Format: organizations/{organization_id}, folders/{folder_id} or projects/{project_id} This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this should not be set. contact (google.cloud.essential_contacts_v1.types.Contact): Required. The contact to create. Must specify an email address and language tag. This corresponds to the ``contact`` field on the ``request`` instance; if ``request`` is provided, this should not be set. retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. Returns: google.cloud.essential_contacts_v1.types.Contact: A contact that will receive notifications from Google Cloud. """ # Create or coerce a protobuf request object. # Sanity check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. has_flattened_params = any([parent, contact]) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " "the individual field arguments should be set." ) # Minor optimization to avoid making a copy if the user passes # in a service.CreateContactRequest. # There's no risk of modifying the input as we've already verified # there are no flattened fields. if not isinstance(request, service.CreateContactRequest): request = service.CreateContactRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. if parent is not None: request.parent = parent if contact is not None: request.contact = contact # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. rpc = self._transport._wrapped_methods[self._transport.create_contact] # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), ) # Send the request. response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) # Done; return the response. return response def update_contact( self, request: service.UpdateContactRequest = None, *, contact: service.Contact = None, update_mask: field_mask_pb2.FieldMask = None, retry: retries.Retry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> service.Contact: r"""Updates a contact. Note: A contact's email address cannot be changed. Args: request (google.cloud.essential_contacts_v1.types.UpdateContactRequest): The request object. Request message for the UpdateContact method. contact (google.cloud.essential_contacts_v1.types.Contact): Required. The contact resource to replace the existing saved contact. Note: the email address of the contact cannot be modified. This corresponds to the ``contact`` field on the ``request`` instance; if ``request`` is provided, this should not be set. update_mask (google.protobuf.field_mask_pb2.FieldMask): Optional. The update mask applied to the resource. For the ``FieldMask`` definition, see https://developers.google.com/protocol-buffers/docs/reference/google.protobuf#fieldmask This corresponds to the ``update_mask`` field on the ``request`` instance; if ``request`` is provided, this should not be set. retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. Returns: google.cloud.essential_contacts_v1.types.Contact: A contact that will receive notifications from Google Cloud. """ # Create or coerce a protobuf request object. # Sanity check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. has_flattened_params = any([contact, update_mask]) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " "the individual field arguments should be set." ) # Minor optimization to avoid making a copy if the user passes # in a service.UpdateContactRequest. # There's no risk of modifying the input as we've already verified # there are no flattened fields. if not isinstance(request, service.UpdateContactRequest): request = service.UpdateContactRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. if contact is not None: request.contact = contact if update_mask is not None: request.update_mask = update_mask # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. rpc = self._transport._wrapped_methods[self._transport.update_contact] # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata( (("contact.name", request.contact.name),) ), ) # Send the request. response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) # Done; return the response. return response def list_contacts( self, request: service.ListContactsRequest = None, *, parent: str = None, retry: retries.Retry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> pagers.ListContactsPager: r"""Lists the contacts that have been set on a resource. Args: request (google.cloud.essential_contacts_v1.types.ListContactsRequest): The request object. Request message for the ListContacts method. parent (str): Required. The parent resource name. Format: organizations/{organization_id}, folders/{folder_id} or projects/{project_id} This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this should not be set. retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. Returns: google.cloud.essential_contacts_v1.services.essential_contacts_service.pagers.ListContactsPager: Response message for the ListContacts method. Iterating over this object will yield results and resolve additional pages automatically. """ # Create or coerce a protobuf request object. # Sanity check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. has_flattened_params = any([parent]) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " "the individual field arguments should be set." ) # Minor optimization to avoid making a copy if the user passes # in a service.ListContactsRequest. # There's no risk of modifying the input as we've already verified # there are no flattened fields. if not isinstance(request, service.ListContactsRequest): request = service.ListContactsRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. if parent is not None: request.parent = parent # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. rpc = self._transport._wrapped_methods[self._transport.list_contacts] # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), ) # Send the request. response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) # This method is paged; wrap the response in a pager, which provides # an `__iter__` convenience method. response = pagers.ListContactsPager( method=rpc, request=request, response=response, metadata=metadata, ) # Done; return the response. return response def get_contact( self, request: service.GetContactRequest = None, *, name: str = None, retry: retries.Retry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> service.Contact: r"""Gets a single contact. Args: request (google.cloud.essential_contacts_v1.types.GetContactRequest): The request object. Request message for the GetContact method. name (str): Required. The name of the contact to retrieve. Format: organizations/{organization_id}/contacts/{contact_id}, folders/{folder_id}/contacts/{contact_id} or projects/{project_id}/contacts/{contact_id} This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this should not be set. retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. Returns: google.cloud.essential_contacts_v1.types.Contact: A contact that will receive notifications from Google Cloud. """ # Create or coerce a protobuf request object. # Sanity check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. has_flattened_params = any([name]) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " "the individual field arguments should be set." ) # Minor optimization to avoid making a copy if the user passes # in a service.GetContactRequest. # There's no risk of modifying the input as we've already verified # there are no flattened fields. if not isinstance(request, service.GetContactRequest): request = service.GetContactRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. if name is not None: request.name = name # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. rpc = self._transport._wrapped_methods[self._transport.get_contact] # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), ) # Send the request. response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) # Done; return the response. return response def delete_contact( self, request: service.DeleteContactRequest = None, *, name: str = None, retry: retries.Retry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> None: r"""Deletes a contact. Args: request (google.cloud.essential_contacts_v1.types.DeleteContactRequest): The request object. Request message for the DeleteContact method. name (str): Required. The name of the contact to delete. Format: organizations/{organization_id}/contacts/{contact_id}, folders/{folder_id}/contacts/{contact_id} or projects/{project_id}/contacts/{contact_id} This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this should not be set. retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. """ # Create or coerce a protobuf request object. # Sanity check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. has_flattened_params = any([name]) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " "the individual field arguments should be set." ) # Minor optimization to avoid making a copy if the user passes # in a service.DeleteContactRequest. # There's no risk of modifying the input as we've already verified # there are no flattened fields. if not isinstance(request, service.DeleteContactRequest): request = service.DeleteContactRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. if name is not None: request.name = name # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. rpc = self._transport._wrapped_methods[self._transport.delete_contact] # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), ) # Send the request. rpc( request, retry=retry, timeout=timeout, metadata=metadata, ) def compute_contacts( self, request: service.ComputeContactsRequest = None, *, retry: retries.Retry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> pagers.ComputeContactsPager: r"""Lists all contacts for the resource that are subscribed to the specified notification categories, including contacts inherited from any parent resources. Args: request (google.cloud.essential_contacts_v1.types.ComputeContactsRequest): The request object. Request message for the ComputeContacts method. retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. Returns: google.cloud.essential_contacts_v1.services.essential_contacts_service.pagers.ComputeContactsPager: Response message for the ComputeContacts method. Iterating over this object will yield results and resolve additional pages automatically. """ # Create or coerce a protobuf request object. # Minor optimization to avoid making a copy if the user passes # in a service.ComputeContactsRequest. # There's no risk of modifying the input as we've already verified # there are no flattened fields. if not isinstance(request, service.ComputeContactsRequest): request = service.ComputeContactsRequest(request) # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. rpc = self._transport._wrapped_methods[self._transport.compute_contacts] # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), ) # Send the request. response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) # This method is paged; wrap the response in a pager, which provides # an `__iter__` convenience method. response = pagers.ComputeContactsPager( method=rpc, request=request, response=response, metadata=metadata, ) # Done; return the response. return response def send_test_message( self, request: service.SendTestMessageRequest = None, *, retry: retries.Retry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> None: r"""Allows a contact admin to send a test message to contact to verify that it has been configured correctly. Args: request (google.cloud.essential_contacts_v1.types.SendTestMessageRequest): The request object. Request message for the SendTestMessage method. retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. """ # Create or coerce a protobuf request object. # Minor optimization to avoid making a copy if the user passes # in a service.SendTestMessageRequest. # There's no risk of modifying the input as we've already verified # there are no flattened fields. if not isinstance(request, service.SendTestMessageRequest): request = service.SendTestMessageRequest(request) # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. rpc = self._transport._wrapped_methods[self._transport.send_test_message] # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("resource", request.resource),)), ) # Send the request. rpc( request, retry=retry, timeout=timeout, metadata=metadata, ) try: DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( gapic_version=pkg_resources.get_distribution( "google-cloud-essential-contacts", ).version, ) except pkg_resources.DistributionNotFound: DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() __all__ = ("EssentialContactsServiceClient",)
apache-2.0
6,529,256,094,567,324,000
41.540163
111
0.617946
false
4.636721
false
false
false
Dev-Cloud-Platform/Dev-Cloud
dev_cloud/cc1/src/wi/views/user/user.py
1
7236
# -*- coding: utf-8 -*- # @COPYRIGHT_begin # # Copyright [2010-2014] Institute of Nuclear Physics PAN, Krakow, Poland # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # # @COPYRIGHT_end """@package src.wi.views.user.user @author Piotr Wójcik @date 31.01.2014 """ from django.contrib import messages from django.shortcuts import render_to_response, redirect from django.template import RequestContext from django.template.loader import render_to_string from django.utils.translation import ugettext as _ from django.views.decorators.csrf import csrf_protect from wi.commontags.templatetags.templatetags import filesizeformatmb from wi.forms.user import CMAuthenticationForm, HelpForm, PasswordChangeForm, \ AccountDataEdit from wi.utils import get_dict_from_list, messages_ajax from wi.utils.decorators import django_view, user_permission from wi.utils.exceptions import RestErrorException from wi.utils.messages_ajax import ajax_request from wi.utils.messages_codes import get_message from wi.utils.states import message_levels_reversed from wi.utils.views import prep_data @django_view @user_permission def change_cm(request, cm_id, success_url='mai_main'): """ View changing used CM. """ request.session['user'].cm_id = int(cm_id) request.session.modified = True messages.success(request, _('Cluster Manager changed.')) return redirect(request.META['HTTP_REFERER'] or success_url) @django_view @ajax_request @user_permission def get_messages(request): """ Ajax view fetching user messages. """ if request.method == 'POST': response = prep_data('user/message/get_list/', request.session) for item in response: item['text'] = get_message(item['code'], item['params']) item['level'] = message_levels_reversed[item['level']] return messages_ajax.success(response) @django_view @ajax_request @user_permission def acc_ajax_get_user_data(request): """ Ajax view. Returns user account data. """ if request.method == 'GET': rest_data = prep_data({'user': 'user/user/get_my_data/', 'cms': 'guest/cluster/list_names/' }, request.session) user_data = rest_data['user'] users_cm = get_dict_from_list(rest_data['cms'], user_data['default_cluster_id'], key='cluster_id') if users_cm is None: raise Exception('User\'s default_cluster_id=%d is not a valid CM id.' % user_data['default_cluster_id']) user_data['default_cluster_id'] = users_cm['name'] return messages_ajax.success(user_data) @django_view @ajax_request @user_permission @csrf_protect def acc_ajax_account_data_edit(request, template_name='generic/form.html', form_class=AccountDataEdit): """ Ajax view for user account data editing. """ rest_data = prep_data({'cms': 'guest/cluster/list_names/'}, request.session) if request.method == 'POST': form = form_class(data=request.POST, rest_data=rest_data) if form.is_valid(): prep_data({'user': ('user/user/edit/', form.cleaned_data)}, request.session) request.session['user'].email = form.cleaned_data['email'] request.session['user'].default_cluster_id = form.cleaned_data['default_cluster_id'] request.session.modified = True return messages_ajax.success(_('Account data edited.')) else: form = form_class(data={'email': request.session['user'].email, 'default_cluster_id': request.session['user'].default_cluster_id}, rest_data=rest_data) return messages_ajax.success(render_to_string(template_name, {'form': form, 'text': '', 'confirmation': _('Save')}, context_instance=RequestContext(request)), status=1) @django_view @ajax_request @user_permission def acc_ajax_get_user_quotas(request): """ Ajax view for fetching users' quotas. """ if request.method == 'GET': quota = prep_data('user/user/check_quota/', request.session) quota['memory'] = filesizeformatmb(quota['memory']) quota['used_memory'] = filesizeformatmb(quota['used_memory']) quota['storage'] = filesizeformatmb(quota['storage']) quota['used_storage'] = filesizeformatmb(quota['used_storage']) return messages_ajax.success(quota) @django_view @csrf_protect @user_permission def acc_password_change(request, template_name='account/password_change_form.html', password_change_form=PasswordChangeForm): """ View for password changing (for logged users). """ if request.method == "POST": form = password_change_form(user=request.session['user'], data=request.POST) if form.is_valid(): new_password = form.cleaned_data['new_password1'] try: prep_data(('user/user/set_password/', {'new_password': new_password}), request.session) except RestErrorException as ex: messages.error(request, ex.value) request.session['user'].set_password(new_password) request.session.modified = True return redirect('acc_password_change_done') else: form = password_change_form(user=request.session['user']) return render_to_response(template_name, {'form': form}, context_instance=RequestContext(request)) @django_view @user_permission def hlp_form(request, form_class=HelpForm, template_name='help/form.html'): """ View handling help form. """ if request.method == 'POST': form = form_class(data=request.POST) if form.is_valid(): topic, issue, email = form.cleaned_data['topic'], form.cleaned_data['issue'], form.cleaned_data['email'] name = str(request.session.get('user', form.cleaned_data['firstlast'])) topic += _(' from user:') + name + ', email: ' + email dictionary = {'issue': issue, 'topic': topic} try: prep_data(('user/user/send_issue/', dictionary), request.session) except Exception: return redirect('hlp_issue_error') return redirect('hlp_issue_sent') else: form = form_class() rest_data = prep_data('guest/user/is_mailer_active/', request.session) return render_to_response(template_name, dict({'form': form}.items() + rest_data.items()), context_instance=RequestContext(request))
apache-2.0
6,534,912,669,814,899,000
35.913265
119
0.634969
false
3.942779
false
false
false
mahim97/zulip
zerver/webhooks/github_webhook/tests.py
5
21928
from typing import Dict, Optional, Text import ujson from mock import MagicMock, patch from zerver.lib.test_classes import WebhookTestCase from zerver.lib.webhooks.git import COMMITS_LIMIT from zerver.models import Message class GithubWebhookTest(WebhookTestCase): STREAM_NAME = 'github' URL_TEMPLATE = "/api/v1/external/github?stream={stream}&api_key={api_key}" FIXTURE_DIR_NAME = 'github_webhook' EXPECTED_SUBJECT_REPO_EVENTS = u"public-repo" EXPECTED_SUBJECT_ISSUE_EVENTS = u"public-repo / Issue #2 Spelling error in the README file" EXPECTED_SUBJECT_PR_EVENTS = u"public-repo / PR #1 Update the README with new information" EXPECTED_SUBJECT_DEPLOYMENT_EVENTS = u"public-repo / Deployment on production" EXPECTED_SUBJECT_ORGANIZATION_EVENTS = u"baxterandthehackers organization" EXPECTED_SUBJECT_BRANCH_EVENTS = u"public-repo / changes" EXPECTED_SUBJECT_WIKI_EVENTS = u"public-repo / Wiki Pages" def test_ping_event(self) -> None: expected_message = u"GitHub webhook has been successfully configured by TomaszKolek" self.send_and_test_stream_message('ping', self.EXPECTED_SUBJECT_REPO_EVENTS, expected_message, HTTP_X_GITHUB_EVENT='ping') def test_ping_organization_event(self) -> None: expected_message = u"GitHub webhook has been successfully configured by eeshangarg" self.send_and_test_stream_message('ping_organization', 'zulip-test-org', expected_message, HTTP_X_GITHUB_EVENT='ping') def test_push_delete_branch(self) -> None: expected_message = u"eeshangarg [deleted](https://github.com/eeshangarg/public-repo/compare/2e8cf535fb38...000000000000) the branch feature." self.send_and_test_stream_message('push_delete_branch', u"public-repo / feature", expected_message, HTTP_X_GITHUB_EVENT='push') def test_push_local_branch_without_commits(self) -> None: expected_message = u"eeshangarg [pushed](https://github.com/eeshangarg/public-repo/compare/feature) the branch feature." self.send_and_test_stream_message('push_local_branch_without_commits', u"public-repo / feature", expected_message, HTTP_X_GITHUB_EVENT='push') def test_push_1_commit(self) -> None: expected_message = u"baxterthehacker [pushed](https://github.com/baxterthehacker/public-repo/compare/9049f1265b7d...0d1a26e67d8f) 1 commit to branch changes.\n\n* Update README.md ([0d1a26e](https://github.com/baxterthehacker/public-repo/commit/0d1a26e67d8f5eaf1f6ba5c57fc3c7d91ac0fd1c))" self.send_and_test_stream_message('push_1_commit', self.EXPECTED_SUBJECT_BRANCH_EVENTS, expected_message, HTTP_X_GITHUB_EVENT='push') def test_push_1_commit_without_username(self) -> None: expected_message = u"eeshangarg [pushed](https://github.com/eeshangarg/public-repo/compare/0383613da871...2e8cf535fb38) 1 commit to branch changes. Commits by John Snow (1).\n\n* Update the README ([2e8cf53](https://github.com/eeshangarg/public-repo/commit/2e8cf535fb38a3dab2476cdf856efda904ad4c94))" self.send_and_test_stream_message('push_1_commit_without_username', self.EXPECTED_SUBJECT_BRANCH_EVENTS, expected_message, HTTP_X_GITHUB_EVENT='push') def test_push_1_commit_filtered_by_branches(self) -> None: self.url = self.build_webhook_url('master,changes') expected_message = u"baxterthehacker [pushed](https://github.com/baxterthehacker/public-repo/compare/9049f1265b7d...0d1a26e67d8f) 1 commit to branch changes.\n\n* Update README.md ([0d1a26e](https://github.com/baxterthehacker/public-repo/commit/0d1a26e67d8f5eaf1f6ba5c57fc3c7d91ac0fd1c))" self.send_and_test_stream_message('push_1_commit', self.EXPECTED_SUBJECT_BRANCH_EVENTS, expected_message, HTTP_X_GITHUB_EVENT='push') def test_push_multiple_comitters(self) -> None: commits_info = u'* Update README.md ([0d1a26e](https://github.com/baxterthehacker/public-repo/commit/0d1a26e67d8f5eaf1f6ba5c57fc3c7d91ac0fd1c))\n' expected_message = u"""baxterthehacker [pushed](https://github.com/baxterthehacker/public-repo/compare/9049f1265b7d...0d1a26e67d8f) 6 commits to branch changes. Commits by Tomasz (3), Ben (2) and baxterthehacker (1).\n\n{}* Update README.md ([0d1a26e](https://github.com/baxterthehacker/public-repo/commit/0d1a26e67d8f5eaf1f6ba5c57fc3c7d91ac0fd1c))""".format(commits_info * 5) self.send_and_test_stream_message('push_multiple_committers', self.EXPECTED_SUBJECT_BRANCH_EVENTS, expected_message, HTTP_X_GITHUB_EVENT='push') def test_push_multiple_comitters_with_others(self) -> None: commits_info = u'* Update README.md ([0d1a26e](https://github.com/baxterthehacker/public-repo/commit/0d1a26e67d8f5eaf1f6ba5c57fc3c7d91ac0fd1c))\n' expected_message = u"""baxterthehacker [pushed](https://github.com/baxterthehacker/public-repo/compare/9049f1265b7d...0d1a26e67d8f) 10 commits to branch changes. Commits by Tomasz (4), Ben (3), James (2) and others (1).\n\n{}* Update README.md ([0d1a26e](https://github.com/baxterthehacker/public-repo/commit/0d1a26e67d8f5eaf1f6ba5c57fc3c7d91ac0fd1c))""".format(commits_info * 9) self.send_and_test_stream_message('push_multiple_committers_with_others', self.EXPECTED_SUBJECT_BRANCH_EVENTS, expected_message, HTTP_X_GITHUB_EVENT='push') def test_push_multiple_comitters_filtered_by_branches(self) -> None: self.url = self.build_webhook_url('master,changes') commits_info = u'* Update README.md ([0d1a26e](https://github.com/baxterthehacker/public-repo/commit/0d1a26e67d8f5eaf1f6ba5c57fc3c7d91ac0fd1c))\n' expected_message = u"""baxterthehacker [pushed](https://github.com/baxterthehacker/public-repo/compare/9049f1265b7d...0d1a26e67d8f) 6 commits to branch changes. Commits by Tomasz (3), Ben (2) and baxterthehacker (1).\n\n{}* Update README.md ([0d1a26e](https://github.com/baxterthehacker/public-repo/commit/0d1a26e67d8f5eaf1f6ba5c57fc3c7d91ac0fd1c))""".format(commits_info * 5) self.send_and_test_stream_message('push_multiple_committers', self.EXPECTED_SUBJECT_BRANCH_EVENTS, expected_message, HTTP_X_GITHUB_EVENT='push') def test_push_multiple_comitters_with_others_filtered_by_branches(self) -> None: self.url = self.build_webhook_url('master,changes') commits_info = u'* Update README.md ([0d1a26e](https://github.com/baxterthehacker/public-repo/commit/0d1a26e67d8f5eaf1f6ba5c57fc3c7d91ac0fd1c))\n' expected_message = u"""baxterthehacker [pushed](https://github.com/baxterthehacker/public-repo/compare/9049f1265b7d...0d1a26e67d8f) 10 commits to branch changes. Commits by Tomasz (4), Ben (3), James (2) and others (1).\n\n{}* Update README.md ([0d1a26e](https://github.com/baxterthehacker/public-repo/commit/0d1a26e67d8f5eaf1f6ba5c57fc3c7d91ac0fd1c))""".format(commits_info * 9) self.send_and_test_stream_message('push_multiple_committers_with_others', self.EXPECTED_SUBJECT_BRANCH_EVENTS, expected_message, HTTP_X_GITHUB_EVENT='push') def test_push_50_commits(self) -> None: commit_info = "* Update README.md ([0d1a26e](https://github.com/baxterthehacker/public-repo/commit/0d1a26e67d8f5eaf1f6ba5c57fc3c7d91ac0fd1c))\n" expected_message = u"baxterthehacker [pushed](https://github.com/baxterthehacker/public-repo/compare/9049f1265b7d...0d1a26e67d8f) 50 commits to branch changes.\n\n{}[and 30 more commit(s)]".format( commit_info * COMMITS_LIMIT ) self.send_and_test_stream_message('push_50_commits', self.EXPECTED_SUBJECT_BRANCH_EVENTS, expected_message, HTTP_X_GITHUB_EVENT='push') def test_push_50_commits_filtered_by_branches(self) -> None: self.url = self.build_webhook_url(branches='master,changes') commit_info = "* Update README.md ([0d1a26e](https://github.com/baxterthehacker/public-repo/commit/0d1a26e67d8f5eaf1f6ba5c57fc3c7d91ac0fd1c))\n" expected_message = u"baxterthehacker [pushed](https://github.com/baxterthehacker/public-repo/compare/9049f1265b7d...0d1a26e67d8f) 50 commits to branch changes.\n\n{}[and 30 more commit(s)]".format( commit_info * COMMITS_LIMIT ) self.send_and_test_stream_message('push_50_commits', self.EXPECTED_SUBJECT_BRANCH_EVENTS, expected_message, HTTP_X_GITHUB_EVENT='push') def test_commit_comment_msg(self) -> None: expected_message = u"baxterthehacker [commented](https://github.com/baxterthehacker/public-repo/commit/9049f1265b7d61be4a8904a9a27120d2064dab3b#commitcomment-11056394) on [9049f12](https://github.com/baxterthehacker/public-repo/commit/9049f1265b7d61be4a8904a9a27120d2064dab3b)\n~~~ quote\nThis is a really good change! :+1:\n~~~" self.send_and_test_stream_message('commit_comment', self.EXPECTED_SUBJECT_REPO_EVENTS, expected_message, HTTP_X_GITHUB_EVENT='commit_comment') def test_create_msg(self) -> None: expected_message = u"baxterthehacker created tag 0.0.1" self.send_and_test_stream_message('create', self.EXPECTED_SUBJECT_REPO_EVENTS, expected_message, HTTP_X_GITHUB_EVENT='create') def test_delete_msg(self) -> None: expected_message = u"baxterthehacker deleted tag simple-tag" self.send_and_test_stream_message('delete', self.EXPECTED_SUBJECT_REPO_EVENTS, expected_message, HTTP_X_GITHUB_EVENT='delete') def test_deployment_msg(self) -> None: expected_message = u"baxterthehacker created new deployment" self.send_and_test_stream_message('deployment', self.EXPECTED_SUBJECT_DEPLOYMENT_EVENTS, expected_message, HTTP_X_GITHUB_EVENT='deployment') def test_deployment_status_msg(self) -> None: expected_message = u"Deployment changed status to success" self.send_and_test_stream_message('deployment_status', self.EXPECTED_SUBJECT_DEPLOYMENT_EVENTS, expected_message, HTTP_X_GITHUB_EVENT='deployment_status') def test_fork_msg(self) -> None: expected_message = u"baxterandthehackers forked [public-repo](https://github.com/baxterandthehackers/public-repo)" self.send_and_test_stream_message('fork', self.EXPECTED_SUBJECT_REPO_EVENTS, expected_message, HTTP_X_GITHUB_EVENT='fork') def test_issue_comment_msg(self) -> None: expected_message = u"baxterthehacker [commented](https://github.com/baxterthehacker/public-repo/issues/2#issuecomment-99262140) on [Issue #2](https://github.com/baxterthehacker/public-repo/issues/2)\n\n~~~ quote\nYou are totally right! I'll get this fixed right away.\n~~~" self.send_and_test_stream_message('issue_comment', self.EXPECTED_SUBJECT_ISSUE_EVENTS, expected_message, HTTP_X_GITHUB_EVENT='issue_comment') def test_issue_msg(self) -> None: expected_message = u"baxterthehacker opened [Issue #2](https://github.com/baxterthehacker/public-repo/issues/2)\n\n~~~ quote\nIt looks like you accidently spelled 'commit' with two 't's.\n~~~" self.send_and_test_stream_message('issue', self.EXPECTED_SUBJECT_ISSUE_EVENTS, expected_message, HTTP_X_GITHUB_EVENT='issues') def test_membership_msg(self) -> None: expected_message = u"baxterthehacker added [kdaigle](https://github.com/kdaigle) to Contractors team" self.send_and_test_stream_message('membership', self.EXPECTED_SUBJECT_ORGANIZATION_EVENTS, expected_message, HTTP_X_GITHUB_EVENT='membership') def test_member_msg(self) -> None: expected_message = u"baxterthehacker added [octocat](https://github.com/octocat) to [public-repo](https://github.com/baxterthehacker/public-repo)" self.send_and_test_stream_message('member', self.EXPECTED_SUBJECT_REPO_EVENTS, expected_message, HTTP_X_GITHUB_EVENT='member') def test_pull_request_opened_msg(self) -> None: expected_message = u"baxterthehacker opened [PR](https://github.com/baxterthehacker/public-repo/pull/1)\nfrom `changes` to `master`\n\n~~~ quote\nThis is a pretty simple change that we need to pull into master.\n~~~" self.send_and_test_stream_message('opened_pull_request', self.EXPECTED_SUBJECT_PR_EVENTS, expected_message, HTTP_X_GITHUB_EVENT='pull_request') def test_pull_request_synchronized_msg(self) -> None: expected_message = u"baxterthehacker updated [PR](https://github.com/baxterthehacker/public-repo/pull/1)\nfrom `changes` to `master`" self.send_and_test_stream_message('synchronized_pull_request', self.EXPECTED_SUBJECT_PR_EVENTS, expected_message, HTTP_X_GITHUB_EVENT='pull_request') def test_pull_request_closed_msg(self) -> None: expected_message = u"baxterthehacker closed without merge [PR](https://github.com/baxterthehacker/public-repo/pull/1)" self.send_and_test_stream_message('closed_pull_request', self.EXPECTED_SUBJECT_PR_EVENTS, expected_message, HTTP_X_GITHUB_EVENT='pull_request') def test_pull_request_merged_msg(self) -> None: expected_message = u"baxterthehacker merged [PR](https://github.com/baxterthehacker/public-repo/pull/1)" self.send_and_test_stream_message('merged_pull_request', self.EXPECTED_SUBJECT_PR_EVENTS, expected_message, HTTP_X_GITHUB_EVENT='pull_request') def test_public_msg(self) -> None: expected_message = u"baxterthehacker made [the repository](https://github.com/baxterthehacker/public-repo) public" self.send_and_test_stream_message('public', self.EXPECTED_SUBJECT_REPO_EVENTS, expected_message, HTTP_X_GITHUB_EVENT='public') def test_wiki_pages_msg(self) -> None: expected_message = u"jasonrudolph:\n* created [Home](https://github.com/baxterthehacker/public-repo/wiki/Home)\n* created [Home](https://github.com/baxterthehacker/public-repo/wiki/Home)" self.send_and_test_stream_message('wiki_pages', self.EXPECTED_SUBJECT_WIKI_EVENTS, expected_message, HTTP_X_GITHUB_EVENT='gollum') def test_watch_msg(self) -> None: expected_message = u"baxterthehacker starred [the repository](https://github.com/baxterthehacker/public-repo)" self.send_and_test_stream_message('watch_repository', self.EXPECTED_SUBJECT_REPO_EVENTS, expected_message, HTTP_X_GITHUB_EVENT='watch') def test_repository_msg(self) -> None: expected_message = u"baxterthehacker created [the repository](https://github.com/baxterandthehackers/public-repo)" self.send_and_test_stream_message('repository', self.EXPECTED_SUBJECT_REPO_EVENTS, expected_message, HTTP_X_GITHUB_EVENT='repository') def test_team_add_msg(self) -> None: expected_message = u"[The repository](https://github.com/baxterandthehackers/public-repo) was added to team github" self.send_and_test_stream_message('team_add', self.EXPECTED_SUBJECT_REPO_EVENTS, expected_message, HTTP_X_GITHUB_EVENT='team_add') def test_release_msg(self) -> None: expected_message = u"baxterthehacker published [the release](https://github.com/baxterthehacker/public-repo/releases/tag/0.0.1)" self.send_and_test_stream_message('release', self.EXPECTED_SUBJECT_REPO_EVENTS, expected_message, HTTP_X_GITHUB_EVENT='release') def test_page_build_msg(self) -> None: expected_message = u"Github Pages build, trigerred by baxterthehacker, is built" self.send_and_test_stream_message('page_build', self.EXPECTED_SUBJECT_REPO_EVENTS, expected_message, HTTP_X_GITHUB_EVENT='page_build') def test_status_msg(self) -> None: expected_message = u"[9049f12](https://github.com/baxterthehacker/public-repo/commit/9049f1265b7d61be4a8904a9a27120d2064dab3b) changed its status to success" self.send_and_test_stream_message('status', self.EXPECTED_SUBJECT_REPO_EVENTS, expected_message, HTTP_X_GITHUB_EVENT='status') def test_pull_request_review_msg(self) -> None: expected_message = u"baxterthehacker submitted [PR Review](https://github.com/baxterthehacker/public-repo/pull/1#pullrequestreview-2626884)" self.send_and_test_stream_message('pull_request_review', self.EXPECTED_SUBJECT_PR_EVENTS, expected_message, HTTP_X_GITHUB_EVENT='pull_request_review') def test_pull_request_review_comment_msg(self) -> None: expected_message = u"baxterthehacker created [PR Review Comment](https://github.com/baxterthehacker/public-repo/pull/1#discussion_r29724692)\n\n~~~ quote\nMaybe you should use more emojji on this line.\n~~~" self.send_and_test_stream_message('pull_request_review_comment', self.EXPECTED_SUBJECT_PR_EVENTS, expected_message, HTTP_X_GITHUB_EVENT='pull_request_review_comment') def test_push_tag_msg(self) -> None: expected_message = u"baxterthehacker pushed tag abc" self.send_and_test_stream_message('push_tag', self.EXPECTED_SUBJECT_REPO_EVENTS, expected_message, HTTP_X_GITHUB_EVENT='push') def test_pull_request_edited_msg(self) -> None: expected_message = u"baxterthehacker edited [PR](https://github.com/baxterthehacker/public-repo/pull/1)\nfrom `changes` to `master`" self.send_and_test_stream_message('edited_pull_request', self.EXPECTED_SUBJECT_PR_EVENTS, expected_message, HTTP_X_GITHUB_EVENT='pull_request') def test_pull_request_assigned_msg(self) -> None: expected_message = u"baxterthehacker assigned [PR](https://github.com/baxterthehacker/public-repo/pull/1) to baxterthehacker" self.send_and_test_stream_message('assigned_pull_request', self.EXPECTED_SUBJECT_PR_EVENTS, expected_message, HTTP_X_GITHUB_EVENT='pull_request') def test_pull_request_unassigned_msg(self) -> None: expected_message = u"eeshangarg unassigned [PR](https://github.com/zulip-test-org/helloworld/pull/1)" self.send_and_test_stream_message( 'unassigned_pull_request', 'helloworld / PR #1 Mention that Zulip rocks!', expected_message, HTTP_X_GITHUB_EVENT='pull_request' ) @patch('zerver.webhooks.github_webhook.view.check_send_stream_message') def test_pull_request_labeled_ignore( self, check_send_stream_message_mock: MagicMock) -> None: payload = self.get_body('labeled_pull_request') result = self.client_post(self.url, payload, HTTP_X_GITHUB_EVENT='pull_request', content_type="application/json") self.assertFalse(check_send_stream_message_mock.called) self.assert_json_success(result) @patch('zerver.webhooks.github_webhook.view.check_send_stream_message') def test_pull_request_unlabeled_ignore( self, check_send_stream_message_mock: MagicMock) -> None: payload = self.get_body('unlabeled_pull_request') result = self.client_post(self.url, payload, HTTP_X_GITHUB_EVENT='pull_request', content_type="application/json") self.assertFalse(check_send_stream_message_mock.called) self.assert_json_success(result) @patch('zerver.webhooks.github_webhook.view.check_send_stream_message') def test_pull_request_request_review_ignore( self, check_send_stream_message_mock: MagicMock) -> None: payload = self.get_body('request_review_pull_request') result = self.client_post(self.url, payload, HTTP_X_GITHUB_EVENT='pull_request', content_type="application/json") self.assertFalse(check_send_stream_message_mock.called) self.assert_json_success(result) @patch('zerver.webhooks.github_webhook.view.check_send_stream_message') def test_pull_request_request_review_remove_ignore( self, check_send_stream_message_mock: MagicMock) -> None: payload = self.get_body('request_review_removed_pull_request') result = self.client_post(self.url, payload, HTTP_X_GITHUB_EVENT='pull_request', content_type="application/json") self.assertFalse(check_send_stream_message_mock.called) self.assert_json_success(result) @patch('zerver.webhooks.github_webhook.view.check_send_stream_message') def test_push_1_commit_filtered_by_branches_ignore( self, check_send_stream_message_mock: MagicMock) -> None: self.url = self.build_webhook_url(branches='master,development') payload = self.get_body('push_1_commit') result = self.client_post(self.url, payload, HTTP_X_GITHUB_EVENT='push', content_type="application/json") self.assertFalse(check_send_stream_message_mock.called) self.assert_json_success(result) @patch('zerver.webhooks.github_webhook.view.check_send_stream_message') def test_push_50_commits_filtered_by_branches_ignore( self, check_send_stream_message_mock: MagicMock) -> None: self.url = self.build_webhook_url(branches='master,development') payload = self.get_body('push_50_commits') result = self.client_post(self.url, payload, HTTP_X_GITHUB_EVENT='push', content_type="application/json") self.assertFalse(check_send_stream_message_mock.called) self.assert_json_success(result) @patch('zerver.webhooks.github_webhook.view.check_send_stream_message') def test_push_multiple_comitters_filtered_by_branches_ignore( self, check_send_stream_message_mock: MagicMock) -> None: self.url = self.build_webhook_url(branches='master,development') payload = self.get_body('push_multiple_committers') result = self.client_post(self.url, payload, HTTP_X_GITHUB_EVENT='push', content_type="application/json") self.assertFalse(check_send_stream_message_mock.called) self.assert_json_success(result) @patch('zerver.webhooks.github_webhook.view.check_send_stream_message') def test_push_multiple_comitters_with_others_filtered_by_branches_ignore( self, check_send_stream_message_mock: MagicMock) -> None: self.url = self.build_webhook_url(branches='master,development') payload = self.get_body('push_multiple_committers_with_others') result = self.client_post(self.url, payload, HTTP_X_GITHUB_EVENT='push', content_type="application/json") self.assertFalse(check_send_stream_message_mock.called) self.assert_json_success(result)
apache-2.0
8,828,503,967,933,125,000
78.162455
387
0.727107
false
3.066853
true
false
false
atztogo/spglib
python/test/test_collinear_spin.py
1
1335
import unittest import numpy as np from spglib import get_symmetry class TestGetSymmetry(unittest.TestCase): def setUp(self): lattice = [[4, 0, 0], [0, 4, 0], [0, 0, 4]] positions = [[0, 0, 0], [0.5, 0.5, 0.5]] numbers = [1, 1] magmoms = [0, 0] self._cell = (lattice, positions, numbers, magmoms) def tearDown(self): pass def test_get_symmetry_ferro(self): self._cell[3][0] = 1 self._cell[3][1] = 1 sym = get_symmetry(self._cell) self.assertEqual(96, len(sym['rotations'])) np.testing.assert_equal(sym['equivalent_atoms'], [0, 0]) def test_get_symmetry_anti_ferro(self): self._cell[3][0] = 1 self._cell[3][1] = -1 sym = get_symmetry(self._cell) self.assertEqual(96, len(sym['rotations'])) np.testing.assert_equal(sym['equivalent_atoms'], [0, 0]) def test_get_symmetry_broken_magmoms(self): self._cell[3][0] = 1 self._cell[3][1] = 2 sym = get_symmetry(self._cell) self.assertEqual(48, len(sym['rotations'])) np.testing.assert_equal(sym['equivalent_atoms'], [0, 1]) if __name__ == '__main__': suite = unittest.TestLoader().loadTestsFromTestCase(TestGetSymmetry) unittest.TextTestRunner(verbosity=2).run(suite) # unittest.main()
bsd-3-clause
1,407,334,135,078,816,500
30.046512
72
0.580524
false
3.186158
true
false
false
aalien/subtitle2spu
parsesrt.py
1
1661
# Copyright (C) 2008 Antti Laine <antti.a.laine@tut.fi> # # This file is part of subtitle2spu. # # subtitle2spu is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # subtitle2spu is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with subtitle2spu. If not, see <http://www.gnu.org/licenses/>. import sys # States READNUMBER = 1 READTIME = 2 READTEXT = 3 def parse( file, writer ): state = READNUMBER linecount = 0 lines = "" for buf in file: if not buf: continue if state == READNUMBER: number = buf.split()[0] state = READTIME continue if state == READTIME: starttime = buf.split()[0] endtime = buf.split()[2] state = READTEXT continue if state == READTEXT: if buf[0] not in ("\n", "\r"): linecount += 1 lines += buf else: print "Writing subtitle %s" %(number) if not writer.write( number, starttime, endtime, lines ): return False state = READNUMBER linecount = 0 lines = "" return True
mit
7,590,266,957,127,908,000
29.759259
73
0.584588
false
4.081081
false
false
false
soscpd/bee
root/tests/zguide/examples/Python/mdcliapi.py
1
3030
"""Majordomo Protocol Client API, Python version. Implements the MDP/Worker spec at http:#rfc.zeromq.org/spec:7. Author: Min RK <benjaminrk@gmail.com> Based on Java example by Arkadiusz Orzechowski """ import logging import zmq import MDP from zhelpers import dump class MajorDomoClient(object): """Majordomo Protocol Client API, Python version. Implements the MDP/Worker spec at http:#rfc.zeromq.org/spec:7. """ broker = None ctx = None client = None poller = None timeout = 2500 retries = 3 verbose = False def __init__(self, broker, verbose=False): self.broker = broker self.verbose = verbose self.ctx = zmq.Context() self.poller = zmq.Poller() logging.basicConfig(format="%(asctime)s %(message)s", datefmt="%Y-%m-%d %H:%M:%S", level=logging.INFO) self.reconnect_to_broker() def reconnect_to_broker(self): """Connect or reconnect to broker""" if self.client: self.poller.unregister(self.client) self.client.close() self.client = self.ctx.socket(zmq.REQ) self.client.linger = 0 self.client.connect(self.broker) self.poller.register(self.client, zmq.POLLIN) if self.verbose: logging.info("I: connecting to broker at %s...", self.broker) def send(self, service, request): """Send request to broker and get reply by hook or crook. Takes ownership of request message and destroys it when sent. Returns the reply message or None if there was no reply. """ if not isinstance(request, list): request = [request] request = [MDP.C_CLIENT, service] + request if self.verbose: logging.warn("I: send request to '%s' service: ", service) dump(request) reply = None retries = self.retries while retries > 0: self.client.send_multipart(request) try: items = self.poller.poll(self.timeout) except KeyboardInterrupt: break # interrupted if items: msg = self.client.recv_multipart() if self.verbose: logging.info("I: received reply:") dump(msg) # Don't try to handle errors, just assert noisily assert len(msg) >= 3 header = msg.pop(0) assert MDP.C_CLIENT == header reply_service = msg.pop(0) assert service == reply_service reply = msg break else: if retries: logging.warn("W: no reply, reconnecting...") self.reconnect_to_broker() else: logging.warn("W: permanent error, abandoning") break retries -= 1 return reply def destroy(self): self.context.destroy()
mit
8,947,509,223,582,141,000
28.705882
90
0.553465
false
4.297872
false
false
false
scholer/cadnano2.5
cadnano/strand/modscmd.py
2
1922
from cadnano.proxies.cnproxy import UndoCommand from cadnano.cntypes import ( DocT, StrandT ) class AddModsCommand(UndoCommand): def __init__(self, document: DocT, strand: StrandT, idx: int, mod_id: str): super(AddModsCommand, self).__init__() self._strand = strand self._id_num = strand.idNum() self._idx = idx self._mod_id = mod_id self.document = document # end def def redo(self): strand = self._strand mid = self._mod_id part = strand.part() idx = self._idx part.addModStrandInstance(strand, idx, mid) strand.strandModsAddedSignal.emit(strand, self.document, mid, idx) # end def def undo(self): strand = self._strand mid = self._mod_id part = strand.part() idx = self._idx part.removeModStrandInstance(strand, idx, mid) strand.strandModsRemovedSignal.emit(strand, self.document, mid, idx) # end def # end class class RemoveModsCommand(UndoCommand): def __init__(self, document, strand, idx, mod_id): super(RemoveModsCommand, self).__init__() self._strand = strand self._id_num = strand.idNum() self._idx = idx self._mod_id = mod_id self.document = document # end def def redo(self): strand = self._strand strand.isStaple() mid = self._mod_id part = strand.part() idx = self._idx part.removeModStrandInstance(strand, idx, mid) strand.strandModsRemovedSignal.emit(strand, self.document, mid, idx) # end def def undo(self): strand = self._strand strand.isStaple() mid = self._mod_id part = strand.part() idx = self._idx part.addModStrandInstance(strand, idx, mid) strand.strandModsAddedSignal.emit(strand, self.document, mid, idx) # end def # end class
mit
6,883,323,475,434,581,000
28.121212
79
0.596254
false
3.469314
false
false
false
maxlikely/scikit-learn
sklearn/datasets/svmlight_format.py
1
13250
"""This module implements a loader and dumper for the svmlight format This format is a text-based format, with one sample per line. It does not store zero valued features hence is suitable for sparse dataset. The first element of each line can be used to store a target variable to predict. This format is used as the default format for both svmlight and the libsvm command line programs. """ # Authors: Mathieu Blondel <mathieu@mblondel.org> # Lars Buitinck <L.J.Buitinck@uva.nl> # Olivier Grisel <olivier.grisel@ensta.org> # License: Simple BSD. from bz2 import BZ2File from contextlib import closing import gzip import io import os.path import numpy as np import scipy.sparse as sp from ._svmlight_format import _load_svmlight_file from .. import __version__ from ..externals import six from ..utils import atleast2d_or_csr def load_svmlight_file(f, n_features=None, dtype=np.float64, multilabel=False, zero_based="auto", query_id=False): """Load datasets in the svmlight / libsvm format into sparse CSR matrix This format is a text-based format, with one sample per line. It does not store zero valued features hence is suitable for sparse dataset. The first element of each line can be used to store a target variable to predict. This format is used as the default format for both svmlight and the libsvm command line programs. Parsing a text based source can be expensive. When working on repeatedly on the same dataset, it is recommended to wrap this loader with joblib.Memory.cache to store a memmapped backup of the CSR results of the first call and benefit from the near instantaneous loading of memmapped structures for the subsequent calls. This implementation is naive: it does allocate too much memory and is slow since written in python. On large datasets it is recommended to use an optimized loader such as: https://github.com/mblondel/svmlight-loader In case the file contains a pairwise preference constraint (known as "qid" in the svmlight format) these are ignored unless the query_id parameter is set to True. These pairwise preference constraints can be used to contraint the combination of samples when using pairwise loss functions (as is the case in some learning to rank problems) so that only pairs with the same query_id value are considered. Parameters ---------- f: {str, file-like, int} (Path to) a file to load. If a path ends in ".gz" or ".bz2", it will be uncompressed on the fly. If an integer is passed, it is assumed to be a file descriptor. A file-like or file descriptor will not be closed by this function. A file-like object must be opened in binary mode. n_features: int or None The number of features to use. If None, it will be inferred. This argument is useful to load several files that are subsets of a bigger sliced dataset: each subset might not have example of every feature, hence the inferred shape might vary from one slice to another. multilabel: boolean, optional Samples may have several labels each (see http://www.csie.ntu.edu.tw/~cjlin/libsvmtools/datasets/multilabel.html) zero_based: boolean or "auto", optional Whether column indices in f are zero-based (True) or one-based (False). If column indices are one-based, they are transformed to zero-based to match Python/NumPy conventions. If set to "auto", a heuristic check is applied to determine this from the file contents. Both kinds of files occur "in the wild", but they are unfortunately not self-identifying. Using "auto" or True should always be safe. query_id: boolean, defaults to False If True, will return the query_id array for each file. Returns ------- X: scipy.sparse matrix of shape (n_samples, n_features) y: ndarray of shape (n_samples,), or, in the multilabel a list of tuples of length n_samples. query_id: array of shape (n_samples,) query_id for each sample. Only returned when query_id is set to True. See also -------- load_svmlight_files: similar function for loading multiple files in this format, enforcing the same number of features/columns on all of them. """ return tuple(load_svmlight_files([f], n_features, dtype, multilabel, zero_based, query_id)) def _gen_open(f): if isinstance(f, int): # file descriptor return io.open(f, "rb", closefd=False) elif not isinstance(f, six.string_types): raise TypeError("expected {str, int, file-like}, got %s" % type(f)) _, ext = os.path.splitext(f) if ext == ".gz": return gzip.open(f, "rb") elif ext == ".bz2": return BZ2File(f, "rb") else: return open(f, "rb") def _open_and_load(f, dtype, multilabel, zero_based, query_id): if hasattr(f, "read"): return _load_svmlight_file(f, dtype, multilabel, zero_based, query_id) # XXX remove closing when Python 2.7+/3.1+ required with closing(_gen_open(f)) as f: return _load_svmlight_file(f, dtype, multilabel, zero_based, query_id) def load_svmlight_files(files, n_features=None, dtype=np.float64, multilabel=False, zero_based="auto", query_id=False): """Load dataset from multiple files in SVMlight format This function is equivalent to mapping load_svmlight_file over a list of files, except that the results are concatenated into a single, flat list and the samples vectors are constrained to all have the same number of features. In case the file contains a pairwise preference constraint (known as "qid" in the svmlight format) these are ignored unless the query_id parameter is set to True. These pairwise preference constraints can be used to constraint the combination of samples when using pairwise loss functions (as is the case in some learning to rank problems) so that only pairs with the same query_id value are considered. Parameters ---------- files : iterable over {str, file-like, int} (Paths of) files to load. If a path ends in ".gz" or ".bz2", it will be uncompressed on the fly. If an integer is passed, it is assumed to be a file descriptor. File-likes and file descriptors will not be closed by this function. File-like objects must be opened in binary mode. n_features: int or None The number of features to use. If None, it will be inferred from the maximum column index occurring in any of the files. multilabel: boolean, optional Samples may have several labels each (see http://www.csie.ntu.edu.tw/~cjlin/libsvmtools/datasets/multilabel.html) zero_based: boolean or "auto", optional Whether column indices in f are zero-based (True) or one-based (False). If column indices are one-based, they are transformed to zero-based to match Python/NumPy conventions. If set to "auto", a heuristic check is applied to determine this from the file contents. Both kinds of files occur "in the wild", but they are unfortunately not self-identifying. Using "auto" or True should always be safe. query_id: boolean, defaults to False If True, will return the query_id array for each file. Returns ------- [X1, y1, ..., Xn, yn] where each (Xi, yi) pair is the result from load_svmlight_file(files[i]). If query_id is set to True, this will return instead [X1, y1, q1, ..., Xn, yn, qn] where (Xi, yi, qi) is the result from load_svmlight_file(files[i]) Rationale --------- When fitting a model to a matrix X_train and evaluating it against a matrix X_test, it is essential that X_train and X_test have the same number of features (X_train.shape[1] == X_test.shape[1]). This may not be the case if you load the files individually with load_svmlight_file. See also -------- load_svmlight_file """ r = [_open_and_load(f, dtype, multilabel, bool(zero_based), bool(query_id)) for f in files] if (zero_based is False or zero_based == "auto" and all(np.min(tmp[1]) > 0 for tmp in r)): for ind in r: indices = ind[1] indices -= 1 if n_features is None: n_features = max(ind[1].max() for ind in r) + 1 result = [] for data, indices, indptr, y, query_values in r: shape = (indptr.shape[0] - 1, n_features) X = sp.csr_matrix((data, indices, indptr), shape) X.sort_indices() result += X, y if query_id: result.append(query_values) return result def _dump_svmlight(X, y, f, one_based, comment, query_id): is_sp = int(hasattr(X, "tocsr")) if X.dtype == np.float64: value_pattern = u"%d:%0.16e" else: value_pattern = u"%d:%f" if y.dtype.kind == 'i': line_pattern = u"%d" else: line_pattern = u"%f" if query_id is not None: line_pattern += u" qid:%d" line_pattern += u" %s\n" if comment: f.write("# Generated by dump_svmlight_file from scikit-learn %s\n" % __version__) f.write("# Column indices are %s-based\n" % ["zero", "one"][one_based]) f.write("#\n") f.writelines("# %s\n" % line for line in comment.splitlines()) for i in range(X.shape[0]): s = " ".join([value_pattern % (j + one_based, X[i, j]) for j in X[i].nonzero()[is_sp]]) if query_id is not None: feat = (y[i], query_id[i], s) else: feat = (y[i], s) f.write((line_pattern % feat).encode('ascii')) def dump_svmlight_file(X, y, f, zero_based=True, comment=None, query_id=None): """Dump the dataset in svmlight / libsvm file format. This format is a text-based format, with one sample per line. It does not store zero valued features hence is suitable for sparse dataset. The first element of each line can be used to store a target variable to predict. Parameters ---------- X : {array-like, sparse matrix}, shape = [n_samples, n_features] Training vectors, where n_samples is the number of samples and n_features is the number of features. y : array-like, shape = [n_samples] Target values. f : string or file-like in binary mode If string, specifies the path that will contain the data. If file-like, data will be written to f. f should be opened in binary mode. zero_based : boolean, optional Whether column indices should be written zero-based (True) or one-based (False). comment : string, optional Comment to insert at the top of the file. This should be either a Unicode string, which will be encoded as UTF-8, or an ASCII byte string. If a comment is given, then it will be preceded by one that identifies the file as having been dumped by scikit-learn. Note that not all tools grok comments in SVMlight files. query_id : array-like, shape = [n_samples] Array containing pairwise preference constraints (qid in svmlight format). """ if comment is not None: # Convert comment string to list of lines in UTF-8. # If a byte string is passed, then check whether it's ASCII; # if a user wants to get fancy, they'll have to decode themselves. # Avoid mention of str and unicode types for Python 3.x compat. if isinstance(comment, bytes): comment.decode("ascii") # just for the exception else: comment = comment.encode("utf-8") if "\0" in comment: raise ValueError("comment string contains NUL byte") y = np.asarray(y) if y.ndim != 1: raise ValueError("expected y of shape (n_samples,), got %r" % (y.shape,)) Xval = atleast2d_or_csr(X) if Xval.shape[0] != y.shape[0]: raise ValueError("X.shape[0] and y.shape[0] should be the same, got" " %r and %r instead." % (Xval.shape[0], y.shape[0])) # We had some issues with CSR matrices with unsorted indices (e.g. #1501), # so sort them here, but first make sure we don't modify the user's X. # TODO We can do this cheaper; sorted_indices copies the whole matrix. if Xval is X and hasattr(Xval, "sorted_indices"): X = Xval.sorted_indices() else: X = Xval if hasattr(X, "sort_indices"): X.sort_indices() if query_id is not None: query_id = np.asarray(query_id) if query_id.shape[0] != y.shape[0]: raise ValueError("expected query_id of shape (n_samples,), got %r" % (query_id.shape,)) one_based = not zero_based if hasattr(f, "write"): _dump_svmlight(X, y, f, one_based, comment, query_id) else: with open(f, "wb") as f: _dump_svmlight(X, y, f, one_based, comment, query_id)
bsd-3-clause
8,683,822,834,947,362,000
37.184438
79
0.644604
false
3.855106
false
false
false
jorgenkg/python-neural-network
nimblenet/cost_functions.py
1
1632
import numpy as np import math def sum_squared_error( outputs, targets, derivative=False ): if derivative: return outputs - targets else: return 0.5 * np.mean(np.sum( np.power(outputs - targets,2), axis = 1 )) #end cost function def hellinger_distance( outputs, targets, derivative=False ): """ The output signals should be in the range [0, 1] """ root_difference = np.sqrt( outputs ) - np.sqrt( targets ) if derivative: return root_difference/( np.sqrt(2) * np.sqrt( outputs )) else: return np.mean(np.sum( np.power(root_difference, 2 ), axis=1) / math.sqrt( 2 )) #end cost function def binary_cross_entropy_cost( outputs, targets, derivative=False, epsilon=1e-11 ): """ The output signals should be in the range [0, 1] """ # Prevent overflow outputs = np.clip(outputs, epsilon, 1 - epsilon) divisor = np.maximum(outputs * (1 - outputs), epsilon) if derivative: return (outputs - targets) / divisor else: return np.mean(-np.sum(targets * np.log( outputs ) + (1 - targets) * np.log(1 - outputs), axis=1)) #end cost function cross_entropy_cost = binary_cross_entropy_cost def softmax_categorical_cross_entropy_cost( outputs, targets, derivative=False, epsilon=1e-11 ): """ The output signals should be in the range [0, 1] """ outputs = np.clip(outputs, epsilon, 1 - epsilon) if derivative: return outputs - targets else: return np.mean(-np.sum(targets * np.log( outputs ), axis=1)) #end cost function softmax_neg_loss = softmax_categorical_cross_entropy_cost
bsd-2-clause
-7,239,579,203,085,241,000
30.403846
106
0.645221
false
3.602649
false
false
false
googleapis/googleapis-gen
google/ads/googleads/v7/googleads-py/google/ads/googleads/v7/enums/types/negative_geo_target_type.py
1
1192
# -*- coding: utf-8 -*- # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # import proto # type: ignore __protobuf__ = proto.module( package='google.ads.googleads.v7.enums', marshal='google.ads.googleads.v7', manifest={ 'NegativeGeoTargetTypeEnum', }, ) class NegativeGeoTargetTypeEnum(proto.Message): r"""Container for enum describing possible negative geo target types. """ class NegativeGeoTargetType(proto.Enum): r"""The possible negative geo target types.""" UNSPECIFIED = 0 UNKNOWN = 1 PRESENCE_OR_INTEREST = 4 PRESENCE = 5 __all__ = tuple(sorted(__protobuf__.manifest))
apache-2.0
-6,885,534,629,699,815,000
28.8
74
0.692953
false
4
false
false
false
mvaled/sentry
src/sentry/south_migrations/0326_auto__add_field_groupsnooze_count__add_field_groupsnooze_window__add_f.py
1
116733
# -*- coding: utf-8 -*- from south.utils import datetime_utils as datetime from south.db import db from south.v2 import SchemaMigration from django.db import models class Migration(SchemaMigration): def forwards(self, orm): # Adding field 'GroupSnooze.count' db.add_column( 'sentry_groupsnooze', 'count', self.gf('sentry.db.models.fields.bounded.BoundedPositiveIntegerField')(null=True), keep_default=False ) # Adding field 'GroupSnooze.window' db.add_column( 'sentry_groupsnooze', 'window', self.gf('sentry.db.models.fields.bounded.BoundedPositiveIntegerField')(null=True), keep_default=False ) # Adding field 'GroupSnooze.user_count' db.add_column( 'sentry_groupsnooze', 'user_count', self.gf('sentry.db.models.fields.bounded.BoundedPositiveIntegerField')(null=True), keep_default=False ) # Adding field 'GroupSnooze.user_window' db.add_column( 'sentry_groupsnooze', 'user_window', self.gf('sentry.db.models.fields.bounded.BoundedPositiveIntegerField')(null=True), keep_default=False ) # Adding field 'GroupSnooze.state' db.add_column( 'sentry_groupsnooze', 'state', self.gf('sentry.db.models.fields.jsonfield.JSONField')(null=True), keep_default=False ) # Changing field 'GroupSnooze.until' db.alter_column( 'sentry_groupsnooze', 'until', self.gf('django.db.models.fields.DateTimeField')(null=True) ) def backwards(self, orm): raise RuntimeError( "Cannot reverse this migration. 'GroupSnooze.until' and its values cannot be restored." ) models = { 'sentry.activity': { 'Meta': { 'object_name': 'Activity' }, 'data': ('sentry.db.models.fields.gzippeddict.GzippedDictField', [], { 'null': 'True' }), 'datetime': ('django.db.models.fields.DateTimeField', [], { 'default': 'datetime.datetime.now' }), 'group': ( 'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], { 'to': "orm['sentry.Group']", 'null': 'True' } ), 'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], { 'primary_key': 'True' }), 'ident': ('django.db.models.fields.CharField', [], { 'max_length': '64', 'null': 'True' }), 'project': ( 'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], { 'to': "orm['sentry.Project']" } ), 'type': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {}), 'user': ( 'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], { 'to': "orm['sentry.User']", 'null': 'True' } ) }, 'sentry.apiapplication': { 'Meta': { 'object_name': 'ApiApplication' }, 'allowed_origins': ('django.db.models.fields.TextField', [], { 'null': 'True', 'blank': 'True' }), 'client_id': ( 'django.db.models.fields.CharField', [], { 'default': "'1fe2246606cd41688e14b95ae1bdc14c6b7652dea035446fa2dc8bcacf21afd6'", 'unique': 'True', 'max_length': '64' } ), 'client_secret': ( 'sentry.db.models.fields.encrypted.EncryptedTextField', [], { 'default': "'7f918820281a421d991389c5fad78a41551739601ae745e8a24e9cb56ee8ffaa'" } ), 'date_added': ('django.db.models.fields.DateTimeField', [], { 'default': 'datetime.datetime.now' }), 'homepage_url': ('django.db.models.fields.URLField', [], { 'max_length': '200', 'null': 'True' }), 'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], { 'primary_key': 'True' }), 'name': ( 'django.db.models.fields.CharField', [], { 'default': "'Trusting Weasel'", 'max_length': '64', 'blank': 'True' } ), 'owner': ( 'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], { 'to': "orm['sentry.User']" } ), 'privacy_url': ('django.db.models.fields.URLField', [], { 'max_length': '200', 'null': 'True' }), 'redirect_uris': ('django.db.models.fields.TextField', [], {}), 'status': ( 'sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], { 'default': '0', 'db_index': 'True' } ), 'terms_url': ('django.db.models.fields.URLField', [], { 'max_length': '200', 'null': 'True' }) }, 'sentry.apiauthorization': { 'Meta': { 'unique_together': "(('user', 'application'),)", 'object_name': 'ApiAuthorization' }, 'application': ( 'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], { 'to': "orm['sentry.ApiApplication']", 'null': 'True' } ), 'date_added': ('django.db.models.fields.DateTimeField', [], { 'default': 'datetime.datetime.now' }), 'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], { 'primary_key': 'True' }), 'scope_list': ( 'sentry.db.models.fields.array.ArrayField', [], { 'of': ('django.db.models.fields.TextField', [], {}) } ), 'scopes': ('django.db.models.fields.BigIntegerField', [], { 'default': 'None' }), 'user': ( 'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], { 'to': "orm['sentry.User']" } ) }, 'sentry.apigrant': { 'Meta': { 'object_name': 'ApiGrant' }, 'application': ( 'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], { 'to': "orm['sentry.ApiApplication']" } ), 'code': ( 'django.db.models.fields.CharField', [], { 'default': "'d959d133f88c4292a581081e6190b949'", 'max_length': '64', 'db_index': 'True' } ), 'expires_at': ( 'django.db.models.fields.DateTimeField', [], { 'default': 'datetime.datetime(2017, 6, 1, 0, 0)', 'db_index': 'True' } ), 'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], { 'primary_key': 'True' }), 'redirect_uri': ('django.db.models.fields.CharField', [], { 'max_length': '255' }), 'scope_list': ( 'sentry.db.models.fields.array.ArrayField', [], { 'of': ('django.db.models.fields.TextField', [], {}) } ), 'scopes': ('django.db.models.fields.BigIntegerField', [], { 'default': 'None' }), 'user': ( 'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], { 'to': "orm['sentry.User']" } ) }, 'sentry.apikey': { 'Meta': { 'object_name': 'ApiKey' }, 'allowed_origins': ('django.db.models.fields.TextField', [], { 'null': 'True', 'blank': 'True' }), 'date_added': ('django.db.models.fields.DateTimeField', [], { 'default': 'datetime.datetime.now' }), 'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], { 'primary_key': 'True' }), 'key': ('django.db.models.fields.CharField', [], { 'unique': 'True', 'max_length': '32' }), 'label': ( 'django.db.models.fields.CharField', [], { 'default': "'Default'", 'max_length': '64', 'blank': 'True' } ), 'organization': ( 'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], { 'related_name': "'key_set'", 'to': "orm['sentry.Organization']" } ), 'scope_list': ( 'sentry.db.models.fields.array.ArrayField', [], { 'of': ('django.db.models.fields.TextField', [], {}) } ), 'scopes': ('django.db.models.fields.BigIntegerField', [], { 'default': 'None' }), 'status': ( 'sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], { 'default': '0', 'db_index': 'True' } ) }, 'sentry.apitoken': { 'Meta': { 'object_name': 'ApiToken' }, 'application': ( 'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], { 'to': "orm['sentry.ApiApplication']", 'null': 'True' } ), 'date_added': ('django.db.models.fields.DateTimeField', [], { 'default': 'datetime.datetime.now' }), 'expires_at': ( 'django.db.models.fields.DateTimeField', [], { 'default': 'datetime.datetime(2017, 7, 1, 0, 0)', 'null': 'True' } ), 'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], { 'primary_key': 'True' }), 'refresh_token': ( 'django.db.models.fields.CharField', [], { 'default': "'6c4fadd19de34e39ac0859f3f896065cd8c3cd19c56c453287ab9f199c539138'", 'max_length': '64', 'unique': 'True', 'null': 'True' } ), 'scope_list': ( 'sentry.db.models.fields.array.ArrayField', [], { 'of': ('django.db.models.fields.TextField', [], {}) } ), 'scopes': ('django.db.models.fields.BigIntegerField', [], { 'default': 'None' }), 'token': ( 'django.db.models.fields.CharField', [], { 'default': "'94b568466766407cad05e6e2a630f6561a04ecb269c047c381f78c857d84422a'", 'unique': 'True', 'max_length': '64' } ), 'user': ( 'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], { 'to': "orm['sentry.User']" } ) }, 'sentry.auditlogentry': { 'Meta': { 'object_name': 'AuditLogEntry' }, 'actor': ( 'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], { 'blank': 'True', 'related_name': "'audit_actors'", 'null': 'True', 'to': "orm['sentry.User']" } ), 'actor_key': ( 'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], { 'to': "orm['sentry.ApiKey']", 'null': 'True', 'blank': 'True' } ), 'actor_label': ( 'django.db.models.fields.CharField', [], { 'max_length': '64', 'null': 'True', 'blank': 'True' } ), 'data': ('sentry.db.models.fields.gzippeddict.GzippedDictField', [], {}), 'datetime': ('django.db.models.fields.DateTimeField', [], { 'default': 'datetime.datetime.now' }), 'event': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {}), 'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], { 'primary_key': 'True' }), 'ip_address': ( 'django.db.models.fields.GenericIPAddressField', [], { 'max_length': '39', 'null': 'True' } ), 'organization': ( 'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], { 'to': "orm['sentry.Organization']" } ), 'target_object': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], { 'null': 'True' }), 'target_user': ( 'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], { 'blank': 'True', 'related_name': "'audit_targets'", 'null': 'True', 'to': "orm['sentry.User']" } ) }, 'sentry.authenticator': { 'Meta': { 'unique_together': "(('user', 'type'),)", 'object_name': 'Authenticator', 'db_table': "'auth_authenticator'" }, 'config': ('sentry.db.models.fields.encrypted.EncryptedPickledObjectField', [], {}), 'created_at': ('django.db.models.fields.DateTimeField', [], { 'default': 'datetime.datetime.now' }), 'id': ('sentry.db.models.fields.bounded.BoundedAutoField', [], { 'primary_key': 'True' }), 'last_used_at': ('django.db.models.fields.DateTimeField', [], { 'null': 'True' }), 'type': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {}), 'user': ( 'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], { 'to': "orm['sentry.User']" } ) }, 'sentry.authidentity': { 'Meta': { 'unique_together': "(('auth_provider', 'ident'), ('auth_provider', 'user'))", 'object_name': 'AuthIdentity' }, 'auth_provider': ( 'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], { 'to': "orm['sentry.AuthProvider']" } ), 'data': ('sentry.db.models.fields.encrypted.EncryptedJsonField', [], { 'default': '{}' }), 'date_added': ('django.db.models.fields.DateTimeField', [], { 'default': 'datetime.datetime.now' }), 'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], { 'primary_key': 'True' }), 'ident': ('django.db.models.fields.CharField', [], { 'max_length': '128' }), 'last_synced': ('django.db.models.fields.DateTimeField', [], { 'default': 'datetime.datetime.now' }), 'last_verified': ('django.db.models.fields.DateTimeField', [], { 'default': 'datetime.datetime.now' }), 'user': ( 'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], { 'to': "orm['sentry.User']" } ) }, 'sentry.authprovider': { 'Meta': { 'object_name': 'AuthProvider' }, 'config': ('sentry.db.models.fields.encrypted.EncryptedJsonField', [], { 'default': '{}' }), 'date_added': ('django.db.models.fields.DateTimeField', [], { 'default': 'datetime.datetime.now' }), 'default_global_access': ('django.db.models.fields.BooleanField', [], { 'default': 'True' }), 'default_role': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], { 'default': '50' }), 'default_teams': ( 'django.db.models.fields.related.ManyToManyField', [], { 'to': "orm['sentry.Team']", 'symmetrical': 'False', 'blank': 'True' } ), 'flags': ('django.db.models.fields.BigIntegerField', [], { 'default': '0' }), 'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], { 'primary_key': 'True' }), 'last_sync': ('django.db.models.fields.DateTimeField', [], { 'null': 'True' }), 'organization': ( 'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], { 'to': "orm['sentry.Organization']", 'unique': 'True' } ), 'provider': ('django.db.models.fields.CharField', [], { 'max_length': '128' }), 'sync_time': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], { 'null': 'True' }) }, 'sentry.broadcast': { 'Meta': { 'object_name': 'Broadcast' }, 'date_added': ('django.db.models.fields.DateTimeField', [], { 'default': 'datetime.datetime.now' }), 'date_expires': ( 'django.db.models.fields.DateTimeField', [], { 'default': 'datetime.datetime(2017, 6, 8, 0, 0)', 'null': 'True', 'blank': 'True' } ), 'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], { 'primary_key': 'True' }), 'is_active': ('django.db.models.fields.BooleanField', [], { 'default': 'True', 'db_index': 'True' }), 'link': ( 'django.db.models.fields.URLField', [], { 'max_length': '200', 'null': 'True', 'blank': 'True' } ), 'message': ('django.db.models.fields.CharField', [], { 'max_length': '256' }), 'title': ('django.db.models.fields.CharField', [], { 'max_length': '32' }), 'upstream_id': ( 'django.db.models.fields.CharField', [], { 'max_length': '32', 'null': 'True', 'blank': 'True' } ) }, 'sentry.broadcastseen': { 'Meta': { 'unique_together': "(('broadcast', 'user'),)", 'object_name': 'BroadcastSeen' }, 'broadcast': ( 'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], { 'to': "orm['sentry.Broadcast']" } ), 'date_seen': ('django.db.models.fields.DateTimeField', [], { 'default': 'datetime.datetime.now' }), 'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], { 'primary_key': 'True' }), 'user': ( 'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], { 'to': "orm['sentry.User']" } ) }, 'sentry.commit': { 'Meta': { 'unique_together': "(('repository_id', 'key'),)", 'object_name': 'Commit', 'index_together': "(('repository_id', 'date_added'),)" }, 'author': ( 'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], { 'to': "orm['sentry.CommitAuthor']", 'null': 'True' } ), 'date_added': ('django.db.models.fields.DateTimeField', [], { 'default': 'datetime.datetime.now' }), 'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], { 'primary_key': 'True' }), 'key': ('django.db.models.fields.CharField', [], { 'max_length': '64' }), 'message': ('django.db.models.fields.TextField', [], { 'null': 'True' }), 'organization_id': ( 'sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], { 'db_index': 'True' } ), 'repository_id': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {}) }, 'sentry.commitauthor': { 'Meta': { 'unique_together': "(('organization_id', 'email'), ('organization_id', 'external_id'))", 'object_name': 'CommitAuthor' }, 'email': ('django.db.models.fields.EmailField', [], { 'max_length': '75' }), 'external_id': ('django.db.models.fields.CharField', [], { 'max_length': '164', 'null': 'True' }), 'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], { 'primary_key': 'True' }), 'name': ('django.db.models.fields.CharField', [], { 'max_length': '128', 'null': 'True' }), 'organization_id': ( 'sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], { 'db_index': 'True' } ) }, 'sentry.commitfilechange': { 'Meta': { 'unique_together': "(('commit', 'filename'),)", 'object_name': 'CommitFileChange' }, 'commit': ( 'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], { 'to': "orm['sentry.Commit']" } ), 'filename': ('django.db.models.fields.CharField', [], { 'max_length': '255' }), 'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], { 'primary_key': 'True' }), 'organization_id': ( 'sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], { 'db_index': 'True' } ), 'type': ('django.db.models.fields.CharField', [], { 'max_length': '1' }) }, 'sentry.counter': { 'Meta': { 'object_name': 'Counter', 'db_table': "'sentry_projectcounter'" }, 'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], { 'primary_key': 'True' }), 'project': ( 'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], { 'to': "orm['sentry.Project']", 'unique': 'True' } ), 'value': ('sentry.db.models.fields.bounded.BoundedBigIntegerField', [], {}) }, 'sentry.deploy': { 'Meta': { 'object_name': 'Deploy' }, 'date_finished': ('django.db.models.fields.DateTimeField', [], { 'default': 'datetime.datetime.now' }), 'date_started': ('django.db.models.fields.DateTimeField', [], { 'null': 'True', 'blank': 'True' }), 'environment_id': ( 'sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], { 'db_index': 'True' } ), 'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], { 'primary_key': 'True' }), 'name': ( 'django.db.models.fields.CharField', [], { 'max_length': '64', 'null': 'True', 'blank': 'True' } ), 'notified': ( 'django.db.models.fields.NullBooleanField', [], { 'default': 'False', 'null': 'True', 'db_index': 'True', 'blank': 'True' } ), 'organization_id': ( 'sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], { 'db_index': 'True' } ), 'release': ( 'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], { 'to': "orm['sentry.Release']" } ), 'url': ( 'django.db.models.fields.URLField', [], { 'max_length': '200', 'null': 'True', 'blank': 'True' } ) }, 'sentry.distribution': { 'Meta': { 'unique_together': "(('release', 'name'),)", 'object_name': 'Distribution' }, 'date_added': ('django.db.models.fields.DateTimeField', [], { 'default': 'datetime.datetime.now' }), 'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], { 'primary_key': 'True' }), 'name': ('django.db.models.fields.CharField', [], { 'max_length': '64' }), 'organization_id': ( 'sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], { 'db_index': 'True' } ), 'release': ( 'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], { 'to': "orm['sentry.Release']" } ) }, 'sentry.dsymapp': { 'Meta': { 'unique_together': "(('project', 'platform', 'app_id'),)", 'object_name': 'DSymApp' }, 'app_id': ('django.db.models.fields.CharField', [], { 'max_length': '64' }), 'data': ('sentry.db.models.fields.jsonfield.JSONField', [], { 'default': '{}' }), 'date_added': ('django.db.models.fields.DateTimeField', [], { 'default': 'datetime.datetime.now' }), 'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], { 'primary_key': 'True' }), 'last_synced': ('django.db.models.fields.DateTimeField', [], { 'default': 'datetime.datetime.now' }), 'platform': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], { 'default': '0' }), 'project': ( 'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], { 'to': "orm['sentry.Project']" } ), 'sync_id': ('django.db.models.fields.CharField', [], { 'max_length': '64', 'null': 'True' }) }, 'sentry.dsymbundle': { 'Meta': { 'object_name': 'DSymBundle' }, 'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], { 'primary_key': 'True' }), 'object': ( 'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], { 'to': "orm['sentry.DSymObject']" } ), 'sdk': ( 'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], { 'to': "orm['sentry.DSymSDK']" } ) }, 'sentry.dsymobject': { 'Meta': { 'object_name': 'DSymObject' }, 'cpu_name': ('django.db.models.fields.CharField', [], { 'max_length': '40' }), 'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], { 'primary_key': 'True' }), 'object_path': ('django.db.models.fields.TextField', [], { 'db_index': 'True' }), 'uuid': ('django.db.models.fields.CharField', [], { 'max_length': '36', 'db_index': 'True' }), 'vmaddr': ('sentry.db.models.fields.bounded.BoundedBigIntegerField', [], { 'null': 'True' }), 'vmsize': ('sentry.db.models.fields.bounded.BoundedBigIntegerField', [], { 'null': 'True' }) }, 'sentry.dsymsdk': { 'Meta': { 'object_name': 'DSymSDK', 'index_together': "[('version_major', 'version_minor', 'version_patchlevel', 'version_build')]" }, 'dsym_type': ('django.db.models.fields.CharField', [], { 'max_length': '20', 'db_index': 'True' }), 'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], { 'primary_key': 'True' }), 'sdk_name': ('django.db.models.fields.CharField', [], { 'max_length': '20' }), 'version_build': ('django.db.models.fields.CharField', [], { 'max_length': '40' }), 'version_major': ('django.db.models.fields.IntegerField', [], {}), 'version_minor': ('django.db.models.fields.IntegerField', [], {}), 'version_patchlevel': ('django.db.models.fields.IntegerField', [], {}) }, 'sentry.dsymsymbol': { 'Meta': { 'unique_together': "[('object', 'address')]", 'object_name': 'DSymSymbol' }, 'address': ('sentry.db.models.fields.bounded.BoundedBigIntegerField', [], { 'db_index': 'True' }), 'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], { 'primary_key': 'True' }), 'object': ( 'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], { 'to': "orm['sentry.DSymObject']" } ), 'symbol': ('django.db.models.fields.TextField', [], {}) }, 'sentry.environment': { 'Meta': { 'unique_together': "(('project_id', 'name'), ('organization_id', 'name'))", 'object_name': 'Environment' }, 'date_added': ('django.db.models.fields.DateTimeField', [], { 'default': 'datetime.datetime.now' }), 'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], { 'primary_key': 'True' }), 'name': ('django.db.models.fields.CharField', [], { 'max_length': '64' }), 'organization_id': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {}), 'project_id': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], { 'null': 'True' }), 'projects': ( 'django.db.models.fields.related.ManyToManyField', [], { 'to': "orm['sentry.Project']", 'through': "orm['sentry.EnvironmentProject']", 'symmetrical': 'False' } ) }, 'sentry.environmentproject': { 'Meta': { 'unique_together': "(('project', 'environment'),)", 'object_name': 'EnvironmentProject' }, 'environment': ( 'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], { 'to': "orm['sentry.Environment']" } ), 'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], { 'primary_key': 'True' }), 'project': ( 'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], { 'to': "orm['sentry.Project']" } ) }, 'sentry.event': { 'Meta': { 'unique_together': "(('project_id', 'event_id'),)", 'object_name': 'Event', 'db_table': "'sentry_message'", 'index_together': "(('group_id', 'datetime'),)" }, 'data': ('sentry.db.models.fields.node.NodeField', [], { 'null': 'True', 'blank': 'True' }), 'datetime': ( 'django.db.models.fields.DateTimeField', [], { 'default': 'datetime.datetime.now', 'db_index': 'True' } ), 'event_id': ( 'django.db.models.fields.CharField', [], { 'max_length': '32', 'null': 'True', 'db_column': "'message_id'" } ), 'group_id': ( 'sentry.db.models.fields.bounded.BoundedBigIntegerField', [], { 'null': 'True', 'blank': 'True' } ), 'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], { 'primary_key': 'True' }), 'message': ('django.db.models.fields.TextField', [], {}), 'platform': ('django.db.models.fields.CharField', [], { 'max_length': '64', 'null': 'True' }), 'project_id': ( 'sentry.db.models.fields.bounded.BoundedBigIntegerField', [], { 'null': 'True', 'blank': 'True' } ), 'time_spent': ('sentry.db.models.fields.bounded.BoundedIntegerField', [], { 'null': 'True' }) }, 'sentry.eventmapping': { 'Meta': { 'unique_together': "(('project_id', 'event_id'),)", 'object_name': 'EventMapping' }, 'date_added': ('django.db.models.fields.DateTimeField', [], { 'default': 'datetime.datetime.now' }), 'event_id': ('django.db.models.fields.CharField', [], { 'max_length': '32' }), 'group_id': ('sentry.db.models.fields.bounded.BoundedBigIntegerField', [], {}), 'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], { 'primary_key': 'True' }), 'project_id': ('sentry.db.models.fields.bounded.BoundedBigIntegerField', [], {}) }, 'sentry.eventprocessingissue': { 'Meta': { 'unique_together': "(('raw_event', 'processing_issue'),)", 'object_name': 'EventProcessingIssue' }, 'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], { 'primary_key': 'True' }), 'processing_issue': ( 'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], { 'to': "orm['sentry.ProcessingIssue']" } ), 'raw_event': ( 'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], { 'to': "orm['sentry.RawEvent']" } ) }, 'sentry.eventtag': { 'Meta': { 'unique_together': "(('event_id', 'key_id', 'value_id'),)", 'object_name': 'EventTag', 'index_together': "(('project_id', 'key_id', 'value_id'), ('group_id', 'key_id', 'value_id'))" }, 'date_added': ( 'django.db.models.fields.DateTimeField', [], { 'default': 'datetime.datetime.now', 'db_index': 'True' } ), 'event_id': ('sentry.db.models.fields.bounded.BoundedBigIntegerField', [], {}), 'group_id': ('sentry.db.models.fields.bounded.BoundedBigIntegerField', [], { 'null': 'True' }), 'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], { 'primary_key': 'True' }), 'key_id': ('sentry.db.models.fields.bounded.BoundedBigIntegerField', [], {}), 'project_id': ('sentry.db.models.fields.bounded.BoundedBigIntegerField', [], {}), 'value_id': ('sentry.db.models.fields.bounded.BoundedBigIntegerField', [], {}) }, 'sentry.eventuser': { 'Meta': { 'unique_together': "(('project', 'ident'), ('project', 'hash'))", 'object_name': 'EventUser', 'index_together': "(('project', 'email'), ('project', 'username'), ('project', 'ip_address'))" }, 'date_added': ( 'django.db.models.fields.DateTimeField', [], { 'default': 'datetime.datetime.now', 'db_index': 'True' } ), 'email': ('django.db.models.fields.EmailField', [], { 'max_length': '75', 'null': 'True' }), 'hash': ('django.db.models.fields.CharField', [], { 'max_length': '32' }), 'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], { 'primary_key': 'True' }), 'ident': ('django.db.models.fields.CharField', [], { 'max_length': '128', 'null': 'True' }), 'ip_address': ( 'django.db.models.fields.GenericIPAddressField', [], { 'max_length': '39', 'null': 'True' } ), 'name': ('django.db.models.fields.CharField', [], { 'max_length': '128', 'null': 'True' }), 'project': ( 'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], { 'to': "orm['sentry.Project']" } ), 'username': ('django.db.models.fields.CharField', [], { 'max_length': '128', 'null': 'True' }) }, 'sentry.file': { 'Meta': { 'object_name': 'File' }, 'blob': ( 'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], { 'related_name': "'legacy_blob'", 'null': 'True', 'to': "orm['sentry.FileBlob']" } ), 'blobs': ( 'django.db.models.fields.related.ManyToManyField', [], { 'to': "orm['sentry.FileBlob']", 'through': "orm['sentry.FileBlobIndex']", 'symmetrical': 'False' } ), 'checksum': ('django.db.models.fields.CharField', [], { 'max_length': '40', 'null': 'True' }), 'headers': ('sentry.db.models.fields.jsonfield.JSONField', [], { 'default': '{}' }), 'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], { 'primary_key': 'True' }), 'name': ('django.db.models.fields.CharField', [], { 'max_length': '128' }), 'path': ('django.db.models.fields.TextField', [], { 'null': 'True' }), 'size': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], { 'null': 'True' }), 'timestamp': ( 'django.db.models.fields.DateTimeField', [], { 'default': 'datetime.datetime.now', 'db_index': 'True' } ), 'type': ('django.db.models.fields.CharField', [], { 'max_length': '64' }) }, 'sentry.fileblob': { 'Meta': { 'object_name': 'FileBlob' }, 'checksum': ('django.db.models.fields.CharField', [], { 'unique': 'True', 'max_length': '40' }), 'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], { 'primary_key': 'True' }), 'path': ('django.db.models.fields.TextField', [], { 'null': 'True' }), 'size': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], { 'null': 'True' }), 'timestamp': ( 'django.db.models.fields.DateTimeField', [], { 'default': 'datetime.datetime.now', 'db_index': 'True' } ) }, 'sentry.fileblobindex': { 'Meta': { 'unique_together': "(('file', 'blob', 'offset'),)", 'object_name': 'FileBlobIndex' }, 'blob': ( 'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], { 'to': "orm['sentry.FileBlob']" } ), 'file': ( 'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], { 'to': "orm['sentry.File']" } ), 'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], { 'primary_key': 'True' }), 'offset': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {}) }, 'sentry.globaldsymfile': { 'Meta': { 'object_name': 'GlobalDSymFile' }, 'cpu_name': ('django.db.models.fields.CharField', [], { 'max_length': '40' }), 'file': ( 'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], { 'to': "orm['sentry.File']" } ), 'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], { 'primary_key': 'True' }), 'object_name': ('django.db.models.fields.TextField', [], {}), 'uuid': ('django.db.models.fields.CharField', [], { 'unique': 'True', 'max_length': '36' }) }, 'sentry.group': { 'Meta': { 'unique_together': "(('project', 'short_id'),)", 'object_name': 'Group', 'db_table': "'sentry_groupedmessage'", 'index_together': "(('project', 'first_release'),)" }, 'active_at': ('django.db.models.fields.DateTimeField', [], { 'null': 'True', 'db_index': 'True' }), 'culprit': ( 'django.db.models.fields.CharField', [], { 'max_length': '200', 'null': 'True', 'db_column': "'view'", 'blank': 'True' } ), 'data': ( 'sentry.db.models.fields.gzippeddict.GzippedDictField', [], { 'null': 'True', 'blank': 'True' } ), 'first_release': ( 'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], { 'to': "orm['sentry.Release']", 'null': 'True', 'on_delete': 'models.PROTECT' } ), 'first_seen': ( 'django.db.models.fields.DateTimeField', [], { 'default': 'datetime.datetime.now', 'db_index': 'True' } ), 'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], { 'primary_key': 'True' }), 'is_public': ( 'django.db.models.fields.NullBooleanField', [], { 'default': 'False', 'null': 'True', 'blank': 'True' } ), 'last_seen': ( 'django.db.models.fields.DateTimeField', [], { 'default': 'datetime.datetime.now', 'db_index': 'True' } ), 'level': ( 'sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], { 'default': '40', 'db_index': 'True', 'blank': 'True' } ), 'logger': ( 'django.db.models.fields.CharField', [], { 'default': "''", 'max_length': '64', 'db_index': 'True', 'blank': 'True' } ), 'message': ('django.db.models.fields.TextField', [], {}), 'num_comments': ( 'sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], { 'default': '0', 'null': 'True' } ), 'platform': ('django.db.models.fields.CharField', [], { 'max_length': '64', 'null': 'True' }), 'project': ( 'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], { 'to': "orm['sentry.Project']", 'null': 'True' } ), 'resolved_at': ('django.db.models.fields.DateTimeField', [], { 'null': 'True', 'db_index': 'True' }), 'score': ('sentry.db.models.fields.bounded.BoundedIntegerField', [], { 'default': '0' }), 'short_id': ('sentry.db.models.fields.bounded.BoundedBigIntegerField', [], { 'null': 'True' }), 'status': ( 'sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], { 'default': '0', 'db_index': 'True' } ), 'time_spent_count': ('sentry.db.models.fields.bounded.BoundedIntegerField', [], { 'default': '0' }), 'time_spent_total': ('sentry.db.models.fields.bounded.BoundedIntegerField', [], { 'default': '0' }), 'times_seen': ( 'sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], { 'default': '1', 'db_index': 'True' } ) }, 'sentry.groupassignee': { 'Meta': { 'object_name': 'GroupAssignee', 'db_table': "'sentry_groupasignee'" }, 'date_added': ('django.db.models.fields.DateTimeField', [], { 'default': 'datetime.datetime.now' }), 'group': ( 'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], { 'related_name': "'assignee_set'", 'unique': 'True', 'to': "orm['sentry.Group']" } ), 'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], { 'primary_key': 'True' }), 'project': ( 'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], { 'related_name': "'assignee_set'", 'to': "orm['sentry.Project']" } ), 'user': ( 'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], { 'related_name': "'sentry_assignee_set'", 'to': "orm['sentry.User']" } ) }, 'sentry.groupbookmark': { 'Meta': { 'unique_together': "(('project', 'user', 'group'),)", 'object_name': 'GroupBookmark' }, 'date_added': ( 'django.db.models.fields.DateTimeField', [], { 'default': 'datetime.datetime.now', 'null': 'True' } ), 'group': ( 'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], { 'related_name': "'bookmark_set'", 'to': "orm['sentry.Group']" } ), 'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], { 'primary_key': 'True' }), 'project': ( 'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], { 'related_name': "'bookmark_set'", 'to': "orm['sentry.Project']" } ), 'user': ( 'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], { 'related_name': "'sentry_bookmark_set'", 'to': "orm['sentry.User']" } ) }, 'sentry.groupcommitresolution': { 'Meta': { 'unique_together': "(('group_id', 'commit_id'),)", 'object_name': 'GroupCommitResolution' }, 'commit_id': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {}), 'datetime': ( 'django.db.models.fields.DateTimeField', [], { 'default': 'datetime.datetime.now', 'db_index': 'True' } ), 'group_id': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {}), 'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], { 'primary_key': 'True' }) }, 'sentry.groupemailthread': { 'Meta': { 'unique_together': "(('email', 'group'), ('email', 'msgid'))", 'object_name': 'GroupEmailThread' }, 'date': ( 'django.db.models.fields.DateTimeField', [], { 'default': 'datetime.datetime.now', 'db_index': 'True' } ), 'email': ('django.db.models.fields.EmailField', [], { 'max_length': '75' }), 'group': ( 'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], { 'related_name': "'groupemail_set'", 'to': "orm['sentry.Group']" } ), 'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], { 'primary_key': 'True' }), 'msgid': ('django.db.models.fields.CharField', [], { 'max_length': '100' }), 'project': ( 'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], { 'related_name': "'groupemail_set'", 'to': "orm['sentry.Project']" } ) }, 'sentry.grouphash': { 'Meta': { 'unique_together': "(('project', 'hash'),)", 'object_name': 'GroupHash' }, 'group': ( 'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], { 'to': "orm['sentry.Group']", 'null': 'True' } ), 'hash': ('django.db.models.fields.CharField', [], { 'max_length': '32' }), 'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], { 'primary_key': 'True' }), 'project': ( 'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], { 'to': "orm['sentry.Project']", 'null': 'True' } ) }, 'sentry.groupmeta': { 'Meta': { 'unique_together': "(('group', 'key'),)", 'object_name': 'GroupMeta' }, 'group': ( 'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], { 'to': "orm['sentry.Group']" } ), 'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], { 'primary_key': 'True' }), 'key': ('django.db.models.fields.CharField', [], { 'max_length': '64' }), 'value': ('django.db.models.fields.TextField', [], {}) }, 'sentry.groupredirect': { 'Meta': { 'object_name': 'GroupRedirect' }, 'group_id': ('sentry.db.models.fields.bounded.BoundedBigIntegerField', [], { 'db_index': 'True' }), 'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], { 'primary_key': 'True' }), 'previous_group_id': ('sentry.db.models.fields.bounded.BoundedBigIntegerField', [], { 'unique': 'True' }) }, 'sentry.grouprelease': { 'Meta': { 'unique_together': "(('group_id', 'release_id', 'environment'),)", 'object_name': 'GroupRelease' }, 'environment': ('django.db.models.fields.CharField', [], { 'default': "''", 'max_length': '64' }), 'first_seen': ('django.db.models.fields.DateTimeField', [], { 'default': 'datetime.datetime.now' }), 'group_id': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {}), 'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], { 'primary_key': 'True' }), 'last_seen': ( 'django.db.models.fields.DateTimeField', [], { 'default': 'datetime.datetime.now', 'db_index': 'True' } ), 'project_id': ( 'sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], { 'db_index': 'True' } ), 'release_id': ( 'sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], { 'db_index': 'True' } ) }, 'sentry.groupresolution': { 'Meta': { 'object_name': 'GroupResolution' }, 'datetime': ( 'django.db.models.fields.DateTimeField', [], { 'default': 'datetime.datetime.now', 'db_index': 'True' } ), 'group': ( 'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], { 'to': "orm['sentry.Group']", 'unique': 'True' } ), 'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], { 'primary_key': 'True' }), 'release': ( 'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], { 'to': "orm['sentry.Release']" } ), 'status': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], { 'default': '0' }) }, 'sentry.grouprulestatus': { 'Meta': { 'unique_together': "(('rule', 'group'),)", 'object_name': 'GroupRuleStatus' }, 'date_added': ('django.db.models.fields.DateTimeField', [], { 'default': 'datetime.datetime.now' }), 'group': ( 'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], { 'to': "orm['sentry.Group']" } ), 'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], { 'primary_key': 'True' }), 'last_active': ('django.db.models.fields.DateTimeField', [], { 'null': 'True' }), 'project': ( 'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], { 'to': "orm['sentry.Project']" } ), 'rule': ( 'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], { 'to': "orm['sentry.Rule']" } ), 'status': ('django.db.models.fields.PositiveSmallIntegerField', [], { 'default': '0' }) }, 'sentry.groupseen': { 'Meta': { 'unique_together': "(('user', 'group'),)", 'object_name': 'GroupSeen' }, 'group': ( 'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], { 'to': "orm['sentry.Group']" } ), 'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], { 'primary_key': 'True' }), 'last_seen': ('django.db.models.fields.DateTimeField', [], { 'default': 'datetime.datetime.now' }), 'project': ( 'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], { 'to': "orm['sentry.Project']" } ), 'user': ( 'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], { 'to': "orm['sentry.User']", 'db_index': 'False' } ) }, 'sentry.groupsnooze': { 'Meta': { 'object_name': 'GroupSnooze' }, 'count': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], { 'null': 'True' }), 'group': ( 'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], { 'to': "orm['sentry.Group']", 'unique': 'True' } ), 'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], { 'primary_key': 'True' }), 'state': ('sentry.db.models.fields.jsonfield.JSONField', [], { 'null': 'True' }), 'until': ('django.db.models.fields.DateTimeField', [], { 'null': 'True' }), 'user_count': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], { 'null': 'True' }), 'user_window': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], { 'null': 'True' }), 'window': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], { 'null': 'True' }) }, 'sentry.groupsubscription': { 'Meta': { 'unique_together': "(('group', 'user'),)", 'object_name': 'GroupSubscription' }, 'date_added': ( 'django.db.models.fields.DateTimeField', [], { 'default': 'datetime.datetime.now', 'null': 'True' } ), 'group': ( 'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], { 'related_name': "'subscription_set'", 'to': "orm['sentry.Group']" } ), 'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], { 'primary_key': 'True' }), 'is_active': ('django.db.models.fields.BooleanField', [], { 'default': 'True' }), 'project': ( 'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], { 'related_name': "'subscription_set'", 'to': "orm['sentry.Project']" } ), 'reason': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], { 'default': '0' }), 'user': ( 'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], { 'to': "orm['sentry.User']" } ) }, 'sentry.grouptagkey': { 'Meta': { 'unique_together': "(('project', 'group', 'key'),)", 'object_name': 'GroupTagKey' }, 'group': ( 'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], { 'to': "orm['sentry.Group']" } ), 'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], { 'primary_key': 'True' }), 'key': ('django.db.models.fields.CharField', [], { 'max_length': '32' }), 'project': ( 'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], { 'to': "orm['sentry.Project']", 'null': 'True' } ), 'values_seen': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], { 'default': '0' }) }, 'sentry.grouptagvalue': { 'Meta': { 'unique_together': "(('group_id', 'key', 'value'),)", 'object_name': 'GroupTagValue', 'db_table': "'sentry_messagefiltervalue'", 'index_together': "(('project_id', 'key', 'value', 'last_seen'),)" }, 'first_seen': ( 'django.db.models.fields.DateTimeField', [], { 'default': 'datetime.datetime.now', 'null': 'True', 'db_index': 'True' } ), 'group_id': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {}), 'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], { 'primary_key': 'True' }), 'key': ('django.db.models.fields.CharField', [], { 'max_length': '32' }), 'last_seen': ( 'django.db.models.fields.DateTimeField', [], { 'default': 'datetime.datetime.now', 'null': 'True', 'db_index': 'True' } ), 'project_id': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], { 'null': 'True' }), 'times_seen': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], { 'default': '0' }), 'value': ('django.db.models.fields.CharField', [], { 'max_length': '200' }) }, 'sentry.lostpasswordhash': { 'Meta': { 'object_name': 'LostPasswordHash' }, 'date_added': ('django.db.models.fields.DateTimeField', [], { 'default': 'datetime.datetime.now' }), 'hash': ('django.db.models.fields.CharField', [], { 'max_length': '32' }), 'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], { 'primary_key': 'True' }), 'user': ( 'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], { 'to': "orm['sentry.User']", 'unique': 'True' } ) }, 'sentry.option': { 'Meta': { 'object_name': 'Option' }, 'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], { 'primary_key': 'True' }), 'key': ('django.db.models.fields.CharField', [], { 'unique': 'True', 'max_length': '64' }), 'last_updated': ('django.db.models.fields.DateTimeField', [], { 'default': 'datetime.datetime.now' }), 'value': ('sentry.db.models.fields.encrypted.EncryptedPickledObjectField', [], {}) }, 'sentry.organization': { 'Meta': { 'object_name': 'Organization' }, 'date_added': ('django.db.models.fields.DateTimeField', [], { 'default': 'datetime.datetime.now' }), 'default_role': ('django.db.models.fields.CharField', [], { 'default': "'member'", 'max_length': '32' }), 'flags': ('django.db.models.fields.BigIntegerField', [], { 'default': '1' }), 'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], { 'primary_key': 'True' }), 'members': ( 'django.db.models.fields.related.ManyToManyField', [], { 'related_name': "'org_memberships'", 'symmetrical': 'False', 'through': "orm['sentry.OrganizationMember']", 'to': "orm['sentry.User']" } ), 'name': ('django.db.models.fields.CharField', [], { 'max_length': '64' }), 'slug': ('django.db.models.fields.SlugField', [], { 'unique': 'True', 'max_length': '50' }), 'status': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], { 'default': '0' }) }, 'sentry.organizationaccessrequest': { 'Meta': { 'unique_together': "(('team', 'member'),)", 'object_name': 'OrganizationAccessRequest' }, 'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], { 'primary_key': 'True' }), 'member': ( 'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], { 'to': "orm['sentry.OrganizationMember']" } ), 'team': ( 'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], { 'to': "orm['sentry.Team']" } ) }, 'sentry.organizationavatar': { 'Meta': { 'object_name': 'OrganizationAvatar' }, 'avatar_type': ('django.db.models.fields.PositiveSmallIntegerField', [], { 'default': '0' }), 'file': ( 'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], { 'to': "orm['sentry.File']", 'unique': 'True', 'null': 'True', 'on_delete': 'models.SET_NULL' } ), 'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], { 'primary_key': 'True' }), 'ident': ( 'django.db.models.fields.CharField', [], { 'unique': 'True', 'max_length': '32', 'db_index': 'True' } ), 'organization': ( 'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], { 'related_name': "'avatar'", 'unique': 'True', 'to': "orm['sentry.Organization']" } ) }, 'sentry.organizationmember': { 'Meta': { 'unique_together': "(('organization', 'user'), ('organization', 'email'))", 'object_name': 'OrganizationMember' }, 'date_added': ('django.db.models.fields.DateTimeField', [], { 'default': 'datetime.datetime.now' }), 'email': ( 'django.db.models.fields.EmailField', [], { 'max_length': '75', 'null': 'True', 'blank': 'True' } ), 'flags': ('django.db.models.fields.BigIntegerField', [], { 'default': '0' }), 'has_global_access': ('django.db.models.fields.BooleanField', [], { 'default': 'True' }), 'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], { 'primary_key': 'True' }), 'organization': ( 'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], { 'related_name': "'member_set'", 'to': "orm['sentry.Organization']" } ), 'role': ('django.db.models.fields.CharField', [], { 'default': "'member'", 'max_length': '32' }), 'teams': ( 'django.db.models.fields.related.ManyToManyField', [], { 'to': "orm['sentry.Team']", 'symmetrical': 'False', 'through': "orm['sentry.OrganizationMemberTeam']", 'blank': 'True' } ), 'token': ( 'django.db.models.fields.CharField', [], { 'max_length': '64', 'unique': 'True', 'null': 'True', 'blank': 'True' } ), 'type': ( 'sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], { 'default': '50', 'blank': 'True' } ), 'user': ( 'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], { 'blank': 'True', 'related_name': "'sentry_orgmember_set'", 'null': 'True', 'to': "orm['sentry.User']" } ) }, 'sentry.organizationmemberteam': { 'Meta': { 'unique_together': "(('team', 'organizationmember'),)", 'object_name': 'OrganizationMemberTeam', 'db_table': "'sentry_organizationmember_teams'" }, 'id': ('sentry.db.models.fields.bounded.BoundedAutoField', [], { 'primary_key': 'True' }), 'is_active': ('django.db.models.fields.BooleanField', [], { 'default': 'True' }), 'organizationmember': ( 'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], { 'to': "orm['sentry.OrganizationMember']" } ), 'team': ( 'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], { 'to': "orm['sentry.Team']" } ) }, 'sentry.organizationonboardingtask': { 'Meta': { 'unique_together': "(('organization', 'task'),)", 'object_name': 'OrganizationOnboardingTask' }, 'data': ('sentry.db.models.fields.jsonfield.JSONField', [], { 'default': '{}' }), 'date_completed': ('django.db.models.fields.DateTimeField', [], { 'default': 'datetime.datetime.now' }), 'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], { 'primary_key': 'True' }), 'organization': ( 'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], { 'to': "orm['sentry.Organization']" } ), 'project_id': ( 'sentry.db.models.fields.bounded.BoundedBigIntegerField', [], { 'null': 'True', 'blank': 'True' } ), 'status': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {}), 'task': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {}), 'user': ( 'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], { 'to': "orm['sentry.User']", 'null': 'True' } ) }, 'sentry.organizationoption': { 'Meta': { 'unique_together': "(('organization', 'key'),)", 'object_name': 'OrganizationOption', 'db_table': "'sentry_organizationoptions'" }, 'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], { 'primary_key': 'True' }), 'key': ('django.db.models.fields.CharField', [], { 'max_length': '64' }), 'organization': ( 'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], { 'to': "orm['sentry.Organization']" } ), 'value': ('sentry.db.models.fields.encrypted.EncryptedPickledObjectField', [], {}) }, 'sentry.processingissue': { 'Meta': { 'unique_together': "(('project', 'checksum', 'type'),)", 'object_name': 'ProcessingIssue' }, 'checksum': ('django.db.models.fields.CharField', [], { 'max_length': '40', 'db_index': 'True' }), 'data': ('sentry.db.models.fields.gzippeddict.GzippedDictField', [], {}), 'datetime': ('django.db.models.fields.DateTimeField', [], { 'default': 'datetime.datetime.now' }), 'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], { 'primary_key': 'True' }), 'project': ( 'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], { 'to': "orm['sentry.Project']" } ), 'type': ('django.db.models.fields.CharField', [], { 'max_length': '30' }) }, 'sentry.project': { 'Meta': { 'unique_together': "(('team', 'slug'), ('organization', 'slug'))", 'object_name': 'Project' }, 'date_added': ('django.db.models.fields.DateTimeField', [], { 'default': 'datetime.datetime.now' }), 'first_event': ('django.db.models.fields.DateTimeField', [], { 'null': 'True' }), 'flags': ('django.db.models.fields.BigIntegerField', [], { 'default': '0', 'null': 'True' }), 'forced_color': ( 'django.db.models.fields.CharField', [], { 'max_length': '6', 'null': 'True', 'blank': 'True' } ), 'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], { 'primary_key': 'True' }), 'name': ('django.db.models.fields.CharField', [], { 'max_length': '200' }), 'organization': ( 'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], { 'to': "orm['sentry.Organization']" } ), 'public': ('django.db.models.fields.BooleanField', [], { 'default': 'False' }), 'slug': ('django.db.models.fields.SlugField', [], { 'max_length': '50', 'null': 'True' }), 'status': ( 'sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], { 'default': '0', 'db_index': 'True' } ), 'team': ( 'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], { 'to': "orm['sentry.Team']" } ) }, 'sentry.projectbookmark': { 'Meta': { 'unique_together': "(('project_id', 'user'),)", 'object_name': 'ProjectBookmark' }, 'date_added': ( 'django.db.models.fields.DateTimeField', [], { 'default': 'datetime.datetime.now', 'null': 'True' } ), 'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], { 'primary_key': 'True' }), 'project_id': ( 'sentry.db.models.fields.bounded.BoundedBigIntegerField', [], { 'null': 'True', 'blank': 'True' } ), 'user': ( 'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], { 'to': "orm['sentry.User']" } ) }, 'sentry.projectdsymfile': { 'Meta': { 'unique_together': "(('project', 'uuid'),)", 'object_name': 'ProjectDSymFile' }, 'cpu_name': ('django.db.models.fields.CharField', [], { 'max_length': '40' }), 'file': ( 'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], { 'to': "orm['sentry.File']" } ), 'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], { 'primary_key': 'True' }), 'object_name': ('django.db.models.fields.TextField', [], {}), 'project': ( 'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], { 'to': "orm['sentry.Project']", 'null': 'True' } ), 'uuid': ('django.db.models.fields.CharField', [], { 'max_length': '36' }) }, 'sentry.projectkey': { 'Meta': { 'object_name': 'ProjectKey' }, 'date_added': ( 'django.db.models.fields.DateTimeField', [], { 'default': 'datetime.datetime.now', 'null': 'True' } ), 'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], { 'primary_key': 'True' }), 'label': ( 'django.db.models.fields.CharField', [], { 'max_length': '64', 'null': 'True', 'blank': 'True' } ), 'project': ( 'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], { 'related_name': "'key_set'", 'to': "orm['sentry.Project']" } ), 'public_key': ( 'django.db.models.fields.CharField', [], { 'max_length': '32', 'unique': 'True', 'null': 'True' } ), 'rate_limit_count': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], { 'null': 'True' }), 'rate_limit_window': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], { 'null': 'True' }), 'roles': ('django.db.models.fields.BigIntegerField', [], { 'default': '1' }), 'secret_key': ( 'django.db.models.fields.CharField', [], { 'max_length': '32', 'unique': 'True', 'null': 'True' } ), 'status': ( 'sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], { 'default': '0', 'db_index': 'True' } ) }, 'sentry.projectoption': { 'Meta': { 'unique_together': "(('project', 'key'),)", 'object_name': 'ProjectOption', 'db_table': "'sentry_projectoptions'" }, 'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], { 'primary_key': 'True' }), 'key': ('django.db.models.fields.CharField', [], { 'max_length': '64' }), 'project': ( 'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], { 'to': "orm['sentry.Project']" } ), 'value': ('sentry.db.models.fields.encrypted.EncryptedPickledObjectField', [], {}) }, 'sentry.projectplatform': { 'Meta': { 'unique_together': "(('project_id', 'platform'),)", 'object_name': 'ProjectPlatform' }, 'date_added': ('django.db.models.fields.DateTimeField', [], { 'default': 'datetime.datetime.now' }), 'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], { 'primary_key': 'True' }), 'last_seen': ('django.db.models.fields.DateTimeField', [], { 'default': 'datetime.datetime.now' }), 'platform': ('django.db.models.fields.CharField', [], { 'max_length': '64' }), 'project_id': ('sentry.db.models.fields.bounded.BoundedBigIntegerField', [], {}) }, 'sentry.rawevent': { 'Meta': { 'unique_together': "(('project', 'event_id'),)", 'object_name': 'RawEvent' }, 'data': ('sentry.db.models.fields.node.NodeField', [], { 'null': 'True', 'blank': 'True' }), 'datetime': ('django.db.models.fields.DateTimeField', [], { 'default': 'datetime.datetime.now' }), 'event_id': ('django.db.models.fields.CharField', [], { 'max_length': '32', 'null': 'True' }), 'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], { 'primary_key': 'True' }), 'project': ( 'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], { 'to': "orm['sentry.Project']" } ) }, 'sentry.release': { 'Meta': { 'unique_together': "(('organization', 'version'),)", 'object_name': 'Release' }, 'data': ('sentry.db.models.fields.jsonfield.JSONField', [], { 'default': '{}' }), 'date_added': ('django.db.models.fields.DateTimeField', [], { 'default': 'datetime.datetime.now' }), 'date_released': ('django.db.models.fields.DateTimeField', [], { 'null': 'True', 'blank': 'True' }), 'date_started': ('django.db.models.fields.DateTimeField', [], { 'null': 'True', 'blank': 'True' }), 'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], { 'primary_key': 'True' }), 'new_groups': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], { 'default': '0' }), 'organization': ( 'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], { 'to': "orm['sentry.Organization']" } ), 'owner': ( 'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], { 'to': "orm['sentry.User']", 'null': 'True', 'blank': 'True' } ), 'project_id': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], { 'null': 'True' }), 'projects': ( 'django.db.models.fields.related.ManyToManyField', [], { 'related_name': "'releases'", 'symmetrical': 'False', 'through': "orm['sentry.ReleaseProject']", 'to': "orm['sentry.Project']" } ), 'ref': ( 'django.db.models.fields.CharField', [], { 'max_length': '64', 'null': 'True', 'blank': 'True' } ), 'url': ( 'django.db.models.fields.URLField', [], { 'max_length': '200', 'null': 'True', 'blank': 'True' } ), 'version': ('django.db.models.fields.CharField', [], { 'max_length': '64' }) }, 'sentry.releasecommit': { 'Meta': { 'unique_together': "(('release', 'commit'), ('release', 'order'))", 'object_name': 'ReleaseCommit' }, 'commit': ( 'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], { 'to': "orm['sentry.Commit']" } ), 'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], { 'primary_key': 'True' }), 'order': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {}), 'organization_id': ( 'sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], { 'db_index': 'True' } ), 'project_id': ( 'sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], { 'null': 'True', 'db_index': 'True' } ), 'release': ( 'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], { 'to': "orm['sentry.Release']" } ) }, 'sentry.releaseenvironment': { 'Meta': { 'unique_together': "(('project_id', 'release_id', 'environment_id'), ('organization_id', 'release_id', 'environment_id'))", 'object_name': 'ReleaseEnvironment', 'db_table': "'sentry_environmentrelease'" }, 'environment_id': ( 'sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], { 'db_index': 'True' } ), 'first_seen': ('django.db.models.fields.DateTimeField', [], { 'default': 'datetime.datetime.now' }), 'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], { 'primary_key': 'True' }), 'last_seen': ( 'django.db.models.fields.DateTimeField', [], { 'default': 'datetime.datetime.now', 'db_index': 'True' } ), 'organization_id': ( 'sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], { 'db_index': 'True' } ), 'project_id': ( 'sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], { 'null': 'True', 'db_index': 'True' } ), 'release_id': ( 'sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], { 'db_index': 'True' } ) }, 'sentry.releasefile': { 'Meta': { 'unique_together': "(('release', 'ident'),)", 'object_name': 'ReleaseFile' }, 'dist': ( 'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], { 'to': "orm['sentry.Distribution']", 'null': 'True' } ), 'file': ( 'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], { 'to': "orm['sentry.File']" } ), 'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], { 'primary_key': 'True' }), 'ident': ('django.db.models.fields.CharField', [], { 'max_length': '40' }), 'name': ('django.db.models.fields.TextField', [], {}), 'organization': ( 'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], { 'to': "orm['sentry.Organization']" } ), 'project_id': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], { 'null': 'True' }), 'release': ( 'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], { 'to': "orm['sentry.Release']" } ) }, 'sentry.releaseheadcommit': { 'Meta': { 'unique_together': "(('repository_id', 'release'),)", 'object_name': 'ReleaseHeadCommit' }, 'commit': ( 'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], { 'to': "orm['sentry.Commit']" } ), 'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], { 'primary_key': 'True' }), 'organization_id': ( 'sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], { 'db_index': 'True' } ), 'release': ( 'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], { 'to': "orm['sentry.Release']" } ), 'repository_id': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {}) }, 'sentry.releaseproject': { 'Meta': { 'unique_together': "(('project', 'release'),)", 'object_name': 'ReleaseProject', 'db_table': "'sentry_release_project'" }, 'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], { 'primary_key': 'True' }), 'new_groups': ( 'sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], { 'default': '0', 'null': 'True' } ), 'project': ( 'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], { 'to': "orm['sentry.Project']" } ), 'release': ( 'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], { 'to': "orm['sentry.Release']" } ) }, 'sentry.repository': { 'Meta': { 'unique_together': "(('organization_id', 'name'), ('organization_id', 'provider', 'external_id'))", 'object_name': 'Repository' }, 'config': ('sentry.db.models.fields.jsonfield.JSONField', [], { 'default': '{}' }), 'date_added': ('django.db.models.fields.DateTimeField', [], { 'default': 'datetime.datetime.now' }), 'external_id': ('django.db.models.fields.CharField', [], { 'max_length': '64', 'null': 'True' }), 'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], { 'primary_key': 'True' }), 'name': ('django.db.models.fields.CharField', [], { 'max_length': '200' }), 'organization_id': ( 'sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], { 'db_index': 'True' } ), 'provider': ('django.db.models.fields.CharField', [], { 'max_length': '64', 'null': 'True' }), 'status': ( 'sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], { 'default': '0', 'db_index': 'True' } ), 'url': ('django.db.models.fields.URLField', [], { 'max_length': '200', 'null': 'True' }) }, 'sentry.reprocessingreport': { 'Meta': { 'unique_together': "(('project', 'event_id'),)", 'object_name': 'ReprocessingReport' }, 'datetime': ('django.db.models.fields.DateTimeField', [], { 'default': 'datetime.datetime.now' }), 'event_id': ('django.db.models.fields.CharField', [], { 'max_length': '32', 'null': 'True' }), 'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], { 'primary_key': 'True' }), 'project': ( 'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], { 'to': "orm['sentry.Project']" } ) }, 'sentry.rule': { 'Meta': { 'object_name': 'Rule' }, 'data': ('sentry.db.models.fields.gzippeddict.GzippedDictField', [], {}), 'date_added': ('django.db.models.fields.DateTimeField', [], { 'default': 'datetime.datetime.now' }), 'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], { 'primary_key': 'True' }), 'label': ('django.db.models.fields.CharField', [], { 'max_length': '64' }), 'project': ( 'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], { 'to': "orm['sentry.Project']" } ), 'status': ( 'sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], { 'default': '0', 'db_index': 'True' } ) }, 'sentry.savedsearch': { 'Meta': { 'unique_together': "(('project', 'name'),)", 'object_name': 'SavedSearch' }, 'date_added': ('django.db.models.fields.DateTimeField', [], { 'default': 'datetime.datetime.now' }), 'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], { 'primary_key': 'True' }), 'is_default': ('django.db.models.fields.BooleanField', [], { 'default': 'False' }), 'name': ('django.db.models.fields.CharField', [], { 'max_length': '128' }), 'owner': ( 'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], { 'to': "orm['sentry.User']", 'null': 'True' } ), 'project': ( 'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], { 'to': "orm['sentry.Project']" } ), 'query': ('django.db.models.fields.TextField', [], {}) }, 'sentry.savedsearchuserdefault': { 'Meta': { 'unique_together': "(('project', 'user'),)", 'object_name': 'SavedSearchUserDefault', 'db_table': "'sentry_savedsearch_userdefault'" }, 'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], { 'primary_key': 'True' }), 'project': ( 'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], { 'to': "orm['sentry.Project']" } ), 'savedsearch': ( 'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], { 'to': "orm['sentry.SavedSearch']" } ), 'user': ( 'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], { 'to': "orm['sentry.User']" } ) }, 'sentry.scheduleddeletion': { 'Meta': { 'unique_together': "(('app_label', 'model_name', 'object_id'),)", 'object_name': 'ScheduledDeletion' }, 'aborted': ('django.db.models.fields.BooleanField', [], { 'default': 'False' }), 'actor_id': ('sentry.db.models.fields.bounded.BoundedBigIntegerField', [], { 'null': 'True' }), 'app_label': ('django.db.models.fields.CharField', [], { 'max_length': '64' }), 'data': ('sentry.db.models.fields.jsonfield.JSONField', [], { 'default': '{}' }), 'date_added': ('django.db.models.fields.DateTimeField', [], { 'default': 'datetime.datetime.now' }), 'date_scheduled': ( 'django.db.models.fields.DateTimeField', [], { 'default': 'datetime.datetime(2017, 7, 1, 0, 0)' } ), 'guid': ( 'django.db.models.fields.CharField', [], { 'default': "'7dcd5c1ace824812b6cc232360d975f7'", 'unique': 'True', 'max_length': '32' } ), 'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], { 'primary_key': 'True' }), 'in_progress': ('django.db.models.fields.BooleanField', [], { 'default': 'False' }), 'model_name': ('django.db.models.fields.CharField', [], { 'max_length': '64' }), 'object_id': ('sentry.db.models.fields.bounded.BoundedBigIntegerField', [], {}) }, 'sentry.tagkey': { 'Meta': { 'unique_together': "(('project', 'key'),)", 'object_name': 'TagKey', 'db_table': "'sentry_filterkey'" }, 'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], { 'primary_key': 'True' }), 'key': ('django.db.models.fields.CharField', [], { 'max_length': '32' }), 'label': ('django.db.models.fields.CharField', [], { 'max_length': '64', 'null': 'True' }), 'project': ( 'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], { 'to': "orm['sentry.Project']" } ), 'status': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], { 'default': '0' }), 'values_seen': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], { 'default': '0' }) }, 'sentry.tagvalue': { 'Meta': { 'unique_together': "(('project', 'key', 'value'),)", 'object_name': 'TagValue', 'db_table': "'sentry_filtervalue'" }, 'data': ( 'sentry.db.models.fields.gzippeddict.GzippedDictField', [], { 'null': 'True', 'blank': 'True' } ), 'first_seen': ( 'django.db.models.fields.DateTimeField', [], { 'default': 'datetime.datetime.now', 'null': 'True', 'db_index': 'True' } ), 'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], { 'primary_key': 'True' }), 'key': ('django.db.models.fields.CharField', [], { 'max_length': '32' }), 'last_seen': ( 'django.db.models.fields.DateTimeField', [], { 'default': 'datetime.datetime.now', 'null': 'True', 'db_index': 'True' } ), 'project': ( 'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], { 'to': "orm['sentry.Project']", 'null': 'True' } ), 'times_seen': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], { 'default': '0' }), 'value': ('django.db.models.fields.CharField', [], { 'max_length': '200' }) }, 'sentry.team': { 'Meta': { 'unique_together': "(('organization', 'slug'),)", 'object_name': 'Team' }, 'date_added': ( 'django.db.models.fields.DateTimeField', [], { 'default': 'datetime.datetime.now', 'null': 'True' } ), 'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], { 'primary_key': 'True' }), 'name': ('django.db.models.fields.CharField', [], { 'max_length': '64' }), 'organization': ( 'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], { 'to': "orm['sentry.Organization']" } ), 'slug': ('django.db.models.fields.SlugField', [], { 'max_length': '50' }), 'status': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], { 'default': '0' }) }, 'sentry.user': { 'Meta': { 'object_name': 'User', 'db_table': "'auth_user'" }, 'date_joined': ('django.db.models.fields.DateTimeField', [], { 'default': 'datetime.datetime.now' }), 'email': ('django.db.models.fields.EmailField', [], { 'max_length': '75', 'blank': 'True' }), 'id': ('sentry.db.models.fields.bounded.BoundedAutoField', [], { 'primary_key': 'True' }), 'is_active': ('django.db.models.fields.BooleanField', [], { 'default': 'True' }), 'is_managed': ('django.db.models.fields.BooleanField', [], { 'default': 'False' }), 'is_password_expired': ('django.db.models.fields.BooleanField', [], { 'default': 'False' }), 'is_staff': ('django.db.models.fields.BooleanField', [], { 'default': 'False' }), 'is_superuser': ('django.db.models.fields.BooleanField', [], { 'default': 'False' }), 'last_login': ('django.db.models.fields.DateTimeField', [], { 'default': 'datetime.datetime.now' }), 'last_password_change': ('django.db.models.fields.DateTimeField', [], { 'null': 'True' }), 'name': ( 'django.db.models.fields.CharField', [], { 'max_length': '200', 'db_column': "'first_name'", 'blank': 'True' } ), 'password': ('django.db.models.fields.CharField', [], { 'max_length': '128' }), 'session_nonce': ('django.db.models.fields.CharField', [], { 'max_length': '12', 'null': 'True' }), 'username': ('django.db.models.fields.CharField', [], { 'unique': 'True', 'max_length': '128' }) }, 'sentry.useravatar': { 'Meta': { 'object_name': 'UserAvatar' }, 'avatar_type': ('django.db.models.fields.PositiveSmallIntegerField', [], { 'default': '0' }), 'file': ( 'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], { 'to': "orm['sentry.File']", 'unique': 'True', 'null': 'True', 'on_delete': 'models.SET_NULL' } ), 'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], { 'primary_key': 'True' }), 'ident': ( 'django.db.models.fields.CharField', [], { 'unique': 'True', 'max_length': '32', 'db_index': 'True' } ), 'user': ( 'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], { 'related_name': "'avatar'", 'unique': 'True', 'to': "orm['sentry.User']" } ) }, 'sentry.useremail': { 'Meta': { 'unique_together': "(('user', 'email'),)", 'object_name': 'UserEmail' }, 'date_hash_added': ('django.db.models.fields.DateTimeField', [], { 'default': 'datetime.datetime.now' }), 'email': ('django.db.models.fields.EmailField', [], { 'max_length': '75' }), 'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], { 'primary_key': 'True' }), 'is_verified': ('django.db.models.fields.BooleanField', [], { 'default': 'False' }), 'user': ( 'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], { 'related_name': "'emails'", 'to': "orm['sentry.User']" } ), 'validation_hash': ( 'django.db.models.fields.CharField', [], { 'default': "u'UgLIAnDusbhZ8E66pCx3Af5EoUtzEmSA'", 'max_length': '32' } ) }, 'sentry.useroption': { 'Meta': { 'unique_together': "(('user', 'project', 'key'), ('user', 'organization', 'key'))", 'object_name': 'UserOption' }, 'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], { 'primary_key': 'True' }), 'key': ('django.db.models.fields.CharField', [], { 'max_length': '64' }), 'organization': ( 'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], { 'to': "orm['sentry.Organization']", 'null': 'True' } ), 'project': ( 'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], { 'to': "orm['sentry.Project']", 'null': 'True' } ), 'user': ( 'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], { 'to': "orm['sentry.User']" } ), 'value': ('sentry.db.models.fields.encrypted.EncryptedPickledObjectField', [], {}) }, 'sentry.userreport': { 'Meta': { 'unique_together': "(('project', 'event_id'),)", 'object_name': 'UserReport', 'index_together': "(('project', 'event_id'), ('project', 'date_added'))" }, 'comments': ('django.db.models.fields.TextField', [], {}), 'date_added': ('django.db.models.fields.DateTimeField', [], { 'default': 'datetime.datetime.now' }), 'email': ('django.db.models.fields.EmailField', [], { 'max_length': '75' }), 'event_id': ('django.db.models.fields.CharField', [], { 'max_length': '32' }), 'event_user_id': ('sentry.db.models.fields.bounded.BoundedBigIntegerField', [], { 'null': 'True' }), 'group': ( 'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], { 'to': "orm['sentry.Group']", 'null': 'True' } ), 'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], { 'primary_key': 'True' }), 'name': ('django.db.models.fields.CharField', [], { 'max_length': '128' }), 'project': ( 'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], { 'to': "orm['sentry.Project']" } ) }, 'sentry.versiondsymfile': { 'Meta': { 'unique_together': "(('dsym_file', 'version', 'build'),)", 'object_name': 'VersionDSymFile' }, 'build': ('django.db.models.fields.CharField', [], { 'max_length': '32', 'null': 'True' }), 'date_added': ('django.db.models.fields.DateTimeField', [], { 'default': 'datetime.datetime.now' }), 'dsym_app': ( 'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], { 'to': "orm['sentry.DSymApp']" } ), 'dsym_file': ( 'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], { 'to': "orm['sentry.ProjectDSymFile']", 'null': 'True' } ), 'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], { 'primary_key': 'True' }), 'version': ('django.db.models.fields.CharField', [], { 'max_length': '32' }) } } complete_apps = ['sentry']
bsd-3-clause
-7,866,482,563,248,466,000
35.812677
120
0.398756
false
4.709444
false
false
false
tazo90/lux
setup.py
1
1994
import os import json from setuptools import setup, find_packages package_name = 'lux' def read(name): root_dir = os.path.dirname(os.path.abspath(__file__)) with open(os.path.join(root_dir, name), 'r') as f: return f.read() def run(): install_requires = [] dependency_links = [] pkg = json.loads(read('package.json')) for line in read('requirements.txt').split('\n'): if line.startswith('-e '): link = line[3:].strip() if link == '.': continue dependency_links.append(link) line = link.split('=')[1] line = line.strip() if line: install_requires.append(line) packages = find_packages(exclude=['tests', 'tests.*']) setup(name=package_name, version=pkg['version'], author=pkg['author']['name'], author_email=pkg['author']['email'], url=pkg['homepage'], license=pkg['licenses'][0]['type'], description=pkg['description'], long_description=read('README.rst'), packages=packages, include_package_data=True, zip_safe=False, install_requires=install_requires, dependency_links=dependency_links, scripts=['bin/luxmake.py'], classifiers=['Development Status :: 2 - Pre-Alpha', 'Environment :: Web Environment', 'Intended Audience :: Developers', 'License :: OSI Approved :: BSD License', 'Operating System :: OS Independent', 'Programming Language :: JavaScript', 'Programming Language :: Python', 'Programming Language :: Python :: 2.7', 'Programming Language :: Python :: 3.3', 'Programming Language :: Python :: 3.4', 'Topic :: Utilities']) if __name__ == '__main__': run()
bsd-3-clause
-8,769,033,512,469,859,000
31.16129
64
0.516048
false
4.573394
false
false
false
Robbie1977/TGscripts
plJHwarpToTemplate.py
1
8574
import os, re, shutil, subprocess, datetime, socket ba = '/groups/sciserv/flyolympiad/vnc_align/toolkit/JBA/brainaligner' cmtkdir = '/usr/local/cmtk/bin/' fiji = '/usr/local/Fiji/ImageJ-linux64' Rawconv = '~/script/raw2nrrdCrop.ijm' Nrrdconv = '~/script/nrrd2rawUncrop.ijm' Tfile = '~/template/flyVNCtemplate20xDaC.nrrd' TfileR = '~/template/flyVNCtemplate20xDa.raw' TfileM = '~/template/flyVNCtemplate20xDa.marker' Qual = '~/script/Quality.py' outdir = os.getcwd() + '/' fo = open("PLwarp.txt",'r') filelist = fo.readlines() fo.close() hostn = socket.gethostname() runid = os.getpid() procid = '[' + hostn + ';' + str(runid) + ']' for fname in filelist: fo = open("stop.txt",'r') stoplist = fo.readlines() if (hostn + '\n') in stoplist: print 'Stop requested!' else: fname = fname.replace('\n','').replace('/disk/data/VFB/IMAGE_DATA/Janelia2012/TG/logs/',outdir) try: if os.path.exists(fname): os.rename(fname,fname.replace('.lsm','~.lsm').replace('.raw','~.raw')) basename = fname.replace(outdir,'').replace('.lsm','').replace('20130404_s/','').replace('.raw','').replace('Rigid/','').replace('/groups/sciserv/flyolympiad/vnc_align/20130404_lsms/','') with open("PLwarp.log", "a") as myfile: # Log entry for process time and error checking myfile.write(basename + ', Started JH warp, ' + procid + ', ' + str(datetime.datetime.now()) + '\n') FloatFile = fname.replace('.lsm','~.lsm').replace('.raw','~.raw') GxDF = outdir + basename + '-global.raw' Goutput = basename + '-rigid.raw' Axform = outdir + basename + '-rigid-affine.xform' Foutput = Goutput.replace('-rigid.raw', '-rigid_C2.nrrd') SigFile = Goutput.replace('-rigid.raw', '-rigid_C1.nrrd') W5xform = outdir + basename + '-rigid-fastwarp.xform' W5output = outdir + basename + '-rigid-BGwarp.nrrd' Wsigout = outdir + basename + '-rigid-SGwarp.nrrd' Routput = basename + '-rigid-warp.raw' Loutput = basename + '-rigid-warp-local' print 'Warping file %s...' % fname #check for complete skip if os.path.exists(W5xform): print 'Warp5 output already exists - skipping.' else: #Generate the Initial Transform if os.path.exists(Goutput): print 'Global alignment already exists - skipping.' else: return_code = subprocess.call('nice ' + ba + ' -t %s -s %s -o %s -F %s -w 0 -C 0 -c 1 -B 1024 -Y' % (TfileR, FloatFile, Goutput, GxDF), shell=True) print 'Brain Aligner Global alignment returned: %d' % return_code #Convert raw to nrrd return_code = subprocess.call('nice xvfb-run ' + fiji + ' -macro %s %s' % (Rawconv, Goutput), shell=True) print 'Fiji/ImageJ conversion returned: %d' % return_code #Generate the Affine Transform if os.path.exists(Axform): print 'Affine xform already exists - skipping.' else: FloatFile = Foutput return_code = subprocess.call('nice ' + cmtkdir + 'registration --dofs 6,9 --auto-multi-levels 4 --match-histograms -o %s %s %s' % (Axform + '_part', Tfile, FloatFile), shell=True) os.rename(Axform + '_part', Axform) print 'registration returned: %d' % return_code #Generate the Warped Transform if os.path.exists(W5xform): print 'Warp5 xform already exists - skipping.' else: return_code = subprocess.call('nice ' + cmtkdir + 'warp -o %s --grid-spacing 80 --exploration 30 --coarsest 4 --match-histograms --accuracy 0.2 --refine 4 --energy-weight 1e-1 --initial %s %s %s' % (W5xform + '_part', Axform, Tfile, FloatFile), shell=True) #coarsest adjusted from 8 to 4 as per greg sug. os.rename(W5xform + '_part', W5xform) print 'warp (5) returned: %d' % return_code #Output a file to show the Warped Transform if os.path.exists(W5output): print 'Warp5 output already exists - skipping.' else: return_code = subprocess.call('nice ' + cmtkdir + 'reformatx -o %s --floating %s %s %s' % (W5output, FloatFile, Tfile, W5xform), shell=True) print 'reformatx returned: %d' % return_code print 'Completed background warpimg for %s.' % basename if os.path.exists(Wsigout): print 'Signal warp output already exists - skipping.' else: return_code = subprocess.call('nice ' + cmtkdir + 'reformatx -o %s --floating %s %s %s' % (Wsigout, SigFile, Tfile, W5xform), shell=True) print 'reformatx returned: %d' % return_code print 'Completed signal warpimg for %s.' % basename if os.path.exists(Routput): print 'RAW warp output already exists - skipping.' else: return_code = subprocess.call('nice xvfb-run ' + fiji + ' -macro %s %s' % (Nrrdconv, Routput), shell=True) print 'Fiji returned: %d' % return_code print 'Completed generating RAW warp for %s.' % basename # if os.path.exists(Loutput + '.raw'): # print 'Brianaligner local output already exists - skipping.' # else: # return_code = subprocess.call('nice ' + ba + ' -t %s -s %s -L %s -o %s -w 10 -C 0 -c 0 -H 2 -B 1024' % (TfileR, Routput, TfileM, Loutput + '.raw'), shell=True) # # print 'Brainaligner returned: %d' % return_code # print 'Completed generating RAW warp for %s.' % basename if os.path.exists(Routput + '_qual.csv'): print 'Quality measure already exists - skipping.' else: return_code = subprocess.call('nice python %s %s %s %s_qual.csv' % (Qual, W5output, Tfile, Routput), shell=True) print 'Qual returned: %d' % return_code print 'Completed generating Qual measure for %s.' % basename if os.path.exists(W5output): #os.remove(fname.replace('_blue','')) #shutil.move(fname.replace('_blue',''),fname.replace('logs/','logs/nrrds/')) #os.remove(Goutput) #os.remove(Ioutput) Add if used #shutil.rmtree(Axform, ignore_errors=True) #os.remove(Aoutput) #os.remove(W5xform) #Needed for Signal Channel Warp with open("PLdone.txt", "a") as myfile: myfile.write(Routput + '\n') #os.remove(W5output) #Needed for checking only print 'Clean-up for %s done.' % basename with open("PLwarp.log", "a") as myfile: # Log entry for process time and error checking myfile.write(basename + ', Finished JH warp, ' + procid + ', ' + str(datetime.datetime.now()) + '\n') else: print 'Failed warpimg for %s.' % basename os.rename(fname.replace('_blue',''),fname.replace('_blue','_blue_error')) with open("PLwarp.log", "a") as myfile: # Log entry for process time and error checking myfile.write(basename + ', Failed JH warp, ' + procid + ', ' + str(datetime.datetime.now()) + '\n') except OSError as e: print 'Skiping file' with open("PLwarp.log", "a") as myfile: # Log entry for process time and error checking myfile.write(basename + ', Error during JH warp: ' + e.strerror + ', ' + procid + ', ' + str(datetime.datetime.now()) + '\n') print 'All Done.'
mit
-6,054,312,207,964,567,000
52.265823
328
0.515512
false
3.831099
false
false
false
tartopum/Lactum
setup.py
1
1422
import os import sys from setuptools import setup from setuptools.command.test import test as TestCommand import lactum class PyTest(TestCommand): def finalize_options(self): TestCommand.finalize_options(self) self.test_args = [] self.test_suite = True def run_tests(self): import pytest errcode = pytest.main(self.test_args) sys.exit(errcode) with open("README.md", "r") as f: readme = f.read() def reqs(*f): def strip_comments(l): return l.split("#", 1)[0].strip() return list(filter(None, [strip_comments(l) for l in open(os.path.join(os.getcwd(), *f)).readlines()])) requirements = reqs("requirements.txt") test_requirements = reqs("requirements-dev.txt") test_requirements = requirements + test_requirements[1:] setup( name="lactum", description="", long_description=readme, author="Vayel", author_email="vincent.lefoulon@free.fr", url="https://github.com/tartopum/Lactum", packages=["lactum"], package_dir={"lactum": "lactum"}, include_package_data=True, install_requires=requirements, license="MIT", zip_safe=False, classifiers=[ "License :: OSI Approved :: MIT License", "Natural Language :: English", "Programming Language :: Python :: 3.5" ], cmdclass={"test": PyTest}, tests_require=test_requirements )
mit
268,804,563,701,314,200
24.392857
107
0.631505
false
3.618321
true
false
false
funkring/fdoo
addons-funkring/at_sale_layout_ext/sale.py
1
1573
# -*- coding: utf-8 -*- ############################################################################# # # Copyright (c) 2007 Martin Reisenhofer <martin.reisenhofer@funkring.net> # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. # ############################################################################## from openerp.osv import fields, osv class SaleLayoutCategory(osv.Model): _inherit = "sale_layout.category" _columns = { "order_id" : fields.many2one("sale.order", "Order", ondelete="cascade") } class sale_order(osv.Model): _inherit = "sale.order" _columns = { "layout_categ_ids" : fields.one2many("sale_layout.category", "order_id", "Layout Categories") } class sale_order_line(osv.Model): _inherit = "sale.order.line" _columns = { "prod_categ_id" : fields.related("product_id", "categ_id", string="Category", type="many2one", relation="product.category", readonly=True) }
agpl-3.0
-4,435,612,682,991,795,700
37.365854
146
0.613477
false
3.893564
false
false
false
kmpf/uap
tools/segemehl_2017_reformatCigar.py
1
4801
#!/bin/bash "exec" "`dirname $0`/../python_env/bin/python" "$0" "$@" #"exec" "python" "$0" "$@" # ^^^ # the cmd above ensures that the correct python environment is # selected to execute this script. # The correct environment is the one belonging to uap, since all # neccessary python modules are installed there. # filename: segemehl_2017_reformatCigar.py # author: Jana Hertel # date: 2017/06/07 # version: 1.0 # description: Reformat the cigar string such that htseq-count is able to process # the according SAM files. Consecutive values for 'ix', 'j=' and 'kM' # are summed up and replaced by nM with n being the sum of i, j and k. import argparse import sys import re from multiprocessing import Pool import itertools parser = argparse.ArgumentParser( description='Python script to process a large file ' 'using multi-processing.') parser.add_argument('--version', action='version', version='%(prog)s 1.0') parser.add_argument( '--in-file', dest='my_file_in', required=True, type=argparse.FileType('r'), help='A large file whose lines are independent from each other and ' 'can be processed separately.') parser.add_argument('--threads', dest='my_cores', default=1, type=int, help='Number of CPUs 2B used. Default: 1') parser.add_argument( '--blocksize', dest='my_bufsize', default=2, type=int, help='Size of buffer to read the input file (in MB). Default: 2') args = parser.parse_args() ########################################################################## # my_range(start, end, step) # # This function creates a range with a user defined step to walk through. # returns: the respective new start values def my_range(start, end, step): while start <= end: yield start start += step ########################################################################## ########################################################################## # process_line(line) # # function that does something with the line: # in this case: # - split the line into columns by tab # - returns the columns separated by tab def process_line(lines): newlines = list() c = 0 for line in lines: c += 1 columns = line.strip().split('\t') # don't process header lines if(columns[0][:1] == "@"): newlines.append(line.strip()) continue cigar = columns[5] x = re.split(r'(\D)', cigar) # split cigar string and sum up consecutive values # for '=' and 'X' (match and mismatch) # leave values as they are for 'I','D' and 'N' (del, insertion, split) M = 0 cigar_new = '' for j in range(1, len(x) - 1, 2): # match or mismatch if x[j] == '=' or x[j] == 'X' or x[j] == 'M': M = M + int(x[j - 1]) else: # del or ins if M > 0: # print the previous match/mismatch cigar_new += str(M) + "M" M = 0 # anything else but '=', 'X' or 'M' cigar_new += x[j - 1] + x[j] if M > 0: cigar_new += str(M) + "M" if cigar_new == "0M*": cigar_new = "*" # print the sam line with the new cigar string to stdout new_line = "" for k in range(0, 5): new_line += "%s\t" % columns[k] new_line += "%s\t" % cigar_new for k in range(6, len(columns)): new_line += "%s" % columns[k] if(not k == len(columns)): new_line += "\t" newlines.append(new_line) return newlines # END: process_line(line) ########################################################################## if __name__ == '__main__': # create my_cores -1 pools, 1 control + the remaining for processing the # lines p = Pool(args.my_cores) a = list() eof_reached = False # bufsize needs to be provided in bytes # argument provided megabytes bufsize = args.my_bufsize * 1000000 while not eof_reached: for i in range(args.my_cores - 1): linelist = args.my_file_in.readlines(bufsize) if len(linelist) == 0: eof_reached = True else: a.append(linelist) # ~ 2MB chunks l = p.map(process_line, a) for j in l: print('\n'.join(j)) a[:] = [] # delete processed lines from the list # this works in principle.. too much i/o # for line in p.imap(process_line, args.my_file_in): # print line, # the coma prevents printing an additional new line # idea for mp: # read file in chunks of the size 1/args.my_cores # --> each chunk in one process
gpl-3.0
6,380,944,447,513,988,000
27.076023
82
0.531764
false
3.762539
false
false
false
elewis33/doorstop
doorstop/server/utilities.py
1
1176
"""Shared functions for the `doorstop.server` package.""" from doorstop import common from doorstop import settings log = common.logger(__name__) class StripPathMiddleware(object): # pylint: disable=R0903 """WSGI middleware that strips trailing slashes from all URLs.""" def __init__(self, app): self.app = app def __call__(self, e, h): # pragma: no cover (integration test) e['PATH_INFO'] = e['PATH_INFO'].rstrip('/') return self.app(e, h) def build_url(host=None, port=None, path=None): """Build the server's URL with optional path.""" host = host or settings.SERVER_HOST port = port or settings.SERVER_PORT log.debug("building URL: {} + {} + {}".format(host, port, path)) if not host: return None url = 'http://{}'.format(host) if port != 80: url += ':{}'.format(port) if path: url += path return url def json_response(request): # pragma: no cover (integration test) """Determine if the request's response should be JSON.""" if request.query.get('format') == 'json': return True else: return request.content_type == 'application/json'
lgpl-3.0
-7,028,435,530,194,350,000
27.682927
69
0.617347
false
3.78135
false
false
false
mtholder/taxalotl
taxalotl/parsing/col.py
1
5000
from __future__ import print_function import io import logging from peyutil import shorter_fp_form from taxalotl.resource_wrapper import TaxonomyWrapper from taxalotl.parsing.darwin_core import normalize_darwin_core_taxonomy _LOG = logging.getLogger(__name__) COL_PARTMAP = { 'Archaea': frozenset([52435722]), 'Bacteria': frozenset([52433432]), 'Eukaryota': frozenset([52433499, 52435027, 52433974, 52433370]), 'Archaeplastida': frozenset([52433499]), 'Fungi': frozenset([52433393]), 'Metazoa': frozenset([52433370]), 'Viruses': frozenset([52433426]), 'Glaucophyta': frozenset([52444130]), 'Rhodophyta': frozenset([52444134]), 'Chloroplastida': frozenset([52442327, 52442210, 52442148, 52434330, 52434201, 52433500, ]), 'Annelida': frozenset([52433489]), 'Arthropoda': frozenset([52433375]), 'Malacostraca': frozenset([52433389]), 'Arachnida': frozenset([52433402]), 'Insecta': frozenset([52433376]), 'Diptera': frozenset([52433521]), 'Coleoptera': frozenset([52433486]), 'Lepidoptera': frozenset([52433663]), 'Hymenoptera': frozenset([52433621]), 'Bryozoa': frozenset([52442814]), 'Chordata': frozenset([52433371]), 'Cnidaria': frozenset([52433398]), 'Ctenophora': frozenset([52443092]), 'Mollusca': frozenset([52440786]), 'Nematoda': frozenset([52436787]), 'Platyhelminthes': frozenset([52443117]), 'Porifera': frozenset([52442836]), } # noinspection PyUnreachableCode def partition_col_by_root_id(tax_part): # type (TaxonPartition) -> None """Reads the serialized taxonomy of the parent, adds the easy lines to their partition element, and returns dicts needed to finish the assignments. Signature for partition functions. Takes: 1. abs path of taxonomy file for parent taxon 2. list of PartitionElements whose roots are sets that specify IDs that are the roots of the subtrees that are to go in each partition elemen. Returns a tuple: 0. par_id ->[child_id] dict, 1. id -> partition_element dict for already assigned IDs, 2. id -> line dict - may only have unassigned IDs in it, 3. synonym id -> [(accepted_id, line), ] for any synonyms 4. roots_set - a frozen set of the union of the partition element roots 5. the rootless partition element ("garbage_bin" for all unassigned IDs) 6. header for taxon file 7. header for synonyms file (or None) """ assert False complete_taxon_fp = tax_part.tax_fp syn_fp = tax_part.input_synonyms_filepath assert not syn_fp syn_by_id = tax_part._syn_by_id ptp = shorter_fp_form(complete_taxon_fp) with io.open(complete_taxon_fp, 'rU', encoding='utf-8') as inp: iinp = iter(inp) tax_part.taxon_header = next(iinp) prev_line = None # vt = unicode('\x0b') # Do some lines have vertical tabs? Of course they do.... # istwo = unicode('\x1e') for n, line in enumerate(iinp): if not line.endswith('\n'): if prev_line: prev_line = prev_line + line[:-1] else: prev_line = line[:-1] continue elif prev_line: line = prev_line + line prev_line = '' ls = line.split('\t') if n % 1000 == 0: _LOG.info(' read taxon {} from {}'.format(n, ptp)) try: col_id, accept_id, par_id = ls[0], ls[4], ls[5] col_id = int(col_id) if accept_id: try: accept_id = int(accept_id) except: if n == 0: continue syn_by_id.setdefault(accept_id, []).append((col_id, line)) else: tax_part.read_taxon_line(col_id, par_id, line) except Exception: _LOG.exception("Exception parsing line {}:\n{}".format(1 + n, line)) raise # noinspection PyAbstractClass class CoLTaxonomyWrapper(TaxonomyWrapper): taxon_filename = 'taxonomy.tsv' # synonyms_filename = None # partition_parsing_fn = staticmethod(partition_col_by_root_id) schema = {"http://rs.tdwg.org/dwc/"} def __init__(self, obj, parent=None, refs=None): TaxonomyWrapper.__init__(self, obj, parent=parent, refs=refs) @property def partition_source_dir(self): return self.normalized_filedir def get_primary_partition_map(self): return COL_PARTMAP def normalize(self): normalize_darwin_core_taxonomy(self.unpacked_filepath, self.normalized_filedir, self) def _post_process_tree(self, tree): self.collapse_incertae_sedis_by_name_prefix(tree, 'not assigned') def post_process_interim_tax_data(self, interim_tax_data): self.collapse_as_incertae_sedis_interim_tax_data(interim_tax_data, 'not assigned')
bsd-2-clause
700,893,670,152,985,700
36.313433
99
0.61
false
3.531073
false
false
false
roglew/pappy-proxy
pappyproxy/interface/decode.py
1
10668
import html import base64 import datetime import gzip import shlex import string import urllib from ..util import hexdump, printable_data, copy_to_clipboard, clipboard_contents, encode_basic_auth, parse_basic_auth from ..console import CommandError from io import StringIO def print_maybe_bin(s): binary = False for c in s: if chr(c) not in string.printable: binary = True break if binary: print(hexdump(s)) else: print(s.decode()) def asciihex_encode_helper(s): return ''.join('{0:x}'.format(c) for c in s).encode() def asciihex_decode_helper(s): ret = [] try: for a, b in zip(s[0::2], s[1::2]): c = chr(a)+chr(b) ret.append(chr(int(c, 16))) return ''.join(ret).encode() except Exception as e: raise CommandError(e) def gzip_encode_helper(s): out = StringIO.StringIO() with gzip.GzipFile(fileobj=out, mode="w") as f: f.write(s) return out.getvalue() def gzip_decode_helper(s): dec_data = gzip.GzipFile('', 'rb', 9, StringIO.StringIO(s)) dec_data = dec_data.read() return dec_data def base64_decode_helper(s): try: return base64.b64decode(s) except TypeError: for i in range(1, 5): try: s_padded = base64.b64decode(s + '='*i) return s_padded except: pass raise CommandError("Unable to base64 decode string") def url_decode_helper(s): bs = s.decode() return urllib.parse.unquote(bs).encode() def url_encode_helper(s): bs = s.decode() return urllib.parse.quote_plus(bs).encode() def html_encode_helper(s): return ''.join(['&#x{0:x};'.format(c) for c in s]).encode() def html_decode_helper(s): return html.unescape(s.decode()).encode() def _code_helper(args, func, copy=True): if len(args) == 0: s = clipboard_contents().encode() print('Will decode:') print(printable_data(s)) s = func(s) if copy: try: copy_to_clipboard(s) except Exception as e: print('Result cannot be copied to the clipboard. Result not copied.') raise e return s else: s = func(args[0].encode()) if copy: try: copy_to_clipboard(s) except Exception as e: print('Result cannot be copied to the clipboard. Result not copied.') raise e return s def base64_decode(client, args): """ Base64 decode a string. If no string is given, will decode the contents of the clipboard. Results are copied to the clipboard. """ print_maybe_bin(_code_helper(args, base64_decode_helper)) def base64_encode(client, args): """ Base64 encode a string. If no string is given, will encode the contents of the clipboard. Results are copied to the clipboard. """ print_maybe_bin(_code_helper(args, base64.b64encode)) def url_decode(client, args): """ URL decode a string. If no string is given, will decode the contents of the clipboard. Results are copied to the clipboard. """ print_maybe_bin(_code_helper(args, url_decode_helper)) def url_encode(client, args): """ URL encode special characters in a string. If no string is given, will encode the contents of the clipboard. Results are copied to the clipboard. """ print_maybe_bin(_code_helper(args, url_encode_helper)) def asciihex_decode(client, args): """ Decode an ascii hex string. If no string is given, will decode the contents of the clipboard. Results are copied to the clipboard. """ print_maybe_bin(_code_helper(args, asciihex_decode_helper)) def asciihex_encode(client, args): """ Convert all the characters in a line to hex and combine them. If no string is given, will encode the contents of the clipboard. Results are copied to the clipboard. """ print_maybe_bin(_code_helper(args, asciihex_encode_helper)) def html_decode(client, args): """ Decode an html encoded string. If no string is given, will decode the contents of the clipboard. Results are copied to the clipboard. """ print_maybe_bin(_code_helper(args, html_decode_helper)) def html_encode(client, args): """ Encode a string and escape html control characters. If no string is given, will encode the contents of the clipboard. Results are copied to the clipboard. """ print_maybe_bin(_code_helper(args, html_encode_helper)) def gzip_decode(client, args): """ Un-gzip a string. If no string is given, will decompress the contents of the clipboard. Results are copied to the clipboard. """ print_maybe_bin(_code_helper(args, gzip_decode_helper)) def gzip_encode(client, args): """ Gzip a string. If no string is given, will decompress the contents of the clipboard. Results are NOT copied to the clipboard. """ print_maybe_bin(_code_helper(args, gzip_encode_helper, copy=False)) def base64_decode_raw(client, args): """ Same as base64_decode but the output will never be printed as a hex dump and results will not be copied. It is suggested you redirect the output to a file. """ print(_code_helper(args, base64_decode_helper, copy=False)) def base64_encode_raw(client, args): """ Same as base64_encode but the output will never be printed as a hex dump and results will not be copied. It is suggested you redirect the output to a file. """ print(_code_helper(args, base64.b64encode, copy=False)) def url_decode_raw(client, args): """ Same as url_decode but the output will never be printed as a hex dump and results will not be copied. It is suggested you redirect the output to a file. """ print(_code_helper(args, url_decode_helper, copy=False)) def url_encode_raw(client, args): """ Same as url_encode but the output will never be printed as a hex dump and results will not be copied. It is suggested you redirect the output to a file. """ print(_code_helper(args, url_encode_helper, copy=False)) def asciihex_decode_raw(client, args): """ Same as asciihex_decode but the output will never be printed as a hex dump and results will not be copied. It is suggested you redirect the output to a file. """ print(_code_helper(args, asciihex_decode_helper, copy=False)) def asciihex_encode_raw(client, args): """ Same as asciihex_encode but the output will never be printed as a hex dump and results will not be copied. It is suggested you redirect the output to a file. """ print(_code_helper(args, asciihex_encode_helper, copy=False)) def html_decode_raw(client, args): """ Same as html_decode but the output will never be printed as a hex dump and results will not be copied. It is suggested you redirect the output to a file. """ print(_code_helper(args, html_decode_helper, copy=False)) def html_encode_raw(client, args): """ Same as html_encode but the output will never be printed as a hex dump and results will not be copied. It is suggested you redirect the output to a file. """ print(_code_helper(args, html_encode_helper, copy=False)) def gzip_decode_raw(client, args): """ Same as gzip_decode but the output will never be printed as a hex dump and results will not be copied. It is suggested you redirect the output to a file. """ print(_code_helper(args, gzip_decode_helper, copy=False)) def gzip_encode_raw(client, args): """ Same as gzip_encode but the output will never be printed as a hex dump and results will not be copied. It is suggested you redirect the output to a file. """ print(_code_helper(args, gzip_encode_helper, copy=False)) def unix_time_decode_helper(line): unix_time = int(line.strip()) dtime = datetime.datetime.fromtimestamp(unix_time) return dtime.strftime('%Y-%m-%d %H:%M:%S') def unix_time_decode(client, args): print(_code_helper(args, unix_time_decode_helper)) def http_auth_encode(client, args): if len(args) != 2: raise CommandError('Usage: http_auth_encode <username> <password>') username, password = args print(encode_basic_auth(username, password)) def http_auth_decode(client, args): username, password = decode_basic_auth(args[0]) print(username) print(password) def load_cmds(cmd): cmd.set_cmds({ 'base64_decode': (base64_decode, None), 'base64_encode': (base64_encode, None), 'asciihex_decode': (asciihex_decode, None), 'asciihex_encode': (asciihex_encode, None), 'url_decode': (url_decode, None), 'url_encode': (url_encode, None), 'html_decode': (html_decode, None), 'html_encode': (html_encode, None), 'gzip_decode': (gzip_decode, None), 'gzip_encode': (gzip_encode, None), 'base64_decode_raw': (base64_decode_raw, None), 'base64_encode_raw': (base64_encode_raw, None), 'asciihex_decode_raw': (asciihex_decode_raw, None), 'asciihex_encode_raw': (asciihex_encode_raw, None), 'url_decode_raw': (url_decode_raw, None), 'url_encode_raw': (url_encode_raw, None), 'html_decode_raw': (html_decode_raw, None), 'html_encode_raw': (html_encode_raw, None), 'gzip_decode_raw': (gzip_decode_raw, None), 'gzip_encode_raw': (gzip_encode_raw, None), 'unixtime_decode': (unix_time_decode, None), 'httpauth_encode': (http_auth_encode, None), 'httpauth_decode': (http_auth_decode, None) }) cmd.add_aliases([ ('base64_decode', 'b64d'), ('base64_encode', 'b64e'), ('asciihex_decode', 'ahd'), ('asciihex_encode', 'ahe'), ('url_decode', 'urld'), ('url_encode', 'urle'), ('html_decode', 'htmld'), ('html_encode', 'htmle'), ('gzip_decode', 'gzd'), ('gzip_encode', 'gze'), ('base64_decode_raw', 'b64dr'), ('base64_encode_raw', 'b64er'), ('asciihex_decode_raw', 'ahdr'), ('asciihex_encode_raw', 'aher'), ('url_decode_raw', 'urldr'), ('url_encode_raw', 'urler'), ('html_decode_raw', 'htmldr'), ('html_encode_raw', 'htmler'), ('gzip_decode_raw', 'gzdr'), ('gzip_encode_raw', 'gzer'), ('unixtime_decode', 'uxtd'), ('httpauth_encode', 'hae'), ('httpauth_decode', 'had'), ])
mit
-4,319,996,795,967,323,600
31.723926
118
0.624953
false
3.587088
false
false
false
pycontw/pycontw2016
src/proposals/migrations/0038_add_new_conference.py
1
1404
# -*- coding: utf-8 -*- # Generated by Django 1.10.4 on 2019-07-10 07:36 from __future__ import unicode_literals from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ('proposals', '0037_auto_20180305_1339'), ] operations = [ migrations.AlterField( model_name='additionalspeaker', name='conference', field=models.SlugField(choices=[('pycontw-2016', 'PyCon Taiwan 2016'), ('pycontw-2017', 'PyCon Taiwan 2017'), ('pycontw-2018', 'PyCon Taiwan 2018'), ('pycontw-2019', 'PyCon Taiwan 2019')], default='pycontw-2019', verbose_name='conference'), ), migrations.AlterField( model_name='talkproposal', name='conference', field=models.SlugField(choices=[('pycontw-2016', 'PyCon Taiwan 2016'), ('pycontw-2017', 'PyCon Taiwan 2017'), ('pycontw-2018', 'PyCon Taiwan 2018'), ('pycontw-2019', 'PyCon Taiwan 2019')], default='pycontw-2019', verbose_name='conference'), ), migrations.AlterField( model_name='tutorialproposal', name='conference', field=models.SlugField(choices=[('pycontw-2016', 'PyCon Taiwan 2016'), ('pycontw-2017', 'PyCon Taiwan 2017'), ('pycontw-2018', 'PyCon Taiwan 2018'), ('pycontw-2019', 'PyCon Taiwan 2019')], default='pycontw-2019', verbose_name='conference'), ), ]
mit
-1,369,078,986,699,272,400
45.8
252
0.625356
false
3.572519
false
false
false
zzh8829/RevOctane
bstream.py
1
3007
import io import struct little_endian_types = { 'int8': 'b', 'uint8': 'B', 'int16': 'h', 'uint16': 'H', 'int32': 'i', 'uint32': 'I', 'int64': 'q', 'uint64': 'Q', 'float': 'f', 'float32': 'f', 'double': 'd', 'char': 'c', 'bool': '?', 'pad': 'x', 'void*': 'P', } big_endian_types = { k:">"+v for k,v in little_endian_types.items()} special_types = { 'int12': 'read_int12', 'uint12': 'read_int12', 'float16': 'read_float16', } class BStream: def __init__(self, **kwargs): if "file" in kwargs: self.stream = open(kwargs["file"], "rb") elif "stream" in kwargs: self.stream = kwargs["stream"] elif "bytes" in kwargs: self.stream = io.BytesIO(kwargs["bytes"]) else: raise Exception("unknown stream source") self.endianness = kwargs.get("endianness","little") if self.endianness == "little": self.normal_types = little_endian_types elif self.endianness == "big": self.normal_types = big_endian_types def read(self, type_name='char'): if isinstance(type_name,int): return self.unpack('%ds'%type_name)[0] type_name = type_name.lower() if type_name.endswith('_t'): type_name = type_name[:-2] if type_name in special_types: return getattr(self, special_types[type_name])() if type_name in self.normal_types: return self.unpack(self.normal_types[type_name])[0] raise Exception("unknown type") def unpack(self, fmt): return struct.unpack(fmt, self.stream.read(struct.calcsize(fmt))) def read_cstring(self): string = "" while True: char = self.read('char') if ord(char) == 0: break string += char.decode("utf-8") return string def read_string(self): return self.unpack('%ds'%self.read('uint32_t'))[0].decode('utf-8') def read_all(self): return self.read(self.size() - self.get_position()) def read_int12(self): return int.from_bytes(self.read(3),byteorder=self.endianness) def read_float16(self): data = self.read('uint16_t') s = int((data >> 15) & 0x00000001) # sign e = int((data >> 10) & 0x0000001f) # exponent f = int(data & 0x000003ff) # fraction if e == 0: if f == 0: return int(s << 31) else: while not (f & 0x00000400): f = f << 1 e -= 1 e += 1 f &= ~0x00000400 #print(s,e,f) elif e == 31: if f == 0: return int((s << 31) | 0x7f800000) else: return int((s << 31) | 0x7f800000 | (f << 13)) e = e + (127 -15) f = f << 13 buf = struct.pack('I',int((s << 31) | (e << 23) | f)) return struct.unpack('f',buf)[0] def tell(self): return self.stream.tell() def seek(self, pos, whence): return self.stream.seek(pos, whence) def get_position(self): return self.tell() def set_position(self, pos, whence=0): return self.seek(pos, whence) def size(self): pos = self.get_position() self.set_position(0,2) end = self.get_position() self.set_position(pos,0) return end def align(self, alignment=4): self.set_position((self.get_position() + alignment - 1) // alignment * alignment)
mit
-8,549,833,761,040,858,000
21.954198
84
0.60858
false
2.628497
false
false
false
avedaee/DIRAC
DataManagementSystem/Client/ReplicaContainers.py
1
4513
# $HeadURL$ __RCSID__ = "$Id$" """ This module contains three classes associated to Replicas. The Replica class contains simply three member elements: SE, PFN and Status and provides access methods for each (inluding type checking). The CatalogReplica class inherits the Replica class. The PhysicalReplica class inherits the Replica class and adds the 'size','checksum','online' and 'migrated' members. In this context Replica refers to any copy of a file. This can be the first or an additional copy. OBSOLETE? K.C. """ import types from DIRAC import S_OK, S_ERROR from DIRAC.Core.Utilities.CFG import CFG class Replica: def __init__(self,pfn='',storageElement='',status=''): # These are the possible attributes for a replica if not type(pfn) in types.StringTypes: raise AttributeError, "pfn should be string type" self.pfn = str(pfn) if not type(storageElement) in types.StringTypes: raise AttributeError, "storageElement should be string type" self.se = str(storageElement) if not type(status) in types.StringTypes: raise AttributeError, "status should be string type" self.status = str(status) def setPFN(self,pfn): if not type(pfn) in types.StringTypes: return S_ERROR("PFN should be %s and not %s" % (types.StringType,type(pfn))) self.pfn = str(pfn) return S_OK() def setSE(self,se): if not type(se) in types.StringTypes: return S_ERROR("SE should be %s and not %s" % (types.StringType,type(se))) self.se = str(se) return S_OK() def setStatus(self,status): if not type(status) in types.StringTypes: return S_ERROR("Status should be %s and not %s" % (types.StringType,type(status))) self.status = str(status) return S_OK() def getPFN(self): return S_OK(self.pfn) def getSE(self): return S_OK(self.se) def getStatus(self): return S_OK(self.status) def digest(self): """ Get short description string of replica and status """ return S_OK("%s:%s:%s" % (self.se,self.pfn,self.status)) def toCFG(self): oCFG = CFG() oCFG.createNewSection(self.se) oCFG.setOption('%s/Status' % (self.se), self.status) oCFG.setOption('%s/PFN' % (self.se), self.pfn) return S_OK(str(oCFG)) class CatalogReplica(Replica): def __init__(self,pfn='',storageElement='',status='U'): Replica.__init__(self,pfn,storageElement,status) class PhysicalReplica(Replica): def __init__(self,pfn='',storageElement='',status='',size=0,checksum='',online=False,migrated=False): # These are the possible attributes for a physical replica Replica.__init__(self,pfn,storageElement,status) try: self.size = int(size) except: raise AttributeError, "size should be integer type" if not type(checksum) in types.StringTypes: raise AttributeError, "checksum should be string type" self.checksum = str(checksum) if not type(online) == types.BooleanType: raise AttributeError, "online should be bool type" self.online = online if not type(migrated) == types.BooleanType: raise AttributeError, "migrated should be bool type" self.migrated = migrated def setSize(self,size): try: self.size = int(size) return S_OK() except: return S_ERROR("Size should be %s and not %s" % (types.IntType,type(size))) def setChecksum(self,checksum): if not type(checksum) in types.StringTypes: return S_ERROR("Checksum should be %s and not %s" % (types.StringType,type(checksum))) self.checksum = str(checksum) return S_OK() def setOnline(self,online): if not type(online) == types.BooleanType: return S_ERROR("online should be %s and not %s" % (types.BooleanType,type(online))) self.online = online return S_OK() def setMigrated(self,migrated): if not type(migrated) == types.BooleanType: return S_ERROR("migrated should be %s and not %s" % (types.BooleanType,type(migrated))) self.migrated = migrated return S_OK() def getSize(self): return S_OK(self.size) def getChecksum(self): return S_OK(self.checksum) def getOnline(self): return S_OK(self.online) def getMigrated(self): return S_OK(self.migrated) def digest(self): online = 'NotOnline' if self.online: online = 'Online' migrated = 'NotMigrated' if self.migrated: migrated = 'Migrated' return S_OK("%s:%s:%d:%s:%s:%s" % (self.se,self.pfn,self.size,self.status,online,migrated))
gpl-3.0
2,198,887,395,661,303,000
31.007092
143
0.670286
false
3.484942
false
false
false
erikrose/oedipus
oedipus/results.py
1
4275
class SearchResults(object): """Results in the order in which they came out of Sphinx Since Sphinx stores no non-numerical attributes, we have to reach into the DB to pull them out. """ def __init__(self, type, ids, fields): self.type = type # Sphinx may return IDs of objects since deleted from the DB. self.ids = ids self.fields = fields # tuple self.objects = dict(self._objects()) # {id: obj/tuple/dict, ...} def _queryset(self): """Return a QuerySet of the objects parallel to the found docs.""" return self.type.objects.filter(id__in=self.ids) def __iter__(self): """Iterate over results in the same order they came out of Sphinx.""" # Ripped off from elasticutils return (self.objects[id] for id in self.ids if id in self.objects) class DictResults(SearchResults): """Results as an iterable of dictionaries""" def _dicts_with_ids(self): """Return an iterable of dicts with ``id`` attrs, each representing a matched DB object.""" fields = self.fields # Append ID to the requested fields so we can keep track of object # identity to sort by weight (or whatever Sphinx sorted by). We could # optimize slightly by not prepending ID if the user already # specifically asked for it, but then we'd have to keep track of its # offset. if fields and 'id' not in fields: fields += ('id',) # Get values rather than values_list, because we need to be able to # find the ID afterward, and we don't want to have to go rooting around # in the Django model to figure out what order the fields were declared # in in the case that no fields were passed in. return self._queryset().values(*fields) def _objects(self): """Return an iterable of (document ID, dict) pairs.""" should_strip_ids = self.fields and 'id' not in self.fields for d in self._dicts_with_ids(): id = d.pop('id') if should_strip_ids else d['id'] yield id, d @classmethod def content_for_fields(klass, result, fields, highlight_fields): """Returns a tuple with content values for highlight_fields. :param result: A result generated by this class. :param fields: Iterable of fields for a result from this class. :param highlight_fields: Iterable of the fields to highlight. This should be a subset of ``fields``. :returns: Tuple with content in the field indexes specified by ``highlight_fields``. :raises KeyError: If there is a field in ``highlight_fields`` that doesn't exist in ``fields``. """ return tuple(result[field] for field in highlight_fields) class TupleResults(DictResults): """Results as an iterable of tuples, like Django's values_list()""" def _objects(self): """Return an iterable of (document ID, tuple) pairs.""" for d in self._dicts_with_ids(): yield d['id'], tuple(d[k] for k in self.fields) @classmethod def content_for_fields(klass, result, fields, highlight_fields): """See ``DictResults.content_for_fields``. :raises ValueError: If there is a field in ``highlight_fields`` that doesn't exist in ``fields``. """ return tuple(result[fields.index(field)] for field in highlight_fields) class ObjectResults(SearchResults): """Results as an iterable of Django model-like objects""" def _objects(self): """Return an iterable of (document ID, model object) pairs.""" # Assuming the document ID is called "id" lets us depend on fewer # Djangoisms than assuming it's the pk; we'd have to get # self.type._meta to get the name of the pk. return ((o.id, o) for o in self._queryset()) @classmethod def content_for_fields(klass, result, fields, highlight_fields): """See ``DictResults.content_for_fields``. :raises AttributeError: If there is a field in ``highlight_fields`` that doesn't exist in ``fields``. """ return tuple(getattr(result, field) for field in highlight_fields)
bsd-3-clause
2,535,138,606,245,989
39.330189
99
0.629474
false
4.309476
false
false
false
arjunasuresh3/Mypykoans
python 2/koans/about_monkey_patching.py
1
1451
#!/usr/bin/env python # -*- coding: utf-8 -*- # # Related to AboutOpenClasses in the Ruby Koans # from runner.koan import * class AboutMonkeyPatching(Koan): class Dog(object): def bark(self): return "WOOF" def test_as_defined_dogs_do_bark(self): fido = self.Dog() self.assertEqual("WOOF", fido.bark()) # ------------------------------------------------------------------ # Add a new method to an existing class. def test_after_patching_dogs_can_both_wag_and_bark(self): def wag(self): return "HAPPY" self.Dog.wag = wag fido = self.Dog() self.assertEqual("HAPPY", fido.wag()) self.assertEqual("WOOF", fido.bark()) # ------------------------------------------------------------------ def test_most_built_in_classes_cannot_be_monkey_patched(self): try: int.is_even = lambda self: (self % 2) == 0 except StandardError as ex: self.assertMatch("can't set attributes of built-in/extension type 'int'", ex[0]) # ------------------------------------------------------------------ class MyInt(int): pass def test_subclasses_of_built_in_classes_can_be_be_monkey_patched(self): self.MyInt.is_even = lambda self: (self % 2) == 0 self.assertEqual(False, self.MyInt(1).is_even()) self.assertEqual(True, self.MyInt(2).is_even())
mit
6,103,702,846,106,463,000
29.229167
92
0.500345
false
3.710997
false
false
false
tedlaz/pyted
misthodosia/m13a/fmy.py
1
2802
# -*- coding: utf-8 -*- ''' Created on 16 Ιαν 2013 @author: tedlaz ''' from utils import dec as d def f13(poso): poso = d(poso) ekp = d(0) if poso < d(21500): ekp = d(2100) elif poso < d(22500): ekp = d(2000) elif poso < d(23500): ekp = d(1900) elif poso < d(24500): ekp = d(1800) elif poso < d(25500): ekp = d(1700) elif poso < d(26500): ekp = d(1600) elif poso < d(27500): ekp = d(1500) elif poso < d(28500): ekp = d(1400) elif poso < d(29500): ekp = d(1300) elif poso < d(30500): ekp = d(1200) elif poso < d(31500): ekp = d(1100) elif poso < d(32500): ekp = d(1000) elif poso < d(33500): ekp = d(900) elif poso < d(34500): ekp = d(800) elif poso < d(35500): ekp = d(700) elif poso < d(36500): ekp = d(600) elif poso < d(37500): ekp = d(500) elif poso < d(38500): ekp = d(400) elif poso < d(39500): ekp = d(300) elif poso < d(40500): ekp = d(200) elif poso < d(41500): ekp = d(100) else: ekp = d(0) #print 'ekptosi',poso,ekp foros = d(0) if poso <= d(25000): foros = d(poso * d(22) / d(100)) else: foros = d(5500) poso = poso - d(25000) if poso <= d(17000): foros += d(poso * d(32) / d(100)) else: foros += d(5440) poso = poso - d(17000) foros += d(poso * d(42) / d(100)) foros = foros - ekp if foros < d(0) : foros = d(0) return foros def eea(poso): poso = d(poso) if poso <= d(12000): synt = d(0) elif poso <= d(20000): synt = d(1) elif poso <= d(50000): synt = d(2) elif poso <= d(100000): synt = d(3) else: synt = d(4) return d(poso * synt / d(100)) def eeap(poso,bar=1): #bar : 1 εάν ολόκληρη περίοδος 2 εάν μισή (πχ.επίδομα αδείας) poso = d(poso) tb = d(14) * d(bar) eis = poso * tb ee = eea(eis) return d(ee / tb) def fp13(poso,bar=1): poso = poso tb = 14 * bar eis = poso * tb f = f13(eis) #pf = d(f - d(0.015,3) * f) return f / tb def fpXrisis(poso,bar=1,xrisi=2013): if xrisi == 2013: return fp13(poso,bar) else: return 0 def eeaXrisis(poso,bar=1,xrisi=2013): if xrisi == 2012 or xrisi == 2013: return eeap(poso,bar) else: return d(0) if __name__ == '__main__': p = 2035.72 print fpXrisis(p,1,2013) print eeaXrisis(p,1,2013)
gpl-3.0
-9,046,916,456,819,823,000
21.389831
83
0.451414
false
2.532599
false
false
false
Sorsly/subtle
google-cloud-sdk/platform/gsutil/third_party/apitools/run_pylint.py
3
8173
# # Copyright 2015 Google Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """Custom script to run PyLint on apitools codebase. "Inspired" by the similar script in gcloud-python. This runs pylint as a script via subprocess in two different subprocesses. The first lints the production/library code using the default rc file (PRODUCTION_RC). The second lints the demo/test code using an rc file (TEST_RC) which allows more style violations (hence it has a reduced number of style checks). """ import ConfigParser import copy import os import subprocess import sys IGNORED_DIRECTORIES = [ 'apitools/gen/testdata', 'samples/storage_sample/storage', 'venv', ] IGNORED_FILES = [ 'ez_setup.py', 'run_pylint.py', 'setup.py', ] PRODUCTION_RC = 'default.pylintrc' TEST_RC = 'reduced.pylintrc' TEST_DISABLED_MESSAGES = [ 'exec-used', 'invalid-name', 'missing-docstring', 'protected-access', ] TEST_RC_ADDITIONS = { 'MESSAGES CONTROL': { 'disable': ',\n'.join(TEST_DISABLED_MESSAGES), }, } def read_config(filename): """Reads pylintrc config onto native ConfigParser object.""" config = ConfigParser.ConfigParser() with open(filename, 'r') as file_obj: config.readfp(file_obj) return config def make_test_rc(base_rc_filename, additions_dict, target_filename): """Combines a base rc and test additions into single file.""" main_cfg = read_config(base_rc_filename) # Create fresh config for test, which must extend production. test_cfg = ConfigParser.ConfigParser() test_cfg._sections = copy.deepcopy(main_cfg._sections) for section, opts in additions_dict.items(): curr_section = test_cfg._sections.setdefault( section, test_cfg._dict()) for opt, opt_val in opts.items(): curr_val = curr_section.get(opt) if curr_val is None: raise KeyError('Expected to be adding to existing option.') curr_section[opt] = '%s\n%s' % (curr_val, opt_val) with open(target_filename, 'w') as file_obj: test_cfg.write(file_obj) def valid_filename(filename): """Checks if a file is a Python file and is not ignored.""" for directory in IGNORED_DIRECTORIES: if filename.startswith(directory): return False return (filename.endswith('.py') and filename not in IGNORED_FILES) def is_production_filename(filename): """Checks if the file contains production code. :rtype: boolean :returns: Boolean indicating production status. """ return not ('demo' in filename or 'test' in filename or filename.startswith('regression')) def get_files_for_linting(allow_limited=True, diff_base=None): """Gets a list of files in the repository. By default, returns all files via ``git ls-files``. However, in some cases uses a specific commit or branch (a so-called diff base) to compare against for changed files. (This requires ``allow_limited=True``.) To speed up linting on Travis pull requests against master, we manually set the diff base to origin/master. We don't do this on non-pull requests since origin/master will be equivalent to the currently checked out code. One could potentially use ${TRAVIS_COMMIT_RANGE} to find a diff base but this value is not dependable. :type allow_limited: boolean :param allow_limited: Boolean indicating if a reduced set of files can be used. :rtype: pair :returns: Tuple of the diff base using the the list of filenames to be linted. """ if os.getenv('TRAVIS') == 'true': # In travis, don't default to master. diff_base = None if (os.getenv('TRAVIS_BRANCH') == 'master' and os.getenv('TRAVIS_PULL_REQUEST') != 'false'): # In the case of a pull request into master, we want to # diff against HEAD in master. diff_base = 'origin/master' if diff_base is not None and allow_limited: result = subprocess.check_output(['git', 'diff', '--name-only', diff_base]) print 'Using files changed relative to %s:' % (diff_base,) print '-' * 60 print result.rstrip('\n') # Don't print trailing newlines. print '-' * 60 else: print 'Diff base not specified, listing all files in repository.' result = subprocess.check_output(['git', 'ls-files']) return result.rstrip('\n').split('\n'), diff_base def get_python_files(all_files=None, diff_base=None): """Gets a list of all Python files in the repository that need linting. Relies on :func:`get_files_for_linting()` to determine which files should be considered. NOTE: This requires ``git`` to be installed and requires that this is run within the ``git`` repository. :type all_files: list or ``NoneType`` :param all_files: Optional list of files to be linted. :rtype: tuple :returns: A tuple containing two lists and a boolean. The first list contains all production files, the next all test/demo files and the boolean indicates if a restricted fileset was used. """ using_restricted = False if all_files is None: all_files, diff_base = get_files_for_linting(diff_base=diff_base) using_restricted = diff_base is not None library_files = [] non_library_files = [] for filename in all_files: if valid_filename(filename): if is_production_filename(filename): library_files.append(filename) else: non_library_files.append(filename) return library_files, non_library_files, using_restricted def lint_fileset(filenames, rcfile, description): """Lints a group of files using a given rcfile.""" # Only lint filenames that exist. For example, 'git diff --name-only' # could spit out deleted / renamed files. Another alternative could # be to use 'git diff --name-status' and filter out files with a # status of 'D'. filenames = [filename for filename in filenames if os.path.exists(filename)] if filenames: rc_flag = '--rcfile=%s' % (rcfile,) pylint_shell_command = ['pylint', rc_flag] + filenames status_code = subprocess.call(pylint_shell_command) if status_code != 0: error_message = ('Pylint failed on %s with ' 'status %d.' % (description, status_code)) print >> sys.stderr, error_message sys.exit(status_code) else: print 'Skipping %s, no files to lint.' % (description,) def main(argv): """Script entry point. Lints both sets of files.""" diff_base = argv[1] if len(argv) > 1 else None make_test_rc(PRODUCTION_RC, TEST_RC_ADDITIONS, TEST_RC) library_files, non_library_files, using_restricted = get_python_files( diff_base=diff_base) try: lint_fileset(library_files, PRODUCTION_RC, 'library code') lint_fileset(non_library_files, TEST_RC, 'test and demo code') except SystemExit: if not using_restricted: raise message = 'Restricted lint failed, expanding to full fileset.' print >> sys.stderr, message all_files, _ = get_files_for_linting(allow_limited=False) library_files, non_library_files, _ = get_python_files( all_files=all_files) lint_fileset(library_files, PRODUCTION_RC, 'library code') lint_fileset(non_library_files, TEST_RC, 'test and demo code') if __name__ == '__main__': main(sys.argv)
mit
-4,517,023,543,270,012,400
34.534783
78
0.650679
false
3.916148
true
false
false