hexsha stringlengths 40 40 | size int64 7 1.04M | ext stringclasses 10 values | lang stringclasses 1 value | max_stars_repo_path stringlengths 4 247 | max_stars_repo_name stringlengths 4 125 | max_stars_repo_head_hexsha stringlengths 40 78 | max_stars_repo_licenses listlengths 1 10 | max_stars_count int64 1 368k ⌀ | max_stars_repo_stars_event_min_datetime stringlengths 24 24 ⌀ | max_stars_repo_stars_event_max_datetime stringlengths 24 24 ⌀ | max_issues_repo_path stringlengths 4 247 | max_issues_repo_name stringlengths 4 125 | max_issues_repo_head_hexsha stringlengths 40 78 | max_issues_repo_licenses listlengths 1 10 | max_issues_count int64 1 116k ⌀ | max_issues_repo_issues_event_min_datetime stringlengths 24 24 ⌀ | max_issues_repo_issues_event_max_datetime stringlengths 24 24 ⌀ | max_forks_repo_path stringlengths 4 247 | max_forks_repo_name stringlengths 4 125 | max_forks_repo_head_hexsha stringlengths 40 78 | max_forks_repo_licenses listlengths 1 10 | max_forks_count int64 1 105k ⌀ | max_forks_repo_forks_event_min_datetime stringlengths 24 24 ⌀ | max_forks_repo_forks_event_max_datetime stringlengths 24 24 ⌀ | content stringlengths 1 1.04M | avg_line_length float64 1.77 618k | max_line_length int64 1 1.02M | alphanum_fraction float64 0 1 | original_content stringlengths 7 1.04M | filtered:remove_function_no_docstring int64 -102 942k | filtered:remove_class_no_docstring int64 -354 977k | filtered:remove_delete_markers int64 0 60.1k |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
353f3b12a8667591e7017e2e585ef6b1e4c68782 | 4,843 | py | Python | pysptools/eea/inval.py | ctherien/pysptools | fbcd3ecaa7ab27f0158b28b4327537c3e75db160 | [
"Apache-2.0"
] | 35 | 2016-03-20T15:25:07.000Z | 2022-03-29T04:05:56.000Z | pysptools/eea/inval.py | ctherien/pysptools | fbcd3ecaa7ab27f0158b28b4327537c3e75db160 | [
"Apache-2.0"
] | 12 | 2016-03-24T13:38:52.000Z | 2021-04-06T07:11:19.000Z | pysptools/eea/inval.py | ctherien/pysptools | fbcd3ecaa7ab27f0158b28b4327537c3e75db160 | [
"Apache-2.0"
] | 14 | 2016-03-21T17:26:46.000Z | 2022-01-18T08:39:27.000Z | #
#------------------------------------------------------------------------------
# Copyright (c) 2013-2014, Christian Therien
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#------------------------------------------------------------------------------
#
# inval.py - This file is part of the PySptools package.
#
"""
"""
import pysptools.util as util
# PPI
# NFINDR
# ATGP
# FIPPI
| 42.858407 | 125 | 0.641751 | #
#------------------------------------------------------------------------------
# Copyright (c) 2013-2014, Christian Therien
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#------------------------------------------------------------------------------
#
# inval.py - This file is part of the PySptools package.
#
"""
"""
import pysptools.util as util
# PPI
def ExtractInputValidation1(class_name):
@util.simple_decorator
def wrap(method):
def checker(self, M, q, numSkewers=10000, normalize=False, mask=None):
check = util.InputValidation(class_name)
check.dispatch(check.cube_type, method.__name__, M, 'M')
check.dispatch(check.int_type, method.__name__, q, 'q')
check.dispatch(check.int_type, method.__name__, numSkewers, 'numSkewers')
check.dispatch(check.bool_type, method.__name__, normalize, 'normalize')
check.dispatch(check.mask_type, method.__name__, mask)
return method(self, M, q, numSkewers=numSkewers, normalize=normalize, mask=mask)
return checker
return wrap
# NFINDR
def ExtractInputValidation2(class_name):
@util.simple_decorator
def wrap(method):
def checker(self, M, q, transform=None, maxit=None, normalize=False, ATGP_init=False, mask=None):
check = util.InputValidation(class_name)
check.dispatch(check.cube_type, method.__name__, M, 'M')
check.dispatch(check.int_type, method.__name__, q, 'q')
check.dispatch(check.transform_type, method.__name__, q, transform)
check.dispatch(check.int_type2, method.__name__, maxit, 'maxit')
check.dispatch(check.bool_type, method.__name__, normalize, 'normalize')
check.dispatch(check.bool_type, method.__name__, ATGP_init, 'ATGP_init')
check.dispatch(check.mask_type, method.__name__, mask)
return method(self, M, q, transform=transform, maxit=maxit, normalize=normalize, ATGP_init=ATGP_init, mask=mask)
return checker
return wrap
# ATGP
def ExtractInputValidation3(class_name):
@util.simple_decorator
def wrap(method):
def checker(self, M, q, normalize=False, mask=None):
check = util.InputValidation(class_name)
check.dispatch(check.cube_type, method.__name__, M, 'M')
check.dispatch(check.int_type, method.__name__, q, 'q')
check.dispatch(check.bool_type, method.__name__, normalize, 'normalize')
check.dispatch(check.mask_type, method.__name__, mask)
return method(self, M, q, normalize=normalize, mask=mask)
return checker
return wrap
# FIPPI
def ExtractInputValidation4(class_name):
@util.simple_decorator
def wrap(method):
def checker(self, M, q=None, maxit=None, normalize=False, mask=None):
check = util.InputValidation(class_name)
check.dispatch(check.cube_type, method.__name__, M, 'M')
check.dispatch(check.int_type, method.__name__, q, 'q')
check.dispatch(check.int_type2, method.__name__, maxit, 'maxit')
check.dispatch(check.bool_type, method.__name__, normalize, 'normalize')
check.dispatch(check.mask_type, method.__name__, mask)
return method(self, M, q=q, maxit=maxit, normalize=normalize, mask=mask)
return checker
return wrap
def PlotInputValidation(class_name):
@util.simple_decorator
def wrap(method):
def checker(self, path, axes=None, suffix=None):
check = util.InputValidation(class_name)
check.dispatch(check.axes_type, method.__name__, axes)
check.dispatch(check.suffix_type, method.__name__, suffix)
method(self, path, axes=axes, suffix=suffix)
return checker
return wrap
def DisplayInputValidation(class_name):
@util.simple_decorator
def wrap(method):
def checker(self, axes=None, suffix=None):
check = util.InputValidation(class_name)
check.dispatch(check.axes_type, method.__name__, axes)
check.dispatch(check.suffix_type, method.__name__, suffix)
method(self, axes=axes, suffix=suffix)
return checker
return wrap
| 3,772 | 0 | 142 |
a2f2c4a35d3dece5fb6a9a94a0d9725d505188b6 | 24,806 | py | Python | FilterUniqueBAM.py | hw538/cfDNA | 779c290586fac73529a5fbe4d01a6d7767dbb23c | [
"MIT"
] | 38 | 2016-03-11T23:21:44.000Z | 2022-03-10T14:45:03.000Z | FilterUniqueBAM.py | hw538/cfDNA | 779c290586fac73529a5fbe4d01a6d7767dbb23c | [
"MIT"
] | 6 | 2016-04-14T06:13:40.000Z | 2020-10-26T10:47:58.000Z | FilterUniqueBAM.py | hw538/cfDNA | 779c290586fac73529a5fbe4d01a6d7767dbb23c | [
"MIT"
] | 30 | 2016-03-12T01:37:47.000Z | 2022-02-25T22:54:29.000Z | #!/usr/bin/env python
# -*- coding: ASCII -*-
"""
Merge/Adapter trim reads stored in BAM
:Author: Martin Kircher
:Contact: mkircher@uw.edu
"""
import sys
import os
import math
import pysam
from optparse import OptionParser,OptionGroup
import string
table = string.maketrans('TGCAMRWSYKVHDBtgcamrwsykvhdb','ACGTKYWSRMBDHVacgtkywsrmbdhv') # COMPLEMENT DNA
quality_offset = 33
parser = OptionParser("%prog [options] BAMfile")
parser.add_option("-p","--PIPE",dest="pipe",help="Read BAM from and write it to PIPE",default=False,action="store_true")
parser.add_option("-o", "--outdir", dest="outdir", help="Create output files in another directory.")
parser.add_option("-c", "--consensus", dest="consensus", help="Report PCR duplicate consensus instead of sequence with highest sum of base qualities.",default=False,action="store_true")
parser.add_option("", "--outprefix", dest="outprefix", help="Prefix for output files (default 'PCRconsensus').",default="PCRconsensus")
parser.add_option("", "--outnewconsensus", dest="outnewconsensus", help="Save reads with new consensus sequence for realignment to separate FastQ output files with this prefix (default OFF).",default=None)
parser.add_option("", "--SAM", dest="SAM", help="Input/Output SAM not BAM.",default=False,action="store_true")
parser.add_option("", "--ignore_RG", dest="ignore_RG", help="Ignore the RG when looking for PCR duplicates. The consensus reads gets the RG of the template used.",default=False,action="store_true")
parser.add_option("", "--fixID", dest="fixID", help="Fix read ID, take only first part up to first / character",default=False,action="store_true")
parser.add_option("--library", dest="library", help="Use library name from RG read header rather than the readgroup ID",default=False,action="store_true")
parser.add_option("--UMI", dest="UMI", help="Use unique molecule identifier (UMI, in second index field) for grouping ",default=False,action="store_true")
parser.add_option("-v", "--verbose", dest="verbose", help="Turn all messages on",default=False,action="store_true")
group = OptionGroup(parser, "Filter options")
group.add_option("--include_qcfail",dest="include_qcfail",help="Consider reads that have the QC fail flag",default=False,action='store_true')
group.add_option("--rescue_qcfail",dest="rescue_qcfail",help="Remove fail quality flag of reads if PCR duplicates are observed",default=False,action='store_true')
group.add_option("--frequency_cutoff",dest="frequency",help="Keep only sequences with at least X PCR duplicates (default X = None)",default=None,type="int")
group.add_option("-f","--5p",dest="fivePrime",help="Cluster reads on five prime coordinate",default=False,action='store_true')
group.add_option("--max_length",dest="MaxLength",help="Longest possible read length stored in the input BAM (only relevant for SR 5' clustering and PE reads spanning multiple contigs, def 800)",default=800,type="int")
group.add_option("-k","--keep",dest="keep",help="Keep unmapped sequences",default=False,action='store_true')
group.add_option("--buffer",dest="rbuffer",help="Lowest number of PE reads buffered before write (def 5000)",default=5000,type="int")
group.add_option("-m","--merged",dest="merged",help="Keep only SR reads that are merged",default=False,action='store_true')
parser.add_option_group(group)
(options, args) = parser.parse_args()
#------------------------------
# PE data
#------------------------------
#The --5p parameter does not trigger any special code in the PE handling. The outer coordinates of PE reads are defined by chromosome, coordinate forward read and coordinate reverse read.
#(1) chromosome is a string or can be a tuple of strings for PEs mapped across contigs/chromosomes
#(2) Coordinate of a read aligned without reversal: reported five prime position in the BAM
#(3) Coordinate of a read aligned as reverse complement: five prime position + alignment length
#When PE reads are collected, they are first collected incomplete pairs and then as complete pairs. Complete pairs are stored until a buffer limit is reached, incomplete pairs until the end of the script -- where they are essentially forgotten, ehh, removed from the BAM file ;-) . So if the buffer is full, a PE read cluster is processed if:
#(1) The chromosome is a string and we are already on a different chromosome
#(2) or, we are on the same chromosome, but more than molecule length away from the largest 5' coordinate of the cluster
#(3) or, chromosome is a tuple and none of the strings in the tuple matches the current chromosome
#(4) or, one of the strings is the current chromosome (implicating that we have finished the other chromosome since we have complete PE read clusters at hand) and we are more than 5p_max_length bases away from the 5' position of that read.
#------------------------------
#Merged parameter
#------------------------------
#Removes reads that are not flagged "paired in sequencing" and where the read ID does not start in "M_". If your incomplete PE reads do not have the paired in sequencing flag, this would remove them. If they are marked as paired in sequencing, but the second read is just missing from the file that would of course not help -- but those are by default removed (see above).
if options.outprefix == "":
sys.stderr.write("Outprefix can not be empty!\n")
sys.exit()
if options.outdir != None and not os.path.isdir(options.outdir):
sys.stderr.write("Output folder does not exist!\n")
sys.exit()
elif options.outdir == None:
options.outdir = ""
else:
options.outdir = options.outdir.rstrip('/')+'/'
cfastq_SR, cfastq_PE = 0,0
if options.outnewconsensus != None:
outfilename = options.outdir+options.outnewconsensus+"_SR.fastq"
if options.verbose: sys.stderr.write("Creating: %s\n"%outfilename)
fastq_SR = open(outfilename,'w')
outfilename = options.outdir+options.outnewconsensus+"_r1.fastq"
if options.verbose: sys.stderr.write("Creating: %s\n"%outfilename)
fastq_r1 = open(outfilename,'w')
outfilename = options.outdir+options.outnewconsensus+"_r2.fastq"
if options.verbose: sys.stderr.write("Creating: %s\n"%outfilename)
fastq_r2 = open(outfilename,'w')
## CREATE OUTPUT FILE(S)/STREAM
fileflags = 'wb'
if options.SAM: fileflags = 'w'
files = args
if options.pipe: files = [None]
if len(files) > 1:
files=files[:1]
sys.stderr.write("This script supports only one input file! Limiting processing to first filename.\n")
outfile = None
sys.stderr.write("WARNING: This script will 'cluster' reads based on both outer coordinates and pick a representative sequence of the cluster as the one with the dominant CIGAR string. Other sequences are lost and will not be considered in consensus calling.\n")
for filename in files:
if filename == None and not options.SAM:
infile = pysam.Samfile( "-", 'rb' )
elif filename == None and options.SAM:
infile = pysam.Samfile( "-", 'r' )
else:
infile = pysam.Samfile( filename, 'rb' )
id2lib = {}
if options.library and 'RG' in infile.header:
for rgroup in infile.header['RG']:
if 'LB' in rgroup and 'ID' in rgroup:
id2lib[rgroup['ID']] = rgroup['LB']
if outfile == None:
if options.verbose: sys.stderr.write("Creating output files/streams...\n")
if options.pipe:
outfile = pysam.Samfile( "-", fileflags, template = infile)
if options.verbose: sys.stderr.write("BAM/SAM output on stdout...\n")
else:
outfilename = options.outdir+options.outprefix+".bam"
if options.verbose: sys.stderr.write("Creating: %s\n"%outfilename)
outfile = pysam.Samfile( outfilename , fileflags, template = infile)
if ('HD' in outfile.header) and ('SO' in outfile.header['HD']):
outfile.header['HD']['SO'] = 'unsorted'
else:
outfile.header['HD'] = {'VN': '1.4','SO':'unsorted'}
lcheck = None,None
variants = {}
incomplete_variants = {}
curpos = None
curvariants = {}
total_reads = 0
out_reads = 0
out_reads_SR = 0
out_reads_kept = 0
for read in infile:
if read.qual == None: continue
if options.fixID: read.qname = read.qname.split('/')[0]
total_reads += 1
if options.verbose and total_reads % 100000 == 0:
sys.stderr.write("Reads in %d / PCR dups out %d PE | %d SR / Unmapped out %d / FastQ realignment %d PE | %d SR ( %.2f%% ; current pos: %s)\n"%(total_reads,out_reads,out_reads_SR,out_reads_kept,cfastq_PE,cfastq_SR,(out_reads*2+out_reads_SR+out_reads_kept+cfastq_SR+cfastq_PE*2)/float(total_reads)*100,str(curpos)))
if read.is_qcfail and not options.include_qcfail and not options.rescue_qcfail:
#if options.verbose: sys.stderr.write("QC FAIL\n")
continue
if read.is_unmapped and not options.keep:
#if options.verbose: sys.stderr.write("UNMAPPED\n")
continue
elif read.is_unmapped and options.keep:
if not read.is_qcfail:
outfile.write(read)
out_reads_kept += 1
continue
if not read.is_paired and options.merged and not read.qname.startswith("M_"):
#if options.verbose: sys.stderr.write("MERGED\n")
continue
RG = None
if not options.ignore_RG:
if read.tags != None:
for key,value in read.tags:
if key == "RG":
if value in id2lib: RG = id2lib[value]
else: RG = value
break
if options.UMI:
if read.tags != None:
for key,value in read.tags:
if key == "XJ":
RG = value
break
if RG not in variants: variants[RG] = {}
if RG not in incomplete_variants: incomplete_variants[RG] = {}
if RG not in curvariants: curvariants[RG] = {}
if sum(map(len,variants.itervalues())) > options.rbuffer and ((lcheck[0] != curpos[0]) or (lcheck[1]+options.MaxLength < curpos[1])):
lcheck = (curpos[0],curpos[1])
if options.verbose: sys.stderr.write("Full buffer (%d)"%sum(map(len,variants.itervalues()))+str(curpos)+" \n")
for cRG in variants.keys():
hvariants = {}
for (hchr,outpos,outpos_r2),reads in variants[cRG].iteritems():
if ((type(hchr) != type(()) and # SINGLE CHROM MAPPING PE
((hchr != curpos[0]) or
((hchr == curpos[0]) and (max(outpos[1],outpos_r2[1])+options.MaxLength < curpos[1])))) or
(type(hchr) == type(()) and # CROSS CONTIG MAPPING PE
(((hchr[0] != curpos[0]) and (hchr[1] != curpos[0])) or
((hchr[0] == curpos[0]) and (outpos[1]+options.MaxLength < curpos[1])) or
((hchr[1] == curpos[0]) and (outpos_r2[1]+options.MaxLength < curpos[1]))))):
forward,reverse = get_consensus(reads)
if forward == None or reverse == None:
#if options.verbose: sys.stderr.write("FAILED CONSENSUS\n")
continue
elif (forward.is_unmapped or reverse.is_unmapped) and options.outnewconsensus != None:
cfastq_PE+=1
seq = forward.seq
qual = forward.qual
if forward.is_reverse:
seq = seq.translate(table)[::-1]
qual = qual[::-1]
fastq_r1.write("@%s/1\n%s\n+\n%s\n"%(forward.qname,seq,qual))
seq = reverse.seq
qual = reverse.qual
if reverse.is_reverse:
seq = seq.translate(table)[::-1]
qual = qual[::-1]
fastq_r2.write("@%s/2\n%s\n+\n%s\n"%(reverse.qname,seq,qual))
else:
outfile.write(forward)
outfile.write(reverse)
out_reads += 1
else:
hvariants[(hchr,outpos,outpos_r2)]=reads
if (len(hvariants) > 0) or (RG == cRG): variants[cRG] = hvariants
else: del variants[cRG]
if options.verbose: sys.stderr.write("- Full buffer (%d)"%sum(map(len,variants.itervalues()))+str(curpos)+" \n")
if read.is_paired: # PE DATA
if read.mate_is_unmapped and options.keep:
outfile.write(read)
out_reads_kept += 1
continue
#else:
#if options.verbose: sys.stderr.write("UNMAPPED\n")
curpos = (read.tid,read.pos)
hchr = read.tid
outpos = (read.tid,read.pos)
if read.is_reverse: outpos = (read.tid,read.pos+aln_length(read.cigar))
if read.is_read1: #FORWARD READ
if read.qname not in incomplete_variants[RG]:
incomplete_variants[RG][read.qname] = [read,outpos]
else:
read_r2,outpos_r2 = incomplete_variants[RG][read.qname]
del incomplete_variants[RG][read.qname]
if outpos_r2[0] != hchr: hchr = hchr,outpos_r2[0]
if (hchr,outpos,outpos_r2) not in variants[RG]:
variants[RG][(hchr,outpos,outpos_r2)] = [(read,read_r2)]
else:
variants[RG][(hchr,outpos,outpos_r2)].append((read,read_r2))
elif read.is_read2: #REVERSE READ
if read.qname not in incomplete_variants[RG]:
incomplete_variants[RG][read.qname] = [read,outpos]
else:
read_r1,outpos_r1 = incomplete_variants[RG][read.qname]
del incomplete_variants[RG][read.qname]
if outpos_r1[0] != hchr: hchr = outpos_r1[0],hchr
if (hchr,outpos_r1,outpos) not in variants[RG]:
variants[RG][(hchr,outpos_r1,outpos)] = [(read_r1,read)]
else:
variants[RG][(hchr,outpos_r1,outpos)].append((read_r1,read))
else:
sys.stderr.write("Should not happen!")
else: # SR DATA
if (curpos != None) and ((read.tid,read.pos) != curpos):
if options.fivePrime and (read.tid == curpos[0]):
hpos = read.pos-options.MaxLength
hvariants = {}
for key,value in curvariants[RG].iteritems():
if (key[1] < hpos):
cread = get_consensus_SR(value[0])
if cread == None:
#if options.verbose: sys.stderr.write("FAILED CONSENSUS\n")
continue
elif cread.is_unmpapped and options.outnewconsensus != None:
cfastq_SR+=1
seq = cread.seq
qual = cread.qual
if cread.is_reverse:
seq = seq.translate(table)[::-1]
qual = qual[::-1]
fastq_SR.write("@%s\n%s\n+\n%s\n"%(cread.qname,seq,qual))
else:
outfile.write(cread)
out_reads_SR += 1
else:
hvariants[key]=value
curvariants[RG] = hvariants
else:
for key,value in curvariants[RG].iteritems():
cread = get_consensus_SR(value[0])
if cread == None:
#if options.verbose: sys.stderr.write("FAILED CONSENSUS\n")
continue
elif cread.is_unmapped and options.outnewconsensus != None:
cfastq_SR+=1
seq = cread.seq
qual = cread.qual
if cread.is_reverse:
seq = seq.translate(table)[::-1]
qual = qual[::-1]
fastq_SR.write("@%s\n%s\n+\n%s\n"%(cread.qname,seq,qual))
else:
outfile.write(cread)
out_reads_SR += 1
curvariants[RG] = {}
curpos = (read.tid,read.pos)
strand = read.is_reverse
outpos = curpos[1]
if strand and options.fivePrime: outpos+=aln_length(read.cigar)
nkey = (strand,outpos)
if not options.fivePrime: nkey = (strand,outpos,aln_length(read.cigar))
if nkey in curvariants[RG]:
curvariants[RG][nkey][0].append(read)
curvariants[RG][nkey][1]+=1
else:
curvariants[RG][nkey] = [[read],1]
for RG in curvariants.keys():
for key,value in curvariants[RG].iteritems():
read = get_consensus_SR(value[0])
if read == None:
#if options.verbose: sys.stderr.write("FAILED CONSENSUS\n")
continue
elif read.is_unmapped and options.outnewconsensus != None:
cfastq_SR+=1
seq = read.seq
qual = read.qual
if read.is_reverse:
seq = seq.translate(table)[::-1]
qual = qual[::-1]
fastq_SR.write("@%s\n%s\n+\n%s\n"%(read.qname,seq,qual))
else:
outfile.write(read)
out_reads_SR += 1
del curvariants[RG]
for RG in variants.keys():
for key,value in variants[RG].iteritems():
read1,read2 = get_consensus(value)
if read1 == None or read2 == None:
#if options.verbose: sys.stderr.write("FAILED CONSENSUS\n")
continue
elif (read1.is_unmapped or read2.is_unmapped) and options.outnewconsensus != None:
cfastq_PE+=1
seq = read1.seq
qual = read1.qual
if read1.is_reverse:
seq = seq.translate(table)[::-1]
qual = qual[::-1]
fastq_r1.write("@%s/1\n%s\n+\n%s\n"%(read1.qname,seq,qual))
seq = read2.seq
qual = read2.qual
if read2.is_reverse:
seq = seq.translate(table)[::-1]
qual = qual[::-1]
fastq_r2.write("@%s/2\n%s\n+\n%s\n"%(read2.qname,seq,qual))
else:
outfile.write(read1)
outfile.write(read2)
out_reads += 1
del variants[RG]
if options.outnewconsensus != None:
fastq_SR.close()
fastq_r1.close()
fastq_r2.close()
if cfastq_PE == 0:
outfilename = options.outdir+options.outnewconsensus+"_r1.fastq"
if options.verbose: sys.stderr.write("Removing empty file: %s\n"%outfilename)
os.remove(outfilename)
outfilename = options.outdir+options.outnewconsensus+"_r2.fastq"
if options.verbose: sys.stderr.write("Removing empty file: %s\n"%outfilename)
os.remove(outfilename)
if cfastq_SR == 0:
outfilename = options.outdir+options.outnewconsensus+"_SR.fastq"
if options.verbose: sys.stderr.write("Removing empty file: %s\n"%outfilename)
os.remove(outfilename)
if options.verbose:
sys.stderr.write("Total reads in %d / PCR dups out %d PE | %d SR / Unmapped out %d / FastQ realignment %d PE | %d SR ( %.2f%%)\n"%(total_reads,out_reads,out_reads_SR,out_reads_kept,cfastq_PE,cfastq_SR,(out_reads*2+out_reads_SR+out_reads_kept+cfastq_SR+cfastq_PE*2)/float(total_reads)*100))
| 42.476027 | 372 | 0.642143 | #!/usr/bin/env python
# -*- coding: ASCII -*-
"""
Merge/Adapter trim reads stored in BAM
:Author: Martin Kircher
:Contact: mkircher@uw.edu
"""
import sys
import os
import math
import pysam
from optparse import OptionParser,OptionGroup
import string
table = string.maketrans('TGCAMRWSYKVHDBtgcamrwsykvhdb','ACGTKYWSRMBDHVacgtkywsrmbdhv') # COMPLEMENT DNA
quality_offset = 33
parser = OptionParser("%prog [options] BAMfile")
parser.add_option("-p","--PIPE",dest="pipe",help="Read BAM from and write it to PIPE",default=False,action="store_true")
parser.add_option("-o", "--outdir", dest="outdir", help="Create output files in another directory.")
parser.add_option("-c", "--consensus", dest="consensus", help="Report PCR duplicate consensus instead of sequence with highest sum of base qualities.",default=False,action="store_true")
parser.add_option("", "--outprefix", dest="outprefix", help="Prefix for output files (default 'PCRconsensus').",default="PCRconsensus")
parser.add_option("", "--outnewconsensus", dest="outnewconsensus", help="Save reads with new consensus sequence for realignment to separate FastQ output files with this prefix (default OFF).",default=None)
parser.add_option("", "--SAM", dest="SAM", help="Input/Output SAM not BAM.",default=False,action="store_true")
parser.add_option("", "--ignore_RG", dest="ignore_RG", help="Ignore the RG when looking for PCR duplicates. The consensus reads gets the RG of the template used.",default=False,action="store_true")
parser.add_option("", "--fixID", dest="fixID", help="Fix read ID, take only first part up to first / character",default=False,action="store_true")
parser.add_option("--library", dest="library", help="Use library name from RG read header rather than the readgroup ID",default=False,action="store_true")
parser.add_option("--UMI", dest="UMI", help="Use unique molecule identifier (UMI, in second index field) for grouping ",default=False,action="store_true")
parser.add_option("-v", "--verbose", dest="verbose", help="Turn all messages on",default=False,action="store_true")
group = OptionGroup(parser, "Filter options")
group.add_option("--include_qcfail",dest="include_qcfail",help="Consider reads that have the QC fail flag",default=False,action='store_true')
group.add_option("--rescue_qcfail",dest="rescue_qcfail",help="Remove fail quality flag of reads if PCR duplicates are observed",default=False,action='store_true')
group.add_option("--frequency_cutoff",dest="frequency",help="Keep only sequences with at least X PCR duplicates (default X = None)",default=None,type="int")
group.add_option("-f","--5p",dest="fivePrime",help="Cluster reads on five prime coordinate",default=False,action='store_true')
group.add_option("--max_length",dest="MaxLength",help="Longest possible read length stored in the input BAM (only relevant for SR 5' clustering and PE reads spanning multiple contigs, def 800)",default=800,type="int")
group.add_option("-k","--keep",dest="keep",help="Keep unmapped sequences",default=False,action='store_true')
group.add_option("--buffer",dest="rbuffer",help="Lowest number of PE reads buffered before write (def 5000)",default=5000,type="int")
group.add_option("-m","--merged",dest="merged",help="Keep only SR reads that are merged",default=False,action='store_true')
parser.add_option_group(group)
(options, args) = parser.parse_args()
#------------------------------
# PE data
#------------------------------
#The --5p parameter does not trigger any special code in the PE handling. The outer coordinates of PE reads are defined by chromosome, coordinate forward read and coordinate reverse read.
#(1) chromosome is a string or can be a tuple of strings for PEs mapped across contigs/chromosomes
#(2) Coordinate of a read aligned without reversal: reported five prime position in the BAM
#(3) Coordinate of a read aligned as reverse complement: five prime position + alignment length
#When PE reads are collected, they are first collected incomplete pairs and then as complete pairs. Complete pairs are stored until a buffer limit is reached, incomplete pairs until the end of the script -- where they are essentially forgotten, ehh, removed from the BAM file ;-) . So if the buffer is full, a PE read cluster is processed if:
#(1) The chromosome is a string and we are already on a different chromosome
#(2) or, we are on the same chromosome, but more than molecule length away from the largest 5' coordinate of the cluster
#(3) or, chromosome is a tuple and none of the strings in the tuple matches the current chromosome
#(4) or, one of the strings is the current chromosome (implicating that we have finished the other chromosome since we have complete PE read clusters at hand) and we are more than 5p_max_length bases away from the 5' position of that read.
#------------------------------
#Merged parameter
#------------------------------
#Removes reads that are not flagged "paired in sequencing" and where the read ID does not start in "M_". If your incomplete PE reads do not have the paired in sequencing flag, this would remove them. If they are marked as paired in sequencing, but the second read is just missing from the file that would of course not help -- but those are by default removed (see above).
def extractLengthCigar(cigarlist):
#Op BAM Description
#M 0 alignment match (can be a sequence match or mismatch)
#I 1 insertion to the reference
#D 2 deletion from the reference
#N 3 skipped region from the reference
#S 4 soft clipping (clipped sequences present in SEQ)
#H 5 hard clipping (clipped sequences NOT present in SEQ)
#P 6 padding (silent deletion from padded reference)
#= 7 sequence match
#X 8 sequence mismatch
tlength = 0
for operation,length in cigarlist:
if operation == 0 or operation == 1 or operation == 4 or operation == 7 or operation == 8: tlength+=length
return tlength
def aln_length(cigarlist):
tlength = 0
for operation,length in cigarlist:
if operation == 0 or operation == 2 or operation == 3 or operation >= 6: tlength += length
return tlength
def calc_consensus(reads):
if options.consensus:
# DETERMINE CONSENSUS SEQUENCE FOR THIS VARIANT
if len(reads) > 1:
seqstring,qualstring = "",""
for pos in xrange(len(reads[0].seq)):
bases = [0,0,0,0]
count = 0
base,qualchr = None,None
for elem in reads:
base = elem.seq[pos]
qualchr = elem.qual[pos]
if base == 'N': continue
count += 1
qual = (ord(elem.qual[pos])-quality_offset)/-10.0
if qual == 0: qual = -0.1
rev_qual = math.log10(1.0-10**qual)-math.log10(3.0)
for i,b in enumerate('ACGT'):
if b == base: bases[i]+=qual
else: bases[i]+=rev_qual
if count > 0:
total_prob = math.log10(max(0.000000001,sum(map(lambda x:10**x,bases))))
max_base,max_qual,min_val = 'N',chr(quality_offset),0
for i,b in enumerate('ACGT'):
cval = bases[i]-total_prob
if cval < min_val:
min_val = cval
max_base,max_qual = b, chr(int(round(min(60,-10.0*(cval))))+quality_offset)
seqstring+=max_base
qualstring+=max_qual
else:
seqstring+=base
qualstring+=qualchr
else:
seqstring = reads[0].seq
qualstring = reads[0].qual
count = len(reads)
outread = None
minmapq = 255
for read in reads: # REUSE READ OBJECT FROM ENTRY WITH IDENTICAL SEQUENCE BUT ALWAYS REPORT ID OF FIRST READ
ofields = None
if read.mapq < minmapq: minmapq = read.mapq
if read.tags != None:
ofields = []
for key,value in read.tags: # LOOK FOR PREVIOUS PCR DUPLICATE COUNTS
if key == "XP": count += value
else: ofields.append((key,value))
#sys.stderr.write('%s %s %s\n'%(read.seq==seqstring,read.seq,seqstring))
if read.seq == seqstring:
outread = read
outread.qname = reads[0].qname
outread.qual = qualstring
if ofields != None: outread.tags = ofields
if outread == None: # CONSENSUS SEQUENCE DOES NOT MATCH ONE OF THE ORIGINAL READS: WE DO NOT KNOW THAT READ IS STILL ALIGNED CORRECTLY!
#if options.verbose:
#sys.stderr.write('Consensus sequence does not match one of the original %d reads.\n'%(len(reads)))
#sys.stderr.write('%s %s\n'%(seqstring,qualstring))
#for read in reads:
#sys.stderr.write('%s %s\n'%(read.seq,read.qual))
outread = reads[0]
outread.is_unmapped = True
outread.mapq = minmapq
outread.seq = seqstring
outread.qual = qualstring
else: # REUSE READ OBJECT WITH HIGHEST SUM OF QUALITIES BUT ALWAYS REPORT ID OF FIRST READ
count = len(reads)
outread = None
maxsumqual = 0
for read in reads:
nsum = sum(map(ord,read.qual))
if nsum > maxsumqual:
outread = read
maxsumqual = nsum
if read.tags != None:
ofields = []
for key,value in read.tags: # LOOK FOR PREVIOUS PCR DUPLICATE COUNTS
if key == "XP": count += value
else: ofields.append((key,value))
outread.tags = ofields
outread.qname = reads[0].qname
outread.is_duplicate = False
if outread.tags == None: outread.tags = [("XP",count)]
else: outread.tags = outread.tags+[("XP",count)]
if options.frequency == None or count >= options.frequency:
return outread
else:
return None
def get_consensus(reads):
by_cigar = {}
cigar_count = {}
# DETERMINE MOST FREQUENT CIGAR LINE PAIR
for (read1,read2) in reads:
cigars = (tuple(read1.cigar),tuple(read2.cigar))
if cigars in by_cigar:
cigar_count[cigars]+=1
by_cigar[cigars][0].append(read1)
by_cigar[cigars][1].append(read2)
else:
cigar_count[cigars]=1
by_cigar[cigars]=([read1],[read2])
to_sort = map(lambda (x,y): (y,-len(str(x)),x),cigar_count.iteritems())
to_sort.sort()
selcigar = to_sort[-1][-1]
reads = by_cigar[selcigar]
del by_cigar
del cigar_count
del to_sort
forward,reverse = calc_consensus(reads[0]),calc_consensus(reads[1])
if len(reads) > 1 and (forward.is_qcfail or reverse.is_qcfail) and options.rescue_qcfail:
new_tags1 = []
for key,value in forward.tags:
if key == "ZQ" and value == "Q":
forward.is_qcfail = False
else:
new_tags1.append((key,value))
new_tags2 = []
for key,value in reverse.tags:
if key == "ZQ" and value == "Q":
reverse.is_qcfail = False
else:
new_tags2.append((key,value))
if not forward.is_qcfail and not reverse.is_qcfail:
forward.tags = new_tags1
reverse.tags = new_tags2
return forward,reverse
else: return None,None
elif (forward.is_qcfail or reverse.is_qcfail) and not options.include_qcfail: return None,None
else: return forward,reverse
def get_consensus_SR(reads):
global options
# DETERMINE MOST FREQUENT CIGAR LINE
by_cigar = {}
cigar_count = {}
for read in reads:
tcigar = tuple(read.cigar)
if tcigar in by_cigar:
cigar_count[tcigar]+=1
by_cigar[tcigar].append(read)
else:
cigar_count[tcigar]=1
by_cigar[tcigar]=[read]
to_sort = map(lambda (x,y): (y,-len(str(x)),x),cigar_count.iteritems())
to_sort.sort()
selcigar = to_sort[-1][-1]
reads = by_cigar[selcigar]
del by_cigar
del cigar_count
del to_sort
consensus = calc_consensus(reads)
if len(reads) > 1 and consensus.is_qcfail and options.rescue_qcfail:
new_tags = []
for key,value in consensus.tags:
if key == "ZQ" and value == "Q":
consensus.is_qcfail = False
else:
new_tags.append((key,value))
if not consensus.is_qcfail:
consensus.tags = new_tags
return consensus
else: return None
elif consensus.is_qcfail and not options.include_qcfail: return None
else: return consensus
if options.outprefix == "":
sys.stderr.write("Outprefix can not be empty!\n")
sys.exit()
if options.outdir != None and not os.path.isdir(options.outdir):
sys.stderr.write("Output folder does not exist!\n")
sys.exit()
elif options.outdir == None:
options.outdir = ""
else:
options.outdir = options.outdir.rstrip('/')+'/'
cfastq_SR, cfastq_PE = 0,0
if options.outnewconsensus != None:
outfilename = options.outdir+options.outnewconsensus+"_SR.fastq"
if options.verbose: sys.stderr.write("Creating: %s\n"%outfilename)
fastq_SR = open(outfilename,'w')
outfilename = options.outdir+options.outnewconsensus+"_r1.fastq"
if options.verbose: sys.stderr.write("Creating: %s\n"%outfilename)
fastq_r1 = open(outfilename,'w')
outfilename = options.outdir+options.outnewconsensus+"_r2.fastq"
if options.verbose: sys.stderr.write("Creating: %s\n"%outfilename)
fastq_r2 = open(outfilename,'w')
## CREATE OUTPUT FILE(S)/STREAM
fileflags = 'wb'
if options.SAM: fileflags = 'w'
files = args
if options.pipe: files = [None]
if len(files) > 1:
files=files[:1]
sys.stderr.write("This script supports only one input file! Limiting processing to first filename.\n")
outfile = None
sys.stderr.write("WARNING: This script will 'cluster' reads based on both outer coordinates and pick a representative sequence of the cluster as the one with the dominant CIGAR string. Other sequences are lost and will not be considered in consensus calling.\n")
for filename in files:
if filename == None and not options.SAM:
infile = pysam.Samfile( "-", 'rb' )
elif filename == None and options.SAM:
infile = pysam.Samfile( "-", 'r' )
else:
infile = pysam.Samfile( filename, 'rb' )
id2lib = {}
if options.library and 'RG' in infile.header:
for rgroup in infile.header['RG']:
if 'LB' in rgroup and 'ID' in rgroup:
id2lib[rgroup['ID']] = rgroup['LB']
if outfile == None:
if options.verbose: sys.stderr.write("Creating output files/streams...\n")
if options.pipe:
outfile = pysam.Samfile( "-", fileflags, template = infile)
if options.verbose: sys.stderr.write("BAM/SAM output on stdout...\n")
else:
outfilename = options.outdir+options.outprefix+".bam"
if options.verbose: sys.stderr.write("Creating: %s\n"%outfilename)
outfile = pysam.Samfile( outfilename , fileflags, template = infile)
if ('HD' in outfile.header) and ('SO' in outfile.header['HD']):
outfile.header['HD']['SO'] = 'unsorted'
else:
outfile.header['HD'] = {'VN': '1.4','SO':'unsorted'}
lcheck = None,None
variants = {}
incomplete_variants = {}
curpos = None
curvariants = {}
total_reads = 0
out_reads = 0
out_reads_SR = 0
out_reads_kept = 0
for read in infile:
if read.qual == None: continue
if options.fixID: read.qname = read.qname.split('/')[0]
total_reads += 1
if options.verbose and total_reads % 100000 == 0:
sys.stderr.write("Reads in %d / PCR dups out %d PE | %d SR / Unmapped out %d / FastQ realignment %d PE | %d SR ( %.2f%% ; current pos: %s)\n"%(total_reads,out_reads,out_reads_SR,out_reads_kept,cfastq_PE,cfastq_SR,(out_reads*2+out_reads_SR+out_reads_kept+cfastq_SR+cfastq_PE*2)/float(total_reads)*100,str(curpos)))
if read.is_qcfail and not options.include_qcfail and not options.rescue_qcfail:
#if options.verbose: sys.stderr.write("QC FAIL\n")
continue
if read.is_unmapped and not options.keep:
#if options.verbose: sys.stderr.write("UNMAPPED\n")
continue
elif read.is_unmapped and options.keep:
if not read.is_qcfail:
outfile.write(read)
out_reads_kept += 1
continue
if not read.is_paired and options.merged and not read.qname.startswith("M_"):
#if options.verbose: sys.stderr.write("MERGED\n")
continue
RG = None
if not options.ignore_RG:
if read.tags != None:
for key,value in read.tags:
if key == "RG":
if value in id2lib: RG = id2lib[value]
else: RG = value
break
if options.UMI:
if read.tags != None:
for key,value in read.tags:
if key == "XJ":
RG = value
break
if RG not in variants: variants[RG] = {}
if RG not in incomplete_variants: incomplete_variants[RG] = {}
if RG not in curvariants: curvariants[RG] = {}
if sum(map(len,variants.itervalues())) > options.rbuffer and ((lcheck[0] != curpos[0]) or (lcheck[1]+options.MaxLength < curpos[1])):
lcheck = (curpos[0],curpos[1])
if options.verbose: sys.stderr.write("Full buffer (%d)"%sum(map(len,variants.itervalues()))+str(curpos)+" \n")
for cRG in variants.keys():
hvariants = {}
for (hchr,outpos,outpos_r2),reads in variants[cRG].iteritems():
if ((type(hchr) != type(()) and # SINGLE CHROM MAPPING PE
((hchr != curpos[0]) or
((hchr == curpos[0]) and (max(outpos[1],outpos_r2[1])+options.MaxLength < curpos[1])))) or
(type(hchr) == type(()) and # CROSS CONTIG MAPPING PE
(((hchr[0] != curpos[0]) and (hchr[1] != curpos[0])) or
((hchr[0] == curpos[0]) and (outpos[1]+options.MaxLength < curpos[1])) or
((hchr[1] == curpos[0]) and (outpos_r2[1]+options.MaxLength < curpos[1]))))):
forward,reverse = get_consensus(reads)
if forward == None or reverse == None:
#if options.verbose: sys.stderr.write("FAILED CONSENSUS\n")
continue
elif (forward.is_unmapped or reverse.is_unmapped) and options.outnewconsensus != None:
cfastq_PE+=1
seq = forward.seq
qual = forward.qual
if forward.is_reverse:
seq = seq.translate(table)[::-1]
qual = qual[::-1]
fastq_r1.write("@%s/1\n%s\n+\n%s\n"%(forward.qname,seq,qual))
seq = reverse.seq
qual = reverse.qual
if reverse.is_reverse:
seq = seq.translate(table)[::-1]
qual = qual[::-1]
fastq_r2.write("@%s/2\n%s\n+\n%s\n"%(reverse.qname,seq,qual))
else:
outfile.write(forward)
outfile.write(reverse)
out_reads += 1
else:
hvariants[(hchr,outpos,outpos_r2)]=reads
if (len(hvariants) > 0) or (RG == cRG): variants[cRG] = hvariants
else: del variants[cRG]
if options.verbose: sys.stderr.write("- Full buffer (%d)"%sum(map(len,variants.itervalues()))+str(curpos)+" \n")
if read.is_paired: # PE DATA
if read.mate_is_unmapped and options.keep:
outfile.write(read)
out_reads_kept += 1
continue
#else:
#if options.verbose: sys.stderr.write("UNMAPPED\n")
curpos = (read.tid,read.pos)
hchr = read.tid
outpos = (read.tid,read.pos)
if read.is_reverse: outpos = (read.tid,read.pos+aln_length(read.cigar))
if read.is_read1: #FORWARD READ
if read.qname not in incomplete_variants[RG]:
incomplete_variants[RG][read.qname] = [read,outpos]
else:
read_r2,outpos_r2 = incomplete_variants[RG][read.qname]
del incomplete_variants[RG][read.qname]
if outpos_r2[0] != hchr: hchr = hchr,outpos_r2[0]
if (hchr,outpos,outpos_r2) not in variants[RG]:
variants[RG][(hchr,outpos,outpos_r2)] = [(read,read_r2)]
else:
variants[RG][(hchr,outpos,outpos_r2)].append((read,read_r2))
elif read.is_read2: #REVERSE READ
if read.qname not in incomplete_variants[RG]:
incomplete_variants[RG][read.qname] = [read,outpos]
else:
read_r1,outpos_r1 = incomplete_variants[RG][read.qname]
del incomplete_variants[RG][read.qname]
if outpos_r1[0] != hchr: hchr = outpos_r1[0],hchr
if (hchr,outpos_r1,outpos) not in variants[RG]:
variants[RG][(hchr,outpos_r1,outpos)] = [(read_r1,read)]
else:
variants[RG][(hchr,outpos_r1,outpos)].append((read_r1,read))
else:
sys.stderr.write("Should not happen!")
else: # SR DATA
if (curpos != None) and ((read.tid,read.pos) != curpos):
if options.fivePrime and (read.tid == curpos[0]):
hpos = read.pos-options.MaxLength
hvariants = {}
for key,value in curvariants[RG].iteritems():
if (key[1] < hpos):
cread = get_consensus_SR(value[0])
if cread == None:
#if options.verbose: sys.stderr.write("FAILED CONSENSUS\n")
continue
elif cread.is_unmpapped and options.outnewconsensus != None:
cfastq_SR+=1
seq = cread.seq
qual = cread.qual
if cread.is_reverse:
seq = seq.translate(table)[::-1]
qual = qual[::-1]
fastq_SR.write("@%s\n%s\n+\n%s\n"%(cread.qname,seq,qual))
else:
outfile.write(cread)
out_reads_SR += 1
else:
hvariants[key]=value
curvariants[RG] = hvariants
else:
for key,value in curvariants[RG].iteritems():
cread = get_consensus_SR(value[0])
if cread == None:
#if options.verbose: sys.stderr.write("FAILED CONSENSUS\n")
continue
elif cread.is_unmapped and options.outnewconsensus != None:
cfastq_SR+=1
seq = cread.seq
qual = cread.qual
if cread.is_reverse:
seq = seq.translate(table)[::-1]
qual = qual[::-1]
fastq_SR.write("@%s\n%s\n+\n%s\n"%(cread.qname,seq,qual))
else:
outfile.write(cread)
out_reads_SR += 1
curvariants[RG] = {}
curpos = (read.tid,read.pos)
strand = read.is_reverse
outpos = curpos[1]
if strand and options.fivePrime: outpos+=aln_length(read.cigar)
nkey = (strand,outpos)
if not options.fivePrime: nkey = (strand,outpos,aln_length(read.cigar))
if nkey in curvariants[RG]:
curvariants[RG][nkey][0].append(read)
curvariants[RG][nkey][1]+=1
else:
curvariants[RG][nkey] = [[read],1]
for RG in curvariants.keys():
for key,value in curvariants[RG].iteritems():
read = get_consensus_SR(value[0])
if read == None:
#if options.verbose: sys.stderr.write("FAILED CONSENSUS\n")
continue
elif read.is_unmapped and options.outnewconsensus != None:
cfastq_SR+=1
seq = read.seq
qual = read.qual
if read.is_reverse:
seq = seq.translate(table)[::-1]
qual = qual[::-1]
fastq_SR.write("@%s\n%s\n+\n%s\n"%(read.qname,seq,qual))
else:
outfile.write(read)
out_reads_SR += 1
del curvariants[RG]
for RG in variants.keys():
for key,value in variants[RG].iteritems():
read1,read2 = get_consensus(value)
if read1 == None or read2 == None:
#if options.verbose: sys.stderr.write("FAILED CONSENSUS\n")
continue
elif (read1.is_unmapped or read2.is_unmapped) and options.outnewconsensus != None:
cfastq_PE+=1
seq = read1.seq
qual = read1.qual
if read1.is_reverse:
seq = seq.translate(table)[::-1]
qual = qual[::-1]
fastq_r1.write("@%s/1\n%s\n+\n%s\n"%(read1.qname,seq,qual))
seq = read2.seq
qual = read2.qual
if read2.is_reverse:
seq = seq.translate(table)[::-1]
qual = qual[::-1]
fastq_r2.write("@%s/2\n%s\n+\n%s\n"%(read2.qname,seq,qual))
else:
outfile.write(read1)
outfile.write(read2)
out_reads += 1
del variants[RG]
if options.outnewconsensus != None:
fastq_SR.close()
fastq_r1.close()
fastq_r2.close()
if cfastq_PE == 0:
outfilename = options.outdir+options.outnewconsensus+"_r1.fastq"
if options.verbose: sys.stderr.write("Removing empty file: %s\n"%outfilename)
os.remove(outfilename)
outfilename = options.outdir+options.outnewconsensus+"_r2.fastq"
if options.verbose: sys.stderr.write("Removing empty file: %s\n"%outfilename)
os.remove(outfilename)
if cfastq_SR == 0:
outfilename = options.outdir+options.outnewconsensus+"_SR.fastq"
if options.verbose: sys.stderr.write("Removing empty file: %s\n"%outfilename)
os.remove(outfilename)
if options.verbose:
sys.stderr.write("Total reads in %d / PCR dups out %d PE | %d SR / Unmapped out %d / FastQ realignment %d PE | %d SR ( %.2f%%)\n"%(total_reads,out_reads,out_reads_SR,out_reads_kept,cfastq_PE,cfastq_SR,(out_reads*2+out_reads_SR+out_reads_kept+cfastq_SR+cfastq_PE*2)/float(total_reads)*100))
| 6,557 | 0 | 115 |
3950362cd5843e8921595707be523c7bbf81ec83 | 798 | py | Python | Hard/295. Find Median from Data Stream/solution (2).py | czs108/LeetCode-Solutions | 889f5b6a573769ad077a6283c058ed925d52c9ec | [
"MIT"
] | 3 | 2020-05-09T12:55:09.000Z | 2022-03-11T18:56:05.000Z | Hard/295. Find Median from Data Stream/solution (2).py | czs108/LeetCode-Solutions | 889f5b6a573769ad077a6283c058ed925d52c9ec | [
"MIT"
] | null | null | null | Hard/295. Find Median from Data Stream/solution (2).py | czs108/LeetCode-Solutions | 889f5b6a573769ad077a6283c058ed925d52c9ec | [
"MIT"
] | 1 | 2022-03-11T18:56:16.000Z | 2022-03-11T18:56:16.000Z | # 295. Find Median from Data Stream
# Runtime: 2710 ms, faster than 5.93% of Python3 online submissions for Find Median from Data Stream.
# Memory Usage: 35.4 MB, less than 94.61% of Python3 online submissions for Find Median from Data Stream.
import bisect
# Insertion Sort
# Your MedianFinder object will be instantiated and called as such:
# obj = MedianFinder()
# obj.addNum(num)
# param_2 = obj.findMedian() | 27.517241 | 105 | 0.631579 | # 295. Find Median from Data Stream
# Runtime: 2710 ms, faster than 5.93% of Python3 online submissions for Find Median from Data Stream.
# Memory Usage: 35.4 MB, less than 94.61% of Python3 online submissions for Find Median from Data Stream.
import bisect
class MedianFinder:
# Insertion Sort
def __init__(self) -> None:
self._vals: list[int] = []
def addNum(self, num: int) -> None:
bisect.insort(self._vals, num)
def findMedian(self) -> float:
n = len(self._vals)
if n % 2 == 0:
return (self._vals[n // 2] + self._vals[n // 2 - 1]) / 2
else:
return self._vals[n // 2]
# Your MedianFinder object will be instantiated and called as such:
# obj = MedianFinder()
# obj.addNum(num)
# param_2 = obj.findMedian() | 275 | -2 | 103 |
6a081facb1824db16756c87c283db728e46053c0 | 148 | py | Python | my/src/Base64Test.py | qq57694878/pychonws | cfad3d94e251db35c0f3485bc7231cc9de999913 | [
"Apache-2.0"
] | null | null | null | my/src/Base64Test.py | qq57694878/pychonws | cfad3d94e251db35c0f3485bc7231cc9de999913 | [
"Apache-2.0"
] | null | null | null | my/src/Base64Test.py | qq57694878/pychonws | cfad3d94e251db35c0f3485bc7231cc9de999913 | [
"Apache-2.0"
] | null | null | null | import base64
from io import StringIO
with open('f:/a/a.txt','r') as fi:
fo = StringIO()
base64.encode(fi,fo)
print(fo.getvalue()) | 24.666667 | 34 | 0.621622 | import base64
from io import StringIO
with open('f:/a/a.txt','r') as fi:
fo = StringIO()
base64.encode(fi,fo)
print(fo.getvalue()) | 0 | 0 | 0 |
99087fc603045ebddc2b6d222daf87c2c4ff56ac | 5,957 | py | Python | satchmo/apps/l10n/utils.py | jtslade/satchmo-svn | a9d791342ac6c5712de55c26ea4780057e27d452 | [
"BSD-3-Clause"
] | 1 | 2016-05-09T08:15:33.000Z | 2016-05-09T08:15:33.000Z | satchmo/apps/l10n/utils.py | jtslade/satchmo-svn | a9d791342ac6c5712de55c26ea4780057e27d452 | [
"BSD-3-Clause"
] | null | null | null | satchmo/apps/l10n/utils.py | jtslade/satchmo-svn | a9d791342ac6c5712de55c26ea4780057e27d452 | [
"BSD-3-Clause"
] | null | null | null | from django.conf import settings
from django.core.exceptions import ImproperlyConfigured
from django.utils.translation import get_language, to_locale
from livesettings import config_value
from satchmo_utils.numbers import trunc_decimal
import locale
import logging
log = logging.getLogger('l10n.utils')
#backport from python2.5
### Number formatting APIs
# Author: Martin von Loewis
# improved by Georg Brandl
#perform the grouping from right to left
#backport from python2.5
def format(percent, value, loc_conv, grouping=False, monetary=False, *additional):
"""Returns the locale-aware substitution of a %? specifier
(percent).
additional is for format strings which contain one or more
'*' modifiers."""
# this is only for one-percent-specifier strings and this should be checked
if percent[0] != '%':
raise ValueError("format() must be given exactly one %char "
"format specifier")
if additional:
formatted = percent % ((value,) + additional)
else:
formatted = percent % value
# floats and decimal ints need special action!
if percent[-1] in 'eEfFgG':
seps = 0
parts = formatted.split('.')
if grouping:
parts[0], seps = _group(parts[0], loc_conv, monetary=monetary)
decimal_point = loc_conv[monetary and 'mon_decimal_point'
or 'decimal_point']
formatted = decimal_point.join(parts)
while seps:
sp = formatted.find(' ')
if sp == -1: break
formatted = formatted[:sp] + formatted[sp+1:]
seps -= 1
elif percent[-1] in 'diu':
if grouping:
formatted = _group(formatted, monetary=monetary)[0]
return formatted
def moneyfmt(val, curr=None, places=-1, grouping=True, wrapcents='', current_locale=None):
"""Formats val according to the currency settings in the current locale.
Ported-and-modified from Python 2.5
"""
conv = get_locale_conv(current_locale)
if places < 0:
places = conv['int_frac_digits']
val = trunc_decimal(val, places)
try: # Required because Python < 2.5 does not have monetary arg
s = format('%%.%if' % places, abs(val), conv, grouping, monetary=True)
except TypeError:
s = format('%%.%if' % places, abs(val), conv, grouping)
# '<' and '>' are markers if the sign must be inserted between symbol and value
s = '<' + s + '>'
if curr is None:
curr = config_value('LANGUAGE','CURRENCY')
curr = curr.replace("_", " ")
precedes = conv[val<0 and 'n_cs_precedes' or 'p_cs_precedes']
separated = conv[val<0 and 'n_sep_by_space' or 'p_sep_by_space']
if precedes:
s = curr + (separated and ' ' or '') + s
else:
s = s + (separated and ' ' or '') + curr
sign_pos = conv[val<0 and 'n_sign_posn' or 'p_sign_posn']
sign = conv[val<0 and 'negative_sign' or 'positive_sign']
if sign_pos == 0:
s = '(' + s + ')'
elif sign_pos == 1:
s = sign + s
elif sign_pos == 2:
s = s + sign
elif sign_pos == 3:
s = s.replace('<', sign)
elif sign_pos == 4:
s = s.replace('>', sign)
else:
# the default if nothing specified;
# this should be the most fitting sign position
s = sign + s
val = s.replace('<', '').replace('>', '')
if wrapcents:
pos = s.rfind(conv['decimal_point'])
if pos>-1:
pos +=1
val = u"%s<%s>%s</%s>" % val[:pos], wrapcents, val[pos:], wrapcents
return val
| 32.375 | 99 | 0.578143 | from django.conf import settings
from django.core.exceptions import ImproperlyConfigured
from django.utils.translation import get_language, to_locale
from livesettings import config_value
from satchmo_utils.numbers import trunc_decimal
import locale
import logging
log = logging.getLogger('l10n.utils')
def get_locale_conv(loc=None, tried=[], possibles=[]):
if loc is None:
loc = to_locale(get_language())
else:
if loc.find('-') > -1:
loc = to_locale(loc)
if not possibles:
possibles = [(loc, 'utf-8'), loc]
pos = loc.find('_')
if pos > -1:
possibles.append((loc[:pos], 'utf-8'))
possibles.append(loc[:pos])
loc = to_locale(settings.LANGUAGE_CODE)
possibles.append((loc, 'utf-8'))
possibles.append(loc)
loc = None
for possible in possibles:
if not possible in tried:
loc = possible
break
if loc:
try:
log.debug('setting locale: %s', str(loc).encode('utf-8'))
locale.setlocale(locale.LC_ALL, loc)
return locale.localeconv()
except (locale.Error, ValueError):
tried.append(loc)
return get_locale_conv(loc=loc[0], tried=tried, possibles=possibles)
locs = ", ".join([str(x).encode('utf-8') for x in tried])
log.fatal(u"Cannot set locale to any of these locales [%s]. Something is misconfigured.", locs)
raise ImproperlyConfigured("bad locale")
#backport from python2.5
### Number formatting APIs
# Author: Martin von Loewis
# improved by Georg Brandl
#perform the grouping from right to left
def _group(s, conv, monetary=False):
thousands_sep = conv[monetary and 'mon_thousands_sep' or 'thousands_sep']
grouping = conv[monetary and 'mon_grouping' or 'grouping']
if not grouping:
return (s, 0)
result = ""
seps = 0
spaces = ""
if s[-1] == ' ':
sp = s.find(' ')
spaces = s[sp:]
s = s[:sp]
while s and grouping:
# if grouping is -1, we are done
if grouping[0] == locale.CHAR_MAX:
break
# 0: re-use last group ad infinitum
elif grouping[0] != 0:
#process last group
group = grouping[0]
grouping = grouping[1:]
if result:
result = s[-group:] + thousands_sep + result
seps += 1
else:
result = s[-group:]
s = s[:-group]
if s and s[-1] not in "0123456789":
# the leading string is only spaces and signs
return s + result + spaces, seps
if not result:
return s + spaces, seps
if s:
result = s + thousands_sep + result
seps += 1
return result + spaces, seps
#backport from python2.5
def format(percent, value, loc_conv, grouping=False, monetary=False, *additional):
"""Returns the locale-aware substitution of a %? specifier
(percent).
additional is for format strings which contain one or more
'*' modifiers."""
# this is only for one-percent-specifier strings and this should be checked
if percent[0] != '%':
raise ValueError("format() must be given exactly one %char "
"format specifier")
if additional:
formatted = percent % ((value,) + additional)
else:
formatted = percent % value
# floats and decimal ints need special action!
if percent[-1] in 'eEfFgG':
seps = 0
parts = formatted.split('.')
if grouping:
parts[0], seps = _group(parts[0], loc_conv, monetary=monetary)
decimal_point = loc_conv[monetary and 'mon_decimal_point'
or 'decimal_point']
formatted = decimal_point.join(parts)
while seps:
sp = formatted.find(' ')
if sp == -1: break
formatted = formatted[:sp] + formatted[sp+1:]
seps -= 1
elif percent[-1] in 'diu':
if grouping:
formatted = _group(formatted, monetary=monetary)[0]
return formatted
def moneyfmt(val, curr=None, places=-1, grouping=True, wrapcents='', current_locale=None):
"""Formats val according to the currency settings in the current locale.
Ported-and-modified from Python 2.5
"""
conv = get_locale_conv(current_locale)
if places < 0:
places = conv['int_frac_digits']
val = trunc_decimal(val, places)
try: # Required because Python < 2.5 does not have monetary arg
s = format('%%.%if' % places, abs(val), conv, grouping, monetary=True)
except TypeError:
s = format('%%.%if' % places, abs(val), conv, grouping)
# '<' and '>' are markers if the sign must be inserted between symbol and value
s = '<' + s + '>'
if curr is None:
curr = config_value('LANGUAGE','CURRENCY')
curr = curr.replace("_", " ")
precedes = conv[val<0 and 'n_cs_precedes' or 'p_cs_precedes']
separated = conv[val<0 and 'n_sep_by_space' or 'p_sep_by_space']
if precedes:
s = curr + (separated and ' ' or '') + s
else:
s = s + (separated and ' ' or '') + curr
sign_pos = conv[val<0 and 'n_sign_posn' or 'p_sign_posn']
sign = conv[val<0 and 'negative_sign' or 'positive_sign']
if sign_pos == 0:
s = '(' + s + ')'
elif sign_pos == 1:
s = sign + s
elif sign_pos == 2:
s = s + sign
elif sign_pos == 3:
s = s.replace('<', sign)
elif sign_pos == 4:
s = s.replace('>', sign)
else:
# the default if nothing specified;
# this should be the most fitting sign position
s = sign + s
val = s.replace('<', '').replace('>', '')
if wrapcents:
pos = s.rfind(conv['decimal_point'])
if pos>-1:
pos +=1
val = u"%s<%s>%s</%s>" % val[:pos], wrapcents, val[pos:], wrapcents
return val
| 2,271 | 0 | 45 |
67a1ccec321679db60ea96ac87e090f303e904c5 | 413 | py | Python | 1point3.py | ishah011/CTCI | 41fb337c03aba98edb6a619c878c618a1113f231 | [
"MIT"
] | null | null | null | 1point3.py | ishah011/CTCI | 41fb337c03aba98edb6a619c878c618a1113f231 | [
"MIT"
] | null | null | null | 1point3.py | ishah011/CTCI | 41fb337c03aba98edb6a619c878c618a1113f231 | [
"MIT"
] | null | null | null | #
#1.3: Design an algorithm and write code to remove the duplicate characters in a string without using any additional buffer.
#NOTE: one or two additional variables are fine. An extra copy of the array is not.
#
myString = "hubba bubba bubble tape"
noDuplicates = []
for letter in myString:
if letter not in noDuplicates:
noDuplicates.append(letter)
myString = ''.join(noDuplicates)
print myString
| 29.5 | 124 | 0.750605 | #
#1.3: Design an algorithm and write code to remove the duplicate characters in a string without using any additional buffer.
#NOTE: one or two additional variables are fine. An extra copy of the array is not.
#
myString = "hubba bubba bubble tape"
noDuplicates = []
for letter in myString:
if letter not in noDuplicates:
noDuplicates.append(letter)
myString = ''.join(noDuplicates)
print myString
| 0 | 0 | 0 |
a97ad18f8057b8debe854abbf7d714cc94f77026 | 4,608 | py | Python | src/data/bpeace_beacon_oop.py | intelligent-environments-lab/utx000 | af60a6162d21e38f8cfa5cdebc0f14e717205f12 | [
"MIT"
] | null | null | null | src/data/bpeace_beacon_oop.py | intelligent-environments-lab/utx000 | af60a6162d21e38f8cfa5cdebc0f14e717205f12 | [
"MIT"
] | 95 | 2020-06-08T17:29:13.000Z | 2021-11-04T02:03:22.000Z | src/data/bpeace_beacon_oop.py | intelligent-environments-lab/utx000 | af60a6162d21e38f8cfa5cdebc0f14e717205f12 | [
"MIT"
] | 1 | 2022-02-17T17:14:03.000Z | 2022-02-17T17:14:03.000Z | # -*- coding: utf-8 -*-
"""
Created on Fri Feb 26 10:56:23 2021
@author: linca
"""
import time
import os
import numpy as np
import pandas as pd
if __name__=='__main__':
beacon_list = [1,5,6,7,10,11,15,16,17,19,21,22,23,24,25,26,28,29,30,32,34,36,38,40,41,44,46,48]
bpeace = BPeace(beacon_list)
start = time.perf_counter()
bpeace.process_beacon()
end = time.perf_counter()
print(f'{end-start} seconds')
| 38.722689 | 182 | 0.578125 | # -*- coding: utf-8 -*-
"""
Created on Fri Feb 26 10:56:23 2021
@author: linca
"""
import time
import os
import numpy as np
import pandas as pd
class Beacon:
def __init__(self, path):
self.path = path
self.number = path[path.rfind('/')+2:].lstrip('0')
self.filepaths = {'adafruit':[f'{self.path}/adafruit/{file}' for file in os.listdir(f'{self.path}/adafruit')],
'sensirion':[f'{self.path}/sensirion/{file}' for file in os.listdir(f'{self.path}/sensirion')]}
self.columns = {'adafruit':['Timestamp', 'TVOC', 'eCO2', 'Lux', 'Visible', 'Infrared', 'NO2',
'T_NO2', 'RH_NO2', 'CO', 'T_CO', 'RH_CO'],
'sensirion':['Timestamp','Temperature [C]','Relative Humidity','CO2','PM_N_0p5','PM_N_1','PM_N_2p5','PM_N_4','PM_N_10','PM_C_1','PM_C_2p5','PM_C_4','PM_C_10']
}
def read_csv(self):
def _read_csv(path):
print(self.number+"csv")
columns=None
if 'adafruit' in path:
columns=self.columns['adafruit']
elif 'sensirion' in path:
columns=self.columns['sensirion']
try:
return pd.read_csv(path, index_col='Timestamp',usecols=columns,parse_dates=True,infer_datetime_format=True)
except ValueError:
return pd.DataFrame()
self.sensirion = pd.concat((_read_csv(file) for file in self.filepaths['sensirion']), copy=False).resample('5T').mean()
# print("don one")
self.adafruit = pd.concat((_read_csv(file) for file in self.filepaths['adafruit']), copy=False).resample('5T').mean()
def preprocess(self):
number = self.number
adafruit = self.adafruit
sensirion = self.sensirion
def mislabeled_NO2(df):
# Mis-wiring NO2 sensor doesn't actually exist
df[['CO','T_CO','RH_CO']] = df[['NO2','T_NO2','RH_NO2']]
df[['NO2','T_NO2','RH_NO2']] = np.nan
return df
if number in [28,29]:
adafruit = mislabeled_NO2(adafruit)
adafruit['CO'] /= 1000 #ppb to ppm
beacon_df = adafruit.merge(right=sensirion,left_index=True,right_index=True,how='outer')
def nonnegative(df):
for var in ['CO2','T_NO2','T_CO','Temperature [C]','RH_NO2','RH_CO','Relative Humidity']:
df[var].mask(df[var] < 0, np.nan, inplace=True)
return df
def lower_bound(df):
for var, threshold in zip(['CO2','Lux'],[100,-1]):
df[var].mask(df[var] < threshold, np.nan, inplace=True)
return df
beacon_df = lower_bound(nonnegative(beacon_df))
beacon_df['Beacon'] = self.number
beacon_df = beacon_df.reset_index().set_index(['Beacon','Timestamp'])
self.data = beacon_df
@property
def empty(self):
return len(self.filepaths['adafruit']+self.filepaths['sensirion'])<1
def __str__(self):
return f'Beacon object at {self.path}'
def __repr__(self):
return f'Beacon object at {self.path}'
class BPeace:
def __init__(self, beacon_list):
self.beacons_folder = '../../data/raw/utx000/beacon'
self.beacon_list = np.sort(beacon_list).tolist()
def process_beacon(self):
beacons = [Beacon(f'{self.beacons_folder}/B{beacon:02}') for beacon in self.beacon_list]
beacons = [beacon for beacon in beacons if not beacon.empty]
start = time.perf_counter()
for beacon in beacons:
beacon.read_csv()
print(f'{time.perf_counter()-start} seconds')
start = time.perf_counter()
for beacon in beacons:
beacon.preprocess()
print(f'{time.perf_counter()-start} seconds')
start = time.perf_counter()
self.beacon_data = pd.concat([beacon.data for beacon in beacons])
print(f'{time.perf_counter()-start} seconds')
self.beacons=beacons
# TODO: give real filename to this
start = time.perf_counter()
self.beacon_data.to_parquet('..\..\data\interim\utx000_beacon.parquet')
print(f'{time.perf_counter()-start} seconds')
if __name__=='__main__':
beacon_list = [1,5,6,7,10,11,15,16,17,19,21,22,23,24,25,26,28,29,30,32,34,36,38,40,41,44,46,48]
bpeace = BPeace(beacon_list)
start = time.perf_counter()
bpeace.process_beacon()
end = time.perf_counter()
print(f'{end-start} seconds')
| 3,869 | 179 | 113 |
fbb6664e47cbad821a97b21e06b8385ddf7caaf7 | 2,436 | py | Python | womeninreddrafts.py | mmiyer/galobot | 363b29949f8b3fde1cbe7fe5b12b2305ba16d89e | [
"MIT"
] | null | null | null | womeninreddrafts.py | mmiyer/galobot | 363b29949f8b3fde1cbe7fe5b12b2305ba16d89e | [
"MIT"
] | null | null | null | womeninreddrafts.py | mmiyer/galobot | 363b29949f8b3fde1cbe7fe5b12b2305ba16d89e | [
"MIT"
] | null | null | null | '''
Parts from https://en.wikipedia.org/wiki/User:Ritchie333/afcbios.py, licensed CC-BY-SA-3.0
'''
import re
from botbase import *
titles = []
page_to_update = "Wikipedia:WikiProject Women in Red/Drafts"
reMarker = re.compile("<ref.*\/ref>|{{.*}}|<!--.*-->|\'\'\'|----")
reTitle = re.compile( '\(.*\)' )
header_new = "New Additions"
header_old = "Existing Pages"
wikitext = "{{/Header}}\n"
wikitext_header_2 = "== {} ==\n"
wikitext_header_3 = "=== {} - {} ===\n"
wikitext_entry = "* [[{}]]\n::<small><nowiki>{}</nowiki></small>\n:::<small><nowiki>{} - {}</nowiki></small>\n"
search_query = 'incategory:"{}" "{}"'
categories = [ "AfC submissions declined as a non-notable biography", "AfC submissions declined as a non-notable academic topic" ]
keywords = [ "she was", "she is", "her book", "her work" ]
for category in categories:
for keyword in keywords:
titles += run_search(category, keyword)
titles = set(titles)
with open('last_titles.txt', 'r') as last_titles_file:
last_titles = set(last_titles_file.read().split("|"))
with open('last_titles.txt', 'w') as last_titles_file:
last_titles_file.write("|".join(titles))
new_titles = titles - last_titles
old_titles = titles & last_titles
wikitext += (generate_entries(new_titles, header_new) + generate_entries(old_titles, header_old))
page = p.Page(site, page_to_update)
page.text = wikitext
page.savewithshutoff(summary = 'Update "Women in Red drafts" report', minor = False)
| 34.309859 | 130 | 0.692529 | '''
Parts from https://en.wikipedia.org/wiki/User:Ritchie333/afcbios.py, licensed CC-BY-SA-3.0
'''
import re
from botbase import *
titles = []
page_to_update = "Wikipedia:WikiProject Women in Red/Drafts"
reMarker = re.compile("<ref.*\/ref>|{{.*}}|<!--.*-->|\'\'\'|----")
reTitle = re.compile( '\(.*\)' )
header_new = "New Additions"
header_old = "Existing Pages"
wikitext = "{{/Header}}\n"
wikitext_header_2 = "== {} ==\n"
wikitext_header_3 = "=== {} - {} ===\n"
wikitext_entry = "* [[{}]]\n::<small><nowiki>{}</nowiki></small>\n:::<small><nowiki>{} - {}</nowiki></small>\n"
search_query = 'incategory:"{}" "{}"'
categories = [ "AfC submissions declined as a non-notable biography", "AfC submissions declined as a non-notable academic topic" ]
keywords = [ "she was", "she is", "her book", "her work" ]
def run_search(category, keyword):
page_query = p.data.api.ListGenerator(
"search",
srnamespace = 118,
srsearch = search_query.format(category, keyword),
srprop = "",
site = site
)
return [page_result["title"] for page_result in page_query]
def generate_entries(titles, header):
section_wikitext = wikitext_header_2.format(header)
for num, title in enumerate(titles):
if num % 50 == 0:
section_wikitext += wikitext_header_3.format(num + 1, num + 50)
page = p.Page(site, title)
timestamp = str(page.latest_revision["timestamp"])[0:10]
editsummary = page.latest_revision["comment"]
shortText = reMarker.sub( '', page.text )
shortTitle = reTitle.sub( '', title[6:] )
sentences = re.search( shortTitle + '.*\.', shortText )
if sentences is not None:
firstsentence = sentences.group().partition( '.' )[0]
else:
firstsentence = ""
section_wikitext += wikitext_entry.format(
title, firstsentence, timestamp, editsummary
)
return section_wikitext
for category in categories:
for keyword in keywords:
titles += run_search(category, keyword)
titles = set(titles)
with open('last_titles.txt', 'r') as last_titles_file:
last_titles = set(last_titles_file.read().split("|"))
with open('last_titles.txt', 'w') as last_titles_file:
last_titles_file.write("|".join(titles))
new_titles = titles - last_titles
old_titles = titles & last_titles
wikitext += (generate_entries(new_titles, header_new) + generate_entries(old_titles, header_old))
page = p.Page(site, page_to_update)
page.text = wikitext
page.savewithshutoff(summary = 'Update "Women in Red drafts" report', minor = False)
| 945 | 0 | 46 |
fb650ad8b13864b11e7e7d51e3a8529e09d17d26 | 5,793 | py | Python | alipay/aop/api/domain/MybankCreditUserBankcardBindModel.py | snowxmas/alipay-sdk-python-all | 96870ced60facd96c5bce18d19371720cbda3317 | [
"Apache-2.0"
] | 213 | 2018-08-27T16:49:32.000Z | 2021-12-29T04:34:12.000Z | alipay/aop/api/domain/MybankCreditUserBankcardBindModel.py | snowxmas/alipay-sdk-python-all | 96870ced60facd96c5bce18d19371720cbda3317 | [
"Apache-2.0"
] | 29 | 2018-09-29T06:43:00.000Z | 2021-09-02T03:27:32.000Z | alipay/aop/api/domain/MybankCreditUserBankcardBindModel.py | snowxmas/alipay-sdk-python-all | 96870ced60facd96c5bce18d19371720cbda3317 | [
"Apache-2.0"
] | 59 | 2018-08-27T16:59:26.000Z | 2022-03-25T10:08:15.000Z | #!/usr/bin/env python
# -*- coding: utf-8 -*-
import json
from alipay.aop.api.constant.ParamConstants import *
| 30.329843 | 77 | 0.578457 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
import json
from alipay.aop.api.constant.ParamConstants import *
class MybankCreditUserBankcardBindModel(object):
def __init__(self):
self._account_name = None
self._admin_type = None
self._admin_user_id = None
self._bankcard_no = None
self._cert_no = None
self._cert_type = None
self._fip_code = None
self._ip_id = None
self._ip_role_id = None
self._purpose = None
self._request_id = None
@property
def account_name(self):
return self._account_name
@account_name.setter
def account_name(self, value):
self._account_name = value
@property
def admin_type(self):
return self._admin_type
@admin_type.setter
def admin_type(self, value):
self._admin_type = value
@property
def admin_user_id(self):
return self._admin_user_id
@admin_user_id.setter
def admin_user_id(self, value):
self._admin_user_id = value
@property
def bankcard_no(self):
return self._bankcard_no
@bankcard_no.setter
def bankcard_no(self, value):
self._bankcard_no = value
@property
def cert_no(self):
return self._cert_no
@cert_no.setter
def cert_no(self, value):
self._cert_no = value
@property
def cert_type(self):
return self._cert_type
@cert_type.setter
def cert_type(self, value):
self._cert_type = value
@property
def fip_code(self):
return self._fip_code
@fip_code.setter
def fip_code(self, value):
self._fip_code = value
@property
def ip_id(self):
return self._ip_id
@ip_id.setter
def ip_id(self, value):
self._ip_id = value
@property
def ip_role_id(self):
return self._ip_role_id
@ip_role_id.setter
def ip_role_id(self, value):
self._ip_role_id = value
@property
def purpose(self):
return self._purpose
@purpose.setter
def purpose(self, value):
self._purpose = value
@property
def request_id(self):
return self._request_id
@request_id.setter
def request_id(self, value):
self._request_id = value
def to_alipay_dict(self):
params = dict()
if self.account_name:
if hasattr(self.account_name, 'to_alipay_dict'):
params['account_name'] = self.account_name.to_alipay_dict()
else:
params['account_name'] = self.account_name
if self.admin_type:
if hasattr(self.admin_type, 'to_alipay_dict'):
params['admin_type'] = self.admin_type.to_alipay_dict()
else:
params['admin_type'] = self.admin_type
if self.admin_user_id:
if hasattr(self.admin_user_id, 'to_alipay_dict'):
params['admin_user_id'] = self.admin_user_id.to_alipay_dict()
else:
params['admin_user_id'] = self.admin_user_id
if self.bankcard_no:
if hasattr(self.bankcard_no, 'to_alipay_dict'):
params['bankcard_no'] = self.bankcard_no.to_alipay_dict()
else:
params['bankcard_no'] = self.bankcard_no
if self.cert_no:
if hasattr(self.cert_no, 'to_alipay_dict'):
params['cert_no'] = self.cert_no.to_alipay_dict()
else:
params['cert_no'] = self.cert_no
if self.cert_type:
if hasattr(self.cert_type, 'to_alipay_dict'):
params['cert_type'] = self.cert_type.to_alipay_dict()
else:
params['cert_type'] = self.cert_type
if self.fip_code:
if hasattr(self.fip_code, 'to_alipay_dict'):
params['fip_code'] = self.fip_code.to_alipay_dict()
else:
params['fip_code'] = self.fip_code
if self.ip_id:
if hasattr(self.ip_id, 'to_alipay_dict'):
params['ip_id'] = self.ip_id.to_alipay_dict()
else:
params['ip_id'] = self.ip_id
if self.ip_role_id:
if hasattr(self.ip_role_id, 'to_alipay_dict'):
params['ip_role_id'] = self.ip_role_id.to_alipay_dict()
else:
params['ip_role_id'] = self.ip_role_id
if self.purpose:
if hasattr(self.purpose, 'to_alipay_dict'):
params['purpose'] = self.purpose.to_alipay_dict()
else:
params['purpose'] = self.purpose
if self.request_id:
if hasattr(self.request_id, 'to_alipay_dict'):
params['request_id'] = self.request_id.to_alipay_dict()
else:
params['request_id'] = self.request_id
return params
@staticmethod
def from_alipay_dict(d):
if not d:
return None
o = MybankCreditUserBankcardBindModel()
if 'account_name' in d:
o.account_name = d['account_name']
if 'admin_type' in d:
o.admin_type = d['admin_type']
if 'admin_user_id' in d:
o.admin_user_id = d['admin_user_id']
if 'bankcard_no' in d:
o.bankcard_no = d['bankcard_no']
if 'cert_no' in d:
o.cert_no = d['cert_no']
if 'cert_type' in d:
o.cert_type = d['cert_type']
if 'fip_code' in d:
o.fip_code = d['fip_code']
if 'ip_id' in d:
o.ip_id = d['ip_id']
if 'ip_role_id' in d:
o.ip_role_id = d['ip_role_id']
if 'purpose' in d:
o.purpose = d['purpose']
if 'request_id' in d:
o.request_id = d['request_id']
return o
| 4,545 | 1,110 | 23 |
2d90dc69d13eaf8d653ebcd3ec7a719ba0d6e4bb | 1,929 | py | Python | runway/_cli/commands/_gen_sample/_k8s_flux_repo.py | troyready/runway | 4fd299961a4b73df39e14f4f19a7236f7be17dd8 | [
"Apache-2.0"
] | null | null | null | runway/_cli/commands/_gen_sample/_k8s_flux_repo.py | troyready/runway | 4fd299961a4b73df39e14f4f19a7236f7be17dd8 | [
"Apache-2.0"
] | null | null | null | runway/_cli/commands/_gen_sample/_k8s_flux_repo.py | troyready/runway | 4fd299961a4b73df39e14f4f19a7236f7be17dd8 | [
"Apache-2.0"
] | null | null | null | """``runway gen-sample k8s-flux-repo`` command."""
import logging
import shutil
import sys
from typing import Any # pylint: disable=W
import click
from ... import options
from .utils import TEMPLATES, convert_gitignore, copy_sample, write_tfstate_template
if sys.version_info.major > 2:
from pathlib import Path # pylint: disable=E
else:
from pathlib2 import Path # pylint: disable=E
LOGGER = logging.getLogger(__name__.replace("._", "."))
@click.command("k8s-flux-repo", short_help="k8s + flux + tf (k8s-tf-infrastructure)")
@options.debug
@options.no_color
@options.verbose
@click.pass_context
def k8s_flux_repo(ctx, **_):
# type: (click.Context, Any) -> None
"""Generate a sample Kubernetes cluster with Flux CD managed via Terraform."""
src = TEMPLATES / "k8s-flux-repo"
dest = Path.cwd() / "k8s-tf-infrastructure"
src_awscli = TEMPLATES / "k8s-cfn-repo/k8s-master.cfn/k8s_hooks/awscli.py"
dest_awscli = dest / "gen-kubeconfig.cfn/k8s_hooks/awscli.py"
copy_sample(ctx, src, dest)
tf_eks_base = TEMPLATES / "k8s-tf-repo" / "eks-base.tf"
copy_sample(ctx, tf_eks_base, dest / tf_eks_base.parts[-1])
convert_gitignore(dest / "_gitignore")
gen_kubeconfig_src_dir = TEMPLATES / "k8s-tf-repo" / "gen-kubeconfig.cfn"
copy_sample(ctx, gen_kubeconfig_src_dir, dest / gen_kubeconfig_src_dir.parts[-1])
LOGGER.debug('copying "%s" to "%s"', src_awscli, dest_awscli)
shutil.copyfile(str(src_awscli), str(dest_awscli))
tfstate_src_dir = TEMPLATES / "k8s-tf-repo" / "tfstate.cfn"
copy_sample(ctx, tfstate_src_dir, dest / tfstate_src_dir.parts[-1])
tfstate_templates_dir = dest / "tfstate.cfn/templates"
tfstate_templates_dir.mkdir()
write_tfstate_template(tfstate_templates_dir / "tf_state.yml")
LOGGER.success("Sample k8s infrastructure repo created at %s", dest)
LOGGER.notice("See the README for setup and deployment instructions.")
| 37.823529 | 85 | 0.723691 | """``runway gen-sample k8s-flux-repo`` command."""
import logging
import shutil
import sys
from typing import Any # pylint: disable=W
import click
from ... import options
from .utils import TEMPLATES, convert_gitignore, copy_sample, write_tfstate_template
if sys.version_info.major > 2:
from pathlib import Path # pylint: disable=E
else:
from pathlib2 import Path # pylint: disable=E
LOGGER = logging.getLogger(__name__.replace("._", "."))
@click.command("k8s-flux-repo", short_help="k8s + flux + tf (k8s-tf-infrastructure)")
@options.debug
@options.no_color
@options.verbose
@click.pass_context
def k8s_flux_repo(ctx, **_):
# type: (click.Context, Any) -> None
"""Generate a sample Kubernetes cluster with Flux CD managed via Terraform."""
src = TEMPLATES / "k8s-flux-repo"
dest = Path.cwd() / "k8s-tf-infrastructure"
src_awscli = TEMPLATES / "k8s-cfn-repo/k8s-master.cfn/k8s_hooks/awscli.py"
dest_awscli = dest / "gen-kubeconfig.cfn/k8s_hooks/awscli.py"
copy_sample(ctx, src, dest)
tf_eks_base = TEMPLATES / "k8s-tf-repo" / "eks-base.tf"
copy_sample(ctx, tf_eks_base, dest / tf_eks_base.parts[-1])
convert_gitignore(dest / "_gitignore")
gen_kubeconfig_src_dir = TEMPLATES / "k8s-tf-repo" / "gen-kubeconfig.cfn"
copy_sample(ctx, gen_kubeconfig_src_dir, dest / gen_kubeconfig_src_dir.parts[-1])
LOGGER.debug('copying "%s" to "%s"', src_awscli, dest_awscli)
shutil.copyfile(str(src_awscli), str(dest_awscli))
tfstate_src_dir = TEMPLATES / "k8s-tf-repo" / "tfstate.cfn"
copy_sample(ctx, tfstate_src_dir, dest / tfstate_src_dir.parts[-1])
tfstate_templates_dir = dest / "tfstate.cfn/templates"
tfstate_templates_dir.mkdir()
write_tfstate_template(tfstate_templates_dir / "tf_state.yml")
LOGGER.success("Sample k8s infrastructure repo created at %s", dest)
LOGGER.notice("See the README for setup and deployment instructions.")
| 0 | 0 | 0 |
a7c775b83b3badde5a47aed95ba249e465034755 | 1,339 | py | Python | Werewolf/agents/TrainablePlayer.py | GeorgeVelikov/Werewolf-Framework | 6a4501cc98cab92111eec2551b9a3d2464adad7f | [
"MIT"
] | 1 | 2021-11-14T16:51:16.000Z | 2021-11-14T16:51:16.000Z | Werewolf/agents/TrainablePlayer.py | GeorgeVelikov/Werewolf-Framework | 6a4501cc98cab92111eec2551b9a3d2464adad7f | [
"MIT"
] | null | null | null | Werewolf/agents/TrainablePlayer.py | GeorgeVelikov/Werewolf-Framework | 6a4501cc98cab92111eec2551b9a3d2464adad7f | [
"MIT"
] | null | null | null | from Werewolf.agents.AgentPlayer import AgentPlayer;
from Werewolf.game.actions.Vote import Vote;
from Shared.enums.AgentTypeEnum import AgentTypeEnum;
import random;
| 21.95082 | 70 | 0.633308 | from Werewolf.agents.AgentPlayer import AgentPlayer;
from Werewolf.game.actions.Vote import Vote;
from Shared.enums.AgentTypeEnum import AgentTypeEnum;
import random;
class TrainablePlayer(AgentPlayer):
def __init__(self, name, game):
super().__init__(name, game);
@property
def AgentType(self):
return AgentTypeEnum.TrainableAgent;
# overrides from the base Agent class, needed by the client-server
# game implementation to force a play from the user
def ActDay(self):
return self.Act();
def ActNight(self):
return self.Act();
def Act(self):
action = None;
if not self.IsAlive or not self.Game.HasStarted:
return None;
# Currently random, we'll add metrics later
playersToVoteFor = [player for player in self.Game.Players];
if not playersToVoteFor:
return Vote(self, None);
votedPlayer = None
# default to waiting
if random.random() < 0.5:
votedPlayer = random.choice(playersToVoteFor);
action = Vote(self, votedPlayer);
return action;
def PreGameSetup(self):
return;
def PostGameSetup(self):
return;
#region Communication
def Talk(self):
return;
def Sway(self):
return;
#endregion
| 707 | 440 | 23 |
d3dc3aa290de068346b689be4570c19ea4648693 | 179 | py | Python | ddtrace/contrib/__init__.py | p7g/dd-trace-py | 141ac0ab6e9962e3b3bafc9de172076075289a19 | [
"Apache-2.0",
"BSD-3-Clause"
] | 308 | 2016-12-07T16:49:27.000Z | 2022-03-15T10:06:45.000Z | ddtrace/contrib/__init__.py | p7g/dd-trace-py | 141ac0ab6e9962e3b3bafc9de172076075289a19 | [
"Apache-2.0",
"BSD-3-Clause"
] | 1,928 | 2016-11-28T17:13:18.000Z | 2022-03-31T21:43:19.000Z | ddtrace/contrib/__init__.py | p7g/dd-trace-py | 141ac0ab6e9962e3b3bafc9de172076075289a19 | [
"Apache-2.0",
"BSD-3-Clause"
] | 311 | 2016-11-27T03:01:49.000Z | 2022-03-18T21:34:03.000Z | from ..internal.utils.importlib import func_name # noqa
from ..internal.utils.importlib import module_name # noqa
from ..internal.utils.importlib import require_modules # noqa
| 44.75 | 62 | 0.798883 | from ..internal.utils.importlib import func_name # noqa
from ..internal.utils.importlib import module_name # noqa
from ..internal.utils.importlib import require_modules # noqa
| 0 | 0 | 0 |
52372e902d1b694b2b7e55ae985ee6bac682ffc7 | 651 | py | Python | sample.py | nanato12/getCryptocurrencyRate | 82cf4f7dde6ddf8d787c89ff30d0cca54a77bd50 | [
"MIT"
] | 1 | 2021-11-10T07:53:49.000Z | 2021-11-10T07:53:49.000Z | sample.py | nanato12/getCryptocurrencyRate | 82cf4f7dde6ddf8d787c89ff30d0cca54a77bd50 | [
"MIT"
] | null | null | null | sample.py | nanato12/getCryptocurrencyRate | 82cf4f7dde6ddf8d787c89ff30d0cca54a77bd50 | [
"MIT"
] | 1 | 2021-02-27T19:59:33.000Z | 2021-02-27T19:59:33.000Z | from datetime import datetime
from getCryptocurrencyRate import CryptoCurrencyRate
product = "BTC_JPY"
# ["BTC_JPY", "XRP_JPY", "ETH_JPY", "XTZ_JPY", "XLM_JPY", "XEM_JPY", "BAT_JPY", "ETC_JPY", "LTC_JPY", "BCH_JPY", "MONA_JPY", "LSK_JPY"]
scale = "hour"
# ["hour","day","week","month","year"]
res = CryptoCurrencyRate(product, scale).get()
print("\n***情報***")
print("リクエストステータス " + str(res.status))
print("現在 " + res.price_info_list[-1].price_str + "JPY")
print("推移 " + res.change_str + "%")
print("\n***一覧***")
for price_info in res.price_info_list:
print(datetime.fromtimestamp(price_info.timestamp))
print(price_info.price_str + "JPY")
| 32.55 | 135 | 0.683564 | from datetime import datetime
from getCryptocurrencyRate import CryptoCurrencyRate
product = "BTC_JPY"
# ["BTC_JPY", "XRP_JPY", "ETH_JPY", "XTZ_JPY", "XLM_JPY", "XEM_JPY", "BAT_JPY", "ETC_JPY", "LTC_JPY", "BCH_JPY", "MONA_JPY", "LSK_JPY"]
scale = "hour"
# ["hour","day","week","month","year"]
res = CryptoCurrencyRate(product, scale).get()
print("\n***情報***")
print("リクエストステータス " + str(res.status))
print("現在 " + res.price_info_list[-1].price_str + "JPY")
print("推移 " + res.change_str + "%")
print("\n***一覧***")
for price_info in res.price_info_list:
print(datetime.fromtimestamp(price_info.timestamp))
print(price_info.price_str + "JPY")
| 0 | 0 | 0 |
e12262bdab851c1c662182f5d1041d745812f5e1 | 661 | py | Python | Ago-Dic-2018/Orlando Martinez/practica 2/UnchangedMagicians.py | angelicardz/DAS_Sistemas | e2a69fec358f0fad4fe05c39ea6168c89eed41ac | [
"MIT"
] | 41 | 2017-09-26T09:36:32.000Z | 2022-03-19T18:05:25.000Z | Ago-Dic-2018/Orlando Martinez/practica 2/UnchangedMagicians.py | angelicardz/DAS_Sistemas | e2a69fec358f0fad4fe05c39ea6168c89eed41ac | [
"MIT"
] | 67 | 2017-09-11T05:06:12.000Z | 2022-02-14T04:44:04.000Z | Ago-Dic-2018/Orlando Martinez/practica 2/UnchangedMagicians.py | angelicardz/DAS_Sistemas | e2a69fec358f0fad4fe05c39ea6168c89eed41ac | [
"MIT"
] | 210 | 2017-09-01T00:10:08.000Z | 2022-03-19T18:05:12.000Z |
nombres_magos = ['Luis', 'Pedro', 'Antonio']
show_magicians(nombres_magos)
print("\nGrandes Magos:")
great_magicians = make_great(nombres_magos[:])
show_magicians(great_magicians)
print("\nMagos Originales:")
show_magicians(nombres_magos)
| 24.481481 | 46 | 0.732224 | def show_magicians(nombres_magos):
for magician in nombres_magos:
print(magician)
def make_great(nombres_magos):
great_magicians = []
while nombres_magos:
mago = nombres_magos.pop()
great_magician = mago + ' the Great'
great_magicians.append(great_magician)
for great_magician in great_magicians:
nombres_magos.append(great_magician)
return nombres_magos
nombres_magos = ['Luis', 'Pedro', 'Antonio']
show_magicians(nombres_magos)
print("\nGrandes Magos:")
great_magicians = make_great(nombres_magos[:])
show_magicians(great_magicians)
print("\nMagos Originales:")
show_magicians(nombres_magos)
| 374 | 0 | 45 |
533c2032d802d13e7292ae751703bdc33fd8bed6 | 6,609 | py | Python | hummingbot/connector/exchange/altmarkets/altmarkets_in_flight_order.py | BGTCapital/hummingbot | 2c50f50d67cedccf0ef4d8e3f4c8cdce3dc87242 | [
"Apache-2.0"
] | 1 | 2022-03-20T01:27:33.000Z | 2022-03-20T01:27:33.000Z | hummingbot/connector/exchange/altmarkets/altmarkets_in_flight_order.py | BGTCapital/hummingbot | 2c50f50d67cedccf0ef4d8e3f4c8cdce3dc87242 | [
"Apache-2.0"
] | 6 | 2022-01-31T15:44:54.000Z | 2022-03-06T04:27:12.000Z | hummingbot/connector/exchange/altmarkets/altmarkets_in_flight_order.py | BGTCapital/hummingbot | 2c50f50d67cedccf0ef4d8e3f4c8cdce3dc87242 | [
"Apache-2.0"
] | 1 | 2022-03-09T18:33:43.000Z | 2022-03-09T18:33:43.000Z | from decimal import Decimal
from typing import (
Any,
Dict,
Optional,
)
import asyncio
from hummingbot.core.event.events import (
OrderType,
TradeType
)
from hummingbot.connector.in_flight_order_base import InFlightOrderBase
from .altmarkets_constants import Constants
s_decimal_0 = Decimal(0)
| 36.513812 | 112 | 0.588137 | from decimal import Decimal
from typing import (
Any,
Dict,
Optional,
)
import asyncio
from hummingbot.core.event.events import (
OrderType,
TradeType
)
from hummingbot.connector.in_flight_order_base import InFlightOrderBase
from .altmarkets_constants import Constants
s_decimal_0 = Decimal(0)
class AltmarketsInFlightOrder(InFlightOrderBase):
def __init__(self,
client_order_id: str,
exchange_order_id: Optional[str],
trading_pair: str,
order_type: OrderType,
trade_type: TradeType,
price: Decimal,
amount: Decimal,
initial_state: str = "local"):
super().__init__(
client_order_id,
exchange_order_id,
trading_pair,
order_type,
trade_type,
price,
amount,
initial_state,
)
self.trade_id_set = set()
self.cancelled_event = asyncio.Event()
@property
def is_done(self) -> bool:
return self.last_state in Constants.ORDER_STATES['DONE']
@property
def is_failure(self) -> bool:
return self.last_state in Constants.ORDER_STATES['FAIL']
@property
def is_cancelled(self) -> bool:
return self.last_state in Constants.ORDER_STATES['CANCEL']
@property
def is_local(self) -> bool:
return self.last_state == "local"
def update_exchange_order_id(self, exchange_id: str):
super().update_exchange_order_id(exchange_id)
if self.is_local:
self.last_state = "submitted"
# @property
# def order_type_description(self) -> str:
# """
# :return: Order description string . One of ["limit buy" / "limit sell" / "market buy" / "market sell"]
# """
# order_type = "market" if self.order_type is OrderType.MARKET else "limit"
# side = "buy" if self.trade_type == TradeType.BUY else "sell"
# return f"{order_type} {side}"
@classmethod
def from_json(cls, data: Dict[str, Any]) -> InFlightOrderBase:
"""
:param data: json data from API
:return: formatted InFlightOrder
"""
retval = AltmarketsInFlightOrder(
data["client_order_id"],
data["exchange_order_id"],
data["trading_pair"],
getattr(OrderType, data["order_type"]),
getattr(TradeType, data["trade_type"]),
Decimal(data["price"]),
Decimal(data["amount"]),
data["last_state"]
)
retval.executed_amount_base = Decimal(data["executed_amount_base"])
retval.executed_amount_quote = Decimal(data["executed_amount_quote"])
retval.fee_asset = data["fee_asset"]
retval.fee_paid = Decimal(data["fee_paid"])
retval.last_state = data["last_state"]
return retval
def update_with_order_update(self, order_update: Dict[str, Any]) -> bool:
"""
Updates the in flight order with trade update (from private/get-order-detail end point)
return: True if the order gets updated otherwise False
Example Order:
{
"id": 9401,
"market": "rogerbtc",
"kind": "ask",
"side": "sell",
"ord_type": "limit",
"price": "0.00000099",
"avg_price": "0.00000099",
"state": "wait",
"origin_volume": "7000.0",
"remaining_volume": "2810.1",
"executed_volume": "4189.9",
"at": 1596481983,
"created_at": 1596481983,
"updated_at": 1596553643,
"trades_count": 272
}
"""
# Update order execution status
self.last_state = order_update["state"]
# Update order
executed_price = Decimal(str(order_update.get("price")
if order_update.get("price") is not None
else order_update.get("avg_price", "0")))
self.executed_amount_base = Decimal(str(order_update["executed_volume"]))
self.executed_amount_quote = (executed_price * self.executed_amount_base) \
if self.executed_amount_base > s_decimal_0 else s_decimal_0
if self.executed_amount_base <= s_decimal_0:
# No trades executed yet.
return False
trade_id = f"{order_update['id']}-{order_update['updated_at']}"
if trade_id in self.trade_id_set:
# trade already recorded
return False
self.trade_id_set.add(trade_id)
# Check if trade fee has been sent
reported_fee_pct = order_update.get("maker_fee")
if reported_fee_pct:
self.fee_paid = Decimal(str(reported_fee_pct)) * self.executed_amount_base
else:
self.fee_paid = order_update.get("trade_fee") * self.executed_amount_base
if not self.fee_asset:
self.fee_asset = self.quote_asset
return True
def update_with_trade_update(self, trade_update: Dict[str, Any]) -> bool:
"""
Updates the in flight order with trade update (from private/get-order-detail end point)
return: True if the order gets updated otherwise False
Example Trade:
{
"amount":"1.0",
"created_at":1615978645,
"id":9618578,
"market":"rogerbtc",
"order_id":2324774,
"price":"0.00000004",
"side":"sell",
"taker_type":"sell",
"total":"0.00000004"
}
"""
self.executed_amount_base = Decimal(str(trade_update.get("amount", "0")))
self.executed_amount_quote = Decimal(str(trade_update.get("total", "0")))
if self.executed_amount_base <= s_decimal_0:
# No trades executed yet.
return False
trade_id = f"{trade_update['order_id']}-{trade_update['created_at']}"
if trade_id in self.trade_id_set:
# trade already recorded
return False
trade_update["exchange_trade_id"] = trade_update["id"]
self.trade_id_set.add(trade_id)
# Check if trade fee has been sent
reported_fee_pct = trade_update.get("fee")
if reported_fee_pct:
self.fee_paid = Decimal(str(reported_fee_pct)) * self.executed_amount_base
else:
self.fee_paid = trade_update.get("trade_fee") * self.executed_amount_base
if not self.fee_asset:
self.fee_asset = self.quote_asset
return True
| 1,057 | 5,213 | 23 |
bcd809e3a05c26a17baed03cebd37406ac23e5f9 | 2,192 | py | Python | resources/websiteCrawler.py | RubenMeeuw/Webgrabber | 083bbd1425e455f2e039979ad6648fd66b67280e | [
"MIT"
] | null | null | null | resources/websiteCrawler.py | RubenMeeuw/Webgrabber | 083bbd1425e455f2e039979ad6648fd66b67280e | [
"MIT"
] | null | null | null | resources/websiteCrawler.py | RubenMeeuw/Webgrabber | 083bbd1425e455f2e039979ad6648fd66b67280e | [
"MIT"
] | null | null | null | from subprocess import call
import shutil
import os
from Converter import ( UrlConverter, LinkConverter )
# import config
import json
import logging
| 31.314286 | 120 | 0.728102 | from subprocess import call
import shutil
import os
from Converter import ( UrlConverter, LinkConverter )
# import config
import json
import logging
class Grabber:
def __init__(self, config, linkparser):
"""Initialise a web grabber."""
self.config = config
# Load the necessary variables from the config file
self.website_list = self.config['WEB']['CRAWL_LIST']
self.output = self.config['WEB']['WEB_OUTPUT']
self.output_grabbed_websites = self.output + '/grabbedWebsites'
self.install_web = self.config['WEB']['WEB_INSTALL_FILE']
self.ip_prefix = self.config["IP_PREFIX"]
self.urlConverter = UrlConverter(self.config)
self.linkConverter = LinkConverter(self.config)
self.convertLinks = linkparser
logging.getLogger()
def getWebsite(self, url):
"""Use wget command to retrieve recursively all files of website"""
logging.debug("Crawling {} from the web...".format(url))
call(['wget', url, '--recursive', '--convert-links',
'--page-requisites', '--continue', '--tries=5',
'--directory-prefix=' + self.output_grabbed_websites + "/" + self.urlConverter.convertUrl(url), '--retry-connrefused',
'--quiet', '-nH', '--no-parent'])
def getWebsiteFromFile(self):
"""Read all lines from the website list and execute the getWebsite method"""
with open(self.website_list, "r") as fp:
for line in fp:
self.getWebsite(line.rstrip('\n'))
def clearGrabbedFolder(self):
"""Clear all previous grabbed websites from the folder"""
logging.debug("Clearing grabbed website folder")
if os.path.exists(self.output_grabbed_websites):
shutil.rmtree(self.output_grabbed_websites)
def execute(self):
"""Start the grabbing process"""
# Remove all old files
self.clearGrabbedFolder()
# Create new url names for the websites
self.urlConverter.convertUrlsInFile()
# Get all website sources from WEBSITES_FILE
self.getWebsiteFromFile()
# Optionally convert the links in the grabbed website to point to each otherself.
if (self.convertLinks):
logging.debug('Converting links')
for f in os.listdir(self.output_grabbed_websites):
self.linkConverter.convertFolder(os.path.join(self.output_grabbed_websites, f))
| 0 | 2,020 | 23 |
28df323f5da4b06632cc76970159e6dd2203664c | 2,804 | py | Python | harvester/oai.py | JPrevost/oai-pmh-harvester | e60f9f375d44ea79e78e8c4add46830ad4253ea7 | [
"Apache-2.0"
] | null | null | null | harvester/oai.py | JPrevost/oai-pmh-harvester | e60f9f375d44ea79e78e8c4add46830ad4253ea7 | [
"Apache-2.0"
] | null | null | null | harvester/oai.py | JPrevost/oai-pmh-harvester | e60f9f375d44ea79e78e8c4add46830ad4253ea7 | [
"Apache-2.0"
] | null | null | null | """oai.py module."""
import json
import logging
from typing import Iterator, Optional
import smart_open
from sickle import Sickle
from sickle.models import Record
logger = logging.getLogger(__name__)
| 31.155556 | 88 | 0.584879 | """oai.py module."""
import json
import logging
from typing import Iterator, Optional
import smart_open
from sickle import Sickle
from sickle.models import Record
logger = logging.getLogger(__name__)
class OAIClient:
def __init__(
self,
source_url: str,
metadata_format: Optional[str] = None,
from_date: Optional[str] = None,
until_date: Optional[str] = None,
set_spec: Optional[str] = None,
) -> None:
self.source_url = source_url
self.client = Sickle(self.source_url)
self.metadata_format = metadata_format
self._set_params(metadata_format, from_date, until_date, set_spec)
def _set_params(
self,
metadata_format: Optional[str],
from_date: Optional[str],
until_date: Optional[str],
set_spec: Optional[str],
) -> None:
params = {}
if metadata_format:
params["metadataPrefix"] = metadata_format
if from_date:
params["from"] = from_date
if until_date:
params["until"] = until_date
if set_spec:
params["set"] = set_spec
self.params = params
def get_identifiers(self) -> list[str]:
responses = self.client.ListIdentifiers(**self.params)
return [record.identifier for record in responses]
def get_records(
self, identifiers: list[str], exclude_deleted: bool
) -> Iterator[Record]:
for identifier in identifiers:
record = self.client.GetRecord(
identifier=identifier, metadataPrefix=self.metadata_format
)
logger.debug(
"Record retrieved:\n Deleted:%s\n Header:%s\n Raw:%s\n",
record.deleted,
record.header,
record.raw,
)
if exclude_deleted is True and record.deleted is True:
continue
yield record
def get_sets(self):
responses = self.client.ListSets()
sets = [{"Set name": set.setName, "Set spec": set.setSpec} for set in responses]
return sets
def write_records(records: Iterator, filepath: str) -> int:
count = 0
with smart_open.open(filepath, "wb") as file:
file.write("<records>\n".encode())
for record in records:
file.write(" ".encode() + record.raw.encode() + "\n".encode())
count += 1
if count % 1000 == 0:
logger.info(
"Status update: %s records written to output file so far!", count
)
file.write("</records>".encode())
return count
def write_sets(sets: list[dict[str, str]], filepath: str) -> None:
with open(filepath, "w") as file:
file.write(json.dumps(sets, indent=2))
| 2,401 | -5 | 203 |
f98b11f5fda04a9642e0ab7aa8717c7c6cad90e6 | 266 | py | Python | tool/def_to_file.py | wuulong/RodeRunner | fdce9e466ca0b61ba3826a30a3cf6aa6f0fcdedf | [
"MIT"
] | 2 | 2021-03-08T02:04:14.000Z | 2021-06-22T20:09:23.000Z | tool/def_to_file.py | wuulong/RodeRunner | fdce9e466ca0b61ba3826a30a3cf6aa6f0fcdedf | [
"MIT"
] | null | null | null | tool/def_to_file.py | wuulong/RodeRunner | fdce9e466ca0b61ba3826a30a3cf6aa6f0fcdedf | [
"MIT"
] | null | null | null | f=open("./level_def.txt","r")
lines = f.readlines()
idx=0
level_id=1
for line in lines:
if idx % 16 == 0:
fo = open("level/level-%i.txt" % level_id, "w")
fo.write("%s" %(line))
if idx % 16 == 15:
fo.close()
level_id+=1
idx+=1
| 20.461538 | 55 | 0.518797 | f=open("./level_def.txt","r")
lines = f.readlines()
idx=0
level_id=1
for line in lines:
if idx % 16 == 0:
fo = open("level/level-%i.txt" % level_id, "w")
fo.write("%s" %(line))
if idx % 16 == 15:
fo.close()
level_id+=1
idx+=1
| 0 | 0 | 0 |
c301dc2ea2e0c56b7701f116e06fb5ca33fab978 | 9,262 | py | Python | python_modules/dagster-graphql/dagster_graphql_tests/graphql/test_retry_execution.py | boltsource/dagster | 30bc3b3aebc412e0f87fac0d77b08358f186bb7d | [
"Apache-2.0"
] | null | null | null | python_modules/dagster-graphql/dagster_graphql_tests/graphql/test_retry_execution.py | boltsource/dagster | 30bc3b3aebc412e0f87fac0d77b08358f186bb7d | [
"Apache-2.0"
] | null | null | null | python_modules/dagster-graphql/dagster_graphql_tests/graphql/test_retry_execution.py | boltsource/dagster | 30bc3b3aebc412e0f87fac0d77b08358f186bb7d | [
"Apache-2.0"
] | null | null | null | from dagster_graphql.test.utils import execute_dagster_graphql
from dagster.core.instance import DagsterInstance
from .execution_queries import START_PIPELINE_EXECUTION_QUERY
from .setup import define_test_context, get_retry_multi_execution_params, retry_config
NON_PERSISTENT_INTERMEDIATES_ERROR = (
'Cannot perform reexecution with non persistent intermediates manager'
)
| 35.48659 | 100 | 0.654826 | from dagster_graphql.test.utils import execute_dagster_graphql
from dagster.core.instance import DagsterInstance
from .execution_queries import START_PIPELINE_EXECUTION_QUERY
from .setup import define_test_context, get_retry_multi_execution_params, retry_config
NON_PERSISTENT_INTERMEDIATES_ERROR = (
'Cannot perform reexecution with non persistent intermediates manager'
)
def step_did_not_run(logs, step_key):
return not any(
log['step']['key'] == step_key
for log in logs
if log['__typename']
in ('ExecutionStepSuccessEvent', 'ExecutionStepSkippedEvent', 'ExecutionStepFailureEvent')
)
def step_did_succeed(logs, step_key):
return any(
log['__typename'] == 'ExecutionStepSuccessEvent' and step_key == log['step']['key']
for log in logs
)
def step_did_skip(logs, step_key):
return any(
log['__typename'] == 'ExecutionStepSkippedEvent' and step_key == log['step']['key']
for log in logs
)
def step_did_fail(logs, step_key):
return any(
log['__typename'] == 'ExecutionStepFailureEvent' and step_key == log['step']['key']
for log in logs
)
def test_retry_requires_intermediates():
context = define_test_context()
result = execute_dagster_graphql(
context,
START_PIPELINE_EXECUTION_QUERY,
variables={
'executionParams': {'mode': 'default', 'selector': {'name': 'eventually_successful'}}
},
)
assert not result.errors
assert result.data
run_id = result.data['startPipelineExecution']['run']['runId']
assert run_id
logs = result.data['startPipelineExecution']['run']['logs']['nodes']
assert isinstance(logs, list)
assert step_did_succeed(logs, 'spawn.compute')
assert step_did_fail(logs, 'fail.compute')
assert step_did_skip(logs, 'fail_2.compute')
assert step_did_skip(logs, 'fail_3.compute')
assert step_did_skip(logs, 'reset.compute')
retry_one = execute_dagster_graphql(
context,
START_PIPELINE_EXECUTION_QUERY,
variables={
'executionParams': {
'mode': 'default',
'selector': {'name': 'eventually_successful'},
'retryRunId': run_id,
}
},
)
assert not retry_one.errors
assert retry_one.data
assert retry_one.data['startPipelineExecution']['__typename'] == 'PythonError'
assert NON_PERSISTENT_INTERMEDIATES_ERROR in retry_one.data['startPipelineExecution']['message']
def test_retry_pipeline_execution():
context = define_test_context(instance=DagsterInstance.local_temp())
result = execute_dagster_graphql(
context,
START_PIPELINE_EXECUTION_QUERY,
variables={
'executionParams': {
'mode': 'default',
'selector': {'name': 'eventually_successful'},
'environmentConfigData': retry_config(0),
}
},
)
run_id = result.data['startPipelineExecution']['run']['runId']
logs = result.data['startPipelineExecution']['run']['logs']['nodes']
assert step_did_succeed(logs, 'spawn.compute')
assert step_did_fail(logs, 'fail.compute')
assert step_did_skip(logs, 'fail_2.compute')
assert step_did_skip(logs, 'fail_3.compute')
assert step_did_skip(logs, 'reset.compute')
retry_one = execute_dagster_graphql(
context,
START_PIPELINE_EXECUTION_QUERY,
variables={
'executionParams': {
'mode': 'default',
'selector': {'name': 'eventually_successful'},
'environmentConfigData': retry_config(1),
'retryRunId': run_id,
}
},
)
run_id = retry_one.data['startPipelineExecution']['run']['runId']
logs = retry_one.data['startPipelineExecution']['run']['logs']['nodes']
assert step_did_not_run(logs, 'spawn.compute')
assert step_did_succeed(logs, 'fail.compute')
assert step_did_fail(logs, 'fail_2.compute')
assert step_did_skip(logs, 'fail_3.compute')
assert step_did_skip(logs, 'reset.compute')
retry_two = execute_dagster_graphql(
context,
START_PIPELINE_EXECUTION_QUERY,
variables={
'executionParams': {
'mode': 'default',
'selector': {'name': 'eventually_successful'},
'environmentConfigData': retry_config(2),
'retryRunId': run_id,
}
},
)
run_id = retry_two.data['startPipelineExecution']['run']['runId']
logs = retry_two.data['startPipelineExecution']['run']['logs']['nodes']
assert step_did_not_run(logs, 'spawn.compute')
assert step_did_not_run(logs, 'fail.compute')
assert step_did_succeed(logs, 'fail_2.compute')
assert step_did_fail(logs, 'fail_3.compute')
assert step_did_skip(logs, 'reset.compute')
retry_three = execute_dagster_graphql(
context,
START_PIPELINE_EXECUTION_QUERY,
variables={
'executionParams': {
'mode': 'default',
'selector': {'name': 'eventually_successful'},
'environmentConfigData': retry_config(3),
'retryRunId': run_id,
}
},
)
run_id = retry_three.data['startPipelineExecution']['run']['runId']
logs = retry_three.data['startPipelineExecution']['run']['logs']['nodes']
assert step_did_not_run(logs, 'spawn.compute')
assert step_did_not_run(logs, 'fail.compute')
assert step_did_not_run(logs, 'fail_2.compute')
assert step_did_succeed(logs, 'fail_3.compute')
assert step_did_succeed(logs, 'reset.compute')
def test_retry_resource_pipeline():
context = define_test_context(instance=DagsterInstance.local_temp())
result = execute_dagster_graphql(
context,
START_PIPELINE_EXECUTION_QUERY,
variables={
'executionParams': {
'mode': 'default',
'selector': {'name': 'retry_resource_pipeline'},
'environmentConfigData': {'storage': {'filesystem': {}}},
}
},
)
run_id = result.data['startPipelineExecution']['run']['runId']
logs = result.data['startPipelineExecution']['run']['logs']['nodes']
assert step_did_succeed(logs, 'start.compute')
assert step_did_fail(logs, 'will_fail.compute')
retry_one = execute_dagster_graphql(
context,
START_PIPELINE_EXECUTION_QUERY,
variables={
'executionParams': {
'mode': 'default',
'selector': {'name': 'retry_resource_pipeline'},
'environmentConfigData': {'storage': {'filesystem': {}}},
'retryRunId': run_id,
}
},
)
run_id = retry_one.data['startPipelineExecution']['run']['runId']
logs = retry_one.data['startPipelineExecution']['run']['logs']['nodes']
assert step_did_not_run(logs, 'start.compute')
assert step_did_fail(logs, 'will_fail.compute')
def test_retry_multi_output():
context = define_test_context(instance=DagsterInstance.local_temp())
result = execute_dagster_graphql(
context,
START_PIPELINE_EXECUTION_QUERY,
variables={'executionParams': get_retry_multi_execution_params(should_fail=True)},
)
print(result.data)
run_id = result.data['startPipelineExecution']['run']['runId']
logs = result.data['startPipelineExecution']['run']['logs']['nodes']
assert step_did_succeed(logs, 'multi.compute')
assert step_did_skip(logs, 'child_multi_skip.compute')
assert step_did_fail(logs, 'can_fail.compute')
assert step_did_skip(logs, 'child_fail.compute')
assert step_did_skip(logs, 'child_skip.compute')
assert step_did_skip(logs, 'grandchild_fail.compute')
retry_one = execute_dagster_graphql(
context,
START_PIPELINE_EXECUTION_QUERY,
variables={
'executionParams': get_retry_multi_execution_params(should_fail=True, retry_id=run_id)
},
)
run_id = retry_one.data['startPipelineExecution']['run']['runId']
logs = retry_one.data['startPipelineExecution']['run']['logs']['nodes']
assert step_did_not_run(logs, 'multi.compute')
assert step_did_not_run(logs, 'child_multi_skip.compute')
assert step_did_fail(logs, 'can_fail.compute')
assert step_did_skip(logs, 'child_fail.compute')
assert step_did_skip(logs, 'child_skip.compute')
assert step_did_skip(logs, 'grandchild_fail.compute')
retry_two = execute_dagster_graphql(
context,
START_PIPELINE_EXECUTION_QUERY,
variables={
'executionParams': get_retry_multi_execution_params(should_fail=False, retry_id=run_id)
},
)
run_id = retry_two.data['startPipelineExecution']['run']['runId']
logs = retry_two.data['startPipelineExecution']['run']['logs']['nodes']
assert step_did_not_run(logs, 'multi.compute')
assert step_did_not_run(logs, 'child_multi_skip.compute')
assert step_did_succeed(logs, 'can_fail.compute')
assert step_did_succeed(logs, 'child_fail.compute')
assert step_did_skip(logs, 'child_skip.compute')
assert step_did_succeed(logs, 'grandchild_fail.compute')
| 8,689 | 0 | 184 |
066b1b7b5f1b227c644687561a9f04a00cd150e2 | 1,009 | py | Python | src/db_test.py | Pecneb/RaspberryPI | da067a6b2b8cda5510d4c2562559a3adf58902ef | [
"MIT"
] | null | null | null | src/db_test.py | Pecneb/RaspberryPI | da067a6b2b8cda5510d4c2562559a3adf58902ef | [
"MIT"
] | null | null | null | src/db_test.py | Pecneb/RaspberryPI | da067a6b2b8cda5510d4c2562559a3adf58902ef | [
"MIT"
] | null | null | null | import db_operations
from user import User
# from event import Event
# def add_test_events():
# for i in range(5):
# tmp_event = Event()
# db_operations.add_event(tmp_event._date, tmp_event.getAuthInBit())
# def list_test_events():
# events = db_operations.get_events()
# for event in events:
# print(event)
if __name__ == "__main__":
main() | 28.828571 | 118 | 0.665015 | import db_operations
from user import User
# from event import Event
def add_test_users():
admin = User("Pecneb", "ecneb2000@gmail.com", "testpass", 1)
db_operations.add_user(admin.getName(), admin.getEmail(), admin.getPassword(), admin.getIsadmin())
for i in range(1, 5):
tmp_user = User(f"Pecneb{i}", f"ecneb{i}@gmail.com", f"testpass{i}", 0)
db_operations.add_user(tmp_user.getName(), tmp_user.getEmail(), tmp_user.getPassword(), tmp_user.getIsadmin())
# def add_test_events():
# for i in range(5):
# tmp_event = Event()
# db_operations.add_event(tmp_event._date, tmp_event.getAuthInBit())
def list_test_users():
users = db_operations.get_data()
for user in users:
print(user)
# def list_test_events():
# events = db_operations.get_events()
# for event in events:
# print(event)
def main():
add_test_users()
# add_test_events()
list_test_users()
#list_test_events()
if __name__ == "__main__":
main() | 555 | 0 | 69 |
d4f897f2d98e5d0d2edca8018400ab91e2ff9eaf | 16,117 | py | Python | tests/test_fieldset.py | rabernat/parcels | b1c3d097c8c8d8a8398b6ab1080c8c2d23350ee8 | [
"MIT"
] | 1 | 2019-01-12T15:33:58.000Z | 2019-01-12T15:33:58.000Z | tests/test_fieldset.py | rabernat/parcels | b1c3d097c8c8d8a8398b6ab1080c8c2d23350ee8 | [
"MIT"
] | null | null | null | tests/test_fieldset.py | rabernat/parcels | b1c3d097c8c8d8a8398b6ab1080c8c2d23350ee8 | [
"MIT"
] | null | null | null | from parcels import FieldSet, ParticleSet, ScipyParticle, JITParticle, Variable, AdvectionRK4, AdvectionRK4_3D, RectilinearZGrid, ErrorCode
from parcels.field import Field, VectorField
from parcels.tools.converters import TimeConverter
from datetime import timedelta as delta
import datetime
import numpy as np
import math
import pytest
from os import path
ptype = {'scipy': ScipyParticle, 'jit': JITParticle}
@pytest.mark.parametrize('xdim', [100, 200])
@pytest.mark.parametrize('ydim', [100, 200])
def test_fieldset_from_data(xdim, ydim):
""" Simple test for fieldset initialisation from data. """
data, dimensions = generate_fieldset(xdim, ydim)
fieldset = FieldSet.from_data(data, dimensions)
assert len(fieldset.U.data.shape) == 3
assert len(fieldset.V.data.shape) == 3
assert np.allclose(fieldset.U.data[0, :], data['U'], rtol=1e-12)
assert np.allclose(fieldset.V.data[0, :], data['V'], rtol=1e-12)
@pytest.mark.parametrize('xdim', [100, 200])
@pytest.mark.parametrize('ydim', [100, 50])
def test_fieldset_from_data_different_dimensions(xdim, ydim, zdim=4, tdim=2):
""" Test for fieldset initialisation from data using
dict-of-dict for dimensions. """
lon = np.linspace(0., 1., xdim, dtype=np.float32)
lat = np.linspace(0., 1., ydim, dtype=np.float32)
depth = np.zeros(zdim, dtype=np.float32)
time = np.zeros(tdim, dtype=np.float64)
U = np.zeros((xdim, ydim), dtype=np.float32)
V = np.ones((xdim, ydim), dtype=np.float32)
P = 2 * np.ones((int(xdim/2), int(ydim/2), zdim, tdim), dtype=np.float32)
data = {'U': U, 'V': V, 'P': P}
dimensions = {'U': {'lat': lat, 'lon': lon},
'V': {'lat': lat, 'lon': lon},
'P': {'lat': lat[0::2], 'lon': lon[0::2], 'depth': depth, 'time': time}}
fieldset = FieldSet.from_data(data, dimensions, transpose=True)
assert len(fieldset.U.data.shape) == 3
assert len(fieldset.V.data.shape) == 3
assert len(fieldset.P.data.shape) == 4
assert fieldset.P.data.shape == (tdim, zdim, ydim/2, xdim/2)
assert np.allclose(fieldset.U.data, 0., rtol=1e-12)
assert np.allclose(fieldset.V.data, 1., rtol=1e-12)
assert np.allclose(fieldset.P.data, 2., rtol=1e-12)
@pytest.mark.parametrize('xdim', [100, 200])
@pytest.mark.parametrize('ydim', [100, 200])
def test_fieldset_from_parcels(xdim, ydim, tmpdir, filename='test_parcels'):
""" Simple test for fieldset initialisation from Parcels FieldSet file format. """
filepath = tmpdir.join(filename)
data, dimensions = generate_fieldset(xdim, ydim)
fieldset_out = FieldSet.from_data(data, dimensions)
fieldset_out.write(filepath)
fieldset = FieldSet.from_parcels(filepath)
assert len(fieldset.U.data.shape) == 3 # Will be 4 once we use depth
assert len(fieldset.V.data.shape) == 3
assert np.allclose(fieldset.U.data[0, :], data['U'], rtol=1e-12)
assert np.allclose(fieldset.V.data[0, :], data['V'], rtol=1e-12)
@pytest.mark.parametrize('indslon', [range(10, 20), [1]])
@pytest.mark.parametrize('indslat', [range(30, 60), [22]])
def test_fieldset_from_file_subsets(indslon, indslat, tmpdir, filename='test_subsets'):
""" Test for subsetting fieldset from file using indices dict. """
data, dimensions = generate_fieldset(100, 100)
filepath = tmpdir.join(filename)
fieldsetfull = FieldSet.from_data(data, dimensions)
fieldsetfull.write(filepath)
indices = {'lon': indslon, 'lat': indslat}
indices_back = indices.copy()
fieldsetsub = FieldSet.from_parcels(filepath, indices=indices)
assert indices == indices_back
assert np.allclose(fieldsetsub.U.lon, fieldsetfull.U.grid.lon[indices['lon']])
assert np.allclose(fieldsetsub.U.lat, fieldsetfull.U.grid.lat[indices['lat']])
assert np.allclose(fieldsetsub.V.lon, fieldsetfull.V.grid.lon[indices['lon']])
assert np.allclose(fieldsetsub.V.lat, fieldsetfull.V.grid.lat[indices['lat']])
ixgrid = np.ix_([0], indices['lat'], indices['lon'])
assert np.allclose(fieldsetsub.U.data, fieldsetfull.U.data[ixgrid])
assert np.allclose(fieldsetsub.V.data, fieldsetfull.V.data[ixgrid])
@pytest.mark.parametrize('xdim', [100, 200])
@pytest.mark.parametrize('ydim', [100, 200])
@pytest.mark.parametrize('mesh', ['flat', 'spherical'])
@pytest.mark.parametrize('dx, dy', [('e1u', 'e2u'), ('e1v', 'e2v')])
@pytest.mark.parametrize('mesh', ['flat', 'spherical'])
@pytest.mark.parametrize('mesh', ['flat', 'spherical'])
@pytest.mark.parametrize('mode', ['scipy', 'jit'])
@pytest.mark.parametrize('mode', ['scipy', 'jit'])
@pytest.mark.parametrize('swapUV', [False, True])
@pytest.mark.parametrize('mode', ['scipy', 'jit'])
@pytest.mark.parametrize('time_periodic', [True, False])
@pytest.mark.parametrize('dt_sign', [-1, 1])
@pytest.mark.parametrize('fail', [False, pytest.param(True, marks=pytest.mark.xfail(strict=True))])
@pytest.mark.parametrize('zdim', [2, 8])
@pytest.mark.parametrize('scale_fac', [0.2, 4, 1])
| 43.796196 | 140 | 0.660235 | from parcels import FieldSet, ParticleSet, ScipyParticle, JITParticle, Variable, AdvectionRK4, AdvectionRK4_3D, RectilinearZGrid, ErrorCode
from parcels.field import Field, VectorField
from parcels.tools.converters import TimeConverter
from datetime import timedelta as delta
import datetime
import numpy as np
import math
import pytest
from os import path
ptype = {'scipy': ScipyParticle, 'jit': JITParticle}
def generate_fieldset(xdim, ydim, zdim=1, tdim=1):
lon = np.linspace(0., 10., xdim, dtype=np.float32)
lat = np.linspace(0., 10., ydim, dtype=np.float32)
depth = np.zeros(zdim, dtype=np.float32)
time = np.zeros(tdim, dtype=np.float64)
if zdim == 1 and tdim == 1:
U, V = np.meshgrid(lon, lat)
dimensions = {'lat': lat, 'lon': lon}
else:
U = np.ones((tdim, zdim, ydim, xdim))
V = np.ones((tdim, zdim, ydim, xdim))
dimensions = {'lat': lat, 'lon': lon, 'depth': depth, 'time': time}
data = {'U': np.array(U, dtype=np.float32), 'V': np.array(V, dtype=np.float32)}
return (data, dimensions)
@pytest.mark.parametrize('xdim', [100, 200])
@pytest.mark.parametrize('ydim', [100, 200])
def test_fieldset_from_data(xdim, ydim):
""" Simple test for fieldset initialisation from data. """
data, dimensions = generate_fieldset(xdim, ydim)
fieldset = FieldSet.from_data(data, dimensions)
assert len(fieldset.U.data.shape) == 3
assert len(fieldset.V.data.shape) == 3
assert np.allclose(fieldset.U.data[0, :], data['U'], rtol=1e-12)
assert np.allclose(fieldset.V.data[0, :], data['V'], rtol=1e-12)
@pytest.mark.parametrize('xdim', [100, 200])
@pytest.mark.parametrize('ydim', [100, 50])
def test_fieldset_from_data_different_dimensions(xdim, ydim, zdim=4, tdim=2):
""" Test for fieldset initialisation from data using
dict-of-dict for dimensions. """
lon = np.linspace(0., 1., xdim, dtype=np.float32)
lat = np.linspace(0., 1., ydim, dtype=np.float32)
depth = np.zeros(zdim, dtype=np.float32)
time = np.zeros(tdim, dtype=np.float64)
U = np.zeros((xdim, ydim), dtype=np.float32)
V = np.ones((xdim, ydim), dtype=np.float32)
P = 2 * np.ones((int(xdim/2), int(ydim/2), zdim, tdim), dtype=np.float32)
data = {'U': U, 'V': V, 'P': P}
dimensions = {'U': {'lat': lat, 'lon': lon},
'V': {'lat': lat, 'lon': lon},
'P': {'lat': lat[0::2], 'lon': lon[0::2], 'depth': depth, 'time': time}}
fieldset = FieldSet.from_data(data, dimensions, transpose=True)
assert len(fieldset.U.data.shape) == 3
assert len(fieldset.V.data.shape) == 3
assert len(fieldset.P.data.shape) == 4
assert fieldset.P.data.shape == (tdim, zdim, ydim/2, xdim/2)
assert np.allclose(fieldset.U.data, 0., rtol=1e-12)
assert np.allclose(fieldset.V.data, 1., rtol=1e-12)
assert np.allclose(fieldset.P.data, 2., rtol=1e-12)
@pytest.mark.parametrize('xdim', [100, 200])
@pytest.mark.parametrize('ydim', [100, 200])
def test_fieldset_from_parcels(xdim, ydim, tmpdir, filename='test_parcels'):
""" Simple test for fieldset initialisation from Parcels FieldSet file format. """
filepath = tmpdir.join(filename)
data, dimensions = generate_fieldset(xdim, ydim)
fieldset_out = FieldSet.from_data(data, dimensions)
fieldset_out.write(filepath)
fieldset = FieldSet.from_parcels(filepath)
assert len(fieldset.U.data.shape) == 3 # Will be 4 once we use depth
assert len(fieldset.V.data.shape) == 3
assert np.allclose(fieldset.U.data[0, :], data['U'], rtol=1e-12)
assert np.allclose(fieldset.V.data[0, :], data['V'], rtol=1e-12)
@pytest.mark.parametrize('indslon', [range(10, 20), [1]])
@pytest.mark.parametrize('indslat', [range(30, 60), [22]])
def test_fieldset_from_file_subsets(indslon, indslat, tmpdir, filename='test_subsets'):
""" Test for subsetting fieldset from file using indices dict. """
data, dimensions = generate_fieldset(100, 100)
filepath = tmpdir.join(filename)
fieldsetfull = FieldSet.from_data(data, dimensions)
fieldsetfull.write(filepath)
indices = {'lon': indslon, 'lat': indslat}
indices_back = indices.copy()
fieldsetsub = FieldSet.from_parcels(filepath, indices=indices)
assert indices == indices_back
assert np.allclose(fieldsetsub.U.lon, fieldsetfull.U.grid.lon[indices['lon']])
assert np.allclose(fieldsetsub.U.lat, fieldsetfull.U.grid.lat[indices['lat']])
assert np.allclose(fieldsetsub.V.lon, fieldsetfull.V.grid.lon[indices['lon']])
assert np.allclose(fieldsetsub.V.lat, fieldsetfull.V.grid.lat[indices['lat']])
ixgrid = np.ix_([0], indices['lat'], indices['lon'])
assert np.allclose(fieldsetsub.U.data, fieldsetfull.U.data[ixgrid])
assert np.allclose(fieldsetsub.V.data, fieldsetfull.V.data[ixgrid])
@pytest.mark.parametrize('xdim', [100, 200])
@pytest.mark.parametrize('ydim', [100, 200])
def test_add_field(xdim, ydim, tmpdir, filename='test_add'):
filepath = tmpdir.join(filename)
data, dimensions = generate_fieldset(xdim, ydim)
fieldset = FieldSet.from_data(data, dimensions)
field = Field('newfld', fieldset.U.data, lon=fieldset.U.lon, lat=fieldset.U.lat)
fieldset.add_field(field)
assert fieldset.newfld.data.shape == fieldset.U.data.shape
fieldset.write(filepath)
@pytest.mark.parametrize('mesh', ['flat', 'spherical'])
def test_fieldset_celledgesizes(mesh):
data, dimensions = generate_fieldset(10, 7)
fieldset = FieldSet.from_data(data, dimensions, mesh=mesh)
fieldset.U.calc_cell_edge_sizes()
D_meridional = fieldset.U.cell_edge_sizes['y']
D_zonal = fieldset.U.cell_edge_sizes['x']
assert np.allclose(D_meridional.flatten(), D_meridional[0, 0]) # all meridional distances should be the same in either mesh
if mesh == 'flat':
assert np.allclose(D_zonal.flatten(), D_zonal[0, 0]) # all zonal distances should be the same in flat mesh
else:
assert all((np.gradient(D_zonal, axis=0) < 0).flatten()) # zonal distances should decrease in spherical mesh
@pytest.mark.parametrize('dx, dy', [('e1u', 'e2u'), ('e1v', 'e2v')])
def test_fieldset_celledgesizes_curvilinear(dx, dy):
fname = path.join(path.dirname(__file__), 'test_data', 'mask_nemo_cross_180lon.nc')
filenames = {'dx': fname, 'dy': fname, 'mesh_mask': fname}
variables = {'dx': dx, 'dy': dy}
dimensions = {'dx': {'lon': 'glamu', 'lat': 'gphiu'},
'dy': {'lon': 'glamu', 'lat': 'gphiu'}}
fieldset = FieldSet.from_nemo(filenames, variables, dimensions)
# explicitly setting cell_edge_sizes from e1u and e2u etc
fieldset.dx.grid.cell_edge_sizes['x'] = fieldset.dx.data
fieldset.dx.grid.cell_edge_sizes['y'] = fieldset.dy.data
A = fieldset.dx.cell_areas()
assert np.allclose(A, fieldset.dx.data * fieldset.dy.data)
def test_fieldset_write_curvilinear(tmpdir):
fname = path.join(path.dirname(__file__), 'test_data', 'mask_nemo_cross_180lon.nc')
filenames = {'dx': fname, 'mesh_mask': fname}
variables = {'dx': 'e1u'}
dimensions = {'lon': 'glamu', 'lat': 'gphiu'}
fieldset = FieldSet.from_nemo(filenames, variables, dimensions)
newfile = tmpdir.join('curv_field')
fieldset.write(newfile)
fieldset2 = FieldSet.from_netcdf(filenames=newfile+'dx.nc', variables={'dx': 'dx'}, dimensions={'lon': 'nav_lon', 'lat': 'nav_lat'})
for var in ['lon', 'lat', 'data']:
assert np.allclose(getattr(fieldset2.dx, var), getattr(fieldset.dx, var))
@pytest.mark.parametrize('mesh', ['flat', 'spherical'])
def test_fieldset_cellareas(mesh):
data, dimensions = generate_fieldset(10, 7)
fieldset = FieldSet.from_data(data, dimensions, mesh=mesh)
cell_areas = fieldset.V.cell_areas()
if mesh == 'flat':
assert np.allclose(cell_areas.flatten(), cell_areas[0, 0], rtol=1e-3)
else:
assert all((np.gradient(cell_areas, axis=0) < 0).flatten()) # areas should decrease with latitude in spherical mesh
for y in range(cell_areas.shape[0]):
assert np.allclose(cell_areas[y, :], cell_areas[y, 0], rtol=1e-3)
@pytest.mark.parametrize('mesh', ['flat', 'spherical'])
def test_fieldset_gradient(mesh):
data, dimensions = generate_fieldset(5, 3)
fieldset = FieldSet.from_data(data, dimensions, mesh=mesh)
# Calculate field gradients for testing against numpy gradients.
dFdx, dFdy = fieldset.V.gradient()
# Create numpy fields.
conv_factor = 6.371e6 * np.pi / 180. if mesh == 'spherical' else 1.
np_dFdx = np.gradient(fieldset.V.data[0, :, :], (np.diff(fieldset.V.lon) * conv_factor)[0], axis=1)
np_dFdy = np.gradient(fieldset.V.data[0, :, :], (np.diff(fieldset.V.lat) * conv_factor)[0], axis=0)
if mesh == 'spherical':
for y in range(np_dFdx.shape[0]):
np_dFdx[:, y] /= math.cos(fieldset.V.grid.lat[y] * math.pi / 180.)
assert np.allclose(dFdx.data, np_dFdx, rtol=5e-2) # Field gradient dx.
assert np.allclose(dFdy.data, np_dFdy, rtol=5e-2) # Field gradient dy.
def addConst(particle, fieldset, time, dt):
particle.lon = particle.lon + fieldset.movewest + fieldset.moveeast
@pytest.mark.parametrize('mode', ['scipy', 'jit'])
def test_fieldset_constant(mode):
data, dimensions = generate_fieldset(100, 100)
fieldset = FieldSet.from_data(data, dimensions)
westval = -0.2
eastval = 0.3
fieldset.add_constant('movewest', westval)
fieldset.add_constant('moveeast', eastval)
assert fieldset.movewest == westval
pset = ParticleSet.from_line(fieldset, size=1, pclass=ptype[mode],
start=(0.5, 0.5), finish=(0.5, 0.5))
pset.execute(pset.Kernel(addConst), dt=1, runtime=1)
assert abs(pset[0].lon - (0.5 + westval + eastval)) < 1e-4
@pytest.mark.parametrize('mode', ['scipy', 'jit'])
@pytest.mark.parametrize('swapUV', [False, True])
def test_vector_fields(mode, swapUV):
lon = np.linspace(0., 10., 12, dtype=np.float32)
lat = np.linspace(0., 10., 10, dtype=np.float32)
U = np.ones((10, 12), dtype=np.float32)
V = np.zeros((10, 12), dtype=np.float32)
data = {'U': U, 'V': V}
dimensions = {'U': {'lat': lat, 'lon': lon},
'V': {'lat': lat, 'lon': lon}}
fieldset = FieldSet.from_data(data, dimensions, mesh='flat')
if swapUV: # we test that we can freely edit whatever UV field
UV = VectorField('UV', fieldset.V, fieldset.U)
fieldset.add_vector_field(UV)
pset = ParticleSet.from_line(fieldset, size=1, pclass=ptype[mode],
start=(0.5, 0.5), finish=(0.5, 0.5))
pset.execute(AdvectionRK4, dt=1, runtime=1)
if swapUV:
assert abs(pset[0].lon - .5) < 1e-9
assert abs(pset[0].lat - 1.5) < 1e-9
else:
assert abs(pset[0].lon - 1.5) < 1e-9
assert abs(pset[0].lat - .5) < 1e-9
@pytest.mark.parametrize('mode', ['scipy', 'jit'])
@pytest.mark.parametrize('time_periodic', [True, False])
@pytest.mark.parametrize('dt_sign', [-1, 1])
def test_periodic(mode, time_periodic, dt_sign):
lon = np.array([0, 1], dtype=np.float32)
lat = np.array([0, 1], dtype=np.float32)
depth = np.array([0, 1], dtype=np.float32)
tsize = 24*60+1
period = 86400
time = np.linspace(0, period, tsize, dtype=np.float64)
def temp_func(time):
return 20 + 2 * np.sin(time*2*np.pi/period)
temp_vec = temp_func(time)
U = np.zeros((2, 2, 2, tsize), dtype=np.float32)
V = np.zeros((2, 2, 2, tsize), dtype=np.float32)
W = np.zeros((2, 2, 2, tsize), dtype=np.float32)
temp = np.zeros((2, 2, 2, tsize), dtype=np.float32)
temp[:, :, :, :] = temp_vec
data = {'U': U, 'V': V, 'W': W, 'temp': temp}
dimensions = {'lon': lon, 'lat': lat, 'depth': depth, 'time': time}
fieldset = FieldSet.from_data(data, dimensions, mesh='flat', time_periodic=time_periodic, transpose=True, allow_time_extrapolation=True)
def sampleTemp(particle, fieldset, time, dt):
# Note that fieldset.temp is interpolated at time=time+dt.
# Indeed, sampleTemp is called at time=time, but the result is written
# at time=time+dt, after the Kernel update
particle.temp = fieldset.temp[time+dt, particle.lon, particle.lat, particle.depth]
class MyParticle(ptype[mode]):
temp = Variable('temp', dtype=np.float32, initial=20.)
dt_sign = -1
pset = ParticleSet.from_list(fieldset, pclass=MyParticle,
lon=[0.5], lat=[0.5], depth=[0.5])
pset.execute(AdvectionRK4_3D + pset.Kernel(sampleTemp),
runtime=delta(hours=51), dt=delta(hours=dt_sign*1))
if time_periodic:
t = pset.particles[0].time
temp_theo = temp_func(t)
elif dt_sign == 1:
temp_theo = temp_vec[-1]
elif dt_sign == -1:
temp_theo = temp_vec[0]
assert np.allclose(temp_theo, pset.particles[0].temp, atol=1e-5)
@pytest.mark.parametrize('fail', [False, pytest.param(True, marks=pytest.mark.xfail(strict=True))])
def test_fieldset_defer_loading_with_diff_time_origin(tmpdir, fail, filename='test_parcels_defer_loading'):
filepath = tmpdir.join(filename)
data0, dims0 = generate_fieldset(10, 10, 1, 10)
dims0['time'] = np.arange(0, 10, 1) * 3600
fieldset_out = FieldSet.from_data(data0, dims0)
fieldset_out.U.grid.time_origin = TimeConverter(np.datetime64('2018-04-20'))
fieldset_out.V.grid.time_origin = TimeConverter(np.datetime64('2018-04-20'))
data1, dims1 = generate_fieldset(10, 10, 1, 10)
if fail:
dims1['time'] = np.arange(0, 10, 1) * 3600
else:
dims1['time'] = np.arange(0, 10, 1) * 1800 + (24+25)*3600
if fail:
Wtime_origin = TimeConverter(np.datetime64('2018-04-22'))
else:
Wtime_origin = TimeConverter(np.datetime64('2018-04-18'))
gridW = RectilinearZGrid(dims1['lon'], dims1['lat'], dims1['depth'], dims1['time'], time_origin=Wtime_origin)
fieldW = Field('W', np.zeros(data1['U'].shape), grid=gridW)
fieldset_out.add_field(fieldW)
fieldset_out.write(filepath)
fieldset = FieldSet.from_parcels(filepath, extra_fields={'W': 'W'})
pset = ParticleSet.from_list(fieldset, pclass=JITParticle, lon=[0.5], lat=[0.5], depth=[0.5],
time=[datetime.datetime(2018, 4, 20, 1)])
pset.execute(AdvectionRK4_3D, runtime=delta(hours=4), dt=delta(hours=1))
@pytest.mark.parametrize('zdim', [2, 8])
@pytest.mark.parametrize('scale_fac', [0.2, 4, 1])
def test_fieldset_defer_loading_function(zdim, scale_fac, tmpdir, filename='test_parcels_defer_loading'):
filepath = tmpdir.join(filename)
data0, dims0 = generate_fieldset(3, 3, zdim, 10)
data0['U'][:, 0, :, :] = np.nan # setting first layer to nan, which will be changed to zero (and all other layers to 1)
dims0['time'] = np.arange(0, 10, 1) * 3600
dims0['depth'] = np.arange(0, zdim, 1)
fieldset_out = FieldSet.from_data(data0, dims0)
fieldset_out.write(filepath)
fieldset = FieldSet.from_parcels(filepath)
# testing for combination of deferred-loaded and numpy Fields
fieldset.add_field(Field('numpyfield', np.zeros((10, zdim, 3, 3)), grid=fieldset.U.grid))
# testing for scaling factors
fieldset.U.set_scaling_factor(scale_fac)
dFdx, dFdy = fieldset.V.gradient()
dz = np.gradient(fieldset.U.depth)
DZ = np.moveaxis(np.tile(dz, (fieldset.U.grid.ydim, fieldset.U.grid.xdim, 1)), [0, 1, 2], [1, 2, 0])
def compute(fieldset):
# Calculating vertical weighted average
for f in [fieldset.U, fieldset.V]:
for tind in f.loaded_time_indices:
f.data[tind, :] = np.sum(f.data[tind, :] * DZ, axis=0) / sum(dz)
fieldset.compute_on_defer = compute
fieldset.computeTimeChunk(1, 1)
assert np.allclose(fieldset.U.data, scale_fac*(zdim-1.)/zdim)
assert np.allclose(dFdx.data, 0)
pset = ParticleSet(fieldset, JITParticle, 0, 0)
def DoNothing(particle, fieldset, time, dt):
return ErrorCode.Success
pset.execute(DoNothing, dt=3600)
assert np.allclose(fieldset.U.data, scale_fac*(zdim-1.)/zdim)
assert np.allclose(dFdx.data, 0)
| 10,843 | 0 | 289 |
8281186c24db75a0e1977b021c03faf391a33ea3 | 2,640 | py | Python | openGaussBase/testcase/KEYWORDS/Deref/Opengauss_Function_Keyword_Deref_Case0020.py | opengauss-mirror/Yat | aef107a8304b94e5d99b4f1f36eb46755eb8919e | [
"MulanPSL-1.0"
] | null | null | null | openGaussBase/testcase/KEYWORDS/Deref/Opengauss_Function_Keyword_Deref_Case0020.py | opengauss-mirror/Yat | aef107a8304b94e5d99b4f1f36eb46755eb8919e | [
"MulanPSL-1.0"
] | null | null | null | openGaussBase/testcase/KEYWORDS/Deref/Opengauss_Function_Keyword_Deref_Case0020.py | opengauss-mirror/Yat | aef107a8304b94e5d99b4f1f36eb46755eb8919e | [
"MulanPSL-1.0"
] | null | null | null | """
Copyright (c) 2022 Huawei Technologies Co.,Ltd.
openGauss is licensed under Mulan PSL v2.
You can use this software according to the terms and conditions of the Mulan PSL v2.
You may obtain a copy of Mulan PSL v2 at:
http://license.coscl.org.cn/MulanPSL2
THIS SOFTWARE IS PROVIDED ON AN "AS IS" BASIS, WITHOUT WARRANTIES OF ANY KIND,
EITHER EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO NON-INFRINGEMENT,
MERCHANTABILITY OR FIT FOR A PARTICULAR PURPOSE.
See the Mulan PSL v2 for more details.
"""
'''
#-- @testpoint:opengauss关键字deref(非保留),作为目录对象名
'''
import unittest
from testcase.utils.Logger import Logger
from testcase.utils.Constant import Constant
from testcase.utils.CommonSH import CommonSH
logger = Logger()
commonsh = CommonSH('dbuser')
constant = Constant()
# 关键字作为目录对象名不带双引号 - 成功
# 关键字作为目录对象名带双引号—成功
# 关键字作为目录对象名带单引号 - 合理报错
#关键字作为目录对象名带反引号 - 合理报错 | 35.675676 | 121 | 0.660227 | """
Copyright (c) 2022 Huawei Technologies Co.,Ltd.
openGauss is licensed under Mulan PSL v2.
You can use this software according to the terms and conditions of the Mulan PSL v2.
You may obtain a copy of Mulan PSL v2 at:
http://license.coscl.org.cn/MulanPSL2
THIS SOFTWARE IS PROVIDED ON AN "AS IS" BASIS, WITHOUT WARRANTIES OF ANY KIND,
EITHER EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO NON-INFRINGEMENT,
MERCHANTABILITY OR FIT FOR A PARTICULAR PURPOSE.
See the Mulan PSL v2 for more details.
"""
'''
#-- @testpoint:opengauss关键字deref(非保留),作为目录对象名
'''
import unittest
from testcase.utils.Logger import Logger
from testcase.utils.Constant import Constant
from testcase.utils.CommonSH import CommonSH
logger = Logger()
commonsh = CommonSH('dbuser')
constant = Constant()
class Hostname(unittest.TestCase):
def setUp(self):
logger.info("------------------------ Opengauss_Function_Keyword_Deref_Case0020 开始执行--------------------------")
# 关键字作为目录对象名不带双引号 - 成功
def test_deref_1(self):
SqlMdg = commonsh.execut_db_sql('''create directory deref as '/tmp/';
drop directory deref;''')
logger.info(SqlMdg)
self.assertIn(constant.CREATE_DIRECTORY_SUCCESS_MSG, SqlMdg)
self.assertIn(constant.DROP_DIRECTORY_SUCCESS_MSG, SqlMdg)
# 关键字作为目录对象名带双引号—成功
def test_deref_2(self):
SqlMdg = commonsh.execut_db_sql('''create directory "deref" as '/tmp/';
drop directory "deref";''')
logger.info(SqlMdg)
self.assertIn(constant.CREATE_DIRECTORY_SUCCESS_MSG, SqlMdg)
self.assertIn(constant.DROP_DIRECTORY_SUCCESS_MSG, SqlMdg)
# 关键字作为目录对象名带单引号 - 合理报错
def test_deref_3(self):
SqlMdg = commonsh.execut_db_sql('''drop directory if exists 'deref';''')
logger.info(SqlMdg)
self.assertIn(constant.SYNTAX_ERROR_MSG, SqlMdg)
SqlMdg = commonsh.execut_db_sql(''' create directory 'deref' as '/tmp/';''')
logger.info(SqlMdg)
self.assertIn(constant.SYNTAX_ERROR_MSG, SqlMdg)
#关键字作为目录对象名带反引号 - 合理报错
def test_deref_4(self):
SqlMdg = commonsh.execut_db_sql('''drop directory if exists \`deref\`;''')
logger.info(SqlMdg)
self.assertIn(constant.SYNTAX_ERROR_MSG, SqlMdg)
SqlMdg = commonsh.execut_db_sql('''create directory \`deref\` as '/tmp/';''')
logger.info(SqlMdg)
self.assertIn(constant.SYNTAX_ERROR_MSG, SqlMdg)
def tearDown(self):
logger.info('------------------------ Opengauss_Function_Keyword_Deref_Case0020 执行结束--------------------------') | 1,558 | 13 | 181 |
5aa25fc509d1383aeccc9e012bd332a63c9da994 | 2,420 | py | Python | tb_paddle/proto/plugin_pr_curve_pb2.py | GT-AcerZhang/tb-paddle | a129520339f4d4e7a9bed05feb733f2565673960 | [
"MIT"
] | null | null | null | tb_paddle/proto/plugin_pr_curve_pb2.py | GT-AcerZhang/tb-paddle | a129520339f4d4e7a9bed05feb733f2565673960 | [
"MIT"
] | null | null | null | tb_paddle/proto/plugin_pr_curve_pb2.py | GT-AcerZhang/tb-paddle | a129520339f4d4e7a9bed05feb733f2565673960 | [
"MIT"
] | null | null | null | # -*- coding: utf-8 -*-
# Generated by the protocol buffer compiler. DO NOT EDIT!
# source: tb_paddle/proto/plugin_pr_curve.proto
from google.protobuf import descriptor as _descriptor
from google.protobuf import message as _message
from google.protobuf import reflection as _reflection
from google.protobuf import symbol_database as _symbol_database
# @@protoc_insertion_point(imports)
_sym_db = _symbol_database.Default()
DESCRIPTOR = _descriptor.FileDescriptor(
name='tb_paddle/proto/plugin_pr_curve.proto',
package='tb_paddle',
syntax='proto3',
serialized_options=None,
serialized_pb=b'\n%tb_paddle/proto/plugin_pr_curve.proto\x12\ttb_paddle\"<\n\x11PrCurvePluginData\x12\x0f\n\x07version\x18\x01 \x01(\x05\x12\x16\n\x0enum_thresholds\x18\x02 \x01(\rb\x06proto3'
)
_PRCURVEPLUGINDATA = _descriptor.Descriptor(
name='PrCurvePluginData',
full_name='tb_paddle.PrCurvePluginData',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='version', full_name='tb_paddle.PrCurvePluginData.version', index=0,
number=1, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='num_thresholds', full_name='tb_paddle.PrCurvePluginData.num_thresholds', index=1,
number=2, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=52,
serialized_end=112,
)
DESCRIPTOR.message_types_by_name['PrCurvePluginData'] = _PRCURVEPLUGINDATA
_sym_db.RegisterFileDescriptor(DESCRIPTOR)
PrCurvePluginData = _reflection.GeneratedProtocolMessageType('PrCurvePluginData', (_message.Message,), {
'DESCRIPTOR' : _PRCURVEPLUGINDATA,
'__module__' : 'tb_paddle.proto.plugin_pr_curve_pb2'
# @@protoc_insertion_point(class_scope:tb_paddle.PrCurvePluginData)
})
_sym_db.RegisterMessage(PrCurvePluginData)
# @@protoc_insertion_point(module_scope)
| 31.842105 | 194 | 0.768182 | # -*- coding: utf-8 -*-
# Generated by the protocol buffer compiler. DO NOT EDIT!
# source: tb_paddle/proto/plugin_pr_curve.proto
from google.protobuf import descriptor as _descriptor
from google.protobuf import message as _message
from google.protobuf import reflection as _reflection
from google.protobuf import symbol_database as _symbol_database
# @@protoc_insertion_point(imports)
_sym_db = _symbol_database.Default()
DESCRIPTOR = _descriptor.FileDescriptor(
name='tb_paddle/proto/plugin_pr_curve.proto',
package='tb_paddle',
syntax='proto3',
serialized_options=None,
serialized_pb=b'\n%tb_paddle/proto/plugin_pr_curve.proto\x12\ttb_paddle\"<\n\x11PrCurvePluginData\x12\x0f\n\x07version\x18\x01 \x01(\x05\x12\x16\n\x0enum_thresholds\x18\x02 \x01(\rb\x06proto3'
)
_PRCURVEPLUGINDATA = _descriptor.Descriptor(
name='PrCurvePluginData',
full_name='tb_paddle.PrCurvePluginData',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='version', full_name='tb_paddle.PrCurvePluginData.version', index=0,
number=1, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='num_thresholds', full_name='tb_paddle.PrCurvePluginData.num_thresholds', index=1,
number=2, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=52,
serialized_end=112,
)
DESCRIPTOR.message_types_by_name['PrCurvePluginData'] = _PRCURVEPLUGINDATA
_sym_db.RegisterFileDescriptor(DESCRIPTOR)
PrCurvePluginData = _reflection.GeneratedProtocolMessageType('PrCurvePluginData', (_message.Message,), {
'DESCRIPTOR' : _PRCURVEPLUGINDATA,
'__module__' : 'tb_paddle.proto.plugin_pr_curve_pb2'
# @@protoc_insertion_point(class_scope:tb_paddle.PrCurvePluginData)
})
_sym_db.RegisterMessage(PrCurvePluginData)
# @@protoc_insertion_point(module_scope)
| 0 | 0 | 0 |
86d707ed7305b29df7db5be158818cd0fd66581d | 821 | py | Python | test/apis/models.py | Leo-BTC/wxserver | 388ab39a106257f0feadc54ed064c5cd5b79905c | [
"BSD-3-Clause"
] | null | null | null | test/apis/models.py | Leo-BTC/wxserver | 388ab39a106257f0feadc54ed064c5cd5b79905c | [
"BSD-3-Clause"
] | 4 | 2021-03-18T20:34:25.000Z | 2022-03-11T23:24:05.000Z | test/apis/models.py | Leo-BTC/wxserver | 388ab39a106257f0feadc54ed064c5cd5b79905c | [
"BSD-3-Clause"
] | null | null | null | from test.database import SurrogatePK, Model, db
from sqlalchemy import Column, String,DateTime, Integer
| 32.84 | 55 | 0.689403 | from test.database import SurrogatePK, Model, db
from sqlalchemy import Column, String,DateTime, Integer
class UserInfo(SurrogatePK, Model):
__tablename__ = 'user_info'
id = Column(Integer, primary_key=True)
username = Column(String(255))
uid = Column(String(255))
address = Column(String(255))
create_time = Column(DateTime)
open_id = Column(String(255))
avatar = Column(String(255))
price = Column(String(10))
number = Column(String(10))
paihang = Column(String(255))
cash = Column(String(10))
class TokenItem(SurrogatePK,Model):
__tablename__ = 'token_item'
id = Column(Integer,primary_key=True)
open_id = Column(String(255))
tokenname = Column(String(255))
dui = Column(String(255))
numbers = Column(String(10))
zhanbi = Column(String(10)) | 0 | 671 | 45 |
1ed4131d6a28fafd9bc79b9bf0e9b3a0df867a88 | 2,229 | py | Python | lib/surface/kuberun/core/backend_bindings/delete.py | google-cloud-sdk-unofficial/google-cloud-sdk | 2a48a04df14be46c8745050f98768e30474a1aac | [
"Apache-2.0"
] | 2 | 2019-11-10T09:17:07.000Z | 2019-12-18T13:44:08.000Z | lib/surface/kuberun/core/backend_bindings/delete.py | google-cloud-sdk-unofficial/google-cloud-sdk | 2a48a04df14be46c8745050f98768e30474a1aac | [
"Apache-2.0"
] | null | null | null | lib/surface/kuberun/core/backend_bindings/delete.py | google-cloud-sdk-unofficial/google-cloud-sdk | 2a48a04df14be46c8745050f98768e30474a1aac | [
"Apache-2.0"
] | 1 | 2020-07-25T01:40:19.000Z | 2020-07-25T01:40:19.000Z | # -*- coding: utf-8 -*- #
# Copyright 2021 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Deletes the backend binding.
This removes the binding between the Compute
Engine backend service and your KubeRun service.
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
from googlecloudsdk.calliope import base
from googlecloudsdk.command_lib.kuberun import flags
from googlecloudsdk.command_lib.kuberun import kuberun_command
from googlecloudsdk.core import log
_DETAILED_HELP = {
'EXAMPLES':
"""
To delete a backend binding ``BACKEND_BINDING'' in the default
namespace, run:
$ {command} BACKEND_BINDING
To delete a backend binding ``BACKEND_BINDING'' in a specific namespace
``NAMESPACE'', run:
$ {command} BACKEND_BINDING --namespace=NAMESPACE
""",
}
@base.ReleaseTracks(base.ReleaseTrack.ALPHA)
class Delete(kuberun_command.KubeRunCommand, base.DeleteCommand):
"""Deletes a backend binding."""
detailed_help = _DETAILED_HELP
flags = [flags.NamespaceFlag(), flags.ClusterConnectionFlags()]
@classmethod
| 32.304348 | 79 | 0.729475 | # -*- coding: utf-8 -*- #
# Copyright 2021 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Deletes the backend binding.
This removes the binding between the Compute
Engine backend service and your KubeRun service.
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
from googlecloudsdk.calliope import base
from googlecloudsdk.command_lib.kuberun import flags
from googlecloudsdk.command_lib.kuberun import kuberun_command
from googlecloudsdk.core import log
_DETAILED_HELP = {
'EXAMPLES':
"""
To delete a backend binding ``BACKEND_BINDING'' in the default
namespace, run:
$ {command} BACKEND_BINDING
To delete a backend binding ``BACKEND_BINDING'' in a specific namespace
``NAMESPACE'', run:
$ {command} BACKEND_BINDING --namespace=NAMESPACE
""",
}
@base.ReleaseTracks(base.ReleaseTrack.ALPHA)
class Delete(kuberun_command.KubeRunCommand, base.DeleteCommand):
"""Deletes a backend binding."""
detailed_help = _DETAILED_HELP
flags = [flags.NamespaceFlag(), flags.ClusterConnectionFlags()]
@classmethod
def Args(cls, parser):
super(Delete, cls).Args(parser)
parser.add_argument(
'backend_binding',
help="""Name of the backend binding to delete. This name
is the same as the Compute Engine backend service.""")
def BuildKubeRunArgs(self, args):
return [args.backend_binding] + super(Delete, self).BuildKubeRunArgs(args)
def Command(self):
return ['core', 'backend-bindings', 'delete']
def SuccessResult(self, out, args):
log.DeletedResource(args.backend_binding, 'backend binding')
| 434 | 0 | 99 |
92275fbfaf4b2e6880e64b5603bf60d9206465dc | 1,944 | py | Python | tests/test_node.py | vvolkl/yadage | bd34a5a1d7d06f7dd3917af2af8badd5af3f195d | [
"MIT"
] | null | null | null | tests/test_node.py | vvolkl/yadage | bd34a5a1d7d06f7dd3917af2af8badd5af3f195d | [
"MIT"
] | null | null | null | tests/test_node.py | vvolkl/yadage | bd34a5a1d7d06f7dd3917af2af8badd5af3f195d | [
"MIT"
] | null | null | null | import pytest
from yadage.wflownode import YadageNode
from yadage.tasks import packtivity_task
from yadage.controllers import YadageController
| 41.361702 | 116 | 0.783436 | import pytest
from yadage.wflownode import YadageNode
from yadage.tasks import packtivity_task
from yadage.controllers import YadageController
def test_create(basic_packtivity_spec,localfs_state):
step = packtivity_task('myname',basic_packtivity_spec,localfs_state)
node = YadageNode('myname',step,'identiifer')
def test_result_prepub(basic_packtivity_spec,localfs_state):
step = packtivity_task('myname',basic_packtivity_spec,localfs_state, {'outputfile': 'world', 'par': 'value'})
node = YadageNode('myname',step,'identiifer')
c = YadageController(None)
node.expected_result = c.prepublishing_backend.prepublish(
node.task.spec, node.task.parameters.json(), node.task.state
)
assert node.has_result() == True
assert node.result == node.expected_result
node.readfromresult('') == node.result
node.readfromresult('/outputfile') == node.result['outputfile']
another_step = packtivity_task('another',basic_packtivity_spec,localfs_state)
node.readfromresult('/outputfile',another_step.inputs)
assert another_step.inputs[-1].stepid == node.identifier
assert another_step.inputs[-1].pointer.path == '/outputfile'
def test_serialize_deserialize(basic_packtivity_spec,localfs_state):
step = packtivity_task('myname',basic_packtivity_spec,localfs_state)
packtivity_task.fromJSON(step.json()).json() == step.json()
def test_noresult(dynamic_packtivity_spec,localfs_state):
step = packtivity_task('myname', dynamic_packtivity_spec, localfs_state, {'localname': 'hello', 'source': 'world'})
node = YadageNode('myname',step,'identiifer')
assert node.has_result() == False
node.readfromresult('', failsilently = True) == None
with pytest.raises(RuntimeError):
node.readfromresult('') == None
def test_repr(basic_packtivity_spec,localfs_state):
step = packtivity_task('myname',basic_packtivity_spec,localfs_state, {'outputfile': 'world', 'par': 'value'})
node = YadageNode('myname',step,'identiifer')
assert repr(node)
| 1,685 | 0 | 115 |
4599e2f4107b772d2bca009bf21832d27b79a475 | 4,937 | py | Python | resource_helper.py | uhh-lt/semeval2019-hhmm | b746b0fb8ab3b957d399276cb354e950f0ef30ed | [
"Apache-2.0"
] | null | null | null | resource_helper.py | uhh-lt/semeval2019-hhmm | b746b0fb8ab3b957d399276cb354e950f0ef30ed | [
"Apache-2.0"
] | null | null | null | resource_helper.py | uhh-lt/semeval2019-hhmm | b746b0fb8ab3b957d399276cb354e950f0ef30ed | [
"Apache-2.0"
] | null | null | null | import numpy as np
from elmo import load_elmo_context_embeddings
from word_embeddings import get_w2v_word_embeddings, get_elmo_word_embeddings, get_w2v_multiword_embeddings
from word2vec import get_w2v_context_embeddings_Default
from utils import task_to_df
import pathlib
# ---------------------------------------
# ---------------------------------------
import ud2csv
from utils import df_to_csv, csv_to_df
# ---------------------------------------
# dump elmo default layer for contexts, google word2vec, tfidf matrix
from elmo import dump_elmo_context_embeddings
from word2vec import load_w2v_model, load_tfidf
# ------------------------------------------------------
import argparse
parser = argparse.ArgumentParser()
parser.add_argument('-w2v', '--w2v_file', help="path to GoogleNews-vectors-negative300.bin?",
default='./input/models/GoogleNews-vectors-negative300.bin')
args = parser.parse_args()
write_input_CSVs()
write_vectors()
dump_models_resources(args.w2v_file) | 37.976923 | 107 | 0.63095 | import numpy as np
from elmo import load_elmo_context_embeddings
from word_embeddings import get_w2v_word_embeddings, get_elmo_word_embeddings, get_w2v_multiword_embeddings
from word2vec import get_w2v_context_embeddings_Default
from utils import task_to_df
import pathlib
# ---------------------------------------
def write_vectors():
dir = 'vectors'
tasks = ['1', '22']
datasets = ['dev', 'test']
for task in tasks:
for dataset in datasets:
pathlib.Path('{}/{}'.format(dir, dataset)).mkdir(parents=True, exist_ok=True)
print(dataset,'task',task)
df = task_to_df(task, dataset)
w2v_context = get_w2v_context_embeddings_Default(task, dataset)
outfile = '{}/{}/task{}_{}.npy'.format(dir, dataset, task,'w2v_context')
np.save(outfile, w2v_context)
elmo_word = get_elmo_word_embeddings(df['word'])
outfile = '{}/{}/task{}_{}.npy'.format(dir, dataset, task, 'elmo_word')
np.save(outfile, elmo_word)
elmo_context = load_elmo_context_embeddings(task, dataset)
outfile = '{}/{}/task{}_{}.npy'.format(dir, dataset, task, 'elmo_context')
np.save(outfile, elmo_context)
if task == '1':
w2v_word = get_w2v_word_embeddings(df['word'])
outfile = '{}/{}/task{}_{}.npy'.format(dir, dataset, task, 'w2v_word')
np.save(outfile, w2v_word)
else:
w2v_word = get_w2v_multiword_embeddings(df['word'])
outfile = '{}/{}/task{}_{}.npy'.format(dir, dataset, task, 'w2v_word')
np.save(outfile, w2v_word)
w2v_verb = get_elmo_word_embeddings(df['verb'])
outfile = '{}/{}/task{}_{}.npy'.format(dir, dataset, task, 'w2v_verb')
np.save(outfile, w2v_verb)
elmo_verb = get_elmo_word_embeddings(df['verb'])
outfile = '{}/{}/task{}_{}.npy'.format(dir, dataset, task, 'elmo_verb')
np.save(outfile, elmo_verb)
# ---------------------------------------
import ud2csv
from utils import df_to_csv, csv_to_df
def write_input_CSVs():
dir = "./semeval_data"
ud_test = dir+"/dep-stx/pos-gold-dep-auto.conll.txt"
task1_test = dir+"/test/task-1.txt"
# task21_test = dir+"/test/task-2.1.txt"
task22_test = dir+"/test/task-2.2.txt"
#-----------------------------------------------
ud_dev = dir+"/dep-stx/pos-gold-dep-auto.conll.txt"
task1_dev = dir+"/dev/task-1.txt"
# task21_dev = dir+"/dev/task-2.1.txt"
task22_dev = dir+"/dev/task-2.2.txt"
all_sentences_dev = './input/models/sentences_dev.txt'
print('writing sentences for dev')
ud2csv.ud_sentences_to_file(ud_dev, all_sentences_dev)
print('writing csvs for dev')
csv_dev = './input/train_task1_dev.csv'
csv_gd_dev = './input/gd_task1_dev.csv'
ud2csv.task1_to_csv(task1_dev, ud_dev, csv_dev)
ud2csv.task1_to_csv_gd(task1_dev, ud_dev, csv_gd_dev)
csv_task22_dev = './input/train_task22_dev.csv'
ud2csv.task22_to_csv(task22_dev, ud_dev, csv_task22_dev)
csv_gr_dev = './input/all_grammaticalLabels_dev.csv'
df_task22 = ud2csv.task22_to_df_withFrameArgsDependencies(task22_dev, ud_dev)
df_to_csv(df_task22, csv_gr_dev)
# ------------------------------------------------------------- Test
print('writing csvs for test')
csv_test = './input/train_task1_test.csv'
csv_gd_test = './input/gd_task1_test.csv'
ud2csv.task1_to_csv(task1_test, ud_test, csv_test)
ud2csv.task1_to_csv_gd(task1_test, ud_test, csv_gd_test)
csv_task22_test = './input/train_task22_test.csv'
ud2csv.task22_to_csv(task22_test, ud_test, csv_task22_test)
csv_gr_test = './input/all_grammaticalLabels_test.csv'
df_task22 = ud2csv.task22_to_df_withFrameArgsDependencies(task22_test, ud_test)
df_to_csv(df_task22, csv_gr_test)
# ---------------------------------------
# dump elmo default layer for contexts, google word2vec, tfidf matrix
from elmo import dump_elmo_context_embeddings
from word2vec import load_w2v_model, load_tfidf
def dump_models_resources(w2v_file='./input/models/GoogleNews-vectors-negative300.bin'):
tasks = ['1', '22']
datasets = ['dev', 'test']
for task in tasks:
for dataset in datasets:
dump_elmo_context_embeddings(task, dataset)
w2v = load_w2v_model(w2v_file, normalized_w2v=False)
tfidf_file = './input/models/sentences_dev.txt'
load_tfidf(tfidf_file, w2v.index2word)
# ------------------------------------------------------
import argparse
parser = argparse.ArgumentParser()
parser.add_argument('-w2v', '--w2v_file', help="path to GoogleNews-vectors-negative300.bin?",
default='./input/models/GoogleNews-vectors-negative300.bin')
args = parser.parse_args()
write_input_CSVs()
write_vectors()
dump_models_resources(args.w2v_file) | 3,868 | 0 | 67 |
38ad908910b5dc2d1d06360a876eba1b26e44fa3 | 2,495 | py | Python | bounca/certificate_engine/ssl/key.py | warthog9/bounca | f83a372fcfa6e9874c81c785fd0ebdb49842eba3 | [
"Apache-2.0"
] | null | null | null | bounca/certificate_engine/ssl/key.py | warthog9/bounca | f83a372fcfa6e9874c81c785fd0ebdb49842eba3 | [
"Apache-2.0"
] | null | null | null | bounca/certificate_engine/ssl/key.py | warthog9/bounca | f83a372fcfa6e9874c81c785fd0ebdb49842eba3 | [
"Apache-2.0"
] | null | null | null |
from cryptography.hazmat.backends import default_backend
from cryptography.hazmat.primitives import serialization
from cryptography.hazmat.primitives.asymmetric import rsa
from cryptography.hazmat.primitives.asymmetric.rsa import RSAPrivateKey
| 32.402597 | 118 | 0.622044 |
from cryptography.hazmat.backends import default_backend
from cryptography.hazmat.primitives import serialization
from cryptography.hazmat.primitives.asymmetric import rsa
from cryptography.hazmat.primitives.asymmetric.rsa import RSAPrivateKey
class Key(object):
def __init__(self) -> None:
self._key = None # type: RSAPrivateKey
@property
def key(self) -> RSAPrivateKey:
return self._key
def create_key(self, key_size: int) -> 'Key':
"""
Create a public/private key pair.
Arguments: key_size - Number of bits to use in the key
Returns: The private key
"""
self._key = rsa.generate_private_key(
public_exponent=65537,
key_size=key_size,
backend=default_backend()
)
return self
def serialize(self, passphrase: bytes=None, encoding: str=serialization.Encoding.PEM) -> bytes:
"""
Serialize key
Arguments: path - filename with relative path
passphrase - optional passphrase (must be bytes)
encoding - optional different encoding
Returns: bytes
"""
if not self._key:
raise RuntimeError("No key object")
encryption = serialization.BestAvailableEncryption(passphrase) if passphrase else serialization.NoEncryption()
return self._key.private_bytes(
encoding=encoding,
format=serialization.PrivateFormat.TraditionalOpenSSL,
encryption_algorithm=encryption,
)
def load(self, pem: bytes, passphrase: bytes=None) -> RSAPrivateKey:
"""
Read key from pem
Arguments: pem - bytes with key
passphrase - optional passphrase (must be bytes)
Returns: Self
"""
self._key = serialization.load_pem_private_key(pem, passphrase, backend=default_backend())
return self
def check_passphrase(self, pem: bytes, passphrase: bytes=None) -> bool:
"""
Checks passphrase of a pem key file
Arguments: pem - bytes with key
passphrase - passphrase (must be bytes)
Returns: true if passphrase is ok
"""
try:
serialization.load_pem_private_key(pem, passphrase, backend=default_backend())
return True
except ValueError as e:
if str(e) == 'Bad decrypt. Incorrect password?':
return False
raise e
| 89 | 2,137 | 23 |
5f942e2d2eda86d6d8b4672dea6520b66b8eefc2 | 940 | py | Python | navigation/nav.py | artigianitecnologici/marrtino_apps | b58bf4daa1d06db2f1c8a47be02b29948d41f48d | [
"BSD-4-Clause"
] | null | null | null | navigation/nav.py | artigianitecnologici/marrtino_apps | b58bf4daa1d06db2f1c8a47be02b29948d41f48d | [
"BSD-4-Clause"
] | null | null | null | navigation/nav.py | artigianitecnologici/marrtino_apps | b58bf4daa1d06db2f1c8a47be02b29948d41f48d | [
"BSD-4-Clause"
] | null | null | null | import sys,os,time
import argparse
sys.path.append(os.getenv("MARRTINO_APPS_HOME")+"/program")
import robot_cmd_ros
from robot_cmd_ros import *
robot_cmd_ros.use_audio = False
robot_cmd_ros.tv_good = 0.5
import move
# main
if __name__ == "__main__":
parser = argparse.ArgumentParser(description='navigator')
parser.add_argument('path', type=str, help='File with path to run')
args = parser.parse_args()
begin(nodename='navigator')
enableObstacleAvoidance(True)
r = do_path(args.path)
print("Path completed: %r" %r)
end()
| 18.431373 | 71 | 0.596809 | import sys,os,time
import argparse
sys.path.append(os.getenv("MARRTINO_APPS_HOME")+"/program")
import robot_cmd_ros
from robot_cmd_ros import *
robot_cmd_ros.use_audio = False
robot_cmd_ros.tv_good = 0.5
import move
def moveTo(px,py,pth=1001):
return move.do_move([px,py,pth])
def do_path(filename):
with open(filename) as f:
l = f.readline()
r = True
while r and l!='':
v = l.split("#")
p = v[0].strip()
if len(p)>0:
print(p)
r = eval(p)
l = f.readline()
return r
# main
if __name__ == "__main__":
parser = argparse.ArgumentParser(description='navigator')
parser.add_argument('path', type=str, help='File with path to run')
args = parser.parse_args()
begin(nodename='navigator')
enableObstacleAvoidance(True)
r = do_path(args.path)
print("Path completed: %r" %r)
end()
| 326 | 0 | 46 |
f741832281584f42536b01d706feac7af6edef4a | 1,421 | py | Python | ui/middleware.py | ove/ove-asset-manager | 34b20ba8b436a5fe5c1561e0c5d98f171a37193f | [
"MIT"
] | null | null | null | ui/middleware.py | ove/ove-asset-manager | 34b20ba8b436a5fe5c1561e0c5d98f171a37193f | [
"MIT"
] | 191 | 2019-03-01T14:00:57.000Z | 2021-06-06T23:01:57.000Z | ui/middleware.py | ove/ove-asset-manager | 34b20ba8b436a5fe5c1561e0c5d98f171a37193f | [
"MIT"
] | 1 | 2020-01-13T13:07:49.000Z | 2020-01-13T13:07:49.000Z | import logging
from typing import Set
import falcon
from common.consts import HTTP_WRITE_METHODS
from common.falcon_utils import auth_token
from common.util import is_public
from ui import BackendController
| 37.394737 | 134 | 0.718508 | import logging
from typing import Set
import falcon
from common.consts import HTTP_WRITE_METHODS
from common.falcon_utils import auth_token
from common.util import is_public
from ui import BackendController
class ContentTypeValidator:
def process_resource(self, req: falcon.Request, _resp: falcon.Response, resource, _params):
if req.method in HTTP_WRITE_METHODS:
content_type = getattr(resource, 'content_type', 'application/x-www-form-urlencoded')
if content_type and content_type not in req.content_type:
raise falcon.HTTPUnsupportedMediaType(description="This API only supports requests encoded as '" + content_type + "'")
class LoginValidator:
def __init__(self, backend: BackendController, login_path: str, public_paths: Set[str] = None):
self.login_path = login_path
self.public_paths = public_paths if public_paths else set()
self.public_paths.add(login_path)
self._backend = backend
def process_resource(self, req: falcon.Request, resp: falcon.Response, _resource, _params):
if is_public(req.path, self.public_paths):
logging.debug("This is a public resource which does not need a valid token")
return
token = auth_token(req)
if not token:
raise falcon.HTTPSeeOther(self.login_path)
resp.auth_user = self._backend.user_info(auth_token=token)
| 1,079 | 6 | 125 |
66e969fdbb46cfaab3e7d865800aab8167144933 | 5,063 | py | Python | libbmp085/bmp085.py | Taur-Tech/BMP085 | 7946db91c93c4c392cbc6b96cee37d18723c89a7 | [
"Apache-2.0"
] | null | null | null | libbmp085/bmp085.py | Taur-Tech/BMP085 | 7946db91c93c4c392cbc6b96cee37d18723c89a7 | [
"Apache-2.0"
] | null | null | null | libbmp085/bmp085.py | Taur-Tech/BMP085 | 7946db91c93c4c392cbc6b96cee37d18723c89a7 | [
"Apache-2.0"
] | null | null | null | '''
Copyright 2015 Stefan Andrei Chelariu
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
'''
import smbus
import math
from time import sleep
| 38.946154 | 202 | 0.677266 | '''
Copyright 2015 Stefan Andrei Chelariu
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
'''
import smbus
import math
from time import sleep
class BMP085:
bus = None
cal_data = None
DEV_ADDR = 0x77
T_READ = 0x2E
P_READ = 0x34
factor = None
oss = 0
'''
*Register Map
'''
AC1_MSB = 0xAA
AC1_LSB = 0xAB
AC2_MSB = 0xAC
AC2_LSB = 0xAD
AC3_MSB = 0xAE
AC3_LSB = 0xAF
AC4_MSB = 0xB0
AC4_LSB = 0xB1
AC5_MSB = 0xB2
AC5_LSB = 0xB3
AC6_MSB = 0xB4
AC6_LSB = 0xB5
B1_MSB = 0xB6
B1_LSB = 0xB7
B2_MSB = 0xB8
B2_LSB = 0xB9
MB_MSB = 0xBA
MB_LSB = 0xBB
MC_MSB = 0xBC
MC_LSB = 0xBD
MD_MSB = 0xBE
MD_LSB = 0xBF
BUF_RD = 0xF4
BUF_MSB = 0xF6
BUF_LSB = 0xF7
BUF_XLSB= 0xF8
def _format_data(self, num):
tmp = (2**16) - 1
if num&(1<<15):
return num | ~tmp
else:
return num & tmp
def _read_cal_data(self):
return {
'ac1' : self.bus.read_byte_data(self.DEV_ADDR, self.AC1_MSB) << 8 | self.bus.read_byte_data(self.DEV_ADDR, self.AC1_LSB),
'ac2' : self.bus.read_byte_data(self.DEV_ADDR, self.AC2_MSB) << 8 | self.bus.read_byte_data(self.DEV_ADDR, self.AC2_LSB),
'ac3' : self.bus.read_byte_data(self.DEV_ADDR, self.AC3_MSB) << 8 | self.bus.read_byte_data(self.DEV_ADDR, self.AC3_LSB),
'ac4' : self.bus.read_byte_data(self.DEV_ADDR, self.AC4_MSB) << 8 | self.bus.read_byte_data(self.DEV_ADDR, self.AC4_LSB),
'ac5' : self.bus.read_byte_data(self.DEV_ADDR, self.AC5_MSB) << 8 | self.bus.read_byte_data(self.DEV_ADDR, self.AC5_LSB),
'ac6' : self.bus.read_byte_data(self.DEV_ADDR, self.AC6_MSB) << 8 | self.bus.read_byte_data(self.DEV_ADDR, self.AC6_LSB),
'b1' : self.bus.read_byte_data(self.DEV_ADDR, self.B1_MSB) << 8 | self.bus.read_byte_data(self.DEV_ADDR, self.B1_LSB),
'b2' : self.bus.read_byte_data(self.DEV_ADDR, self.B2_MSB) << 8 | self.bus.read_byte_data(self.DEV_ADDR, self.B2_LSB),
'mb' : self.bus.read_byte_data(self.DEV_ADDR, self.MB_MSB) << 8 | self.bus.read_byte_data(self.DEV_ADDR, self.MB_LSB),
'mc' : self.bus.read_byte_data(self.DEV_ADDR, self.MC_MSB) << 8 | self.bus.read_byte_data(self.DEV_ADDR, self.MC_LSB),
'md' : self.bus.read_byte_data(self.DEV_ADDR, self.MD_MSB) << 8 | self.bus.read_byte_data(self.DEV_ADDR, self.MD_LSB)
}
def _get_raw_temp(self):
self.bus.write_byte_data(self.DEV_ADDR, self.BUF_RD, self.T_READ)
sleep(0.005)
return self.bus.read_byte_data(self.DEV_ADDR, self.BUF_MSB) <<8 | self.bus.read_byte_data(self.DEV_ADDR, self.BUF_LSB)
def _get_raw_pres(self):
self.bus.write_byte_data(self.DEV_ADDR, self.BUF_RD, self.P_READ + (self.oss<<6))
sleep(0.005)
return (self.bus.read_byte_data(self.DEV_ADDR, self.BUF_MSB) << 16 | self.bus.read_byte_data(self.DEV_ADDR, self.BUF_LSB) <<8 | self.bus.read_byte_data(self.DEV_ADDR, self.BUF_XLSB)) >> (8 - self.oss)
def get_temperature(self):
_a = (self._get_raw_temp() - self.cal_data['ac6'])*self.cal_data['ac5']/math.pow(2,15)
_b = self.cal_data['mc']*math.pow(2,11)/(_a + self.cal_data['md'])
self.factor = _a + _b
return ((_a + _b + 8)/math.pow(2,4))/10
def get_pressure(self):
_k1 = self.factor -4000
_a = (self.cal_data['b2']*math.pow(_k1,2)/math.pow(2,12))/math.pow(2,11)
_b = self.cal_data['ac2']*_k1/math.pow(2,11)
_c = _a + _b
_k2 = ((self.cal_data['ac1']*4 + _c) + 2)/4
_a = self.cal_data['ac3']*_k1/math.pow(2,13)
_b = (self.cal_data['b1']*(math.pow(_k1,2)/math.pow(2,12)))/math.pow(2,16)
_c = ((_a + _b) + 2)/math.pow(2,2)
_k3 = self.cal_data['ac4']*(int((_c + 32768))&0xffffffff)/math.pow(2,15)
_k4 = ((self._get_raw_pres()&0xffffffff) - _k2)*50000
if _k4 < 0x80000000:
_p = (_k4*2)/_k3
else:
_p = (_k4/_k3)*2
_a = math.pow((_p/math.pow(2,8)),2)
_a = (_a*3038)/math.pow(2,16)
_b = (-7357*_p)/math.pow(2,16)
_p = _p + (_a + _b + 3791)/math.pow(2,4)
return _p
def __init__(self, i2c_module):
self.bus = smbus.SMBus(i2c_module)
self.cal_data = self._read_cal_data()
self.cal_data['ac1'] = self._format_data(self.cal_data['ac1'])
self.cal_data['ac2'] = self._format_data(self.cal_data['ac2'])
self.cal_data['ac3'] = self._format_data(self.cal_data['ac3'])
self.cal_data['b1'] = self._format_data(self.cal_data['b1'])
self.cal_data['b2'] = self._format_data(self.cal_data['b2'])
self.cal_data['mb'] = self._format_data(self.cal_data['mb'])
self.cal_data['mc'] = self._format_data(self.cal_data['mc'])
self.cal_data['md'] = self._format_data(self.cal_data['md'])
#TODO: check if cal_data is valid
| 3,703 | 722 | 23 |
97defb1cfdf65f73f647002d113b953cb409a5d7 | 261 | py | Python | example/test/core/camera/flat/orthogonal/unit.py | dmilos/IceRay | 4e01f141363c0d126d3c700c1f5f892967e3d520 | [
"MIT-0"
] | 2 | 2020-09-04T12:27:15.000Z | 2022-01-17T14:49:40.000Z | example/test/core/camera/flat/orthogonal/unit.py | dmilos/IceRay | 4e01f141363c0d126d3c700c1f5f892967e3d520 | [
"MIT-0"
] | null | null | null | example/test/core/camera/flat/orthogonal/unit.py | dmilos/IceRay | 4e01f141363c0d126d3c700c1f5f892967e3d520 | [
"MIT-0"
] | 1 | 2020-09-04T12:27:52.000Z | 2020-09-04T12:27:52.000Z | import IceRayCpp
| 23.727273 | 62 | 0.681992 | import IceRayCpp
def name( ):
return "camera_flat_orthogonal"
def make( P_width=2, P_height=2 ):
camera = IceRayCpp.CameraFlatOrthogonal(P_width,P_height)
#camera.width(P_width)
#camera.height(P_height)
return { 'this': camera }
| 193 | 0 | 50 |
2c5aa3fd4f720680737ff39525b00221448ebd9a | 8,239 | py | Python | chat_bot.py | akhil-2907/healthcare-chatbot | 7f454b234d62fad5bd61aeac7678d15661e55843 | [
"Apache-2.0"
] | null | null | null | chat_bot.py | akhil-2907/healthcare-chatbot | 7f454b234d62fad5bd61aeac7678d15661e55843 | [
"Apache-2.0"
] | null | null | null | chat_bot.py | akhil-2907/healthcare-chatbot | 7f454b234d62fad5bd61aeac7678d15661e55843 | [
"Apache-2.0"
] | null | null | null |
import pandas as pd
import pyttsx3
from sklearn import preprocessing
from sklearn.tree import DecisionTreeClassifier,_tree
import numpy as np
from sklearn.model_selection import train_test_split
from sklearn.model_selection import cross_val_score
from sklearn.svm import SVC
import csv
import warnings
warnings.filterwarnings("ignore", category=DeprecationWarning)
training = pd.read_csv('Training.csv')
testing= pd.read_csv('Testing.csv')
cols= training.columns
cols= cols[:-1]
x = training[cols]
y = training['prognosis']
y1= y
reduced_data = training.groupby(training['prognosis']).max()
#mapping strings to numbers
le = preprocessing.LabelEncoder()
le.fit(y)
y = le.transform(y)
x_train, x_test, y_train, y_test = train_test_split(x, y, test_size=0.33, random_state=42)
testx = testing[cols]
testy = testing['prognosis']
testy = le.transform(testy)
clf1 = DecisionTreeClassifier()
clf = clf1.fit(x_train,y_train)
# print(clf.score(x_train,y_train))
# print ("cross result========")
scores = cross_val_score(clf, x_test, y_test, cv=3)
# print (scores)
print (scores.mean())
model=SVC()
model.fit(x_train,y_train)
print("for svm: ")
print(model.score(x_test,y_test))
importances = clf.feature_importances_
indices = np.argsort(importances)[::-1]
features = cols
severityDictionary=dict()
description_list = dict()
precautionDictionary=dict()
symptoms_dict = {}
for index, symptom in enumerate(x):
symptoms_dict[symptom] = index
getSeverityDict()
getDescription()
getprecautionDict()
getInfo()
tree_to_code(clf,cols)
| 28.707317 | 93 | 0.595825 |
import pandas as pd
import pyttsx3
from sklearn import preprocessing
from sklearn.tree import DecisionTreeClassifier,_tree
import numpy as np
from sklearn.model_selection import train_test_split
from sklearn.model_selection import cross_val_score
from sklearn.svm import SVC
import csv
import warnings
warnings.filterwarnings("ignore", category=DeprecationWarning)
training = pd.read_csv('Training.csv')
testing= pd.read_csv('Testing.csv')
cols= training.columns
cols= cols[:-1]
x = training[cols]
y = training['prognosis']
y1= y
reduced_data = training.groupby(training['prognosis']).max()
#mapping strings to numbers
le = preprocessing.LabelEncoder()
le.fit(y)
y = le.transform(y)
x_train, x_test, y_train, y_test = train_test_split(x, y, test_size=0.33, random_state=42)
testx = testing[cols]
testy = testing['prognosis']
testy = le.transform(testy)
clf1 = DecisionTreeClassifier()
clf = clf1.fit(x_train,y_train)
# print(clf.score(x_train,y_train))
# print ("cross result========")
scores = cross_val_score(clf, x_test, y_test, cv=3)
# print (scores)
print (scores.mean())
model=SVC()
model.fit(x_train,y_train)
print("for svm: ")
print(model.score(x_test,y_test))
importances = clf.feature_importances_
indices = np.argsort(importances)[::-1]
features = cols
def readn(nstr):
engine = pyttsx3.init()
engine.setProperty('voice', "english+f5")
engine.setProperty('rate', 130)
engine.say(nstr)
engine.runAndWait()
engine.stop()
severityDictionary=dict()
description_list = dict()
precautionDictionary=dict()
symptoms_dict = {}
for index, symptom in enumerate(x):
symptoms_dict[symptom] = index
def calc_condition(exp,days):
sum=0
for item in exp:
sum=sum+severityDictionary[item]
if((sum*days)/(len(exp)+1)>13):
print("You should take the consultation from doctor. ")
else:
print("It might not be that bad but you should take precautions.")
def getDescription():
global description_list
with open('symptom_Description.csv') as csv_file:
csv_reader = csv.reader(csv_file, delimiter=',')
line_count = 0
for row in csv_reader:
_description={row[0]:row[1]}
description_list.update(_description)
def getSeverityDict():
global severityDictionary
with open('symptom_severity.csv') as csv_file:
csv_reader = csv.reader(csv_file, delimiter=',')
line_count = 0
try:
for row in csv_reader:
_diction={row[0]:int(row[1])}
severityDictionary.update(_diction)
except:
pass
def getprecautionDict():
global precautionDictionary
with open('symptom_precaution.csv') as csv_file:
csv_reader = csv.reader(csv_file, delimiter=',')
line_count = 0
for row in csv_reader:
_prec={row[0]:[row[1],row[2],row[3],row[4]]}
precautionDictionary.update(_prec)
def getInfo():
# name=input("Name:")
print("Your Name \n\t\t\t\t\t\t",end="->")
name=input("")
print("hello ",name)
def check_pattern(dis_list,inp):
import re
pred_list=[]
ptr=0
patt = "^" + inp + "$"
regexp = re.compile(inp)
for item in dis_list:
# print(f"comparing {inp} to {item}")
if regexp.search(item):
pred_list.append(item)
# return 1,item
if(len(pred_list)>0):
return 1,pred_list
else:
return ptr,item
def sec_predict(symptoms_exp):
df = pd.read_csv('Training.csv')
X = df.iloc[:, :-1]
y = df['prognosis']
X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=0.3, random_state=20)
rf_clf = DecisionTreeClassifier()
rf_clf.fit(X_train, y_train)
symptoms_dict = {}
for index, symptom in enumerate(X):
symptoms_dict[symptom] = index
input_vector = np.zeros(len(symptoms_dict))
for item in symptoms_exp:
input_vector[[symptoms_dict[item]]] = 1
return rf_clf.predict([input_vector])
def print_disease(node):
#print(node)
node = node[0]
#print(len(node))
val = node.nonzero()
# print(val)
disease = le.inverse_transform(val[0])
return disease
def tree_to_code(tree, feature_names):
tree_ = tree.tree_
# print(tree_)
feature_name = [
feature_names[i] if i != _tree.TREE_UNDEFINED else "undefined!"
for i in tree_.feature
]
chk_dis=",".join(feature_names).split(",")
symptoms_present = []
# conf_inp=int()
while True:
print("Enter the symptom you are experiencing \n\t\t\t\t\t\t",end="->")
disease_input = input("")
conf,cnf_dis=check_pattern(chk_dis,disease_input)
if conf==1:
print("searches related to input: ")
for num,it in enumerate(cnf_dis):
print(num,")",it)
if num!=0:
print(f"Select the one you meant (0 - {num}): ", end="")
conf_inp = int(input(""))
else:
conf_inp=0
disease_input=cnf_dis[conf_inp]
break
# print("Did you mean: ",cnf_dis,"?(yes/no) :",end="")
# conf_inp = input("")
# if(conf_inp=="yes"):
# break
else:
print("Enter valid symptom.")
while True:
try:
num_days=int(input("Okay. From how many days ? : "))
break
except:
print("Enter number of days.")
def recurse(node, depth):
indent = " " * depth
if tree_.feature[node] != _tree.TREE_UNDEFINED:
name = feature_name[node]
threshold = tree_.threshold[node]
if name == disease_input:
val = 1
else:
val = 0
if val <= threshold:
recurse(tree_.children_left[node], depth + 1)
else:
symptoms_present.append(name)
recurse(tree_.children_right[node], depth + 1)
else:
present_disease = print_disease(tree_.value[node])
# print( "You may have " + present_disease )
red_cols = reduced_data.columns
symptoms_given = red_cols[reduced_data.loc[present_disease].values[0].nonzero()]
# dis_list=list(symptoms_present)
# if len(dis_list)!=0:
# print("symptoms present " + str(list(symptoms_present)))
# print("symptoms given " + str(list(symptoms_given)) )
print("Are you experiencing any ")
symptoms_exp=[]
for syms in list(symptoms_given):
inp=""
print(syms,"? : ",end='')
while True:
inp=input("")
if(inp=="yes" or inp=="no"):
break
else:
print("provide proper answers i.e. (yes/no) : ",end="")
if(inp=="yes"):
symptoms_exp.append(syms)
second_prediction=sec_predict(symptoms_exp)
# print(second_prediction)
calc_condition(symptoms_exp,num_days)
if(present_disease[0]==second_prediction[0]):
print("You may have ", present_disease[0])
print(description_list[present_disease[0]])
# readn(f"You may have {present_disease[0]}")
# readn(f"{description_list[present_disease[0]]}")
else:
print("You may have ", present_disease[0], "or ", second_prediction[0])
print(description_list[present_disease[0]])
print(description_list[second_prediction[0]])
# print(description_list[present_disease[0]])
precution_list=precautionDictionary[present_disease[0]]
print("Take following measures : ")
for i,j in enumerate(precution_list):
print(i+1,")",j)
# confidence_level = (1.0*len(symptoms_present))/len(symptoms_given)
# print("confidence level is " + str(confidence_level))
recurse(0, 1)
getSeverityDict()
getDescription()
getprecautionDict()
getInfo()
tree_to_code(clf,cols)
| 6,449 | 0 | 227 |
08458448b47cb2ecf2cf30267d26d24b16450023 | 475 | py | Python | sandbox/EF/download_OCCA.py | geoffstanley/neutralocean | 5e93c9732d3a64bf4c5dcb81a6d2f47839b0c6f7 | [
"MIT"
] | 10 | 2022-03-03T16:00:01.000Z | 2022-03-14T18:51:08.000Z | sandbox/EF/download_OCCA.py | geoffstanley/neutralocean | 5e93c9732d3a64bf4c5dcb81a6d2f47839b0c6f7 | [
"MIT"
] | null | null | null | sandbox/EF/download_OCCA.py | geoffstanley/neutralocean | 5e93c9732d3a64bf4c5dcb81a6d2f47839b0c6f7 | [
"MIT"
] | null | null | null | import os
import urllib.request
PATH_OCCA = os.path.expanduser('~/work/data/OCCA/')
os.makedirs(PATH_OCCA, exist_ok=True)
varnames = "theta salt phihyd etan".split()
ftp = "ftp://mit.ecco-group.org/ecco_for_las/OCCA_1x1_v2/2004-6/annual/"
for name in varnames:
fname = f"DD{name}.0406annclim.nc"
url = f"{ftp}{fname}"
dest = f"{PATH_OCCA}{fname}"
print("retrieving:", url, dest)
ret = urllib.request.urlretrieve(url, dest)
print("returned:", ret)
| 26.388889 | 72 | 0.688421 | import os
import urllib.request
PATH_OCCA = os.path.expanduser('~/work/data/OCCA/')
os.makedirs(PATH_OCCA, exist_ok=True)
varnames = "theta salt phihyd etan".split()
ftp = "ftp://mit.ecco-group.org/ecco_for_las/OCCA_1x1_v2/2004-6/annual/"
for name in varnames:
fname = f"DD{name}.0406annclim.nc"
url = f"{ftp}{fname}"
dest = f"{PATH_OCCA}{fname}"
print("retrieving:", url, dest)
ret = urllib.request.urlretrieve(url, dest)
print("returned:", ret)
| 0 | 0 | 0 |
08f20087349304a8ce366110393fb096e6a7e124 | 326 | py | Python | mpopt/benchmarks/cec2020/setup.py | cil-lab/fwaopt | 16e2264ee20ccdbd15f2ba067e6466b283b71421 | [
"Xnet",
"X11",
"RSA-MD"
] | 7 | 2020-12-09T09:59:24.000Z | 2022-02-02T01:53:11.000Z | mpopt/benchmark/cec2020/setup.py | Aor-Li/MPOPT | b418a3fa519146cc97fb8f29d454e27cca20ef4b | [
"Xnet",
"RSA-MD",
"X11"
] | null | null | null | mpopt/benchmark/cec2020/setup.py | Aor-Li/MPOPT | b418a3fa519146cc97fb8f29d454e27cca20ef4b | [
"Xnet",
"RSA-MD",
"X11"
] | 5 | 2020-12-10T02:30:28.000Z | 2021-06-17T05:51:07.000Z | from distutils.core import setup
from distutils.extension import Extension
from Cython.Build import cythonize
cec20_extension = Extension(
name="cec20",
sources=["cec20.pyx"],
libraries=["cec20"],
library_dirs=["lib"],
include_dirs=["lib"],
)
setup(name="cec20", ext_modules=cythonize([cec20_extension]))
| 25.076923 | 61 | 0.723926 | from distutils.core import setup
from distutils.extension import Extension
from Cython.Build import cythonize
cec20_extension = Extension(
name="cec20",
sources=["cec20.pyx"],
libraries=["cec20"],
library_dirs=["lib"],
include_dirs=["lib"],
)
setup(name="cec20", ext_modules=cythonize([cec20_extension]))
| 0 | 0 | 0 |
245ca0d107e76b617d98e4608a4cda9d7e254c51 | 214 | py | Python | src/workstation/__init__.py | militu/workstation-cli | 8f69f87942e445f44e2101d2c1fee482c28ec38a | [
"MIT"
] | null | null | null | src/workstation/__init__.py | militu/workstation-cli | 8f69f87942e445f44e2101d2c1fee482c28ec38a | [
"MIT"
] | null | null | null | src/workstation/__init__.py | militu/workstation-cli | 8f69f87942e445f44e2101d2c1fee482c28ec38a | [
"MIT"
] | null | null | null | import importlib.resources as pkg_resources
import os
OS_FEDORA = "fedora"
OS_UBUNTU = "ubuntu"
OS_MAC = "macintosh"
HOME = os.environ["HOME"]
RESOURCES_PATH = str(pkg_resources.path("workstation", "resources"))
| 21.4 | 68 | 0.761682 | import importlib.resources as pkg_resources
import os
OS_FEDORA = "fedora"
OS_UBUNTU = "ubuntu"
OS_MAC = "macintosh"
HOME = os.environ["HOME"]
RESOURCES_PATH = str(pkg_resources.path("workstation", "resources"))
| 0 | 0 | 0 |
81ffad09620356aa57759a4346e5a4e8420f33f9 | 8,461 | py | Python | workflows/cloudify_system_workflows/snapshots/agents.py | Metaswitch/cloudify-manager | 760affb83facbe154c35c6ce20acb9432daa8bbd | [
"Apache-2.0"
] | null | null | null | workflows/cloudify_system_workflows/snapshots/agents.py | Metaswitch/cloudify-manager | 760affb83facbe154c35c6ce20acb9432daa8bbd | [
"Apache-2.0"
] | 1 | 2021-03-26T00:32:30.000Z | 2021-03-26T00:32:30.000Z | workflows/cloudify_system_workflows/snapshots/agents.py | vbohinc/cloudify-manager | 760affb83facbe154c35c6ce20acb9432daa8bbd | [
"Apache-2.0"
] | 1 | 2019-11-24T12:07:18.000Z | 2019-11-24T12:07:18.000Z | ########
# Copyright (c) 2015 GigaSpaces Technologies Ltd. All rights reserved
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# * See the License for the specific language governing permissions and
# * limitations under the License.
import os
import json
from cloudify.workflows import ctx
from cloudify import broker_config
from cloudify.manager import get_rest_client
from cloudify.utils import get_broker_ssl_cert_path
from .utils import is_compute, get_tenants_list
from .constants import BROKER_DEFAULT_VHOST, V_4_1_0
| 43.389744 | 79 | 0.61813 | ########
# Copyright (c) 2015 GigaSpaces Technologies Ltd. All rights reserved
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# * See the License for the specific language governing permissions and
# * limitations under the License.
import os
import json
from cloudify.workflows import ctx
from cloudify import broker_config
from cloudify.manager import get_rest_client
from cloudify.utils import get_broker_ssl_cert_path
from .utils import is_compute, get_tenants_list
from .constants import BROKER_DEFAULT_VHOST, V_4_1_0
class Agents(object):
_AGENTS_FILE = 'agents.json'
def __init__(self):
with open(get_broker_ssl_cert_path(), 'r') as f:
self._broker_ssl_cert = f.read()
def restore(self, tempdir, version):
with open(os.path.join(tempdir, self._AGENTS_FILE)) as agents_file:
agents = json.load(agents_file)
if version < V_4_1_0:
self._insert_agents_data(agents)
return
for tenant_name, deployments in agents.iteritems():
self._insert_agents_data(agents[tenant_name], tenant_name)
def dump(self, tempdir, manager_version):
self._manager_version = manager_version
result = {}
for tenant_name in get_tenants_list():
result[tenant_name] = {}
tenant_client = get_rest_client(tenant_name)
tenant_deployments = tenant_client.deployments.list(
_include=['id'],
_get_all_results=True
)
for deployment in tenant_deployments:
result[tenant_name][deployment.id] = \
self._get_deployment_result(tenant_client, deployment.id)
self._dump_result_to_file(tempdir, result)
def _dump_result_to_file(self, tempdir, result):
agents_file_path = os.path.join(tempdir, self._AGENTS_FILE)
with open(agents_file_path, 'w') as out:
out.write(json.dumps(result))
def _get_deployment_result(self, client, deployment_id):
deployment_result = {}
nodes_list = client.nodes.list(deployment_id=deployment_id,
_include=['id', 'type_hierarchy'],
_get_all_results=True)
for node in nodes_list:
if is_compute(node):
deployment_result[node.id] = self._get_node_result(
client, deployment_id, node.id)
return deployment_result
def _get_node_result(self, client, deployment_id, node_id):
node_result = {}
for node_instance in client.node_instances.list(
deployment_id=deployment_id, node_name=node_id):
# Only patch agent config for nodes that have been initialized;
# uninitialized nodes don't have an agent config yet in their
# runtime properties
if node_instance.state == 'uninitialized':
continue
node_result[node_instance.id] = self._get_node_instance_result(
node_instance)
return node_result
def _get_node_instance_result(self, node_instance):
"""
Fill in the broker config info from the cloudify_agent dict, using
the info from the bootstrap context as the fallback defaults
"""
agent = node_instance.runtime_properties.get('cloudify_agent', {})
tenant = agent.get('rest_tenant', {})
broker_conf = {
'broker_ip': agent.get('broker_ip', broker_config.broker_hostname),
'broker_ssl_cert': self._broker_ssl_cert,
'broker_ssl_enabled': True,
'broker_user': tenant.get('rabbitmq_username',
broker_config.broker_username),
'broker_pass': tenant.get('rabbitmq_password',
broker_config.broker_password),
'broker_vhost': tenant.get('rabbitmq_vhost',
broker_config.broker_vhost)
}
return {
'version': str(self._manager_version),
'broker_config': broker_conf
}
def _insert_agents_data(self, agents, tenant_name=None):
for deployment_id, nodes in agents.iteritems():
try:
self._create_agent(nodes, tenant_name)
except Exception:
ctx.logger.warning(
'Failed restoring agents for deployment `{0}` in tenant '
'`{1}`'.format(deployment_id, tenant_name),
exc_info=True)
def _create_rest_tenant(self, old_agent, broker_config, tenant_name):
old_rest_tenant = old_agent.get('rest_tenant', tenant_name)
if isinstance(old_rest_tenant, dict):
return old_rest_tenant
return {
'rabbitmq_vhost': broker_config['broker_vhost'],
'rabbitmq_username': broker_config['broker_user'],
'rabbitmq_password': broker_config['broker_pass'],
'name': old_rest_tenant
}
@classmethod
def _get_tenant_name(cls, node_instance_id):
"""
When restoring a snapshot from versions 4.0.0/4.0.1 the tenant name is
not defined and the only way to `guess` it is by finding the
node_instance from the agents.json file in the DB and checking its
tenant.
:param node_instance_id: a node instance from the agents.json file
:return: the tenant of the given node instance
"""
client = get_rest_client()
node_instances = client.node_instances.list(_all_tenants=True).items
for node_instance in node_instances:
if node_instance['id'] == node_instance_id:
return node_instance['tenant_name']
def _create_agent(self, nodes, tenant_name):
client = None
for node_instances in nodes.itervalues():
for node_instance_id, agent in node_instances.iteritems():
broker_config = self._get_broker_config(agent)
tenant_name = tenant_name or self._get_tenant_name(
node_instance_id)
client = client or get_rest_client(tenant_name)
node_instance = client.node_instances.get(node_instance_id)
runtime_properties = node_instance.runtime_properties
old_agent = runtime_properties.get('cloudify_agent', {})
if not broker_config.get('broker_ip'):
broker_config['broker_ip'] = \
old_agent.get('manager_ip', '')
broker_config['broker_vhost'] = \
broker_config.get('broker_vhost', BROKER_DEFAULT_VHOST)
agent['rest_tenant'] = self._create_rest_tenant(
old_agent, broker_config, tenant_name)
agent['broker_config'] = broker_config
old_agent.update(agent)
runtime_properties['cloudify_agent'] = old_agent
# Results of agent validation on old manager.
# Might be incorrect for new manager.
runtime_properties.pop('agent_status', None)
client.node_instances.update(
node_instance_id=node_instance_id,
runtime_properties=runtime_properties,
version=node_instance.version
)
@staticmethod
def _get_broker_config(agent):
# We need to retrieve broker_config:
# 3.3.1 and later
if 'broker_config' in agent:
broker_config = agent['broker_config']
# 3.3 and earlier
else:
broker_config = {}
for k in ['broker_user', 'broker_pass', 'broker_ip',
'broker_ssl_enabled', 'broker_ssl_cert']:
broker_config[k] = agent.pop(k)
if broker_config['broker_ssl_enabled']:
broker_config['broker_port'] = '5671'
else:
broker_config['broker_port'] = '5672'
return broker_config
| 5,400 | 2,101 | 23 |
17b70aee45d4cd9a06cd51f5f9694453400d9b44 | 45,684 | py | Python | sandbox/poly.py | bollu/polymage | 517657142cc3ae74e9daff3b41a0257d6a4ce2b6 | [
"Apache-2.0"
] | 10 | 2016-07-22T06:53:11.000Z | 2021-02-19T06:22:00.000Z | sandbox/poly.py | bollu/polymage | 517657142cc3ae74e9daff3b41a0257d6a4ce2b6 | [
"Apache-2.0"
] | null | null | null | sandbox/poly.py | bollu/polymage | 517657142cc3ae74e9daff3b41a0257d6a4ce2b6 | [
"Apache-2.0"
] | 2 | 2017-11-21T20:29:36.000Z | 2021-05-21T01:52:05.000Z | #
# Copyright 2014-2016 Vinay Vasista, Ravi Teja Mullapudi, Uday Bondhugula,
# and others from Multicore Computing Lab, Department of Computer Science
# and Automation, Indian Institute of Science
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
#
# poly.py : Polyhedral representation of pipeline functions.
#
from __future__ import absolute_import, division, print_function
import math
import time
import islpy as isl
from constructs import *
from expression import *
from utils import *
import pipe
import align_scale as aln_scl
# Static method 'alloc' for isl Id does not allow the user to be
# not None, as of now. We need an exclusive dictionary to map the
# users of an Id to that Id object.
isl_id_user_map = {}
class PolyRep(object):
""" The PolyRep class is the polyhedral representation of a
group. It gives piece-wise domain and schedule for each compute
object in the group. Polyhedral transformations modify the
piece-wise domains as well as the schedules.
"""
| 42.26087 | 79 | 0.54205 | #
# Copyright 2014-2016 Vinay Vasista, Ravi Teja Mullapudi, Uday Bondhugula,
# and others from Multicore Computing Lab, Department of Computer Science
# and Automation, Indian Institute of Science
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
#
# poly.py : Polyhedral representation of pipeline functions.
#
from __future__ import absolute_import, division, print_function
import math
import time
import islpy as isl
from constructs import *
from expression import *
from utils import *
import pipe
import align_scale as aln_scl
# Static method 'alloc' for isl Id does not allow the user to be
# not None, as of now. We need an exclusive dictionary to map the
# users of an Id to that Id object.
isl_id_user_map = {}
def isl_set_id_user(id_, user):
isl_id_user_map[id_] = user
return
def isl_get_id_user(id_):
return isl_id_user_map[id_]
def isl_alloc_id_for(ctx, name, user):
name = name+"_"+str(id(user))
id_ = isl.Id.alloc(ctx, name, None)
return id_
def optimize_schedule(part_scheds, dependencies):
# The pluto optimizer can be used to optimize the schedule for
# comparision.
pass
def add_constraints_from_list(obj, local_space, constraint_list,
constraint_alloc):
for constr in constraint_list:
c = constraint_alloc(local_space)
# find the normalization factor
m = 1
for coeff in constr:
if isinstance(constr[coeff], Fraction):
den = int(gcd(abs(constr[coeff].denominator), m))
m = (abs(constr[coeff].denominator) * m)//den
assert m.denominator == 1
m = m.numerator
# normalize
for coeff in constr:
if isinstance(constr[coeff], Fraction):
constr[coeff] = m * constr[coeff]
assert constr[coeff].denominator == 1
constr[coeff] = int(constr[coeff].numerator)
else:
constr[coeff] = int(m * constr[coeff])
for coeff in constr:
dim = coeff[1]
try:
if coeff[0] == 'param':
if (type(dim) == str):
dim = \
obj.find_dim_by_name(isl._isl.dim_type.param, dim)
c = c.set_coefficient_val(isl._isl.dim_type.param,
dim, constr[coeff])
elif coeff[0] == 'in':
if (type(dim) == str):
dim = obj.find_dim_by_name(isl._isl.dim_type.in_, dim)
c = c.set_coefficient_val(isl._isl.dim_type.in_,
dim, constr[coeff])
elif coeff[0] == 'out':
if (type(dim) == str):
dim = obj.find_dim_by_name(isl._isl.dim_type.out, dim)
c = c.set_coefficient_val(isl._isl.dim_type.out,
dim, constr[coeff])
elif coeff[0] == 'constant':
c = c.set_constant_val(constr[coeff])
else:
assert False
except isl.Error:
# Ignore this constraint conjunct since the referred dimension
# is not scheduled in the obj. This happens when we try to add
# constraint for a dimension that is not at all used by a part.
# FIXME: isl's find_dim_by_name throws exception on not finding
# any scheduled dimension. It's better to replace the exception
# handling with an isl function, if any, to test for the
# existence of a dimension in that part.
pass
obj = obj.add_constraint(c)
return obj
def add_constraints(obj, ineqs, eqs):
def add_constraints_for_element(obj, local_space, ineqs, eqs):
obj = add_constraints_from_list(obj, local_space, ineqs,
isl.Constraint.inequality_alloc)
obj = add_constraints_from_list(obj, local_space, eqs,
isl.Constraint.equality_alloc)
return obj
space = obj.get_space()
if (isinstance(obj, isl.Map)):
for bmap in obj.get_basic_maps():
local_space = bmap.get_local_space()
obj = add_constraints_for_element(obj, local_space, ineqs, eqs)
elif (isinstance(obj, isl.Set)):
for bset in obj.get_basic_sets():
local_space = bset.get_local_space()
obj = add_constraints_for_element(obj, local_space, ineqs, eqs)
elif (isinstance(obj, isl.BasicSet) or
isinstance(obj, isl.BasicMap)):
local_space = obj.get_local_space()
obj = add_constraints_for_element(obj, local_space, ineqs, eqs)
else:
assert False
return obj
def extract_value_dependence(part, ref, ref_poly_dom):
# Dependencies are calculated between values. There is no storage
# mapping done yet.
assert(part.sched)
deps = []
access_region = isl.BasicSet.universe(ref_poly_dom.dom_set.get_space())
part_dom = \
part.sched.domain().align_params(ref_poly_dom.dom_set.get_space())
access_region = access_region.align_params(part_dom.get_space())
rel = isl.BasicMap.from_domain_and_range(part_dom, access_region)
dim_out = rel.dim(isl._isl.dim_type.out)
source_dims = [ ('out', i) for i in range(0, dim_out)]
num_args = len(ref.arguments)
for i in range(0, num_args):
arg = ref.arguments[i]
# If the argument is not affine the dependence reflects that
# the computation may depend on any value of the referenced object
if (isAffine(arg)):
coeff = get_affine_var_and_param_coeff(arg)
coeff = map_coeff_to_dim(coeff)
coeff[('constant', 0)] = get_constant_from_expr(arg, affine=True)
coeff[source_dims[i]] = -1
rel = add_constraints(rel, [], [coeff])
if not rel.is_empty():
deps.append(PolyDep(ref.objectRef, part.comp.func, rel))
return deps
class PolyPart(object):
def __init__(self, _sched, _expr, _pred, _comp,
_align, _scale, _level_no, _liveout = True):
self.sched = _sched
self.expr = _expr
self.pred = _pred
assert isinstance(_comp, pipe.ComputeObject)
self.comp = _comp
self.func = self.comp.func
# Dependencies between values of computation objects
self.deps = []
# References made by self
self._refs = self.collect_part_refs()
# self dependence
self._self_dep = self.check_self_dep()
# Mapping between the input variables to the corresponding
# schedule dimension. A full affine schedule will need a
# transformation matrix. Currently we only shuffle the
# dimension order apart from tiling so a simple dimension
# alignment vector suffices. This has to be changed to
# handle more general cases later.
self._align = _align
# Scaling factors for each schedule dimension
self._scale = _scale
# Default alignment and scaling factors are set while
# constructing the polypart. These are changed by the
# alignment and loop scaling passes. Both these passer
# attempt to improve locality and uniformize dependencies.
self._level_no = _level_no
# tile shape, size, coordinate info
self.dim_tile_info = {}
# maps tiled dimensions to their respective scratchpad sizes
self.dim_scratch_size = {}
# dimensions marked as parallelizable/vectorizable
self.parallel_sched_dims = []
self.vector_sched_dim = []
# liveness in the group containing the part
self._is_liveout = _liveout
@property
def align(self):
return list(self._align)
@property
def scale(self):
return list(self._scale)
@property
def refs(self):
return list(self._refs)
@property
def is_self_dependent(self):
return self._self_dep
@property
def is_liveout(self):
return self._is_liveout
@property
def level(self):
return self._level_no
def set_align(self, align):
self._align = [i for i in align]
return
def set_scale(self, _scale):
self._scale = [i for i in _scale]
return
def is_align_set(self):
return self._align != [] and self._align != None
def is_scale_set(self):
return self._scale != [] and self._scale != None
def check_self_dep(self):
obj_refs = [ ref.objectRef for ref in self.refs \
if ref.objectRef == self.func ]
if len(obj_refs) > 0:
return True
return False
def get_size(self, param_estimates):
# returns the size of the computation that contains this poly part
size = None
domain = self.func.domain
if isinstance(self.func, Reduction):
domain = self.func.reductionDomain
for interval in domain:
subs_size = get_dim_size(interval, param_estimates)
if is_constant_expr(subs_size):
if size is not None:
size = size * get_constant_from_expr(subs_size)
else:
size = get_constant_from_expr(subs_size)
else:
size = '*'
break
assert size is not None
return size
def collect_part_refs(self):
refs = self.expr.collect(Reference)
if (self.pred):
refs += self.pred.collect(Reference)
return refs
def compute_liveness(self):
self._is_liveout = self.comp.is_liveout
return
def set_liveness(self, _is_liveout):
self._is_liveout = _is_liveout
def compute_dependence_vector(self, parent_part,
ref, scale_map = None):
def get_scale(s_map, p, i):
if s_map is not None:
return s_map[p][i]
return p.scale[i]
num_args = len(ref.arguments)
dim_out = parent_part.sched.dim(isl._isl.dim_type.out)
dep_vec = [ NULL for i in range(0, dim_out) ]
if isinstance(parent_part.func, Reduction):
for i in range(1, dim_out):
dep_vec[i] = '*'
dep_vec[0] = self.level - parent_part.level
return (dep_vec, parent_part.level)
# else
for i in range(0, num_args):
arg = ref.arguments[i]
pvar_sched_dim = parent_part.align[i]
if (isAffine(arg)):
dom_dim_coeff = \
get_domain_dim_coeffs(self.sched, arg)
param_coeff = \
get_param_coeffs(self.sched, arg)
# Parameter coefficents can also be considered to
# generate parametric shifts. Yet to be seen.
# Indexed with multiple variables.
if (len(dom_dim_coeff) > 1 or \
(len(dom_dim_coeff) == 1 and len(param_coeff) >=1)):
# Although there are multiple coefficients, if there is
# only one variable coefficient and other parametric
# coefficients, uniformization can be done with parametric
# shifts. Full affine scheduling might be able to find a
# way to uniformize dependencies. This has to be further
# explored.
#assert False
dep_vec[pvar_sched_dim] = '*'
# Indexed with a single variable. This can either be an uniform
# access or can be uniformized with scaling when possible
elif len(dom_dim_coeff) == 1 and len(param_coeff) == 0:
dim = list(dom_dim_coeff.keys())[0]
cvar_sched_dim = self.align[dim]
pscale = get_scale(scale_map, parent_part, i)
cscale = get_scale(scale_map, self, dim)
assert Fraction(pscale).denominator == 1
assert Fraction(cscale).denominator == 1
if ((cvar_sched_dim == pvar_sched_dim) and \
(dom_dim_coeff[dim] * pscale == cscale)):
dep_vec[pvar_sched_dim] = \
-get_constant_from_expr(arg, affine=True)
access_scale = pscale
if dep_vec[pvar_sched_dim] > 0:
dep_vec[pvar_sched_dim] = \
(int(math.ceil(dep_vec[pvar_sched_dim] *
access_scale)))
else:
dep_vec[pvar_sched_dim] = \
(int(math.floor(dep_vec[pvar_sched_dim] *
access_scale)))
else:
dep_vec[pvar_sched_dim] = '*'
elif len(dom_dim_coeff) == 0 and len(param_coeff) > 0:
#assert False
dep_vec[parentVarSchedDim] = '*'
# Only parametric or Constant access. The schedule in this
# dimension can be shifted to this point to uniformize the
# dependence
# In case the dimension in the parent has a constant size
# an upper and lower bound on the dependence vector can
# be computed.
elif len(dom_dim_coeff) + len(param_coeff) == 0:
# offsets should be set here.
access_const = get_constant_from_expr(arg, affine = True)
p_lower_bound = parent_part.sched.domain().dim_min(i)
p_upper_bound = parent_part.sched.domain().dim_max(i)
if ((p_lower_bound.is_cst() and
p_lower_bound.n_piece() == 1) and
(p_upper_bound.is_cst() and
p_upper_bound.n_piece() == 1)):
pscale = get_scale(scale_map, parent_part, i)
low_vec_aff = (p_lower_bound.get_pieces())[0][1]
val = low_vec_aff.get_constant_val()
assert(val.get_den_val() == 1)
low_vec = \
int(math.floor((access_const - val.get_num_si()) *
pscale))
high_vec_aff = (p_upper_bound.get_pieces())[0][1]
val = high_vec_aff.get_constant_val()
assert(val.get_den_val() == 1)
high_vec = \
int(math.ceil((access_const - val.get_num_si()) *
pscale))
if high_vec == low_vec:
dep_vec[pvar_sched_dim] = high_vec
else:
# Unpack dependence vectors when this hits
#assert False
#dep_vec[pvar_sched_dim] = (low_vec, high_vec)
dep_vec[pvar_sched_dim] = '*'
else:
dep_vec[pvar_sched_dim] = '*'
else:
assert False
else: # if not isAffine(arg)
#assert(False)
dep_vec[pvar_sched_dim] = '*'
assert dep_vec[0] == NULL
dep_vec[0] = self.level - parent_part.level
for i in range(0, dim_out):
if (dep_vec[i] == NULL):
dep_vec[i] = 0
#for i in range(0, dim_out):
# if (dep_vec[i] == NULL):
# dep_vec[i] = '*'
# p_lower_bound = parent_part.sched.range().dim_min(i)
# p_upper_bound = parent_part.sched.range().dim_max(i)
# c_lower_bound = self.sched.range().dim_min(i)
# c_upper_bound = self.sched.range().dim_max(i)
# if (c_lower_bound.is_equal(c_upper_bound) and
# p_lower_bound.is_equal(p_upper_bound)):
# dim_diff = c_upper_bound.sub(p_upper_bound)
# if (dim_diff.is_cst() and dim_diff.n_piece() == 1):
# aff = (dim_diff.get_pieces())[0][1]
# val = aff.get_constant_val()
# dep_vec[i] = (val.get_num_si())/(val.get_den_val())
return (dep_vec, parent_part.level)
def __str__(self):
partStr = "Schedule: " + self.sched.__str__() + '\n'\
"Expression: " + self.expr.__str__() + '\n'\
"Predicate: " + self.pred.__str__() + '\n'
depstr = ""
for dep in self.deps:
depstr = depstr + dep.__str__() + '\n'
return partStr + depstr
class PolyDomain(object):
def __init__(self, _dom_set, _comp):
self._dom_set = _dom_set
assert isinstance(_comp, pipe.ComputeObject)
self._comp = _comp
@property
def dom_set(self):
return self._dom_set
@property
def comp(self):
return self._comp
def set_tuple_id(self, _id):
self._dom_set.set_tuple_id(_id)
return
def __str__(self):
return "Domain: " + self.dom_set.__str__()
class PolyDep(object):
def __init__(self, _producer, _consumer, _rel):
self._producer = _producer
self._consumer = _consumer
self._rel = _rel
@property
def producer_obj(self):
return self._producer
@property
def consumer_obj(self):
return self._consumer
@property
def rel(self):
return self._rel
def __str__(self):
return self.rel.__str__()
class PolyRep(object):
""" The PolyRep class is the polyhedral representation of a
group. It gives piece-wise domain and schedule for each compute
object in the group. Polyhedral transformations modify the
piece-wise domains as well as the schedules.
"""
def __init__(self, _ctx, _group, _outputs,
_param_constraints):
assert isinstance(_group, pipe.Group)
self.group = _group
self.outputs = _outputs
self.param_constraints = _param_constraints
self.ctx = _ctx
self.poly_parts = {}
self.poly_doms = {}
self.polyast = []
self._var_count = 0
self._func_count = 0
# TODO: move the following outside __init__()
# For now, let this be. Compilation optimizations can come later.
self.extract_polyrep_from_group(_param_constraints)
def extract_polyrep_from_group(self, param_constraints):
# dict: comp_obj -> level_no
comp_map = self.group.get_ordered_comps
num_objs = len(comp_map.items())
# Comute the max dimensionality of the compute objects
def max_dim(comps):
dim = 0
for comp in comps:
if type(comp.func) == Reduction:
dim = max(dim, len(comp.func.reductionVariables))
dim = max(dim, len(comp.func.variables))
elif type(comp.func) == Function or type(comp.func) == Image:
dim = max(dim, len(comp.func.variables))
return dim
dim = max_dim(comp_map)
# Get all the parameters used in the group compute objects
grp_params = []
for comp in comp_map:
grp_params = grp_params + comp.func.getObjects(Parameter)
grp_params = list(set(grp_params))
param_names = [param.name for param in grp_params]
# Represent all the constraints specified on the parameters relevant
# to the group.
context_conds = \
self.format_param_constraints(param_constraints, grp_params)
# The [t] is for the stage dimension
schedule_names = ['_t'] + \
[ self.getVarName() for i in range(0, dim) ]
for comp in comp_map:
if (type(comp.func) == Function or type(comp.func) == Image):
self.extract_polyrep_from_function(comp, dim, schedule_names,
param_names, context_conds,
comp_map[comp]+1,
param_constraints)
elif (type(comp.func) == Reduction):
self.extract_polyrep_from_reduction(comp, dim, schedule_names,
param_names, context_conds,
comp_map[comp]+1,
param_constraints)
else:
assert False
def format_param_constraints(self, param_constraints, grp_params):
context_conds = []
grp_params_set = set(grp_params)
for param_constr in param_constraints:
# Only consider parameter constraints of parameters
# given in params.
params_in_constr = param_constr.collect(Parameter)
context_add = set(params_in_constr).issubset(grp_params_set)
# Only add the constraint if it is affine and has no conjunctions.
# Handling conjunctions can be done but will require more care.
if context_add and isAffine(param_constr):
param_constr_conjunct = param_constr.split_to_conjuncts()
if len(param_constr_conjunct) == 1:
context_conds.append(param_constr)
return context_conds
def extract_poly_dom_from_comp(self, comp, param_constraints):
var_names = [ var.name for var in comp.func.variables ]
dom_map_names = [ name +'\'' for name in var_names ]
params = []
for interval in comp.func.domain:
params = params + interval.collect(Parameter)
params = list(set(params))
param_names = [ param.name for param in params ]
space = isl.Space.create_from_names(self.ctx, in_ = var_names,
out = dom_map_names,
params = param_names)
dom_map = isl.BasicMap.universe(space)
# Adding the domain constraints
[ineqs, eqs] = format_domain_constraints(comp.func.domain, var_names)
dom_map = add_constraints(dom_map, ineqs, eqs)
param_conds = self.format_param_constraints(param_constraints, params)
[param_ineqs, param_eqs] = format_conjunct_constraints(param_conds)
dom_map = add_constraints(dom_map, param_ineqs, param_eqs)
poly_dom = PolyDomain(dom_map.domain(), comp)
id_ = isl_alloc_id_for(self.ctx, comp.func.name, poly_dom)
poly_dom.set_tuple_id(id_)
isl_set_id_user(id_, poly_dom)
return poly_dom
def extract_polyrep_from_function(self, comp, max_dim,
schedule_names, param_names,
context_conds, level_no,
param_constraints):
self.poly_doms[comp] = \
self.extract_poly_dom_from_comp(comp, param_constraints)
sched_map = self.create_sched_space(comp.func.variables,
comp.func.domain,
schedule_names, param_names,
context_conds)
self.create_poly_parts_from_definition(comp, max_dim, sched_map,
level_no, schedule_names,
comp.func.domain)
def extract_polyrep_from_reduction(self, comp, max_dim,
schedule_names, param_names,
context_conds, level_no,
param_constraints):
self.poly_doms[comp] = \
self.extract_poly_dom_from_comp(comp, param_constraints)
sched_map = self.create_sched_space(comp.func.reductionVariables,
comp.func.reductionDomain,
schedule_names, param_names,
context_conds)
self.create_poly_parts_from_definition(comp, max_dim,
sched_map, level_no,
schedule_names,
comp.func.reductionDomain)
dom_map = self.create_sched_space(comp.func.variables,
comp.func.domain,
schedule_names, param_names,
context_conds)
# Initializing the reduction earlier than any other function
self.create_poly_parts_from_default(comp, max_dim, dom_map, level_no,
schedule_names)
def create_sched_space(self, variables, domains,
schedule_names, param_names, context_conds):
# Variable names for referring to dimensions
var_names = [ var.name for var in variables ]
space = isl.Space.create_from_names(self.ctx, in_ = var_names,
out = schedule_names,
params = param_names)
sched_map = isl.BasicMap.universe(space)
# Adding the domain constraints
[ineqs, eqs] = format_domain_constraints(domains, var_names)
sched_map = add_constraints(sched_map, ineqs, eqs)
# Adding the parameter constraints
[param_ineqs, param_eqs] = format_conjunct_constraints(context_conds)
sched_map = add_constraints(sched_map, param_ineqs, param_eqs)
return sched_map
def create_poly_parts_from_definition(self, comp, max_dim,
sched_map, level_no,
schedule_names, domain):
self.poly_parts[comp] = []
for case in comp.func.defn:
sched_m = sched_map.copy()
# The basic schedule is an identity schedule appended with
# a level dimension. The level dimension gives the ordering
# of the compute objects within a group.
align, scale = \
aln_scl.default_align_and_scale(sched_m, max_dim, shift=True)
if (isinstance(case, Case)):
# Dealing with != and ||. != can be replaced with < || >.
# and || splits the domain into two.
split_conjuncts = case.condition.split_to_conjuncts()
for conjunct in split_conjuncts:
# If the condition is non-affine it is stored as a
# predicate for the expression. An affine condition
# is added to the domain.
affine = True
for cond in conjunct:
affine = affine and \
isAffine(cond.lhs) and isAffine(cond.rhs)
if(affine):
[conjunct_ineqs, conjunct_eqs] = \
format_conjunct_constraints(conjunct)
sched_m = add_constraints(sched_m,
conjunct_ineqs,
conjunct_eqs)
parts = self.make_poly_parts(sched_m, case.expression,
None, comp,
align, scale, level_no)
for part in parts:
self.poly_parts[comp].append(part)
else:
parts = self.make_poly_parts(sched_m, case.expression,
case.condition, comp,
align, scale, level_no)
for part in parts:
self.poly_parts[comp].append(part)
else:
assert(isinstance(case, AbstractExpression) or
isinstance(case, Reduce))
parts = self.make_poly_parts(sched_m, case,
None, comp,
align, scale, level_no)
# FIXME: Is a loop required here? make_poly_part
# seems to return a list of one part
for part in parts:
self.poly_parts[comp].append(part)
# TODO adding a boundary padding and default to the function
# will help DSL usability.
# An attempt to subtract all the part domains to find the domain
# where the default expression has to be applied.
#sched_m = isl.BasicMap.identity(self.polyspace)
#sched_m = add_constraints(sched, ineqs, eqs)
# Adding stage identity constraint
#level_coeff = {}
#level_coeff[varDims[0]] = -1
#level_coeff[('constant', 0)] = compObjs[comp]
#sched_m = add_constraints(sched_m, [], [level_coeff])
#sched_m = add_constraints(sched_m, param_ineqs, param_eqs)
#for part in self.poly_parts[comp]:
# sched_m = sched_m.subtract_range(part.sched.range())
# if (sched_m.is_empty()):
# break
#if(not sched_m.fast_is_empty()):
# bmap_list = []
# if (isinstance(sched_m, isl.BasicMap)):
# bmap_list.append(sched_m)
# else:
# sched_m.foreach_basic_map(bmap_list.append)
# for bmap in bmap_list:
# poly_part = PolyPart(bmap, comp.func.default, None, comp)
# id_ = isl_alloc_id_for(self.ctx, comp.func.name, poly_part)
# poly_part.sched = poly_part.sched.set_tuple_id(
# isl._isl.dim_type.in_, id_)
# isl_set_id_user(id_, poly_part)
# self.poly_parts[comp].append(poly_part)
def create_poly_parts_from_default(self, comp, max_dim, sched_map,
level_no, schedule_names):
sched_m = sched_map.copy()
align, scale = \
aln_scl.default_align_and_scale(sched_m, max_dim, shift=True)
assert(isinstance(comp.func.default, AbstractExpression))
poly_part = PolyPart(sched_m, comp.func.default,
None, comp,
align, scale, level_no-1)
id_ = isl_alloc_id_for(self.ctx, comp.func.name, poly_part)
poly_part.sched = \
poly_part.sched.set_tuple_id(isl._isl.dim_type.in_, id_)
isl_set_id_user(id_, poly_part)
self.poly_parts[comp].append(poly_part)
def make_poly_parts(self, sched_map, expr, pred, comp,
align, scale, level_no):
# Detect selects with modulo constraints and split into
# multiple parts. This technique can also be applied to the
# predicate but for now we focus on selects.
poly_parts = []
# This is very very temporary solution there should be a
# better way of doing this. Only targetting conditions
# of the form (affine)%constant == constant.
broken_parts = []
if isinstance(expr, Select):
conjuncts = expr.condition.split_to_conjuncts()
if len(conjuncts) == 1 and len(conjuncts[0]) == 1:
cond = conjuncts[0][0]
left_expr = cond.lhs
right_expr = cond.rhs
is_left_modulo = isAffine(left_expr, include_modulo=True) and \
not isAffine(left_expr)
is_right_constant = is_constant_expr(right_expr)
break_select = False
# check for 'affine % constant == constant'
if is_left_modulo and is_right_constant and \
cond.conditional == '==' and \
isinstance(left_expr, AbstractBinaryOpNode)\
and left_expr.op == '%' and isAffine(left_expr.left)\
and is_constant_expr(left_expr.right):
break_select = True
if break_select:
left_const = get_constant_from_expr(left_expr.left,
affine = True)
right_const = get_constant_from_expr(right_expr,
affine = True)
mod_const = get_constant_from_expr(left_expr.right,
affine = True)
left_coeff = get_affine_var_and_param_coeff(left_expr.left)
left_coeff = map_coeff_to_dim(left_coeff)
mul_name = '_Mul_'
rem_name = '_Rem_'
# true branch schedule
true_sched = sched_map.copy()
dim_in = true_sched.dim(isl._isl.dim_type.in_)
true_sched = \
true_sched.insert_dims(isl._isl.dim_type.in_,
dim_in, 1)
true_sched = \
true_sched.set_dim_name(isl._isl.dim_type.in_,
dim_in, mul_name)
eqs = []
left_coeff[('constant', 0)] = left_const - right_const
left_coeff[('in', dim_in)] = -mod_const
eqs.append(left_coeff)
true_sched = add_constraints(true_sched, [], eqs)
true_sched = true_sched.project_out(isl._isl.dim_type.in_,
dim_in, 1)
broken_parts.append((true_sched, expr.true_expression))
# false branch schedule
false_sched = sched_map.copy()
dim_in = false_sched.dim(isl._isl.dim_type.in_)
false_sched = \
false_sched.insert_dims(isl._isl.dim_type.in_,
dim_in, 2)
false_sched = \
false_sched.set_dim_name(isl._isl.dim_type.in_,
dim_in, mul_name)
false_sched = \
false_sched.set_dim_name(isl._isl.dim_type.in_,
dim_in+1, rem_name)
eqs = []
left_coeff[('constant', 0)] = left_const - right_const
left_coeff[('in', dim_in)] = -mod_const
left_coeff[('in', dim_in+1)] = -1
eqs.append(left_coeff)
ineqs = []
coeff = {}
coeff[('in', dim_in+1)] = 1
coeff[('constant', 0)] = -1
ineqs.append(coeff)
coeff = {}
coeff[('in', dim_in+1)] = -1
coeff[('constant', 0)] = mod_const-1
ineqs.append(coeff)
false_sched = add_constraints(false_sched, ineqs, eqs)
false_sched = \
false_sched.project_out(isl._isl.dim_type.in_,
dim_in, 2)
broken_parts.append((false_sched, expr.false_expression))
# Note the align and scale lists are cloned otherwise all the parts
# will be sharing the same alignment and scaling
if not broken_parts:
poly_part = PolyPart(sched_map, expr, pred, comp,
list(align), list(scale), level_no)
# Create a user pointer, tuple name and add it to the map
id_ = isl_alloc_id_for(self.ctx, comp.func.name, poly_part)
poly_part.sched = poly_part.sched.set_tuple_id(
isl._isl.dim_type.in_, id_)
isl_set_id_user(id_, poly_part)
poly_parts.append(poly_part)
else:
for bsched_map, bexpr in broken_parts:
poly_part = PolyPart(bsched_map, bexpr, pred, comp,
list(align), list(scale), level_no)
# Create a user pointer, tuple name and add it to the map
id_ = isl_alloc_id_for(self.ctx, comp.func.name, poly_part)
poly_part.sched = poly_part.sched.set_tuple_id( \
isl._isl.dim_type.in_, id_)
isl_set_id_user(id_, poly_part)
poly_parts.append(poly_part)
return poly_parts
def generate_code(self):
self.polyast = []
if self.poly_parts:
self.build_ast()
def build_ast(self):
#astbld = isl.AstBuild.from_context( \
# isl.BasicSet("[C, R]->{: R>=1 and C>=1}", self.ctx))
parts = []
for plist in self.poly_parts.values():
parts.extend(plist)
# TODO figure out a way to create the correct parameter context
# since the parameters for all the parts may not be the same
astbld = isl.AstBuild.from_context(parts[0].sched.params())
#astbld = astbld.set_options(isl.UnionMap("{ }"))
sched_map = None
opt_map = None
for part in parts:
if sched_map is None:
# initial map
sched_map = isl.UnionMap.from_map(part.sched)
else:
part_map = isl.UnionMap.from_map(part.sched)
sched_map = sched_map.union(part_map)
srange = part.sched.range()
unroll_union_set = \
isl.UnionSet.from_set(isl.Set("{:}", self.ctx))
dom_union_set = \
isl.UnionSet.universe(isl.UnionSet.from_set(srange))
if opt_map is None:
opt_map = isl.UnionMap.from_domain_and_range(dom_union_set, \
unroll_union_set)
else:
opt_map = opt_map.union( \
isl.UnionMap.from_domain_and_range( \
dom_union_set, unroll_union_set) )
astbld = astbld.set_options(opt_map)
# All parts in the group will have the same schedule dimension
# using the first part as the canonical one
num_ids = parts[0].sched.dim(isl._isl.dim_type.out)
ids = isl.IdList.alloc(self.ctx, num_ids)
for i in range(0, num_ids):
sched_name = parts[0].sched.get_dim_name(isl._isl.dim_type.out, i)
id_ = isl.Id.alloc(self.ctx, sched_name, None)
ids = ids.add(id_)
astbld = astbld.set_iterators(ids)
self.polyast.append(astbld.ast_from_schedule(sched_map))
def getVarName(self):
name = "_i" + str(self._var_count)
self._var_count+=1
return name
def __str__(self):
polystr = ""
for comp in self.poly_parts:
for part in self.poly_parts[comp]:
polystr = polystr + part.__str__() + '\n'
if (self.polyast != []):
for ast in self.polyast:
printer = isl.Printer.to_str(self.ctx)
printer = printer.set_output_format(isl.format.C)
printOpts = isl.AstPrintOptions.alloc(self.ctx)
printer = ast.print_(printer, printOpts)
aststr = printer.get_str()
polystr = polystr + '\n' + aststr
return polystr
def get_dim_size(interval, param_estimates):
param_val_map = {}
for est in param_estimates:
assert isinstance(est[0], Parameter)
param_val_map[est[0]] = Value.numericToValue(est[1])
dim_size = interval.upperBound - interval.lowerBound + 1
return substitute_vars(dim_size, param_val_map)
def get_domain_dim_coeffs(sched, arg):
dom_dim_coeff = {}
if (isAffine(arg)):
coeff = get_affine_var_and_param_coeff(arg)
for item in coeff:
if type(item) == Variable:
dim = sched.find_dim_by_name(isl._isl.dim_type.in_,
item.name)
dom_dim_coeff[dim] = coeff[item]
return dom_dim_coeff
def get_param_coeffs(sched, arg):
param_coeff = {}
if (isAffine(arg)):
coeff = get_affine_var_and_param_coeff(arg)
for item in coeff:
if type(item) == Parameter:
dim = sched.find_dim_by_name(isl._isl.dim_type.param,
item.name)
param_coeff[dim] == coeff[item]
return param_coeff
def map_coeff_to_dim(coeff):
variables = list(coeff.keys())
for var in variables:
coeffval = coeff[var]
coeff.pop(var)
if (isinstance(var, Parameter)):
coeff[('param', var.name)] = coeffval
elif (isinstance(var, Variable)):
coeff[('in', var.name)] = coeffval
return coeff
def format_domain_constraints(domain, var_names):
ineq_coeff = []
eq_coeff = []
dom_len = len(domain)
for i in range(0, dom_len):
coeff = {}
interval = domain[i]
lb_coeff = get_affine_var_and_param_coeff(interval.lowerBound)
# Mapping from variable names to the corresponding dimension
lb_coeff = map_coeff_to_dim(lb_coeff)
lb_const = get_constant_from_expr(interval.lowerBound, affine = True)
# Normalizing into >= format
coeff = dict( (n, -lb_coeff.get(n)) for n in lb_coeff )
coeff[('constant', 0)] = -lb_const
coeff[('in', var_names[i])] = 1
ineq_coeff.append(coeff)
ub_coeff = get_affine_var_and_param_coeff(interval.upperBound)
# Mapping from variable names to the corresponding dimension
ub_coeff = map_coeff_to_dim(ub_coeff)
ub_const = get_constant_from_expr(interval.upperBound, affine = True)
# Normalizing into >= format
coeff = ub_coeff
coeff[('constant', 0)] = ub_const
coeff[('in', var_names[i])] = -1
ineq_coeff.append(coeff)
return [ineq_coeff, eq_coeff]
def format_conjunct_constraints(conjunct):
# TODO check if the condition is a conjunction
# print([ cond.__str__() for cond in conjunct ])
ineq_coeff = []
eq_coeff = []
for cond in conjunct:
coeff = {}
left_coeff = get_affine_var_and_param_coeff(cond.lhs)
right_coeff = get_affine_var_and_param_coeff(cond.rhs)
left_const = get_constant_from_expr(cond.lhs, affine = True)
right_const = get_constant_from_expr(cond.rhs, affine = True)
# Mapping from variable names to the corresponding dimension
left_coeff = map_coeff_to_dim(left_coeff)
right_coeff = map_coeff_to_dim(right_coeff)
def constant_div_factor(const):
m = 1
for coeff in const:
if isinstance(const[coeff], Fraction):
m = (abs(const[coeff].denominator) * m) // \
gcd(abs(const[coeff].denominator), m)
assert m.denominator == 1
m = m.numerator
return m
# Normalizing >= format
if (cond.conditional in ['<=','<']):
coeff = dict( (n, -left_coeff.get(n, 0) + right_coeff.get(n, 0)) \
for n in set(left_coeff) | set(right_coeff) )
d = constant_div_factor(coeff)
coeff[('constant', 0)] = -left_const + right_const - \
int(cond.conditional == '<') - \
Fraction(d-1, d)
ineq_coeff.append(coeff)
elif(cond.conditional in ['>=','>']):
coeff = dict( (n, left_coeff.get(n, 0) - right_coeff.get(n, 0)) \
for n in set(left_coeff) | set(right_coeff) )
d = constant_div_factor(coeff)
coeff[('constant', 0)] = left_const - right_const - \
int(cond.conditional == '>') + \
Fraction(d-1, d)
ineq_coeff.append(coeff)
else:
# Weird
assert(cond.conditional == '==')
coeff = dict( (n, left_coeff.get(n, 0) - right_coeff.get(n, 0)) \
for n in set(left_coeff) | set(right_coeff) )
coeff[('constant', 0)] = left_const - right_const
eq_coeff.append(coeff)
return [ineq_coeff, eq_coeff]
| 42,523 | 906 | 746 |
c27d370b1166e8e1a85a9272b1b37da857c1f4a4 | 1,627 | py | Python | 3rdparty/openmm/wrappers/python/tests/TestMetadynamics.py | merkys/MMB | 0531385b8367405e1188e31c3eef7aa4cc50170b | [
"MIT"
] | 5 | 2020-07-31T17:33:03.000Z | 2022-01-01T19:24:37.000Z | 3rdparty/openmm/wrappers/python/tests/TestMetadynamics.py | merkys/MMB | 0531385b8367405e1188e31c3eef7aa4cc50170b | [
"MIT"
] | 11 | 2020-06-16T05:05:42.000Z | 2022-03-30T09:59:14.000Z | 3rdparty/openmm/wrappers/python/tests/TestMetadynamics.py | merkys/MMB | 0531385b8367405e1188e31c3eef7aa4cc50170b | [
"MIT"
] | 9 | 2020-01-24T12:02:37.000Z | 2020-10-16T06:23:56.000Z | import unittest
from simtk.openmm import *
from simtk.openmm.app import *
from simtk.unit import *
class TestMetadynamics(unittest.TestCase):
"""Test the Metadynamics class"""
def testHarmonicOscillator(self):
"""Test running metadynamics on a harmonic oscillator."""
system = System()
system.addParticle(1.0)
system.addParticle(1.0)
force = HarmonicBondForce()
force.addBond(0, 1, 1.0, 100000.0)
system.addForce(force)
cv = CustomBondForce('r')
cv.addBond(0, 1)
bias = BiasVariable(cv, 0.94, 1.06, 0.02)
meta = Metadynamics(system, [bias], 300*kelvin, 2.0, 5.0, 10)
integrator = LangevinIntegrator(300*kelvin, 10/picosecond, 0.001*picosecond)
topology = Topology()
chain = topology.addChain()
residue = topology.addResidue('H2', chain)
topology.addAtom('H1', element.hydrogen, residue)
topology.addAtom('H2', element.hydrogen, residue)
simulation = Simulation(topology, system, integrator, Platform.getPlatformByName('Reference'))
simulation.context.setPositions([Vec3(0, 0, 0), Vec3(1, 0, 0)])
meta.step(simulation, 200000)
fe = meta.getFreeEnergy()
center = bias.gridWidth//2
fe -= fe[center]
# Energies should be reasonably well converged over the central part of the range.
for i in range(center-3, center+4):
r = bias.minValue + i*(bias.maxValue-bias.minValue)/(bias.gridWidth-1)
e = 0.5*100000.0*(r-1.0)**2*kilojoules_per_mole
assert abs(fe[i]-e) < 1.0*kilojoules_per_mole | 41.717949 | 102 | 0.637984 | import unittest
from simtk.openmm import *
from simtk.openmm.app import *
from simtk.unit import *
class TestMetadynamics(unittest.TestCase):
"""Test the Metadynamics class"""
def testHarmonicOscillator(self):
"""Test running metadynamics on a harmonic oscillator."""
system = System()
system.addParticle(1.0)
system.addParticle(1.0)
force = HarmonicBondForce()
force.addBond(0, 1, 1.0, 100000.0)
system.addForce(force)
cv = CustomBondForce('r')
cv.addBond(0, 1)
bias = BiasVariable(cv, 0.94, 1.06, 0.02)
meta = Metadynamics(system, [bias], 300*kelvin, 2.0, 5.0, 10)
integrator = LangevinIntegrator(300*kelvin, 10/picosecond, 0.001*picosecond)
topology = Topology()
chain = topology.addChain()
residue = topology.addResidue('H2', chain)
topology.addAtom('H1', element.hydrogen, residue)
topology.addAtom('H2', element.hydrogen, residue)
simulation = Simulation(topology, system, integrator, Platform.getPlatformByName('Reference'))
simulation.context.setPositions([Vec3(0, 0, 0), Vec3(1, 0, 0)])
meta.step(simulation, 200000)
fe = meta.getFreeEnergy()
center = bias.gridWidth//2
fe -= fe[center]
# Energies should be reasonably well converged over the central part of the range.
for i in range(center-3, center+4):
r = bias.minValue + i*(bias.maxValue-bias.minValue)/(bias.gridWidth-1)
e = 0.5*100000.0*(r-1.0)**2*kilojoules_per_mole
assert abs(fe[i]-e) < 1.0*kilojoules_per_mole | 0 | 0 | 0 |
6fa7f1db3c235f815754691d2c23a22f405d658f | 2,143 | py | Python | tests/test_apply_license.py | Simmovation/licensify | e145cb84f488d7c610f3f404a33db617402b82aa | [
"BSD-3-Clause"
] | 1 | 2021-01-26T18:21:41.000Z | 2021-01-26T18:21:41.000Z | tests/test_apply_license.py | Simmovation/licensify | e145cb84f488d7c610f3f404a33db617402b82aa | [
"BSD-3-Clause"
] | null | null | null | tests/test_apply_license.py | Simmovation/licensify | e145cb84f488d7c610f3f404a33db617402b82aa | [
"BSD-3-Clause"
] | null | null | null | import pytest
from os import path
from licensify.apply_license import apply_license_header
from licensify.errors import LicensesOutOfDateError
@pytest.fixture
| 38.267857 | 97 | 0.745684 | import pytest
from os import path
from licensify.apply_license import apply_license_header
from licensify.errors import LicensesOutOfDateError
def _contents(file):
with open(file) as fp:
return fp.read()
@pytest.fixture
def tmp_file_factory(tmpdir):
def _factory(contents=''):
filename = path.join(str(tmpdir), 'tmp.py')
with open(filename, 'w') as fp:
fp.write(contents)
return filename
return _factory
def test_license_is_applied_to_empty_file(tmp_file_factory):
source_path = tmp_file_factory()
apply_license_header('My Awesome License', [source_path])
assert _contents(source_path) == '# My Awesome License\n'
def test_license_does_not_overwrite_source(tmp_file_factory):
contents = 'print(\'hello world\')'
source_path = tmp_file_factory(contents)
apply_license_header('My Awesome License', [source_path])
assert _contents(source_path) == '# My Awesome License\n' + contents
def test_license_not_written_on_dry_run(tmp_file_factory):
contents = 'print(\'hello world\')'
source_path = tmp_file_factory(contents)
apply_license_header('My Awesome License', [source_path], dry_run=True)
assert _contents(source_path) == contents
def test_returns_files_to_update(tmp_file_factory):
contents = 'print(\'hello world\')'
source_path = tmp_file_factory(contents)
result = apply_license_header('My Awesome License', [source_path], dry_run=True)
assert result == [source_path]
def test_raises_error_if_check_true_and_file_needs_update(tmp_file_factory):
contents = 'print(\'hello world\')'
source_path = tmp_file_factory(contents)
with pytest.raises(LicensesOutOfDateError):
apply_license_header('My Awesome License', [source_path], dry_run=True, check=True)
def test_does_not_update_already_up_to_date_file(tmp_file_factory):
contents = '# My Awesome License\nprint(\'hello world\')'
source_path = tmp_file_factory(contents)
results = apply_license_header('My Awesome License', [source_path], dry_run=True, check=True)
assert _contents(source_path) == contents
assert results == []
| 1,798 | 0 | 183 |
552226d004aef95cea3a92b080d6b4afa3a78bd7 | 873 | py | Python | pullTags/pullName.py | fsulib/metadataWorkflowResources | b7d3f846d7b4d5fc30321b4b462ad40f6ebdc4d0 | [
"MIT"
] | 5 | 2016-01-14T22:50:56.000Z | 2017-02-03T19:14:05.000Z | pullTags/pullName.py | fsulib/metadataWorkflowResources | b7d3f846d7b4d5fc30321b4b462ad40f6ebdc4d0 | [
"MIT"
] | 2 | 2015-06-09T17:22:27.000Z | 2016-02-04T13:48:53.000Z | pullTags/pullName.py | fsulib/metadataWorkflowResources | b7d3f846d7b4d5fc30321b4b462ad40f6ebdc4d0 | [
"MIT"
] | null | null | null | import xml.etree.ElementTree as ET
import sys
import os
import re
newLine = re.compile(r"^\n\s+")
NS = {'mods': 'http://www.loc.gov/mods/v3'}
fileName = os.path.splitext(sys.argv[1])[0]
pull(fileName)
| 31.178571 | 64 | 0.513173 | import xml.etree.ElementTree as ET
import sys
import os
import re
newLine = re.compile(r"^\n\s+")
NS = {'mods': 'http://www.loc.gov/mods/v3'}
def pull(filename):
tree = ET.parse(filename + '.xml')
root = tree.getroot()
for record in root.iterfind('.//{%s}mods' % NS['mods']):
for name in record.iterfind('.//{%s}name' % NS['mods']):
with open(filename + 'Name.txt', 'w') as f:
fullName = []
for child in name:
if child.text is not None:
fullName.append(child.text)
for item in fullName:
match = newLine.match(item)
if match:
fullName.remove(match.string)
fullName.sort()
print(fullName)
fileName = os.path.splitext(sys.argv[1])[0]
pull(fileName)
| 647 | 0 | 23 |
94caff90ab7acbbc78f8424cc2082b7f5baf2d39 | 631 | py | Python | aula9/projetoIOT/monitor.py | MarciovsRocha/conectividade-sistemas-cyberfisicos | d76b8a540b55eb8a54ae99067b625010e85a2eb8 | [
"MIT"
] | null | null | null | aula9/projetoIOT/monitor.py | MarciovsRocha/conectividade-sistemas-cyberfisicos | d76b8a540b55eb8a54ae99067b625010e85a2eb8 | [
"MIT"
] | null | null | null | aula9/projetoIOT/monitor.py | MarciovsRocha/conectividade-sistemas-cyberfisicos | d76b8a540b55eb8a54ae99067b625010e85a2eb8 | [
"MIT"
] | null | null | null | #!/usr/bin/env python3
import socket
import sys
import threading
import mmonitor
#print('esqueci de fazer o exercicio 2A')
t = threading.Thread(target=mmonitor.Console)
t.start()
porta = int(input('Porta para ouvir sensores: '))
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
try:
s.bind(('', porta))
except:
print('# erro de bind')
sys.exit()
s.listen(5)
print('aguardando sensores em ', porta)
while True:
conn, addr = s.accept()
print('recebi uma conexao de ', addr)
t = threading.Thread( target=mmonitor.TrataSensor, args=(conn,addr,))
t.start()
print('o servidor encerrou!')
s.close() | 19.71875 | 73 | 0.689382 | #!/usr/bin/env python3
import socket
import sys
import threading
import mmonitor
#print('esqueci de fazer o exercicio 2A')
t = threading.Thread(target=mmonitor.Console)
t.start()
porta = int(input('Porta para ouvir sensores: '))
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
try:
s.bind(('', porta))
except:
print('# erro de bind')
sys.exit()
s.listen(5)
print('aguardando sensores em ', porta)
while True:
conn, addr = s.accept()
print('recebi uma conexao de ', addr)
t = threading.Thread( target=mmonitor.TrataSensor, args=(conn,addr,))
t.start()
print('o servidor encerrou!')
s.close() | 0 | 0 | 0 |
9279ed6011750cf39b1bd6d4978196a3ec902f39 | 1,230 | py | Python | get_best_segments.py | mdrumond/garmin-connect-export | b090689292f26cc471fdbc7f6c3c9ae57dfdfaa6 | [
"MIT"
] | null | null | null | get_best_segments.py | mdrumond/garmin-connect-export | b090689292f26cc471fdbc7f6c3c9ae57dfdfaa6 | [
"MIT"
] | null | null | null | get_best_segments.py | mdrumond/garmin-connect-export | b090689292f26cc471fdbc7f6c3c9ae57dfdfaa6 | [
"MIT"
] | null | null | null | import pandas as pd
import numpy as np
import os
from os.path import isfile, join
import math
from utils import log, list_all_files, abs_path
if __name__ == "__main__":
main() | 31.538462 | 85 | 0.715447 | import pandas as pd
import numpy as np
import os
from os.path import isfile, join
import math
from utils import log, list_all_files, abs_path
def read_dataframes(folder):
pickle_files = list_all_files(folder)
dfs = [pd.read_pickle(abs_path(folder,pickle_file))
for pickle_file in pickle_files]
return pd.concat(dfs)
def record_history(dfs, section):
records = []
curr_record_date = np.datetime64('now')
for row in dfs.loc[(dfs['section']==section)].sort_values(by='time').itertuples():
if row.date < curr_record_date:
curr_record_date = row.date
records.append([row.date, row.time, row.minutes_per_kilometer])
return pd.DataFrame(records, columns=[
'date', '%0.0f_time' % section, '%0.0f_minutes_per_kilometer' % section])
def main():
input_folder = 'output'
output_folder = 'csvs'
dfs = read_dataframes(input_folder)
sections = [1000,(1000*1.60934),3000,(2000*1.60934),5000,10000,21097.5,30000,42195]
df = pd.concat([record_history(dfs, section) for section in sections])
df.to_csv(abs_path(output_folder,"records.csv"))
#for rh,s in zip(record_histories, sections):
# rh.to_csv(abs_path(output_folder,"%0.0f.csv" % s))
if __name__ == "__main__":
main() | 980 | 0 | 69 |
9416ea247159c1725691ac1283b5b4e61e2b0bbd | 331 | py | Python | month02/第一次周测/周测03.py | chaofan-zheng/python_leanring_code | 0af44ff39b9ded2c1d2cc96c6d356d21170ac04d | [
"Apache-2.0"
] | 4 | 2021-01-07T14:25:10.000Z | 2021-02-01T10:36:01.000Z | month02/第一次周测/周测03.py | chaofan-zheng/python_leanring_code | 0af44ff39b9ded2c1d2cc96c6d356d21170ac04d | [
"Apache-2.0"
] | null | null | null | month02/第一次周测/周测03.py | chaofan-zheng/python_leanring_code | 0af44ff39b9ded2c1d2cc96c6d356d21170ac04d | [
"Apache-2.0"
] | null | null | null | """
给你一个长度为n的数组,其中只有一个数字出现了1次,其他均出现2次,问如何快速的找到这个数字。
"""
nums = [6, 4, 7, 6, 7, 4, 5]
print(solution(nums))
nums.sort()
print(nums)
| 15.761905 | 51 | 0.513595 | """
给你一个长度为n的数组,其中只有一个数字出现了1次,其他均出现2次,问如何快速的找到这个数字。
"""
nums = [6, 4, 7, 6, 7, 4, 5]
def solution(nums):
nums.sort()
# lenth = range(len(nums) - 1)
i = 0
while True:
if nums[i] == nums[i + 1]:
i += 2
else:
return nums[i]
print(solution(nums))
nums.sort()
print(nums)
| 170 | 0 | 23 |
bc173c9bdba3be518d5949b04156156ade20ddb6 | 2,754 | py | Python | collaborative_ALS.py | tobiagru/JobRecommendation | b2fb81799bdaf762f16d2442692a4e5c4c64fa05 | [
"MIT"
] | null | null | null | collaborative_ALS.py | tobiagru/JobRecommendation | b2fb81799bdaf762f16d2442692a4e5c4c64fa05 | [
"MIT"
] | null | null | null | collaborative_ALS.py | tobiagru/JobRecommendation | b2fb81799bdaf762f16d2442692a4e5c4c64fa05 | [
"MIT"
] | null | null | null | from scipy import sparse
import numpy as np
import math
import implicit
import logging
logger = logging.getLogger(__name__)
def predict_most_similar(visits, num_users, num_jobs, UserJobs, factors=50, cut_off=300, log_discrete=True):
"""
Matrix Factorization based
Still Collaborative filtering but this time based on alternating
least squares with an efficient implementation from implicit.
Still not very fast as some of the list to matrix stuff still applies.
But it should scale better. Maybe it is worth storing this in memory
and requesting values when a user needs some
args:
visits: a list of objects with a user_id, job_id and duration value
num_users: integer, number of users = max user_id
num_jobs: integer, number of jobs = max job_id
UserJobs: django or SQLAlechmy model where the similarities are saved
cut_off: integer, top cut off time in seconds
log_discrete: boolean, if true converts to log discrete values
"""
tic = datetime.now()
#we only operate on the user vectors
#this expects integer ids as users if this isn't the case you might want
# to have a dict for row & col keys
M_t = sparse.csr_matrix((num_jobs, num_users), dtype=np.uint8)
#TODO can you vectorize this?
for visit in visits:
M_t[visit.job_id, visit.user_id] = calc_time(visit.duration)
logger.debug("M_t took {} ms".format((datetime.now() - tic).microseconds))
tic = datetime.now()
# initialize a model
model = implicit.als.AlternatingLeastSquares(factors=factors)
logger.debug("Loading model took {} ms".format((datetime.now() - tic).microseconds))
tic = datetime.now()
# train the model on a sparse matrix of item/user/confidence weights
model.fit(M_t)
logger.debug("Fitting model took {} ms".format((datetime.now() - tic).microseconds))
tic = datetime.now()
# recommend items for a user
for user_id in range(num_users):
preds = model.recommend(user_id, M_t.T)
only saves the non-zero ones
for pred in preds:
userjob = UserJobs.objects.filter(user_id=user_id, job_id=pred[0]).first()
if userjob is None:
UserJobs.create(user_id=user_id, job_id=pred[0], similarity_Skill=None, similarity_CF=pred[1])
else:
userjob.similarity_CF = pred[1]
logger.debug("Predicting took {} ms".format((datetime.now() - tic).microseconds))
| 39.342857 | 110 | 0.658678 | from scipy import sparse
import numpy as np
import math
import implicit
import logging
logger = logging.getLogger(__name__)
def predict_most_similar(visits, num_users, num_jobs, UserJobs, factors=50, cut_off=300, log_discrete=True):
"""
Matrix Factorization based
Still Collaborative filtering but this time based on alternating
least squares with an efficient implementation from implicit.
Still not very fast as some of the list to matrix stuff still applies.
But it should scale better. Maybe it is worth storing this in memory
and requesting values when a user needs some
args:
visits: a list of objects with a user_id, job_id and duration value
num_users: integer, number of users = max user_id
num_jobs: integer, number of jobs = max job_id
UserJobs: django or SQLAlechmy model where the similarities are saved
cut_off: integer, top cut off time in seconds
log_discrete: boolean, if true converts to log discrete values
"""
tic = datetime.now()
#we only operate on the user vectors
#this expects integer ids as users if this isn't the case you might want
# to have a dict for row & col keys
M_t = sparse.csr_matrix((num_jobs, num_users), dtype=np.uint8)
#TODO can you vectorize this?
for visit in visits:
def calc_time(val):
if val > 300:
val = 300
if log_discrete:
return int(math.log(val, cut_off) * 255)
else:
return int(val / cut_off * 255)
M_t[visit.job_id, visit.user_id] = calc_time(visit.duration)
logger.debug("M_t took {} ms".format((datetime.now() - tic).microseconds))
tic = datetime.now()
# initialize a model
model = implicit.als.AlternatingLeastSquares(factors=factors)
logger.debug("Loading model took {} ms".format((datetime.now() - tic).microseconds))
tic = datetime.now()
# train the model on a sparse matrix of item/user/confidence weights
model.fit(M_t)
logger.debug("Fitting model took {} ms".format((datetime.now() - tic).microseconds))
tic = datetime.now()
# recommend items for a user
for user_id in range(num_users):
preds = model.recommend(user_id, M_t.T)
only saves the non-zero ones
for pred in preds:
userjob = UserJobs.objects.filter(user_id=user_id, job_id=pred[0]).first()
if userjob is None:
UserJobs.create(user_id=user_id, job_id=pred[0], similarity_Skill=None, similarity_CF=pred[1])
else:
userjob.similarity_CF = pred[1]
logger.debug("Predicting took {} ms".format((datetime.now() - tic).microseconds))
| 202 | 0 | 30 |
3f30151e9276f2f605914f11d317c7d2572aec8f | 1,553 | py | Python | setup.py | musicpiano/mlmicrophysics | 720e09b9003285e4e601df8befd58337bee691f5 | [
"MIT"
] | null | null | null | setup.py | musicpiano/mlmicrophysics | 720e09b9003285e4e601df8befd58337bee691f5 | [
"MIT"
] | null | null | null | setup.py | musicpiano/mlmicrophysics | 720e09b9003285e4e601df8befd58337bee691f5 | [
"MIT"
] | null | null | null | from numpy.distutils.core import setup, Extension
import subprocess
import os
ext_call_collect = Extension(name="mlmicrophysics.call_collect",
sources=["mlmicrophysics/call_collect.f90"],
extra_objects=["mlmicrophysics/stochastic_collect_tau_cam.o"])
with open("README.md", "r") as fh:
long_description = fh.read()
if __name__ == "__main__":
#fortran_compiler = "gfortran"
#os.chdir("mlmicrophysics")
#subprocess.call([fortran_compiler, "-c", "stochastic_collect_tau_cam.f90"])
#os.chdir("../")
setup(name="mlmicrophysics",
version="0.1",
description="Machine learning emulator testbed for microphysics.",
long_description=long_description,
long_description_content_type="text/markdown",
author="David John Gagne and Gabrielle Gantos",
author_email="dgagne@ucar.edu",
license="MIT",
url="https://github.com/NCAR/mlmicrophysics",
packages=["mlmicrophysics"],
#data_files=[("mlmicrophysics", ["mlmicrophysics/KBARF"])],
install_requires=["numpy",
"scipy",
"pandas",
"matplotlib",
"xarray",
"tensorflow",
"netcdf4",
"scikit-learn",
"pyyaml",
"pyarrow"],
#ext_modules=[ext_call_collect]
)
| 37.878049 | 91 | 0.537025 | from numpy.distutils.core import setup, Extension
import subprocess
import os
ext_call_collect = Extension(name="mlmicrophysics.call_collect",
sources=["mlmicrophysics/call_collect.f90"],
extra_objects=["mlmicrophysics/stochastic_collect_tau_cam.o"])
with open("README.md", "r") as fh:
long_description = fh.read()
if __name__ == "__main__":
#fortran_compiler = "gfortran"
#os.chdir("mlmicrophysics")
#subprocess.call([fortran_compiler, "-c", "stochastic_collect_tau_cam.f90"])
#os.chdir("../")
setup(name="mlmicrophysics",
version="0.1",
description="Machine learning emulator testbed for microphysics.",
long_description=long_description,
long_description_content_type="text/markdown",
author="David John Gagne and Gabrielle Gantos",
author_email="dgagne@ucar.edu",
license="MIT",
url="https://github.com/NCAR/mlmicrophysics",
packages=["mlmicrophysics"],
#data_files=[("mlmicrophysics", ["mlmicrophysics/KBARF"])],
install_requires=["numpy",
"scipy",
"pandas",
"matplotlib",
"xarray",
"tensorflow",
"netcdf4",
"scikit-learn",
"pyyaml",
"pyarrow"],
#ext_modules=[ext_call_collect]
)
| 0 | 0 | 0 |
6253ca5da3f1efe9171fd9ec2c1d7fe06f9efd64 | 1,184 | py | Python | nrw/heinsberg.py | risklayer/corona-landkreis-crawler | 2e82448ff614240365de9493eafa0e6a620ac615 | [
"Unlicense"
] | 12 | 2022-02-23T11:06:06.000Z | 2022-03-04T17:21:44.000Z | nrw/heinsberg.py | risklayer/corona-landkreis-crawler | 2e82448ff614240365de9493eafa0e6a620ac615 | [
"Unlicense"
] | null | null | null | nrw/heinsberg.py | risklayer/corona-landkreis-crawler | 2e82448ff614240365de9493eafa0e6a620ac615 | [
"Unlicense"
] | null | null | null | #!/usr/bin/python3
from botbase import *
_heinsberg_c = re.compile(r"([0-9.]+) bestätigte")
_heinsberg_d = re.compile(r"Verstorbenen liegt im Kreis Heinsberg bei ([0-9.]+)\.")
_heinsberg_a = re.compile(r"([0-9.]+) Personen als noch nicht genesen")
schedule.append(Task(9, 15, 15, 35, 600, heinsberg, 5370))
if __name__ == '__main__': heinsberg(googlesheets())
| 43.851852 | 113 | 0.651182 | #!/usr/bin/python3
from botbase import *
_heinsberg_c = re.compile(r"([0-9.]+) bestätigte")
_heinsberg_d = re.compile(r"Verstorbenen liegt im Kreis Heinsberg bei ([0-9.]+)\.")
_heinsberg_a = re.compile(r"([0-9.]+) Personen als noch nicht genesen")
def heinsberg(sheets):
return True ##### MELDET NICHT MEHR.
import locale
locale.setlocale(locale.LC_TIME, "de_DE.UTF-8")
soup = get_soup("https://www.kreis-heinsberg.de/aktuelles/aktuelles/?pid=5294")
main = soup.find(id="directory")
ps = [p.get_text(" ").strip() for p in main.findAll("p")]
ps = [p for p in ps if not p == ""]
# for p in ps[:5]: print(p)
if not today().strftime("%-d. %B %Y") in ps[0]: raise NotYetAvailableException("Heinsberg noch alt:" + ps[0])
c = force_int(_heinsberg_c.search(ps[0]).group(1))
d = force_int(_heinsberg_d.search(ps[0]).group(1)) + 37 # "mit"
a = force_int(_heinsberg_a.search(ps[0]).group(1)) if _heinsberg_a.search(ps[0]) else None
g = c - d - a if a else None
update(sheets, 5370, c=c, d=d, g=g, sig="Bot")
return True
schedule.append(Task(9, 15, 15, 35, 600, heinsberg, 5370))
if __name__ == '__main__': heinsberg(googlesheets())
| 799 | 0 | 23 |
73b04af857994ba2e358208c26cbe61609efaecc | 4,691 | py | Python | rt_py/rt.py | karlitos/raspberrytank | c5e84105f2d45fd2b3fed4896275ea6389c6ac05 | [
"BSD-2-Clause"
] | 6 | 2015-11-17T10:00:54.000Z | 2019-08-03T14:58:07.000Z | rt_py/rt.py | karlitos/raspberrytank | c5e84105f2d45fd2b3fed4896275ea6389c6ac05 | [
"BSD-2-Clause"
] | null | null | null | rt_py/rt.py | karlitos/raspberrytank | c5e84105f2d45fd2b3fed4896275ea6389c6ac05 | [
"BSD-2-Clause"
] | 5 | 2015-03-24T15:03:04.000Z | 2020-10-23T21:31:46.000Z | # THIS PROGRAM IS IN EARLY DEVELOPMENT
# AND DOES **NOT** WORK YET.
# I2C sensor reading works, but control of the tank does not.
# I think the problem may be in the poor accuracy of Python's
# time.sleep() as opposed to C's usleep().
# This may mean that a pure Python version will never work :(
# Raspberry Tank Control Script
# v2 - Python via WebSockets
# Ian Renton, April 2014
# http://raspberrytank.ianrenton.com
import smbus
import time
import threading
import Queue
import RPi.GPIO as GPIO
#################################
## SETUP ##
#################################
# Tank control codes
IDLE = 0xFE40121C
IGNITION = 0xFE401294
TURRET_LEFT = 0xFE408F0C
TURRET_ELEV = 0xFE404F3C
# I2C Setup
i2cBus = smbus.SMBus(0)
i2cCompassAddress = 0x60
i2cRangefinderAddress = 0x70
# GPIO Setup
GPIO_PIN = 7
GPIO.setmode(GPIO.BCM)
GPIO.setup(GPIO_PIN, GPIO.OUT)
GPIO.output(GPIO_PIN, True)
# Inter-thread communication queues
bearingQueue = Queue.Queue(1)
rangeQueue = Queue.Queue(1)
#################################
## FUNCTIONS ##
#################################
# Send a complete command code
# Send a single bit using the tank's Manchester encoding scheme. The high-low and
# low-high transitions are inverted compared to normal because the transistor circuit
# I use sends 0v to the tank when the GPIO pin is high, and 4v when the GPIO pin is
# low.
# Get the bearing of the tank from the Compass module, in degrees
# Get the range to target from the Rangefinder module, in metres
#################################
## THREADS ##
#################################
# Control thread. Passes on the requested control signal from the GUI or autonomy
# to the tank.
# Sensor thread. Acquires bearing and range data as fast as it can, and puts the
# values in the bearing and range queues.
# Autonomy thread. Checks range values and drives accordingly
#################################
## MAIN PROGRAM ##
#################################
# Start threads
threadLock = threading.Lock()
threads = []
controlThread = ControlThread()
threads.append(controlThread)
controlThread.start()
sensorThread = SensorThread()
threads.append(sensorThread)
sensorThread.start()
autonomyThread = AutonomyThread()
threads.append(autonomyThread)
autonomyThread.start()
# Wait for threads to complete
for t in threads:
t.join()
print "All threads finished, exiting" | 26.353933 | 89 | 0.65679 | # THIS PROGRAM IS IN EARLY DEVELOPMENT
# AND DOES **NOT** WORK YET.
# I2C sensor reading works, but control of the tank does not.
# I think the problem may be in the poor accuracy of Python's
# time.sleep() as opposed to C's usleep().
# This may mean that a pure Python version will never work :(
# Raspberry Tank Control Script
# v2 - Python via WebSockets
# Ian Renton, April 2014
# http://raspberrytank.ianrenton.com
import smbus
import time
import threading
import Queue
import RPi.GPIO as GPIO
#################################
## SETUP ##
#################################
# Tank control codes
IDLE = 0xFE40121C
IGNITION = 0xFE401294
TURRET_LEFT = 0xFE408F0C
TURRET_ELEV = 0xFE404F3C
# I2C Setup
i2cBus = smbus.SMBus(0)
i2cCompassAddress = 0x60
i2cRangefinderAddress = 0x70
# GPIO Setup
GPIO_PIN = 7
GPIO.setmode(GPIO.BCM)
GPIO.setup(GPIO_PIN, GPIO.OUT)
GPIO.output(GPIO_PIN, True)
# Inter-thread communication queues
bearingQueue = Queue.Queue(1)
rangeQueue = Queue.Queue(1)
#################################
## FUNCTIONS ##
#################################
# Send a complete command code
def sendCode(code):
# Initial high period to start packet
GPIO.output(GPIO_PIN, False)
time.sleep(0.000500)
print "Sending " + hex(code)
# Send the code itself, bit by bit
for i in range(0, 32):
bit = (code>>(31-i)) & 0x1;
sendBit(bit);
# Force a gap between messages
GPIO.output(GPIO_PIN, True)
time.sleep(0.004)
# Send a single bit using the tank's Manchester encoding scheme. The high-low and
# low-high transitions are inverted compared to normal because the transistor circuit
# I use sends 0v to the tank when the GPIO pin is high, and 4v when the GPIO pin is
# low.
def sendBit(bit):
GPIO.output(GPIO_PIN, bit)
time.sleep(0.000250)
GPIO.output(GPIO_PIN, not bit)
time.sleep(0.000250)
# Get the bearing of the tank from the Compass module, in degrees
def getBearing():
bear1 = i2cBus.read_byte_data(i2cCompassAddress, 2)
bear2 = i2cBus.read_byte_data(i2cCompassAddress, 3)
bear = (bear1 << 8) + bear2
bear = bear/10.0
return bear
# Get the range to target from the Rangefinder module, in metres
def getRange():
i2cBus.write_byte_data(i2cRangefinderAddress, 0, 0x51)
time.sleep(0.7)
range1 = i2cBus.read_byte_data(i2cRangefinderAddress, 2)
range2 = i2cBus.read_byte_data(i2cRangefinderAddress, 3)
range3 = (range1 << 8) + range2
return range3/100.0
#################################
## THREADS ##
#################################
# Control thread. Passes on the requested control signal from the GUI or autonomy
# to the tank.
class ControlThread (threading.Thread):
def __init__(self):
threading.Thread.__init__(self)
print "Control thread running"
def run(self):
for i in range(0, 100):
sendCode(IDLE)
print "A"
for i in range(0, 100):
sendCode(TURRET_ELEV)
print "B"
for i in range(0, 100):
sendCode(IDLE)
# Sensor thread. Acquires bearing and range data as fast as it can, and puts the
# values in the bearing and range queues.
class SensorThread (threading.Thread):
def __init__(self):
threading.Thread.__init__(self)
print "Sensor thread running"
def run(self):
while True:
tmpBearing = getBearing()
tmpRange = getRange()
threadLock.acquire()
bearingQueue.put(tmpBearing)
rangeQueue.put(tmpRange)
bearing = tmpBearing
threadLock.release()
time.sleep(0.5) # Low values (0.1-0.2) cause the program to hang
# Autonomy thread. Checks range values and drives accordingly
class AutonomyThread (threading.Thread):
def __init__(self):
threading.Thread.__init__(self)
print "Autonomy thread running"
def run(self):
tmpBearing = 0.0
tmpRange = 0.0
while True:
threadLock.acquire()
if not bearingQueue.empty():
tmpBearing = bearingQueue.get()
if not rangeQueue.empty():
tmpRange = rangeQueue.get()
threadLock.release()
print "Autonomy checking Bearing: " + str(tmpBearing) + " Range: " + str(tmpRange)
time.sleep(1)
#################################
## MAIN PROGRAM ##
#################################
# Start threads
threadLock = threading.Lock()
threads = []
controlThread = ControlThread()
threads.append(controlThread)
controlThread.start()
sensorThread = SensorThread()
threads.append(sensorThread)
sensorThread.start()
autonomyThread = AutonomyThread()
threads.append(autonomyThread)
autonomyThread.start()
# Wait for threads to complete
for t in threads:
t.join()
print "All threads finished, exiting" | 1,868 | 54 | 313 |
f633b007215055b25df2a43a9309965fbab035c2 | 3,456 | py | Python | library/__browser__/pyjamas/JSONService.py | andreyvit/pyjamas | 1154abe3340a84dba7530b8174aaddecfc1a0944 | [
"ECL-2.0",
"Apache-2.0"
] | 1 | 2017-09-09T11:58:36.000Z | 2017-09-09T11:58:36.000Z | library/__browser__/pyjamas/JSONService.py | andreyvit/pyjamas | 1154abe3340a84dba7530b8174aaddecfc1a0944 | [
"ECL-2.0",
"Apache-2.0"
] | null | null | null | library/__browser__/pyjamas/JSONService.py | andreyvit/pyjamas | 1154abe3340a84dba7530b8174aaddecfc1a0944 | [
"ECL-2.0",
"Apache-2.0"
] | null | null | null | # no stream support
# reserved names: callMethod, onCompletion
| 33.230769 | 99 | 0.613137 | # no stream support
class JSONService:
def __init__(self, url, handler = None):
"""
Create a JSON remote service object. The url is the URL that will receive
POST data with the JSON request. See the JSON-RPC spec for more information.
The handler object should implement onRemoteResponse(value, requestInfo) to
accept the return value of the remote method, and
onRemoteError(code, message, requestInfo) to handle errors.
"""
from pyjamas.JSONParser import JSONParser
self.parser = JSONParser()
self.url = url
self.handler = handler
def callMethod(self, method, params, handler = None):
if handler is None:
handler = self.handler
if handler is None:
return self.__sendNotify(method, params)
else:
return self.__sendRequest(method, params, handler)
def onCompletion(self):
pass
def __sendNotify(self, method, params):
msg = {"id":None, "method":method, "params":params}
msg_data = self.parser.encode(msg)
if not HTTPRequest().asyncPost(self.url, msg_data, self):
return -1
return 1
def __sendRequest(self, method, params, handler):
id = pygwt.getNextHashId()
msg = {"id":id, "method":method, "params":params}
msg_data = self.parser.encode(msg)
request_info = JSONRequestInfo(id, method, handler)
if not HTTPRequest().asyncPost(self.url, msg_data, JSONResponseTextHandler(request_info)):
return -1
return id
class JSONRequestInfo:
def __init__(self, id, method, handler):
self.id = id
self.method = method
self.handler = handler
class JSONResponseTextHandler:
def __init__(self, request):
self.request = request
def onCompletion(self, json_str):
from pyjamas.JSONParser import JSONParser
response = JSONParser().decodeAsObject(json_str)
if not response:
self.request.handler.onRemoteError(0, "Server Error or Invalid Response", self.request)
elif response.has_key("error") and response['error']:
error = response["error"]
self.request.handler.onRemoteError(0, error, self.request)
else:
self.request.handler.onRemoteResponse(response["result"], self.request)
def onError(self, error_str, error_code):
self.request.handler.onRemoteError(error_code, error_str, self.request)
# reserved names: callMethod, onCompletion
class JSONProxy(JSONService):
def __init__(self, url, methods=None):
JSONService.__init__(self, url)
if methods:
self.__registerMethods(methods)
def __createMethod(self, method):
JS("""
return function() {
var params = [];
for (var n=0; n<arguments.length; n++) { params.push(arguments[n]); }
if (params[params.length-1].onRemoteResponse) {
var handler=params.pop();
return this.__sendRequest(method, params, handler);
}
else {
return this.__sendNotify(method, params);
}
};
""")
def __registerMethods(self, methods):
JS("""
methods=methods.l;
for (var i in methods) {
var method = methods[i];
this[method]=this.__createMethod(method);
}
""")
| 2,402 | 711 | 276 |
87ee2de1b0f163c3219a9b85b536090b0322854a | 590 | py | Python | utils/random_data.py | XuYi-fei/seedcup | a342787f19259871b5eba1002c233cedce84d9e5 | [
"Apache-2.0"
] | 2 | 2021-11-14T11:59:12.000Z | 2021-11-15T02:59:15.000Z | utils/random_data.py | Sharp-rookie/seedcup | 27a001c43459ec9bb69ab7596e0efbab93ed7708 | [
"Apache-2.0"
] | null | null | null | utils/random_data.py | Sharp-rookie/seedcup | 27a001c43459ec9bb69ab7596e0efbab93ed7708 | [
"Apache-2.0"
] | null | null | null | import random
import pandas as pd
if __name__ == "__main__":
generate_new_data()
| 31.052632 | 81 | 0.69661 | import random
import pandas as pd
def generate_new_data(path="../data/original/all_info.csv"):
df = pd.DataFrame(pd.read_csv(path))
column = list(df.columns)
df = list(df.values)
random.shuffle(df)
train_df = pd.DataFrame(columns=column, data=df[:int(len(df)*4/5)]).fillna(0)
valid_df = pd.DataFrame(columns=column, data=df[int(len(df)*4/5):]).fillna(0)
train_df.to_csv(index=False, path_or_buf="../data/random_data/train.csv")
valid_df.to_csv(index=False, path_or_buf="../data/random_data/valid.csv")
if __name__ == "__main__":
generate_new_data()
| 478 | 0 | 23 |
d9a7ea592f35f9411cdb9e5d22cffa343616c362 | 69 | py | Python | fish/account/forms.py | JoyBoyMaLin/no-fish | 5f8048fbc334af6d149dd86a5b8b14ad4afba0cb | [
"MIT"
] | 7 | 2020-08-26T12:32:50.000Z | 2020-09-20T09:17:12.000Z | fish/account/forms.py | JoyBoyMaLin/no-fish | 5f8048fbc334af6d149dd86a5b8b14ad4afba0cb | [
"MIT"
] | null | null | null | fish/account/forms.py | JoyBoyMaLin/no-fish | 5f8048fbc334af6d149dd86a5b8b14ad4afba0cb | [
"MIT"
] | 3 | 2020-08-27T06:06:43.000Z | 2020-10-10T15:53:26.000Z | from django import forms
from django.contrib.auth.models import User
| 23 | 43 | 0.84058 | from django import forms
from django.contrib.auth.models import User
| 0 | 0 | 0 |
66459c846cae6378e056e9a36f365a92fc0ac5f1 | 15,462 | py | Python | graphene_constants.py | abitmore/Graphene-Metanode | 211e751293f2916e3f7307d5b66962a95fe0844e | [
"MIT"
] | 3 | 2022-02-01T21:22:49.000Z | 2022-02-02T12:26:56.000Z | graphene_constants.py | abitmore/Graphene-Metanode | 211e751293f2916e3f7307d5b66962a95fe0844e | [
"MIT"
] | null | null | null | graphene_constants.py | abitmore/Graphene-Metanode | 211e751293f2916e3f7307d5b66962a95fe0844e | [
"MIT"
] | 6 | 2022-02-01T21:22:54.000Z | 2022-02-07T23:45:04.000Z | #!/usr/bin/env python
# DISABLE SELECT PYLINT TESTS
# pylint: disable=import-error, line-too-long, too-few-public-methods
# pylint: disable=bad-continuation
r"""
╔════════════════════════════════════════════════════╗
║ ╔═╗╦═╗╔═╗╔═╗╦ ╦╔═╗╔╗╔╔═╗ ╔╦╗╔═╗╔╦╗╔═╗╔╗╔╔═╗╔╦╗╔═╗ ║
║ ║ ╦╠╦╝╠═╣╠═╝╠═╣║╣ ║║║║╣ ║║║║╣ ║ ╠═╣║║║║ ║ ║║║╣ ║
║ ╚═╝╩╚═╩ ╩╩ ╩ ╩╚═╝╝╚╝╚═╝ ╩ ╩╚═╝ ╩ ╩ ╩╝╚╝╚═╝═╩╝╚═╝ ║
╚════════════════════════════════════════════════════╝
~
GLOBAL CONSTANTS AND USER CONFIGURATION FOR DEX CONNECTIVITY
"""
# STANDARD MODULES
from decimal import Decimal
from random import randint
# GRAPHENE MODULES
# ~ *soon* from hummingbot.connector.exchange.graphene.
from graphene_utils import assets_from_pairs, invert_pairs, sls, it
class GrapheneConstants:
"""
the base class contains constants relevant to all graphene chains
and chain specific constants for <your chain>
the aim here is to have a single object
which can be instatied either as:
# chain agnostic constants, eg.
# constants = GrapheneConstants()
# constants.core.BASE58
# constants.metanode.TIMEOUT
# constants.signing.TIMEOUT
# chain specific constants, eg.
# constants = GrapheneConstants(chain_name)
# constants.chain.NODES
# constants.chain.PAIRS
# constants.chain.ACCOUNT
and then passed through instantiated class objects as self.constants
"""
def __init__(self, chain_name=None):
"""
this requires no user configuration,
advanced might configure a testnet or additional graphene based blockchain here
"""
chains = {
"peerplays": {
"core": "PPY",
"config": PeerplaysConfig,
"id": (
"6b6b5f0ce7a36d323768e534f3edb41c6d6332a541a95725b98e28d140850134"
),
},
"peerplays testnet": {
"core": "TEST",
"config": PeerplaysTestnetConfig,
"id": (
"7c1c72eb738b3ff1870350f85daca27e2d0f5dd25af27df7475fbd92815e421e"
),
},
"bitshares": {
"core": "BTS",
"config": BitsharesConfig,
"id": (
"4018d7844c78f6a6c41c6a552b898022310fc5dec06da467ee7905a8dad512c8"
),
},
"bitshares testnet": {
"core": "TEST",
"config": BitsharesTestnetConfig,
"id": (
"39f5e2ede1f8bc1a3a54a7914414e3779e33193f1f5693510e73cb7a87617447"
),
},
# ~ "rudex": {
# ~ "core": "GPH",
# ~ "config": RudexConfig,
# ~ "id": (
# ~ "7fcf452d6bb058949cdc875b13c8908c8f54b0f264c39faf8152b682af0740ee"
# ~ ),
# ~ },
# ~ "hive": {
# ~ "core": "HIVE",
# ~ "config": HiveConfig,
# ~ "id": (
# ~ "18dcf0a285365fc58b71f18b3d3fec954aa0c141c44e4e5cb4cf777b9eab274e"
# ~ ),
# ~ },
}
# instantiate hummingbot and graphene core constants
self.core = CoreConstants
self.core.CHAINS = list(chains.keys())
# instantiate user configuration for public and private api connectivity
self.metanode = MetanodeConfig
self.signing = SigningConfig
# instantiate user configuration specific to one blockchain
# normalize user inputs derive some constants that will prove useful later
# constants derived at instantiation still formatted upper `constants.chain.XXX`
if chain_name is not None:
self.chain = chains[chain_name.lower()]["config"]
self.chain.NAME = chain_name.lower()
self.chain.CORE = chains[self.chain.NAME]["core"].upper()
self.chain.ID = chains[self.chain.NAME]["id"]
self.chain.NODES = [node.lower() for node in sls(self.chain.NODES)]
self.chain.PAIRS = [pair.upper() for pair in sls(self.chain.PAIRS)]
# filter out duplicate inverted pairs
self.chain.PAIRS = [
i for i in self.chain.PAIRS if i not in invert_pairs(self.chain.PAIRS)
]
self.chain.INVERT_PAIRS = invert_pairs(self.chain.PAIRS)
self.chain.ASSETS = assets_from_pairs(self.chain.PAIRS)
self.chain.DATABASE = (
"database/" + self.chain.NAME.replace(" ", "_") + ".db"
)
self.chain.TITLE = self.chain.NAME.title()
if not hasattr(self.chain, "PREFIX"):
self.chain.PREFIX = self.chain.CORE
class CoreConstants:
"""
╔═╗╔═╗╦═╗╔═╗
║ ║ ║╠╦╝║╣
╚═╝╚═╝╩╚═╚═╝
these constants require no user configuration
"""
# about 75 years in future; used for expiration date of limit orders
END_OF_TIME = 4 * 10 ** 9
# membership_expiration_date is set to this date if lifetime member
LTM = "2106-02-07T06:28:15"
# ISO8601 time format; 'graphene time'
ISO8601 = "%Y-%m-%dT%H:%M:%S%Z"
# bitsharesbase/operationids.py
OP_IDS = {
"LimitOrderCreate": 1,
"LimitOrderCancel": 2,
}
# swap keys/values to index names by number
OP_NAMES = {v: k for k, v in OP_IDS.items()}
# bitsharesbase/objecttypes.py used by ObjectId() to confirm a.b.c
TYPES = {
"account": 2,
"asset": 3,
"limit_order": 7,
} # 1.2.x # 1.3.x # 1.7.x
# base58 encoding and decoding; this is alphabet defined as bytes
# ~ BASE58 = "".join([i for i in string.digits + string.ascii_letters if i not in "Il0O"]).encode()
# ~ print(b"123456789abcdefghijkmnopqrstuvwxyzABCDEFGHJKLMNPQRSTUVWXYZ")
# ~ print(BASE58)
# ~ # hex encoding and decoding
# ~ HEXDIGITS = string.hexdigits
# ~ print(f"0123456789abcdefABCDEF\n{HEXDIGITS}")
# base58 encoding and decoding; this is alphabet defined:
BASE58 = b"123456789ABCDEFGHJKLMNPQRSTUVWXYZabcdefghijkmnopqrstuvwxyz"
# hex encoding and decoding
HEXDIGITS = "0123456789abcdefABCDEF"
# numerical constants
GRAPHENE_MAX = int(10 ** 15)
DECIMAL_NIL = Decimal(1) / GRAPHENE_MAX
DECIMAL_NAN = Decimal("nan")
DECIMAL_0 = Decimal(0)
DECIMAL_SATOSHI = Decimal(0.00000001)
DECIMAL_SIXSIG = Decimal(0.999999)
class MetanodeConfig:
"""
╔╦╗╔═╗╔╦╗╔═╗╔╗╔╔═╗╔╦╗╔═╗
║║║║╣ ║ ╠═╣║║║║ ║ ║║║╣
╩ ╩╚═╝ ╩ ╩ ╩╝╚╝╚═╝═╩╝╚═╝
these constants relate to the timing of the metanode server and trustless client
metanode can run with a single node, a few nodes, or a large selection of nodes
depending on the size of the public api network you've whitelisted,
some configuration may be required
its suggested that you familiarize yourself with the codebase
prior to adjusting anything here
"""
# ==================================================================================
# SECURITY hard coded list prevents SQL injection in _get_table()
# ==================================================================================
VALID_TABLES = [
"chain",
"account",
"objects",
"pairs",
"assets",
"nodes",
"timing",
]
# ==================================================================================
# SECURITY this hard coded list prevents SQL injection in maven and oracle updates
# ==================================================================================
TRACKER_TABLE = {
# account table
"fees_account": "account",
"ltm": "account",
"cancels": "account",
# assets table
"supply": "assets",
"fees_asset": "assets",
"balance": "assets",
# pairs table
"ops": "pairs",
"last": "pairs",
"book": "pairs",
"history": "pairs",
"opens": "pairs",
"fills": "pairs",
# timing table
"ping": "timing",
"handshake": "timing",
"blocktime": "timing",
"server": "timing",
"blocknum": "timing",
"read": "timing",
}
STATUS_CODES = { # used by latency testing
200: "CONNECTED",
1001: "NO HISTORY",
1002: "WRONG CHAIN ID",
1003: "FORKED FROM MAINNET",
1004: "STALE BLOCKTIME",
1005: "SLOW HANDSHAKE",
1006: "SLOW PING",
1007: "CONNECTION FAILED",
1008: "CONNECTION TIMEOUT",
}
DEV = True # additional printing in terminal
REGENERATION_TUPLE = randint(120, 240)
MAVENS = 7 # number of processes collecting data
MAVEN_WINDOW = 7 # window depth for mode(sooths)
LATENCY_THRESHER_TIMEOUT = 10 # if status 1008 on all nodes, increase
LATENCY_TASK_PAUSE = 60 # time between testing same node twice
MAVEN_CACHE_HARVEST_JOIN = 8
CACHE_RESTART_JOIN = 10
MAVEN_RPC_RATIO = 3
MAVEN_HIGH_LOW_RATIO = 20
MAVEN_PAUSE = 0.1
ORACLE_PAUSE = 0.5
MAX_PING = 1
SQL_EXECUTE_PAUSE = (0.2, True)
class SigningConfig:
"""
╔═╗╦╔═╗╔╗╔╦╔╗╔╔═╗
╚═╗║║ ╦║║║║║║║║ ╦
╚═╝╩╚═╝╝╚╝╩╝╚╝╚═╝
these constants relate to the client side graphene scripting of
transcription, serialization, signing, and broadcast
of authenticate, buy, sell, and cancel operations
"""
# timeout during websocket handshake; default 4 seconds
HANDSHAKE_TIMEOUT = 4
# multiprocessing handler lifespan, default 20 seconds
PROCESS_TIMEOUT = 20
# default False for persistent limit orders
KILL_OR_FILL = False
# default True scales elements of oversize gross order to means
AUTOSCALE = True
# default True to never spend last 2 core tokens (for fees)
CORE_FEES = True
# multiprocessing incarnations, default 3 attempts
ATTEMPTS = 3
# prevent extreme number of AI generated edicts; default 20
# NOTE batch transactions are currently disable
# so this parameter is moot at the hummingbot level
LIMIT = 20
# default True to execute order in primary script process
JOIN = True
# ignore orders value less than ~DUST core in value; 0 to disable
DUST = 0
# True = heavy print output
DEV = True
class PeerplaysConfig:
"""
╔═════════════════════════════╗
║ HUMMINGBOT GRAPHENE ║
║ ╔═╗╔═╗╔═╗╦═╗╔═╗╦ ╔═╗╦ ╦╔═╗ ║
║ ╠═╝║╣ ║╣ ╠╦╝╠═╝║ ╠═╣╚╦╝╚═╗ ║
║ ╩ ╚═╝╚═╝╩╚═╩ ╩═╝╩ ╩ ╩ ╚═╝ ║
║ DEX MARKET MAKING CONNECTOR ║
╚═════════════════════════════╝
configuration details specific to peerplays mainnet
"""
ACCOUNT = ""
NODES = ["wss://peerplaysblockchain.net/mainnet/api"]
PAIRS = ["BTC-PPY", "HIVE-PPY", "HBD-PPY"]
class PeerplaysTestnetConfig:
"""
configuration details specific to peerplays testnet
"""
ACCOUNT = "litepresence1"
NODES = ["wss://ymir.peerplays.download/api"]
PAIRS = ["TEST-ABC", "TEST-XYZ"]
class BitsharesConfig:
"""
╔═════════════════════════════╗
║ HUMMINGBOT GRAPHENE ║
║ ╔╗ ╦╔╦╗╔═╗╦ ╦╔═╗╦═╗╔═╗╔═╗ ║
║ ╠╩╗║ ║ ╚═╗╠═╣╠═╣╠╦╝║╣ ╚═╗ ║
║ ╚═╝╩ ╩ ╚═╝╩ ╩╩ ╩╩╚═╚═╝╚═╝ ║
║ DEX MARKET MAKING CONNECTOR ║
╚═════════════════════════════╝
configuration details specific to bitshares mainnet
"""
ACCOUNT = ""
NODES = [
"wss://api.bts.mobi/wss",
"wss://api-us.61bts.com/wss",
"wss://cloud.xbts.io/ws",
"wss://api.dex.trading/wss",
"wss://eu.nodes.bitshares.ws/ws",
"wss://api.pindd.club/ws",
"wss://dex.iobanker.com/ws",
"wss://public.xbts.io/ws",
"wss://node.xbts.io/ws",
"wss://node.market.rudex.org/ws",
"wss://nexus01.co.uk/ws",
"wss://api-bts.liondani.com/ws",
"wss://api.bitshares.bhuz.info/wss",
"wss://btsws.roelandp.nl/ws",
"wss://hongkong.bitshares.im/ws",
"wss://node1.deex.exchange/wss",
"wss://api.cnvote.vip:888/wss",
"wss://bts.open.icowallet.net/ws",
"wss://api.weaccount.cn/ws",
"wss://api.61bts.com",
"wss://api.btsgo.net/ws",
"wss://bitshares.bts123.cc:15138/wss",
"wss://singapore.bitshares.im/wss",
]
PAIRS = ["BTS-HONEST", "BTS-HONEST.USD", "HONEST.XAU-CNY"]
class BitsharesTestnetConfig:
"""
configuration details specific to bitshares testnet
"""
ACCOUNT = ""
NODES = [
"wss://testnet.bitshares.im/ws",
"wss://testnet.dex.trading/",
"wss://testnet.xbts.io/ws",
"wss://api-testnet.61bts.com/ws",
]
PAIRS = ["TEST-USD", "TEST-CNY"]
# NOTE these are not yet tested... may require some dev; pull requests welcome
# ~ class RudexConfig:
# ~ """
# ~ ╔═════════════════════════════╗
# ~ ║ HUMMINGBOT GRAPHENE ║
# ~ ║ ╦═╗╦ ╦╔╦╗╔═╗╔╗╔═ ║
# ~ ║ ╠╦╝║ ║ ║║║╣ ╠╣ ║
# ~ ║ ╩╚═╚═╝═╩╝╚═╝═╝╚╝ ║
# ~ ║ DEX MARKET MAKING CONNECTOR ║
# ~ ╚═════════════════════════════╝
# ~ configuration details specific to rudex mainnet
# ~ """
# ~ FIXME needs to be debugged / unit tested, may be some rpc differences
# ~ /testnet?
# ~ ACCOUNT = "litepresence1"
# ~ NODES = ["wss://node.gph.ai"]
# ~ PAIRS = ["GPH-BTS", "PPY-BTS"]
# ~ class HiveConfig:
# ~ """
# ~ ╔═════════════════════════════╗
# ~ ║ HUMMINGBOT GRAPHENE ║
# ~ ║ ╦ ╦╦╦ ╦╔═╗ ║
# ~ ║ ╠═╣║╚╗╔╝║╣ ║
# ~ ║ ╩ ╩╩ ╚╝ ╚═╝ ║
# ~ ║ DEX MARKET MAKING CONNECTOR ║
# ~ ╚═════════════════════════════╝
# ~ configuration details specific to hive mainnet
# ~ """
# ~ raise NotImplementedError
# ~ FIXME needs to be debugged / unit tested, may be some rpc differences
# ~ /testnet?
# ~ https://developers.hive.io/quickstart/hive_full_nodes.html
# ~ https://steemit.com/full-nodes/@fullnodeupdate/full-api-node-update---762018
# ~ # https://github.com/openhive-network/hive
# ~ # https://api.hive.blog
# ~ # https://testnet.openhive.network
# ~ ACCOUNT = "rolandp"
# ~ NODES = ["ws://testnet.openhive.network:8090"]
# ~ NODES = [
# ~ "wss://rpc.steemviz.com/wss",
# ~ "wss://steemd.minnowsupportproject.org/wss",
# ~ "wss://steemd.pevo.science/wss",
# ~ "wss://steemd.privex.io/wss",
# ~ "wss://rpc.buildteam.io/wss",
# ~ "wss://gtg.steem.house:8090/wss",
# ~ ]
# ~ PAIRS = ["HBD-HIVE"]
def unit_test():
"""
test class inheritance
"""
# FIXME state what is being printed
# chain agnostic constants, eg.
constants = GrapheneConstants()
dispatch = {str(idx): chain for idx, chain in enumerate(constants.core.CHAINS)}
for key, value in dispatch.items():
if "testnet" not in value:
print(key + ": " + it("blue", value))
else:
print(key + ": " + it("purple", value))
chain = dispatch[input("Enter choice: ")]
CONSTANTS = GrapheneConstants() # pylint: disable=invalid-name
print(CONSTANTS.core.BASE58)
print(CONSTANTS.metanode.STATUS_CODES)
print(CONSTANTS.signing.ATTEMPTS)
# chain specific constants, eg.
constants = GrapheneConstants(chain)
print(constants.chain.NODES)
print(constants.chain.PAIRS)
print(constants.chain.INVERT_PAIRS)
print(constants.chain.ASSETS)
print(constants.chain.CORE)
print(constants.chain.PREFIX)
# note core / metanode / etc. constants still work this way
print(constants.metanode.STATUS_CODES)
if __name__ == "__main__":
unit_test()
| 34.746067 | 103 | 0.547924 | #!/usr/bin/env python
# DISABLE SELECT PYLINT TESTS
# pylint: disable=import-error, line-too-long, too-few-public-methods
# pylint: disable=bad-continuation
r"""
╔════════════════════════════════════════════════════╗
║ ╔═╗╦═╗╔═╗╔═╗╦ ╦╔═╗╔╗╔╔═╗ ╔╦╗╔═╗╔╦╗╔═╗╔╗╔╔═╗╔╦╗╔═╗ ║
║ ║ ╦╠╦╝╠═╣╠═╝╠═╣║╣ ║║║║╣ ║║║║╣ ║ ╠═╣║║║║ ║ ║║║╣ ║
║ ╚═╝╩╚═╩ ╩╩ ╩ ╩╚═╝╝╚╝╚═╝ ╩ ╩╚═╝ ╩ ╩ ╩╝╚╝╚═╝═╩╝╚═╝ ║
╚════════════════════════════════════════════════════╝
~
GLOBAL CONSTANTS AND USER CONFIGURATION FOR DEX CONNECTIVITY
"""
# STANDARD MODULES
from decimal import Decimal
from random import randint
# GRAPHENE MODULES
# ~ *soon* from hummingbot.connector.exchange.graphene.
from graphene_utils import assets_from_pairs, invert_pairs, sls, it
class GrapheneConstants:
"""
the base class contains constants relevant to all graphene chains
and chain specific constants for <your chain>
the aim here is to have a single object
which can be instatied either as:
# chain agnostic constants, eg.
# constants = GrapheneConstants()
# constants.core.BASE58
# constants.metanode.TIMEOUT
# constants.signing.TIMEOUT
# chain specific constants, eg.
# constants = GrapheneConstants(chain_name)
# constants.chain.NODES
# constants.chain.PAIRS
# constants.chain.ACCOUNT
and then passed through instantiated class objects as self.constants
"""
def __init__(self, chain_name=None):
"""
this requires no user configuration,
advanced might configure a testnet or additional graphene based blockchain here
"""
chains = {
"peerplays": {
"core": "PPY",
"config": PeerplaysConfig,
"id": (
"6b6b5f0ce7a36d323768e534f3edb41c6d6332a541a95725b98e28d140850134"
),
},
"peerplays testnet": {
"core": "TEST",
"config": PeerplaysTestnetConfig,
"id": (
"7c1c72eb738b3ff1870350f85daca27e2d0f5dd25af27df7475fbd92815e421e"
),
},
"bitshares": {
"core": "BTS",
"config": BitsharesConfig,
"id": (
"4018d7844c78f6a6c41c6a552b898022310fc5dec06da467ee7905a8dad512c8"
),
},
"bitshares testnet": {
"core": "TEST",
"config": BitsharesTestnetConfig,
"id": (
"39f5e2ede1f8bc1a3a54a7914414e3779e33193f1f5693510e73cb7a87617447"
),
},
# ~ "rudex": {
# ~ "core": "GPH",
# ~ "config": RudexConfig,
# ~ "id": (
# ~ "7fcf452d6bb058949cdc875b13c8908c8f54b0f264c39faf8152b682af0740ee"
# ~ ),
# ~ },
# ~ "hive": {
# ~ "core": "HIVE",
# ~ "config": HiveConfig,
# ~ "id": (
# ~ "18dcf0a285365fc58b71f18b3d3fec954aa0c141c44e4e5cb4cf777b9eab274e"
# ~ ),
# ~ },
}
# instantiate hummingbot and graphene core constants
self.core = CoreConstants
self.core.CHAINS = list(chains.keys())
# instantiate user configuration for public and private api connectivity
self.metanode = MetanodeConfig
self.signing = SigningConfig
# instantiate user configuration specific to one blockchain
# normalize user inputs derive some constants that will prove useful later
# constants derived at instantiation still formatted upper `constants.chain.XXX`
if chain_name is not None:
self.chain = chains[chain_name.lower()]["config"]
self.chain.NAME = chain_name.lower()
self.chain.CORE = chains[self.chain.NAME]["core"].upper()
self.chain.ID = chains[self.chain.NAME]["id"]
self.chain.NODES = [node.lower() for node in sls(self.chain.NODES)]
self.chain.PAIRS = [pair.upper() for pair in sls(self.chain.PAIRS)]
# filter out duplicate inverted pairs
self.chain.PAIRS = [
i for i in self.chain.PAIRS if i not in invert_pairs(self.chain.PAIRS)
]
self.chain.INVERT_PAIRS = invert_pairs(self.chain.PAIRS)
self.chain.ASSETS = assets_from_pairs(self.chain.PAIRS)
self.chain.DATABASE = (
"database/" + self.chain.NAME.replace(" ", "_") + ".db"
)
self.chain.TITLE = self.chain.NAME.title()
if not hasattr(self.chain, "PREFIX"):
self.chain.PREFIX = self.chain.CORE
class CoreConstants:
"""
╔═╗╔═╗╦═╗╔═╗
║ ║ ║╠╦╝║╣
╚═╝╚═╝╩╚═╚═╝
these constants require no user configuration
"""
# about 75 years in future; used for expiration date of limit orders
END_OF_TIME = 4 * 10 ** 9
# membership_expiration_date is set to this date if lifetime member
LTM = "2106-02-07T06:28:15"
# ISO8601 time format; 'graphene time'
ISO8601 = "%Y-%m-%dT%H:%M:%S%Z"
# bitsharesbase/operationids.py
OP_IDS = {
"LimitOrderCreate": 1,
"LimitOrderCancel": 2,
}
# swap keys/values to index names by number
OP_NAMES = {v: k for k, v in OP_IDS.items()}
# bitsharesbase/objecttypes.py used by ObjectId() to confirm a.b.c
TYPES = {
"account": 2,
"asset": 3,
"limit_order": 7,
} # 1.2.x # 1.3.x # 1.7.x
# base58 encoding and decoding; this is alphabet defined as bytes
# ~ BASE58 = "".join([i for i in string.digits + string.ascii_letters if i not in "Il0O"]).encode()
# ~ print(b"123456789abcdefghijkmnopqrstuvwxyzABCDEFGHJKLMNPQRSTUVWXYZ")
# ~ print(BASE58)
# ~ # hex encoding and decoding
# ~ HEXDIGITS = string.hexdigits
# ~ print(f"0123456789abcdefABCDEF\n{HEXDIGITS}")
# base58 encoding and decoding; this is alphabet defined:
BASE58 = b"123456789ABCDEFGHJKLMNPQRSTUVWXYZabcdefghijkmnopqrstuvwxyz"
# hex encoding and decoding
HEXDIGITS = "0123456789abcdefABCDEF"
# numerical constants
GRAPHENE_MAX = int(10 ** 15)
DECIMAL_NIL = Decimal(1) / GRAPHENE_MAX
DECIMAL_NAN = Decimal("nan")
DECIMAL_0 = Decimal(0)
DECIMAL_SATOSHI = Decimal(0.00000001)
DECIMAL_SIXSIG = Decimal(0.999999)
class MetanodeConfig:
"""
╔╦╗╔═╗╔╦╗╔═╗╔╗╔╔═╗╔╦╗╔═╗
║║║║╣ ║ ╠═╣║║║║ ║ ║║║╣
╩ ╩╚═╝ ╩ ╩ ╩╝╚╝╚═╝═╩╝╚═╝
these constants relate to the timing of the metanode server and trustless client
metanode can run with a single node, a few nodes, or a large selection of nodes
depending on the size of the public api network you've whitelisted,
some configuration may be required
its suggested that you familiarize yourself with the codebase
prior to adjusting anything here
"""
# ==================================================================================
# SECURITY hard coded list prevents SQL injection in _get_table()
# ==================================================================================
VALID_TABLES = [
"chain",
"account",
"objects",
"pairs",
"assets",
"nodes",
"timing",
]
# ==================================================================================
# SECURITY this hard coded list prevents SQL injection in maven and oracle updates
# ==================================================================================
TRACKER_TABLE = {
# account table
"fees_account": "account",
"ltm": "account",
"cancels": "account",
# assets table
"supply": "assets",
"fees_asset": "assets",
"balance": "assets",
# pairs table
"ops": "pairs",
"last": "pairs",
"book": "pairs",
"history": "pairs",
"opens": "pairs",
"fills": "pairs",
# timing table
"ping": "timing",
"handshake": "timing",
"blocktime": "timing",
"server": "timing",
"blocknum": "timing",
"read": "timing",
}
STATUS_CODES = { # used by latency testing
200: "CONNECTED",
1001: "NO HISTORY",
1002: "WRONG CHAIN ID",
1003: "FORKED FROM MAINNET",
1004: "STALE BLOCKTIME",
1005: "SLOW HANDSHAKE",
1006: "SLOW PING",
1007: "CONNECTION FAILED",
1008: "CONNECTION TIMEOUT",
}
DEV = True # additional printing in terminal
REGENERATION_TUPLE = randint(120, 240)
MAVENS = 7 # number of processes collecting data
MAVEN_WINDOW = 7 # window depth for mode(sooths)
LATENCY_THRESHER_TIMEOUT = 10 # if status 1008 on all nodes, increase
LATENCY_TASK_PAUSE = 60 # time between testing same node twice
MAVEN_CACHE_HARVEST_JOIN = 8
CACHE_RESTART_JOIN = 10
MAVEN_RPC_RATIO = 3
MAVEN_HIGH_LOW_RATIO = 20
MAVEN_PAUSE = 0.1
ORACLE_PAUSE = 0.5
MAX_PING = 1
SQL_EXECUTE_PAUSE = (0.2, True)
class SigningConfig:
"""
╔═╗╦╔═╗╔╗╔╦╔╗╔╔═╗
╚═╗║║ ╦║║║║║║║║ ╦
╚═╝╩╚═╝╝╚╝╩╝╚╝╚═╝
these constants relate to the client side graphene scripting of
transcription, serialization, signing, and broadcast
of authenticate, buy, sell, and cancel operations
"""
# timeout during websocket handshake; default 4 seconds
HANDSHAKE_TIMEOUT = 4
# multiprocessing handler lifespan, default 20 seconds
PROCESS_TIMEOUT = 20
# default False for persistent limit orders
KILL_OR_FILL = False
# default True scales elements of oversize gross order to means
AUTOSCALE = True
# default True to never spend last 2 core tokens (for fees)
CORE_FEES = True
# multiprocessing incarnations, default 3 attempts
ATTEMPTS = 3
# prevent extreme number of AI generated edicts; default 20
# NOTE batch transactions are currently disable
# so this parameter is moot at the hummingbot level
LIMIT = 20
# default True to execute order in primary script process
JOIN = True
# ignore orders value less than ~DUST core in value; 0 to disable
DUST = 0
# True = heavy print output
DEV = True
class PeerplaysConfig:
"""
╔═════════════════════════════╗
║ HUMMINGBOT GRAPHENE ║
║ ╔═╗╔═╗╔═╗╦═╗╔═╗╦ ╔═╗╦ ╦╔═╗ ║
║ ╠═╝║╣ ║╣ ╠╦╝╠═╝║ ╠═╣╚╦╝╚═╗ ║
║ ╩ ╚═╝╚═╝╩╚═╩ ╩═╝╩ ╩ ╩ ╚═╝ ║
║ DEX MARKET MAKING CONNECTOR ║
╚═════════════════════════════╝
configuration details specific to peerplays mainnet
"""
ACCOUNT = ""
NODES = ["wss://peerplaysblockchain.net/mainnet/api"]
PAIRS = ["BTC-PPY", "HIVE-PPY", "HBD-PPY"]
class PeerplaysTestnetConfig:
"""
configuration details specific to peerplays testnet
"""
ACCOUNT = "litepresence1"
NODES = ["wss://ymir.peerplays.download/api"]
PAIRS = ["TEST-ABC", "TEST-XYZ"]
class BitsharesConfig:
"""
╔═════════════════════════════╗
║ HUMMINGBOT GRAPHENE ║
║ ╔╗ ╦╔╦╗╔═╗╦ ╦╔═╗╦═╗╔═╗╔═╗ ║
║ ╠╩╗║ ║ ╚═╗╠═╣╠═╣╠╦╝║╣ ╚═╗ ║
║ ╚═╝╩ ╩ ╚═╝╩ ╩╩ ╩╩╚═╚═╝╚═╝ ║
║ DEX MARKET MAKING CONNECTOR ║
╚═════════════════════════════╝
configuration details specific to bitshares mainnet
"""
ACCOUNT = ""
NODES = [
"wss://api.bts.mobi/wss",
"wss://api-us.61bts.com/wss",
"wss://cloud.xbts.io/ws",
"wss://api.dex.trading/wss",
"wss://eu.nodes.bitshares.ws/ws",
"wss://api.pindd.club/ws",
"wss://dex.iobanker.com/ws",
"wss://public.xbts.io/ws",
"wss://node.xbts.io/ws",
"wss://node.market.rudex.org/ws",
"wss://nexus01.co.uk/ws",
"wss://api-bts.liondani.com/ws",
"wss://api.bitshares.bhuz.info/wss",
"wss://btsws.roelandp.nl/ws",
"wss://hongkong.bitshares.im/ws",
"wss://node1.deex.exchange/wss",
"wss://api.cnvote.vip:888/wss",
"wss://bts.open.icowallet.net/ws",
"wss://api.weaccount.cn/ws",
"wss://api.61bts.com",
"wss://api.btsgo.net/ws",
"wss://bitshares.bts123.cc:15138/wss",
"wss://singapore.bitshares.im/wss",
]
PAIRS = ["BTS-HONEST", "BTS-HONEST.USD", "HONEST.XAU-CNY"]
class BitsharesTestnetConfig:
"""
configuration details specific to bitshares testnet
"""
ACCOUNT = ""
NODES = [
"wss://testnet.bitshares.im/ws",
"wss://testnet.dex.trading/",
"wss://testnet.xbts.io/ws",
"wss://api-testnet.61bts.com/ws",
]
PAIRS = ["TEST-USD", "TEST-CNY"]
# NOTE these are not yet tested... may require some dev; pull requests welcome
# ~ class RudexConfig:
# ~ """
# ~ ╔═════════════════════════════╗
# ~ ║ HUMMINGBOT GRAPHENE ║
# ~ ║ ╦═╗╦ ╦╔╦╗╔═╗╔╗╔═ ║
# ~ ║ ╠╦╝║ ║ ║║║╣ ╠╣ ║
# ~ ║ ╩╚═╚═╝═╩╝╚═╝═╝╚╝ ║
# ~ ║ DEX MARKET MAKING CONNECTOR ║
# ~ ╚═════════════════════════════╝
# ~ configuration details specific to rudex mainnet
# ~ """
# ~ FIXME needs to be debugged / unit tested, may be some rpc differences
# ~ /testnet?
# ~ ACCOUNT = "litepresence1"
# ~ NODES = ["wss://node.gph.ai"]
# ~ PAIRS = ["GPH-BTS", "PPY-BTS"]
# ~ class HiveConfig:
# ~ """
# ~ ╔═════════════════════════════╗
# ~ ║ HUMMINGBOT GRAPHENE ║
# ~ ║ ╦ ╦╦╦ ╦╔═╗ ║
# ~ ║ ╠═╣║╚╗╔╝║╣ ║
# ~ ║ ╩ ╩╩ ╚╝ ╚═╝ ║
# ~ ║ DEX MARKET MAKING CONNECTOR ║
# ~ ╚═════════════════════════════╝
# ~ configuration details specific to hive mainnet
# ~ """
# ~ raise NotImplementedError
# ~ FIXME needs to be debugged / unit tested, may be some rpc differences
# ~ /testnet?
# ~ https://developers.hive.io/quickstart/hive_full_nodes.html
# ~ https://steemit.com/full-nodes/@fullnodeupdate/full-api-node-update---762018
# ~ # https://github.com/openhive-network/hive
# ~ # https://api.hive.blog
# ~ # https://testnet.openhive.network
# ~ ACCOUNT = "rolandp"
# ~ NODES = ["ws://testnet.openhive.network:8090"]
# ~ NODES = [
# ~ "wss://rpc.steemviz.com/wss",
# ~ "wss://steemd.minnowsupportproject.org/wss",
# ~ "wss://steemd.pevo.science/wss",
# ~ "wss://steemd.privex.io/wss",
# ~ "wss://rpc.buildteam.io/wss",
# ~ "wss://gtg.steem.house:8090/wss",
# ~ ]
# ~ PAIRS = ["HBD-HIVE"]
def unit_test():
"""
test class inheritance
"""
# FIXME state what is being printed
# chain agnostic constants, eg.
constants = GrapheneConstants()
dispatch = {str(idx): chain for idx, chain in enumerate(constants.core.CHAINS)}
for key, value in dispatch.items():
if "testnet" not in value:
print(key + ": " + it("blue", value))
else:
print(key + ": " + it("purple", value))
chain = dispatch[input("Enter choice: ")]
CONSTANTS = GrapheneConstants() # pylint: disable=invalid-name
print(CONSTANTS.core.BASE58)
print(CONSTANTS.metanode.STATUS_CODES)
print(CONSTANTS.signing.ATTEMPTS)
# chain specific constants, eg.
constants = GrapheneConstants(chain)
print(constants.chain.NODES)
print(constants.chain.PAIRS)
print(constants.chain.INVERT_PAIRS)
print(constants.chain.ASSETS)
print(constants.chain.CORE)
print(constants.chain.PREFIX)
# note core / metanode / etc. constants still work this way
print(constants.metanode.STATUS_CODES)
if __name__ == "__main__":
unit_test()
| 0 | 0 | 0 |
605f79c318f2146cb18139cddc3cf8c97cad0caa | 2,492 | py | Python | spacy_udpipe/util.py | OlegDurandin/spacy-udpipe | 7e0972e1aef06595348cb08be7bb11030e4d1b7b | [
"MIT"
] | null | null | null | spacy_udpipe/util.py | OlegDurandin/spacy-udpipe | 7e0972e1aef06595348cb08be7bb11030e4d1b7b | [
"MIT"
] | null | null | null | spacy_udpipe/util.py | OlegDurandin/spacy-udpipe | 7e0972e1aef06595348cb08be7bb11030e4d1b7b | [
"MIT"
] | null | null | null | # coding: utf8
import json
import os
import urllib.request
from pathlib import Path
from spacy.language import Language
from spacy.util import get_lang_class
BASE_URL = "https://lindat.mff.cuni.cz/repository/xmlui/bitstream/handle/11234/1-2998/"
MODELS_DIR = os.path.join(Path(__file__).parent, "models")
langs_path = os.path.join(Path(__file__).parent, "languages.json")
with open(langs_path, "r") as f:
LANGUAGES = json.load(f)
def download(lang):
"""Download the UDPipe pretrained model.
lang (unicode): ISO 639-1 language code or shorthand UDPipe model name.
"""
_check_language(lang)
try:
_check_models_dir(lang)
except:
os.makedirs(MODELS_DIR)
if LANGUAGES[lang] in os.listdir(MODELS_DIR):
msg = "Already downloaded a model for the" \
" '{}' language".format(lang)
print(msg)
return
url = BASE_URL + LANGUAGES[lang]
fname = os.path.join(MODELS_DIR, LANGUAGES[lang])
urllib.request.urlretrieve(url=url, filename=fname)
msg = "Successfully downloaded the pretrained UDPipe" \
" model for the '{}' language".format(lang)
print(msg)
def get_path(lang):
"""Get the path to the UDPipe pretrained model if it was downloaded.
lang (unicode): ISO 639-1 language code or shorthand UDPipe model name.
RETURNS (unicode): The path to the UDPipe pretrained model.
"""
_check_language(lang)
_check_models_dir(lang)
if not LANGUAGES[lang] in os.listdir(MODELS_DIR):
msg = "Use spacy_udpipe.download to download the pretrained" \
" UDPipe model for the '{}' language".format(lang)
raise Exception(msg)
path = os.path.join(MODELS_DIR, LANGUAGES[lang])
return path
def get_defaults(lang):
"""Get the language-specific defaults, if available in spaCy. This allows
using lexical attribute getters that depend on static language data, e.g.
Token.like_num, Token.is_stop, Doc.noun_chunks etc.
lang (unicode): ISO 639-1 language code.
RETURNS (Language.Defaults): The language defaults.
"""
try:
lang_cls = get_lang_class(lang)
return lang_cls.Defaults
except ImportError:
return Language.Defaults
| 31.544304 | 87 | 0.688202 | # coding: utf8
import json
import os
import urllib.request
from pathlib import Path
from spacy.language import Language
from spacy.util import get_lang_class
BASE_URL = "https://lindat.mff.cuni.cz/repository/xmlui/bitstream/handle/11234/1-2998/"
MODELS_DIR = os.path.join(Path(__file__).parent, "models")
langs_path = os.path.join(Path(__file__).parent, "languages.json")
with open(langs_path, "r") as f:
LANGUAGES = json.load(f)
def _check_language(lang):
if lang not in LANGUAGES:
raise Exception("'{}' language not available".format(lang))
def _check_models_dir(lang):
if not os.path.exists(MODELS_DIR):
raise Exception("Download the pretrained model(s) first")
def download(lang):
"""Download the UDPipe pretrained model.
lang (unicode): ISO 639-1 language code or shorthand UDPipe model name.
"""
_check_language(lang)
try:
_check_models_dir(lang)
except:
os.makedirs(MODELS_DIR)
if LANGUAGES[lang] in os.listdir(MODELS_DIR):
msg = "Already downloaded a model for the" \
" '{}' language".format(lang)
print(msg)
return
url = BASE_URL + LANGUAGES[lang]
fname = os.path.join(MODELS_DIR, LANGUAGES[lang])
urllib.request.urlretrieve(url=url, filename=fname)
msg = "Successfully downloaded the pretrained UDPipe" \
" model for the '{}' language".format(lang)
print(msg)
def get_path(lang):
"""Get the path to the UDPipe pretrained model if it was downloaded.
lang (unicode): ISO 639-1 language code or shorthand UDPipe model name.
RETURNS (unicode): The path to the UDPipe pretrained model.
"""
_check_language(lang)
_check_models_dir(lang)
if not LANGUAGES[lang] in os.listdir(MODELS_DIR):
msg = "Use spacy_udpipe.download to download the pretrained" \
" UDPipe model for the '{}' language".format(lang)
raise Exception(msg)
path = os.path.join(MODELS_DIR, LANGUAGES[lang])
return path
def get_defaults(lang):
"""Get the language-specific defaults, if available in spaCy. This allows
using lexical attribute getters that depend on static language data, e.g.
Token.like_num, Token.is_stop, Doc.noun_chunks etc.
lang (unicode): ISO 639-1 language code.
RETURNS (Language.Defaults): The language defaults.
"""
try:
lang_cls = get_lang_class(lang)
return lang_cls.Defaults
except ImportError:
return Language.Defaults
| 215 | 0 | 46 |
7a089a96b2dd6ebf06bf7dffa13029b50e7366e4 | 1,638 | py | Python | jp.atcoder/tenka1-2012-qualC/tenka1_2012_10/30787133.py | kagemeka/atcoder-submissions | 91d8ad37411ea2ec582b10ba41b1e3cae01d4d6e | [
"MIT"
] | 1 | 2022-02-09T03:06:25.000Z | 2022-02-09T03:06:25.000Z | jp.atcoder/tenka1-2012-qualC/tenka1_2012_10/30787133.py | kagemeka/atcoder-submissions | 91d8ad37411ea2ec582b10ba41b1e3cae01d4d6e | [
"MIT"
] | 1 | 2022-02-05T22:53:18.000Z | 2022-02-09T01:29:30.000Z | jp.atcoder/tenka1-2012-qualC/tenka1_2012_10/30787133.py | kagemeka/atcoder-submissions | 91d8ad37411ea2ec582b10ba41b1e3cae01d4d6e | [
"MIT"
] | null | null | null | import typing
if __name__ == "__main__":
main()
| 28.736842 | 81 | 0.521368 | import typing
def main() -> None:
s = input()
m = len(s)
numbers = ["A", "2", "3", "4", "5", "6", "7", "8", "9", "10", "J", "Q", "K"]
numbers_to_index = dict(zip(numbers, range(len(numbers))))
marks = ["S", "H", "D", "C"]
mark_to_index = dict(zip(marks, range(len(marks))))
def parse_as_sequence(s: str) -> typing.List[typing.Tuple[int, int]]:
a = []
i = 0
while i < m:
mark = s[i]
i += 1
mark_index = mark_to_index[mark]
if s[i : i + 1] in numbers_to_index:
number_index = numbers_to_index[s[i : i + 1]]
i += 1
else:
number_index = numbers_to_index[s[i : i + 2]]
i += 2
a.append((mark_index, number_index))
return a
a = parse_as_sequence(s)
count = [0] * 4
def in_target_range(number_index: int) -> bool:
return number_index == 0 or number_index >= 9
i = -1
for mark_index, number_index in a:
i += 1
if in_target_range(number_index):
count[mark_index] += 1
if count[mark_index] == 5:
target_index = mark_index
break
discarded_card_strings = []
for mark_index, number_index in a[:i]:
if mark_index == target_index and in_target_range(number_index):
continue
discarded_card_strings.append(marks[mark_index] + numbers[number_index])
print(0 if not discarded_card_strings else ''.join(discarded_card_strings))
if __name__ == "__main__":
main()
| 1,552 | 0 | 25 |
fbc51f45d5a4e1032b36f47d3d59b26833afe4ae | 30 | py | Python | PDA/extra_assignments/2.A. Numbers_ Day of week/solution/main.py | EMbeDS-education/StatsAndComputing20212022 | 971e418882b206a1b5606d15d222cef1a5a04834 | [
"MIT"
] | 2 | 2022-02-24T09:35:15.000Z | 2022-03-14T20:34:33.000Z | PDA/extra_assignments/2.A. Numbers_ Day of week/solution/main.py | GeorgiosArg/StatsAndComputing20212022 | 798d39af6aa5ef5eef49d5d6f43191351e8a49f3 | [
"MIT"
] | null | null | null | PDA/extra_assignments/2.A. Numbers_ Day of week/solution/main.py | GeorgiosArg/StatsAndComputing20212022 | 798d39af6aa5ef5eef49d5d6f43191351e8a49f3 | [
"MIT"
] | 2 | 2022-03-15T21:40:35.000Z | 2022-03-26T14:51:31.000Z | print((int(input()) + 3) % 7)
| 15 | 29 | 0.5 | print((int(input()) + 3) % 7)
| 0 | 0 | 0 |
af44a538c377f047579c71f637cbd0bbcd7231a1 | 5,053 | py | Python | src/dataflow/leaderboard/create_text_data.py | luweishuang/task_oriented_dialogue_as_dataflow_synthesis | 5638adfb2274d76ca1c430e6b727cca41f43c195 | [
"MIT"
] | 257 | 2020-09-18T23:12:13.000Z | 2022-03-24T03:24:24.000Z | src/dataflow/leaderboard/create_text_data.py | luweishuang/task_oriented_dialogue_as_dataflow_synthesis | 5638adfb2274d76ca1c430e6b727cca41f43c195 | [
"MIT"
] | 24 | 2020-09-26T15:08:06.000Z | 2022-03-11T07:46:30.000Z | src/dataflow/leaderboard/create_text_data.py | luweishuang/task_oriented_dialogue_as_dataflow_synthesis | 5638adfb2274d76ca1c430e6b727cca41f43c195 | [
"MIT"
] | 59 | 2020-09-22T05:47:13.000Z | 2022-03-30T19:03:08.000Z | # Copyright (c) Microsoft Corporation.
# Licensed under the MIT license.
"""
Semantic Machines\N{TRADE MARK SIGN} software.
Creates text data (source-target pairs) to be used for training OpenNMT models.
"""
import argparse
import dataclasses
from typing import Dict, Iterator
import jsons
from tqdm import tqdm
from dataflow.core.dialogue import AgentUtterance, Turn
from dataflow.core.turn_prediction import UtteranceWithContext
from dataflow.onmt_helpers.create_onmt_text_data import (
OnmtTextDatum,
create_context_turns,
create_onmt_text_datum_for_turn,
)
# We assume all dialogues start from turn 0.
# This is true for MultiWoZ and CalFlow datasets.
_MIN_TURN_INDEX = 0
def create_onmt_text_data_for_contextualized_turn(
contextualized_turn: UtteranceWithContext,
num_context_turns: int,
min_turn_index: int,
include_program: bool,
include_agent_utterance: bool,
include_described_entities: bool,
) -> Iterator[OnmtTextDatum]:
"""Yields OnmtTextDatum for a dialogue."""
turn_lookup: Dict[int, Turn] = {
turn.turn_index: turn for turn in contextualized_turn.context.turns
}
context_turns = create_context_turns(
turn_lookup=turn_lookup,
curr_turn_index=contextualized_turn.datum_id.turn_index,
num_context_turns=num_context_turns,
min_turn_index=min_turn_index,
)
onmt_text_datum = create_onmt_text_datum_for_turn(
dialogue_id=contextualized_turn.datum_id.dialogue_id,
curr_turn=Turn(
turn_index=contextualized_turn.datum_id.turn_index,
user_utterance=contextualized_turn.user_utterance,
agent_utterance=AgentUtterance(
original_text="", tokens=[], described_entities=[]
),
lispress="()",
skip=False,
),
context_turns=context_turns,
include_program=include_program,
include_agent_utterance=include_agent_utterance,
include_described_entities=include_described_entities,
)
yield onmt_text_datum
if __name__ == "__main__":
cmdline_parser = argparse.ArgumentParser(
description=__doc__, formatter_class=argparse.RawTextHelpFormatter
)
add_arguments(cmdline_parser)
args = cmdline_parser.parse_args()
print("Semantic Machines\N{TRADE MARK SIGN} software.")
main(
dataflow_dialogues_jsonl=args.dialogues_jsonl,
num_context_turns=args.num_context_turns,
min_turn_index=_MIN_TURN_INDEX,
include_program=args.include_program,
include_agent_utterance=args.include_agent_utterance,
include_described_entities=args.include_described_entities,
onmt_text_data_outbase=args.onmt_text_data_outbase,
)
| 34.609589 | 88 | 0.70948 | # Copyright (c) Microsoft Corporation.
# Licensed under the MIT license.
"""
Semantic Machines\N{TRADE MARK SIGN} software.
Creates text data (source-target pairs) to be used for training OpenNMT models.
"""
import argparse
import dataclasses
from typing import Dict, Iterator
import jsons
from tqdm import tqdm
from dataflow.core.dialogue import AgentUtterance, Turn
from dataflow.core.turn_prediction import UtteranceWithContext
from dataflow.onmt_helpers.create_onmt_text_data import (
OnmtTextDatum,
create_context_turns,
create_onmt_text_datum_for_turn,
)
# We assume all dialogues start from turn 0.
# This is true for MultiWoZ and CalFlow datasets.
_MIN_TURN_INDEX = 0
def create_onmt_text_data_for_contextualized_turn(
contextualized_turn: UtteranceWithContext,
num_context_turns: int,
min_turn_index: int,
include_program: bool,
include_agent_utterance: bool,
include_described_entities: bool,
) -> Iterator[OnmtTextDatum]:
"""Yields OnmtTextDatum for a dialogue."""
turn_lookup: Dict[int, Turn] = {
turn.turn_index: turn for turn in contextualized_turn.context.turns
}
context_turns = create_context_turns(
turn_lookup=turn_lookup,
curr_turn_index=contextualized_turn.datum_id.turn_index,
num_context_turns=num_context_turns,
min_turn_index=min_turn_index,
)
onmt_text_datum = create_onmt_text_datum_for_turn(
dialogue_id=contextualized_turn.datum_id.dialogue_id,
curr_turn=Turn(
turn_index=contextualized_turn.datum_id.turn_index,
user_utterance=contextualized_turn.user_utterance,
agent_utterance=AgentUtterance(
original_text="", tokens=[], described_entities=[]
),
lispress="()",
skip=False,
),
context_turns=context_turns,
include_program=include_program,
include_agent_utterance=include_agent_utterance,
include_described_entities=include_described_entities,
)
yield onmt_text_datum
def main(
dataflow_dialogues_jsonl: str,
num_context_turns: int,
min_turn_index: int,
include_program: bool,
include_agent_utterance: bool,
include_described_entities: bool,
onmt_text_data_outbase: str,
) -> None:
fps = OnmtTextDatum.create_output_files(onmt_text_data_outbase)
for line in tqdm(open(dataflow_dialogues_jsonl), unit=" contextualized turns"):
contextualized_turn = jsons.loads(line.strip(), UtteranceWithContext)
for onmt_text_datum in create_onmt_text_data_for_contextualized_turn(
contextualized_turn=contextualized_turn,
num_context_turns=num_context_turns,
min_turn_index=min_turn_index,
include_program=include_program,
include_agent_utterance=include_agent_utterance,
include_described_entities=include_described_entities,
):
for field_name, field_value in dataclasses.asdict(onmt_text_datum).items():
fp = fps[field_name]
fp.write(field_value)
fp.write("\n")
for _, fp in fps.items():
fp.close()
def add_arguments(argument_parser: argparse.ArgumentParser) -> None:
argument_parser.add_argument(
"--dialogues_jsonl",
help="the jsonl file containing the dialogue data with dataflow programs",
)
argument_parser.add_argument(
"--num_context_turns",
type=int,
help="number of previous turns to be included in the source sequence",
)
argument_parser.add_argument(
"--include_program",
default=False,
action="store_true",
help="if True, include the gold program for the context turn parts",
)
argument_parser.add_argument(
"--include_agent_utterance",
default=False,
action="store_true",
help="if True, include the gold agent utterance for the context turn parts",
)
argument_parser.add_argument(
"--include_described_entities",
default=False,
action="store_true",
help="if True, include the described entities field for the context turn parts",
)
argument_parser.add_argument(
"--onmt_text_data_outbase",
help="the output file basename for the extracted text data for OpenNMT",
)
if __name__ == "__main__":
cmdline_parser = argparse.ArgumentParser(
description=__doc__, formatter_class=argparse.RawTextHelpFormatter
)
add_arguments(cmdline_parser)
args = cmdline_parser.parse_args()
print("Semantic Machines\N{TRADE MARK SIGN} software.")
main(
dataflow_dialogues_jsonl=args.dialogues_jsonl,
num_context_turns=args.num_context_turns,
min_turn_index=_MIN_TURN_INDEX,
include_program=args.include_program,
include_agent_utterance=args.include_agent_utterance,
include_described_entities=args.include_described_entities,
onmt_text_data_outbase=args.onmt_text_data_outbase,
)
| 2,259 | 0 | 46 |
b9e05312823543446c119b6fecb2ac0c40ee948d | 821 | py | Python | backendCode/findCircleIDbyName.py | CoronaCircles/backend | f8bfaa67dfcee660150f5d534147fdac5adbd0dc | [
"MIT"
] | 1 | 2020-03-29T13:29:39.000Z | 2020-03-29T13:29:39.000Z | backendCode/findCircleIDbyName.py | CoronaCircles/backend | f8bfaa67dfcee660150f5d534147fdac5adbd0dc | [
"MIT"
] | 4 | 2020-04-03T13:48:41.000Z | 2020-04-03T22:11:17.000Z | backendCode/findCircleIDbyName.py | CoronaCircles/backend | f8bfaa67dfcee660150f5d534147fdac5adbd0dc | [
"MIT"
] | null | null | null | #!/usr/bin/python3
#
# Corona Circles, codevscovid19 hackathon Zurich
# by Christopher Rehm 29-30 mar 2020, christopherrehm@web.de
import sys
import MySQLdb
if __name__ == "__main__":
if len(sys.argv)-1 == 1:
findCircleIDbyName(sys.argv[1])
else:
print("wrong number of arguments")
| 25.65625 | 67 | 0.618758 | #!/usr/bin/python3
#
# Corona Circles, codevscovid19 hackathon Zurich
# by Christopher Rehm 29-30 mar 2020, christopherrehm@web.de
import sys
import MySQLdb
def findCircleIDbyName(circleName):
conn = MySQLdb.connect(host="rdbms.strato.de",
user="U4098787",
passwd="1light1light!!", db="DB4098787")
cursor = conn.cursor()
sql = """SELECT c.ID FROM CIRCLE c,
(SELECT ID FROM CIRCLE WHERE NAME = '%s') curr
WHERE c.ID = curr.ID""" % (circleName)
print(sql)
cursor.execute(sql)
circleID = cursor.fetchone()
print(circleID[0])
conn.commit()
conn.close()
return circleID
if __name__ == "__main__":
if len(sys.argv)-1 == 1:
findCircleIDbyName(sys.argv[1])
else:
print("wrong number of arguments")
| 488 | 0 | 23 |
d471a79b99fbe3f250dd5505e8c48a14ab9968dd | 1,175 | py | Python | bipatch.py | ngovanmao/basictools | 278a6a3c95d9122b83474fb66781a9381406a8d9 | [
"Apache-2.0"
] | null | null | null | bipatch.py | ngovanmao/basictools | 278a6a3c95d9122b83474fb66781a9381406a8d9 | [
"Apache-2.0"
] | null | null | null | bipatch.py | ngovanmao/basictools | 278a6a3c95d9122b83474fb66781a9381406a8d9 | [
"Apache-2.0"
] | null | null | null | #!/usr/bin/python
import time
import os
import argparse
from subprocess import call
from joblib import Parallel, delayed
import multiprocessing
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument(
'--orig',
type=str,
help="Original directory")
parser.add_argument(
'--new',
type=str,
help="Latest directory")
parser.add_argument(
'--patch',
type=str,
help="Patched directory")
parser.add_argument('--verbose', action='store_true')
global args
args = parser.parse_args()
num_cores = multiprocessing.cpu_count()
Parallel(n_jobs=num_cores)(delayed(bispatch)(i) for i in os.listdir(args.orig))
"""
pool = multiprocessing.Pool(num_cores)
results = []
for filename in os.listdir(args.orig):
results.append(pool.apply_async(bisdiff, filename))
"""
| 27.325581 | 86 | 0.636596 | #!/usr/bin/python
import time
import os
import argparse
from subprocess import call
from joblib import Parallel, delayed
import multiprocessing
def bispatch(filename):
if args.verbose:
print("bspatch {} {} {}".format(args.orig + filename,\
args.new + filename, args.patch + filename))
call(['bspatch', args.orig + filename, args.new+ filename, args.patch + filename])
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument(
'--orig',
type=str,
help="Original directory")
parser.add_argument(
'--new',
type=str,
help="Latest directory")
parser.add_argument(
'--patch',
type=str,
help="Patched directory")
parser.add_argument('--verbose', action='store_true')
global args
args = parser.parse_args()
num_cores = multiprocessing.cpu_count()
Parallel(n_jobs=num_cores)(delayed(bispatch)(i) for i in os.listdir(args.orig))
"""
pool = multiprocessing.Pool(num_cores)
results = []
for filename in os.listdir(args.orig):
results.append(pool.apply_async(bisdiff, filename))
"""
| 234 | 0 | 23 |
88cfb37bd136ec1d06f9f4daa5a349fcb50c43eb | 866 | py | Python | h2o-py/tests/testdir_misc/pyunit_export_gzip.py | ahmedengu/h2o-3 | ac2c0a6fbe7f8e18078278bf8a7d3483d41aca11 | [
"Apache-2.0"
] | 6,098 | 2015-05-22T02:46:12.000Z | 2022-03-31T16:54:51.000Z | h2o-py/tests/testdir_misc/pyunit_export_gzip.py | ahmedengu/h2o-3 | ac2c0a6fbe7f8e18078278bf8a7d3483d41aca11 | [
"Apache-2.0"
] | 2,517 | 2015-05-23T02:10:54.000Z | 2022-03-30T17:03:39.000Z | h2o-py/tests/testdir_misc/pyunit_export_gzip.py | ahmedengu/h2o-3 | ac2c0a6fbe7f8e18078278bf8a7d3483d41aca11 | [
"Apache-2.0"
] | 2,199 | 2015-05-22T04:09:55.000Z | 2022-03-28T22:20:45.000Z | from __future__ import print_function
import sys
sys.path.insert(1,"../../../")
import h2o
from tests import pyunit_utils
from os import path
import binascii
'''
Export file with h2o.export_file compressed with 'gzip'
'''
if __name__ == "__main__":
pyunit_utils.standalone_test(export_gzip)
else:
export_gzip()
| 22.205128 | 86 | 0.722864 | from __future__ import print_function
import sys
sys.path.insert(1,"../../../")
import h2o
from tests import pyunit_utils
from os import path
import binascii
'''
Export file with h2o.export_file compressed with 'gzip'
'''
def is_gzip_file(path):
with open(path, 'rb') as f:
magic = binascii.hexlify(f.read(2))
return magic == b'1f8b'
def export_gzip():
prostate = h2o.import_file(pyunit_utils.locate("smalldata/prostate/prostate.csv"))
target = path.join(pyunit_utils.locate("results"), "prostate_export.csv.gzip")
h2o.export_file(prostate, target, compression="gzip")
assert is_gzip_file(target)
prostate_gzip = h2o.import_file(target)
assert pyunit_utils.compare_frames(prostate, prostate_gzip, numElements=2)
if __name__ == "__main__":
pyunit_utils.standalone_test(export_gzip)
else:
export_gzip()
| 493 | 0 | 46 |
e887ea3c42b45eaae68408b146da0da0c6893990 | 2,494 | py | Python | caso/manager.py | indigo-dc/casoincd | 9307d0f98989b4a9ec9ecd772752b5b7255879e9 | [
"Apache-2.0"
] | null | null | null | caso/manager.py | indigo-dc/casoincd | 9307d0f98989b4a9ec9ecd772752b5b7255879e9 | [
"Apache-2.0"
] | null | null | null | caso/manager.py | indigo-dc/casoincd | 9307d0f98989b4a9ec9ecd772752b5b7255879e9 | [
"Apache-2.0"
] | null | null | null | # -*- coding: utf-8 -*-
# Copyright 2014 Spanish National Research Council (CSIC)
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import datetime
import os.path
import dateutil.parser
from dateutil import tz
from oslo_config import cfg
import caso.extract.manager
import caso.messenger
from caso import utils
opts = [
cfg.ListOpt('messengers',
default=['caso.messenger.noop.NoopMessenger'],
help='List of messenger that will dispatch records. '
'valid values are %s' %
["%s.%s" % (i.__module__, i.__name__)
for i in caso.messenger.all_managers()]),
cfg.StrOpt('spooldir',
default='/var/spool/caso',
help='Spool directory.'),
]
cli_opts = [
cfg.BoolOpt('dry_run',
default=False,
help='Extract records but do not push records to SSM. This '
'will not update the last run date.'),
]
CONF = cfg.CONF
CONF.register_opts(opts)
CONF.register_cli_opts(cli_opts)
| 30.414634 | 76 | 0.636728 | # -*- coding: utf-8 -*-
# Copyright 2014 Spanish National Research Council (CSIC)
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import datetime
import os.path
import dateutil.parser
from dateutil import tz
from oslo_config import cfg
import caso.extract.manager
import caso.messenger
from caso import utils
opts = [
cfg.ListOpt('messengers',
default=['caso.messenger.noop.NoopMessenger'],
help='List of messenger that will dispatch records. '
'valid values are %s' %
["%s.%s" % (i.__module__, i.__name__)
for i in caso.messenger.all_managers()]),
cfg.StrOpt('spooldir',
default='/var/spool/caso',
help='Spool directory.'),
]
cli_opts = [
cfg.BoolOpt('dry_run',
default=False,
help='Extract records but do not push records to SSM. This '
'will not update the last run date.'),
]
CONF = cfg.CONF
CONF.register_opts(opts)
CONF.register_cli_opts(cli_opts)
class Manager(object):
def __init__(self):
utils.makedirs(CONF.spooldir)
self.last_run_file = os.path.join(CONF.spooldir, "lastrun")
self.extractor_manager = caso.extract.manager.Manager()
self.messenger = caso.messenger.Manager()
@property
def lastrun(self):
if os.path.exists(self.last_run_file):
with open(self.last_run_file, "r") as fd:
date = fd.read()
else:
date = "1970-01-01"
try:
date = dateutil.parser.parse(date)
except Exception:
# FIXME(aloga): raise a proper exception here
raise
return date
def run(self):
records = self.extractor_manager.get_records(lastrun=self.lastrun)
if not CONF.dry_run:
self.messenger.push_to_all(records)
with open(self.last_run_file, "w") as fd:
fd.write(str(datetime.datetime.now(tz.tzutc())))
| 843 | 95 | 23 |
414295bfe3eebef2abd817ed868191f5d41f895b | 15,553 | py | Python | src/integral_timber_joints/process/compute_process_gripper.py | gramaziokohler/integral_timber_joints | 70e75a66e13b5ada580fcffc58879f5fcb8fce32 | [
"MIT"
] | 3 | 2021-09-16T13:08:32.000Z | 2022-02-21T17:20:21.000Z | src/integral_timber_joints/process/compute_process_gripper.py | gramaziokohler/integral_timber_joints | 70e75a66e13b5ada580fcffc58879f5fcb8fce32 | [
"MIT"
] | 80 | 2021-09-06T09:55:38.000Z | 2022-03-22T18:44:24.000Z | src/integral_timber_joints/process/compute_process_gripper.py | gramaziokohler/integral_timber_joints | 70e75a66e13b5ada580fcffc58879f5fcb8fce32 | [
"MIT"
] | null | null | null | from compas.geometry import Translation, Vector, Transformation, Frame
from integral_timber_joints.assembly import BeamAssemblyMethod
from integral_timber_joints.process.dependency import ComputationalResult
try:
from typing import Dict, List, Optional, Tuple
from integral_timber_joints.process import RobotClampAssemblyProcess
from integral_timber_joints.tools import Gripper, Tool, Screwdriver
except:
pass
# ---------------------------------------------------------------
# This file contains functions to be imported into Process class.
# They are separated here to keep individual files smaller.
# ---------------------------------------------------------------
# Computing Gripper Related Attributes
# ------------------------------------
# Automatically Invoked Functions
# -------------------------------------
def assign_gripper_to_beam(process, beam_id, verbose=False):
# type: (RobotClampAssemblyProcess, str, bool) -> ComputationalResult
"""Assign a gripper type using available grippers based on the beam's length.
Beam must fit within gripper `beam_length_limits`, if multiple options allow,
the gripper with the closest `target_beam_length` will be chosen.
If the attribute `gripper_type` is already assigned, this function will not chage it.
For beam that is SCREWED_WITHOUT_GRIPPER. It will find the grasping joint,
and copy the `tool_type` and `tool_id` to 'gripper_type' and 'gripper_id'
State Change
------------
This functions sets the following beam_attribute
- 'gripper_type'
- 'gripper_id'
Return
------
`ComputationalResult.ValidCannotContinue` if no suitable gripper can be found
`ComputationalResult.ValidCanContinue` if a suitable gripper can be found
"""
beam_length = process.assembly.beam(beam_id).length
chosen_gripper_type = None
chosen_gripper_ideal = None
assembly_method = process.assembly.get_assembly_method(beam_id)
# * Skip MANUAL_ASSEMBLY
if assembly_method == BeamAssemblyMethod.MANUAL_ASSEMBLY:
if verbose:
print("Skipping assign_gripper_to_beam for MANUAL_ASSEMBLY")
return ComputationalResult.ValidCanContinue
# * Handle the copy and paste for SCREWED_WITHOUT_GRIPPER
if assembly_method == BeamAssemblyMethod.SCREWED_WITHOUT_GRIPPER:
grasping_joint_id = process.assembly.get_grasping_joint_id(beam_id)
tool_type = process.assembly.get_joint_attribute(grasping_joint_id, "tool_type")
tool_id = process.assembly.get_joint_attribute(grasping_joint_id, "tool_id")
process.assembly.set_beam_attribute(beam_id, "gripper_type", tool_type)
process.assembly.set_beam_attribute(beam_id, "gripper_id", tool_id)
return ComputationalResult.ValidCanContinue
# Do not change anything if gripper_type is already set
already_set = False
gripper_type = process.assembly.get_beam_attribute(beam_id, "gripper_type")
if gripper_type is not None:
if verbose:
print("assign_gripper_to_beam: gripper_type set")
gripper_id = process.assembly.get_beam_attribute(beam_id, "gripper_id")
if gripper_id is not None:
if verbose:
print("assign_gripper_to_beam: gripper_id set")
# Check that the gripper_id is sensible
if assembly_method == BeamAssemblyMethod.SCREWED_WITHOUT_GRIPPER:
if verbose:
print("assign_gripper_to_beam: assembly method = %s " % assembly_method)
if gripper_id in [tool.name for tool in process.screwdrivers]:
if verbose:
print("assign_gripper_to_beam: gripper_id %s is valid and will not be changed." % process.assembly.get_beam_attribute(beam_id, "gripper_id"))
already_set = True
else:
if gripper_id in [tool.name for tool in process.grippers]:
if process.tool(gripper_id).type_name == gripper_type:
if verbose:
print("assign_gripper_to_beam: gripper_id %s is valid and will not be changed." % process.assembly.get_beam_attribute(beam_id, "gripper_id"))
already_set = True
if already_set:
if verbose:
print("Beam (%s) gripper_type (%s) has already been set. No change made by assign_gripper_to_beam()." %
(beam_id, gripper_type))
return ComputationalResult.ValidNoChange
if assembly_method == BeamAssemblyMethod.SCREWED_WITHOUT_GRIPPER:
joint_ids = process.assembly.get_joint_ids_with_tools_for_beam(beam_id)
first_screwdriver = process.get_tool_of_joint(joint_ids[0])
chosen_gripper_type = first_screwdriver.type_name
gripper_id = first_screwdriver.name
if verbose:
print("chosen_gripper_type = %s" % chosen_gripper_type)
print("gripper_id = %s" % gripper_id)
else:
# Compute Gripper Type
for gripper_type in process.available_gripper_types:
gripper = process.get_one_gripper_by_type(gripper_type)
# Check if beam length is within limits
if beam_length >= gripper.beam_length_limits[0] and beam_length <= gripper.beam_length_limits[1]:
# Compute beam length vs ideal length and make decision
length_to_ideal = abs(beam_length - gripper.target_beam_length)
if chosen_gripper_type is None or length_to_ideal < chosen_gripper_ideal:
chosen_gripper_type = gripper_type
chosen_gripper_ideal = length_to_ideal
# In cases no suitable gripper is available
if chosen_gripper_type is None:
if verbose:
print("No suitable gripper can be assigned to %s" % (beam_id))
print("WARNING: No suitable gripper can be assigned to %s" % (beam_id))
return ComputationalResult.ValidCannotContinue
gripper_id = process.get_one_tool_by_type(chosen_gripper_type).name
# Set gripper_type and gripper_id and return
process.assembly.set_beam_attribute(beam_id, "gripper_type", chosen_gripper_type)
process.assembly.set_beam_attribute(beam_id, "gripper_id", gripper_id)
if verbose:
print("Gripper Type: %s assigned to %s" % (chosen_gripper_type, beam_id))
return ComputationalResult.ValidCanContinue
def compute_gripper_grasp_pose(process, beam_id, verbose=False):
# type: (RobotClampAssemblyProcess, str, bool) -> ComputationalResult
""" Compute grasp pose for the beam and gripper.
Gripper should be assigned before.
For Beams with Gripper Gripper
------------------------------
Default values will be applied if 'gripper_grasp_dist_from_start' and 'gripper_grasp_face'
are not set. Otherwise previous values will be preserved to calculate 'gripper_tcp_in_ocf'.
For Beams with Screwdriver as gripper
-------------------------------------
- `tool_id`s and `gripper_id` should be assigned before.
- `grasping_joint_id` should be assigned before.
- `gripper_tcp_in_ocf` will be based on the
- beam attribute `grasping_joint_id` (Set Manually)
- joint_attribute `tool_orientation_frame_index` (Set Manually)
State Change
------------
This functions sets the following beam_attribute
- 'gripper_grasp_dist_from_start' (if default)
- 'gripper_grasp_face' (if default)
- 'gripper_tcp_in_ocf'
Return
------
`ComputationalResult.ValidCannotContinue` if prerequisite not satisfied
`ComputationalResult.ValidCanContinue` otherwise (this function should not fail)
"""
assembly_method = process.assembly.get_assembly_method(beam_id)
# * Skip MANUAL_ASSEMBLY
if assembly_method == BeamAssemblyMethod.MANUAL_ASSEMBLY:
if verbose:
print("Skipping compute_gripper_grasp_pose for MANUAL_ASSEMBLY")
return ComputationalResult.ValidCanContinue
# Check to ensure prerequisite
if process.assembly.get_beam_attribute(beam_id, 'gripper_type') is None:
return ComputationalResult.ValidCannotContinue
beam = process.assembly.beam(beam_id)
if assembly_method == BeamAssemblyMethod.SCREWED_WITHOUT_GRIPPER:
# Retrive which joint is the gripper screwdriver and the tool_orientation_frame index
joint_id = process.assembly.get_grasping_joint_id(beam_id) # grasping_joint_id
tool_orientation_frame_index = process.assembly.get_joint_attribute(joint_id, 'tool_orientation_frame_index')
# Transform the tool orientation frame to beam ocf
joint = process.assembly.joint((joint_id[1], joint_id[0]))
screwdriver_tcp_frame_in_wcf = joint.get_clamp_frames(beam)[tool_orientation_frame_index]
t_world_from_screwdriver_tcp = Transformation.from_frame(screwdriver_tcp_frame_in_wcf)
t_world_from_beam = Transformation.from_frame(beam.frame)
t_beam_from_screwdriver_tcp = t_world_from_beam.inverse() * t_world_from_screwdriver_tcp
process.assembly.set_beam_attribute(beam_id, "gripper_tcp_in_ocf", Frame.from_transformation(t_beam_from_screwdriver_tcp))
return ComputationalResult.ValidCanContinue
else:
# * Computing `gripper_grasp_face` if it is None
# Apply default values if None
if grasp_face(beam_id) not in [1, 2, 3, 4]: # Default method
gripper_grasp_face = process.set_grasp_face_following_assembly_direction(beam_id)
if grasp_face(beam_id) not in [1, 2, 3, 4]: # Backup plan
gripper_grasp_face = process.set_grasp_face_following_guide_vector(beam_id)
if grasp_face(beam_id) not in [1, 2, 3, 4]: # Picking face 1 and deal with it
process.assembly.set_beam_attribute(beam_id, "gripper_grasp_face", 1)
print("Someting wrong, gripper_grasp_face is not in [1,2,3,4] after search. Grasp face defaulted to ", 1)
# * Computing `gripper_grasp_dist_from_start` if it is None
gripper_grasp_dist_from_start = process.assembly.get_beam_attribute(beam_id, "gripper_grasp_dist_from_start")
if gripper_grasp_dist_from_start is None:
gripper_grasp_dist_from_start = beam.length / 2.0
process.assembly.set_beam_attribute(beam_id, "gripper_grasp_dist_from_start", gripper_grasp_dist_from_start)
# * Compute Gripper Grasp Pose, aka. gripper_tcp_in_ocf
gripper_tcp_in_ocf = beam.grasp_frame_ocf(grasp_face(beam_id), gripper_grasp_dist_from_start)
process.assembly.set_beam_attribute(beam_id, "gripper_tcp_in_ocf", gripper_tcp_in_ocf)
return ComputationalResult.ValidCanContinue
def set_grasp_face_following_assembly_direction(process, beam_id):
# type: (RobotClampAssemblyProcess, str) -> int
"""Return the best face number (1-4) for creating `gripper_tcp_in_ocf`
where grasp face normal is the opposite direction of the beam's assembly direction.
State Change
------------
This functions sets the following beam_attribute
- 'gripper_grasp_face'
Dependency Trigger
------------------
Invalidate: 'compute_gripper_grasp_pose' and downstream
"""
for joint_id in process.assembly.get_joints_of_beam_connected_to_already_built(beam_id):
joint = process.assembly.joint(joint_id)
selected_face = (joint.face_id + 1) % 4 + 1
process.assembly.set_beam_attribute(beam_id, 'gripper_grasp_face', selected_face)
# Dependency Trigger
process.dependency.invalidate(beam_id, process.compute_gripper_grasp_pose)
# Only the first joint is considered
return selected_face
def set_grasp_face_following_guide_vector(process, beam_id):
# type: (RobotClampAssemblyProcess, str) -> int
"""Return the best face number (1-4) for creating `gripper_tcp_in_ocf`
where the Z-Axis of the tcp_in_WCF, when beam is at 'assembly_wcf_final',
follows the direction of guide vector `design_guide_vector_grasp`
Side Effect
-----------
beam_attribute 'gripper_grasp_face' will be set.
"""
# Get the guide Vector from beam_attribute
design_guide_vector_grasp = process.assembly.get_beam_attribute(beam_id, 'design_guide_vector_grasp').unitized()
assert design_guide_vector_grasp is not None
# Try different grasp face and choose the one that aligns best.
beam = process.assembly.beam(beam_id)
best_face = 0
best_score = -1
for gripper_grasp_face in range(1, 5):
gripper_tcp_in_ocf = beam.grasp_frame_ocf(gripper_grasp_face, 0)
gripper_tcp_in_wcf = gripper_tcp_in_ocf.transformed(Transformation.from_frame(beam.frame))
# Compute the alignment score using dot product
alignment_score = gripper_tcp_in_wcf.zaxis.dot(design_guide_vector_grasp)
if alignment_score > best_score:
best_score = alignment_score
best_face = gripper_grasp_face
process.assembly.set_beam_attribute(beam_id, 'gripper_grasp_face', best_face)
return best_face
# ------------------------------------
# Manually invoked Functions
# -------------------------------------
def adjust_gripper_pos(process, beam_id, amount):
# type: (RobotClampAssemblyProcess, str, float) -> bool
""" Modify the grasp pose 'gripper_grasp_dist_from_start'
'gripper_tcp_in_ocf'
Gripper should be assigned before.
State Change
------------
This functions updates the following beam_attribute
- 'gripper_grasp_dist_from_start'
- 'gripper_tcp_in_ocf'
Return
------
False if prerequisite not satisfied
True, if setting is successdul otherwise (this function should not fail)
Dependency Trigger
------------------
Invalidate: 'compute_gripper_grasp_pose' and downstream
"""
# Check to ensure prerequisite
assembly = process.assembly
beam = assembly.beam(beam_id)
if assembly.get_beam_attribute(beam_id, 'gripper_type') is None:
return False
gripper_grasp_face = assembly.get_beam_attribute(beam_id, "gripper_grasp_face")
gripper_grasp_dist_from_start = assembly.get_beam_attribute(beam_id, "gripper_grasp_dist_from_start")
gripper_grasp_dist_from_start += amount
assembly.set_beam_attribute(beam_id, "gripper_grasp_dist_from_start", gripper_grasp_dist_from_start)
# Recompute beam grasp_frame
gripper_tcp_in_ocf = beam.grasp_frame_ocf(gripper_grasp_face, gripper_grasp_dist_from_start)
assembly.set_beam_attribute(beam_id, "gripper_tcp_in_ocf", gripper_tcp_in_ocf)
# Dependency Trigger
process.dependency.invalidate(beam_id, process.compute_gripper_grasp_pose)
return True
def override_grasp_face(process, beam_id, grasp_face):
# type: (RobotClampAssemblyProcess, str, float) -> bool
"""Manually override `gripper_grasp_face` for a specified beam
`grasp_face` can only be within 1 - 4, overrange value will be wrapped
State Change
------------
This functions sets the following beam_attribute
- 'gripper_grasp_face'
Dependency Trigger
------------------
Invalidate: 'compute_gripper_grasp_pose' and downstream
"""
grasp_face = (grasp_face - 1) % 4 + 1
process.assembly.set_beam_attribute(beam_id, 'gripper_grasp_face', grasp_face)
# Dependency Trigger
process.dependency.invalidate(beam_id, process.compute_gripper_grasp_pose)
return True
| 44.950867 | 169 | 0.705587 | from compas.geometry import Translation, Vector, Transformation, Frame
from integral_timber_joints.assembly import BeamAssemblyMethod
from integral_timber_joints.process.dependency import ComputationalResult
try:
from typing import Dict, List, Optional, Tuple
from integral_timber_joints.process import RobotClampAssemblyProcess
from integral_timber_joints.tools import Gripper, Tool, Screwdriver
except:
pass
# ---------------------------------------------------------------
# This file contains functions to be imported into Process class.
# They are separated here to keep individual files smaller.
# ---------------------------------------------------------------
# Computing Gripper Related Attributes
# ------------------------------------
# Automatically Invoked Functions
# -------------------------------------
def assign_gripper_to_beam(process, beam_id, verbose=False):
# type: (RobotClampAssemblyProcess, str, bool) -> ComputationalResult
"""Assign a gripper type using available grippers based on the beam's length.
Beam must fit within gripper `beam_length_limits`, if multiple options allow,
the gripper with the closest `target_beam_length` will be chosen.
If the attribute `gripper_type` is already assigned, this function will not chage it.
For beam that is SCREWED_WITHOUT_GRIPPER. It will find the grasping joint,
and copy the `tool_type` and `tool_id` to 'gripper_type' and 'gripper_id'
State Change
------------
This functions sets the following beam_attribute
- 'gripper_type'
- 'gripper_id'
Return
------
`ComputationalResult.ValidCannotContinue` if no suitable gripper can be found
`ComputationalResult.ValidCanContinue` if a suitable gripper can be found
"""
beam_length = process.assembly.beam(beam_id).length
chosen_gripper_type = None
chosen_gripper_ideal = None
assembly_method = process.assembly.get_assembly_method(beam_id)
# * Skip MANUAL_ASSEMBLY
if assembly_method == BeamAssemblyMethod.MANUAL_ASSEMBLY:
if verbose:
print("Skipping assign_gripper_to_beam for MANUAL_ASSEMBLY")
return ComputationalResult.ValidCanContinue
# * Handle the copy and paste for SCREWED_WITHOUT_GRIPPER
if assembly_method == BeamAssemblyMethod.SCREWED_WITHOUT_GRIPPER:
grasping_joint_id = process.assembly.get_grasping_joint_id(beam_id)
tool_type = process.assembly.get_joint_attribute(grasping_joint_id, "tool_type")
tool_id = process.assembly.get_joint_attribute(grasping_joint_id, "tool_id")
process.assembly.set_beam_attribute(beam_id, "gripper_type", tool_type)
process.assembly.set_beam_attribute(beam_id, "gripper_id", tool_id)
return ComputationalResult.ValidCanContinue
# Do not change anything if gripper_type is already set
already_set = False
gripper_type = process.assembly.get_beam_attribute(beam_id, "gripper_type")
if gripper_type is not None:
if verbose:
print("assign_gripper_to_beam: gripper_type set")
gripper_id = process.assembly.get_beam_attribute(beam_id, "gripper_id")
if gripper_id is not None:
if verbose:
print("assign_gripper_to_beam: gripper_id set")
# Check that the gripper_id is sensible
if assembly_method == BeamAssemblyMethod.SCREWED_WITHOUT_GRIPPER:
if verbose:
print("assign_gripper_to_beam: assembly method = %s " % assembly_method)
if gripper_id in [tool.name for tool in process.screwdrivers]:
if verbose:
print("assign_gripper_to_beam: gripper_id %s is valid and will not be changed." % process.assembly.get_beam_attribute(beam_id, "gripper_id"))
already_set = True
else:
if gripper_id in [tool.name for tool in process.grippers]:
if process.tool(gripper_id).type_name == gripper_type:
if verbose:
print("assign_gripper_to_beam: gripper_id %s is valid and will not be changed." % process.assembly.get_beam_attribute(beam_id, "gripper_id"))
already_set = True
if already_set:
if verbose:
print("Beam (%s) gripper_type (%s) has already been set. No change made by assign_gripper_to_beam()." %
(beam_id, gripper_type))
return ComputationalResult.ValidNoChange
if assembly_method == BeamAssemblyMethod.SCREWED_WITHOUT_GRIPPER:
joint_ids = process.assembly.get_joint_ids_with_tools_for_beam(beam_id)
first_screwdriver = process.get_tool_of_joint(joint_ids[0])
chosen_gripper_type = first_screwdriver.type_name
gripper_id = first_screwdriver.name
if verbose:
print("chosen_gripper_type = %s" % chosen_gripper_type)
print("gripper_id = %s" % gripper_id)
else:
# Compute Gripper Type
for gripper_type in process.available_gripper_types:
gripper = process.get_one_gripper_by_type(gripper_type)
# Check if beam length is within limits
if beam_length >= gripper.beam_length_limits[0] and beam_length <= gripper.beam_length_limits[1]:
# Compute beam length vs ideal length and make decision
length_to_ideal = abs(beam_length - gripper.target_beam_length)
if chosen_gripper_type is None or length_to_ideal < chosen_gripper_ideal:
chosen_gripper_type = gripper_type
chosen_gripper_ideal = length_to_ideal
# In cases no suitable gripper is available
if chosen_gripper_type is None:
if verbose:
print("No suitable gripper can be assigned to %s" % (beam_id))
print("WARNING: No suitable gripper can be assigned to %s" % (beam_id))
return ComputationalResult.ValidCannotContinue
gripper_id = process.get_one_tool_by_type(chosen_gripper_type).name
# Set gripper_type and gripper_id and return
process.assembly.set_beam_attribute(beam_id, "gripper_type", chosen_gripper_type)
process.assembly.set_beam_attribute(beam_id, "gripper_id", gripper_id)
if verbose:
print("Gripper Type: %s assigned to %s" % (chosen_gripper_type, beam_id))
return ComputationalResult.ValidCanContinue
def compute_gripper_grasp_pose(process, beam_id, verbose=False):
# type: (RobotClampAssemblyProcess, str, bool) -> ComputationalResult
""" Compute grasp pose for the beam and gripper.
Gripper should be assigned before.
For Beams with Gripper Gripper
------------------------------
Default values will be applied if 'gripper_grasp_dist_from_start' and 'gripper_grasp_face'
are not set. Otherwise previous values will be preserved to calculate 'gripper_tcp_in_ocf'.
For Beams with Screwdriver as gripper
-------------------------------------
- `tool_id`s and `gripper_id` should be assigned before.
- `grasping_joint_id` should be assigned before.
- `gripper_tcp_in_ocf` will be based on the
- beam attribute `grasping_joint_id` (Set Manually)
- joint_attribute `tool_orientation_frame_index` (Set Manually)
State Change
------------
This functions sets the following beam_attribute
- 'gripper_grasp_dist_from_start' (if default)
- 'gripper_grasp_face' (if default)
- 'gripper_tcp_in_ocf'
Return
------
`ComputationalResult.ValidCannotContinue` if prerequisite not satisfied
`ComputationalResult.ValidCanContinue` otherwise (this function should not fail)
"""
assembly_method = process.assembly.get_assembly_method(beam_id)
# * Skip MANUAL_ASSEMBLY
if assembly_method == BeamAssemblyMethod.MANUAL_ASSEMBLY:
if verbose:
print("Skipping compute_gripper_grasp_pose for MANUAL_ASSEMBLY")
return ComputationalResult.ValidCanContinue
# Check to ensure prerequisite
if process.assembly.get_beam_attribute(beam_id, 'gripper_type') is None:
return ComputationalResult.ValidCannotContinue
beam = process.assembly.beam(beam_id)
if assembly_method == BeamAssemblyMethod.SCREWED_WITHOUT_GRIPPER:
# Retrive which joint is the gripper screwdriver and the tool_orientation_frame index
joint_id = process.assembly.get_grasping_joint_id(beam_id) # grasping_joint_id
tool_orientation_frame_index = process.assembly.get_joint_attribute(joint_id, 'tool_orientation_frame_index')
# Transform the tool orientation frame to beam ocf
joint = process.assembly.joint((joint_id[1], joint_id[0]))
screwdriver_tcp_frame_in_wcf = joint.get_clamp_frames(beam)[tool_orientation_frame_index]
t_world_from_screwdriver_tcp = Transformation.from_frame(screwdriver_tcp_frame_in_wcf)
t_world_from_beam = Transformation.from_frame(beam.frame)
t_beam_from_screwdriver_tcp = t_world_from_beam.inverse() * t_world_from_screwdriver_tcp
process.assembly.set_beam_attribute(beam_id, "gripper_tcp_in_ocf", Frame.from_transformation(t_beam_from_screwdriver_tcp))
return ComputationalResult.ValidCanContinue
else:
# * Computing `gripper_grasp_face` if it is None
def grasp_face(beam_id):
return process.assembly.get_beam_attribute(beam_id, "gripper_grasp_face")
# Apply default values if None
if grasp_face(beam_id) not in [1, 2, 3, 4]: # Default method
gripper_grasp_face = process.set_grasp_face_following_assembly_direction(beam_id)
if grasp_face(beam_id) not in [1, 2, 3, 4]: # Backup plan
gripper_grasp_face = process.set_grasp_face_following_guide_vector(beam_id)
if grasp_face(beam_id) not in [1, 2, 3, 4]: # Picking face 1 and deal with it
process.assembly.set_beam_attribute(beam_id, "gripper_grasp_face", 1)
print("Someting wrong, gripper_grasp_face is not in [1,2,3,4] after search. Grasp face defaulted to ", 1)
# * Computing `gripper_grasp_dist_from_start` if it is None
gripper_grasp_dist_from_start = process.assembly.get_beam_attribute(beam_id, "gripper_grasp_dist_from_start")
if gripper_grasp_dist_from_start is None:
gripper_grasp_dist_from_start = beam.length / 2.0
process.assembly.set_beam_attribute(beam_id, "gripper_grasp_dist_from_start", gripper_grasp_dist_from_start)
# * Compute Gripper Grasp Pose, aka. gripper_tcp_in_ocf
gripper_tcp_in_ocf = beam.grasp_frame_ocf(grasp_face(beam_id), gripper_grasp_dist_from_start)
process.assembly.set_beam_attribute(beam_id, "gripper_tcp_in_ocf", gripper_tcp_in_ocf)
return ComputationalResult.ValidCanContinue
def set_grasp_face_following_assembly_direction(process, beam_id):
# type: (RobotClampAssemblyProcess, str) -> int
"""Return the best face number (1-4) for creating `gripper_tcp_in_ocf`
where grasp face normal is the opposite direction of the beam's assembly direction.
State Change
------------
This functions sets the following beam_attribute
- 'gripper_grasp_face'
Dependency Trigger
------------------
Invalidate: 'compute_gripper_grasp_pose' and downstream
"""
for joint_id in process.assembly.get_joints_of_beam_connected_to_already_built(beam_id):
joint = process.assembly.joint(joint_id)
selected_face = (joint.face_id + 1) % 4 + 1
process.assembly.set_beam_attribute(beam_id, 'gripper_grasp_face', selected_face)
# Dependency Trigger
process.dependency.invalidate(beam_id, process.compute_gripper_grasp_pose)
# Only the first joint is considered
return selected_face
def set_grasp_face_following_guide_vector(process, beam_id):
# type: (RobotClampAssemblyProcess, str) -> int
"""Return the best face number (1-4) for creating `gripper_tcp_in_ocf`
where the Z-Axis of the tcp_in_WCF, when beam is at 'assembly_wcf_final',
follows the direction of guide vector `design_guide_vector_grasp`
Side Effect
-----------
beam_attribute 'gripper_grasp_face' will be set.
"""
# Get the guide Vector from beam_attribute
design_guide_vector_grasp = process.assembly.get_beam_attribute(beam_id, 'design_guide_vector_grasp').unitized()
assert design_guide_vector_grasp is not None
# Try different grasp face and choose the one that aligns best.
beam = process.assembly.beam(beam_id)
best_face = 0
best_score = -1
for gripper_grasp_face in range(1, 5):
gripper_tcp_in_ocf = beam.grasp_frame_ocf(gripper_grasp_face, 0)
gripper_tcp_in_wcf = gripper_tcp_in_ocf.transformed(Transformation.from_frame(beam.frame))
# Compute the alignment score using dot product
alignment_score = gripper_tcp_in_wcf.zaxis.dot(design_guide_vector_grasp)
if alignment_score > best_score:
best_score = alignment_score
best_face = gripper_grasp_face
process.assembly.set_beam_attribute(beam_id, 'gripper_grasp_face', best_face)
return best_face
# ------------------------------------
# Manually invoked Functions
# -------------------------------------
def adjust_gripper_pos(process, beam_id, amount):
# type: (RobotClampAssemblyProcess, str, float) -> bool
""" Modify the grasp pose 'gripper_grasp_dist_from_start'
'gripper_tcp_in_ocf'
Gripper should be assigned before.
State Change
------------
This functions updates the following beam_attribute
- 'gripper_grasp_dist_from_start'
- 'gripper_tcp_in_ocf'
Return
------
False if prerequisite not satisfied
True, if setting is successdul otherwise (this function should not fail)
Dependency Trigger
------------------
Invalidate: 'compute_gripper_grasp_pose' and downstream
"""
# Check to ensure prerequisite
assembly = process.assembly
beam = assembly.beam(beam_id)
if assembly.get_beam_attribute(beam_id, 'gripper_type') is None:
return False
gripper_grasp_face = assembly.get_beam_attribute(beam_id, "gripper_grasp_face")
gripper_grasp_dist_from_start = assembly.get_beam_attribute(beam_id, "gripper_grasp_dist_from_start")
gripper_grasp_dist_from_start += amount
assembly.set_beam_attribute(beam_id, "gripper_grasp_dist_from_start", gripper_grasp_dist_from_start)
# Recompute beam grasp_frame
gripper_tcp_in_ocf = beam.grasp_frame_ocf(gripper_grasp_face, gripper_grasp_dist_from_start)
assembly.set_beam_attribute(beam_id, "gripper_tcp_in_ocf", gripper_tcp_in_ocf)
# Dependency Trigger
process.dependency.invalidate(beam_id, process.compute_gripper_grasp_pose)
return True
def override_grasp_face(process, beam_id, grasp_face):
# type: (RobotClampAssemblyProcess, str, float) -> bool
"""Manually override `gripper_grasp_face` for a specified beam
`grasp_face` can only be within 1 - 4, overrange value will be wrapped
State Change
------------
This functions sets the following beam_attribute
- 'gripper_grasp_face'
Dependency Trigger
------------------
Invalidate: 'compute_gripper_grasp_pose' and downstream
"""
grasp_face = (grasp_face - 1) % 4 + 1
process.assembly.set_beam_attribute(beam_id, 'gripper_grasp_face', grasp_face)
# Dependency Trigger
process.dependency.invalidate(beam_id, process.compute_gripper_grasp_pose)
return True
| 89 | 0 | 30 |
98ac5c6908c40818ded6875944d3f91d712aa11d | 71,895 | py | Python | analyzer_executor/src/graph_description_pb2.py | lightoyou/grapl | 77488059891091e5656254ee15efef038a1b46a7 | [
"Apache-2.0"
] | null | null | null | analyzer_executor/src/graph_description_pb2.py | lightoyou/grapl | 77488059891091e5656254ee15efef038a1b46a7 | [
"Apache-2.0"
] | null | null | null | analyzer_executor/src/graph_description_pb2.py | lightoyou/grapl | 77488059891091e5656254ee15efef038a1b46a7 | [
"Apache-2.0"
] | null | null | null | # Generated by the protocol buffer compiler. DO NOT EDIT!
# source: graph_description.proto
import sys
_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1'))
from google.protobuf import descriptor as _descriptor
from google.protobuf import message as _message
from google.protobuf import reflection as _reflection
from google.protobuf import symbol_database as _symbol_database
from google.protobuf import descriptor_pb2
# @@protoc_insertion_point(imports)
_sym_db = _symbol_database.Default()
from google.protobuf import wrappers_pb2 as google_dot_protobuf_dot_wrappers__pb2
DESCRIPTOR = _descriptor.FileDescriptor(
name='graph_description.proto',
package='graph_description',
syntax='proto3',
serialized_pb=_b('\n\x17graph_description.proto\x12\x11graph_description\x1a\x1egoogle/protobuf/wrappers.proto\"G\n\x04Host\x12\x12\n\x08hostname\x18\x01 \x01(\tH\x00\x12\x0c\n\x02ip\x18\x02 \x01(\tH\x00\x12\x12\n\x08\x61sset_id\x18\x03 \x01(\tH\x00\x42\t\n\x07host_id\"\xfb\x02\n\x10\x41ssetDescription\x12\x10\n\x08node_key\x18\x01 \x01(\t\x12\x11\n\ttimestamp\x18\x02 \x01(\x04\x12.\n\x08\x61sset_id\x18\x03 \x01(\x0b\x32\x1c.google.protobuf.StringValue\x12/\n\thost_name\x18\x04 \x01(\x0b\x32\x1c.google.protobuf.StringValue\x12\x31\n\x0bhost_domain\x18\x05 \x01(\x0b\x32\x1c.google.protobuf.StringValue\x12/\n\thost_fqdn\x18\x06 \x01(\x0b\x32\x1c.google.protobuf.StringValue\x12\x34\n\x0ehost_local_mac\x18\x07 \x01(\x0b\x32\x1c.google.protobuf.StringValue\x12-\n\x07host_ip\x18\x08 \x01(\x0b\x32\x1c.google.protobuf.StringValue\x12\x18\n\x10operating_system\x18\t \x01(\t\"\xe2\x03\n\x0fNodeDescription\x12\x39\n\nasset_node\x18\x01 \x01(\x0b\x32#.graph_description.AssetDescriptionH\x00\x12=\n\x0cprocess_node\x18\x02 \x01(\x0b\x32%.graph_description.ProcessDescriptionH\x00\x12\x37\n\tfile_node\x18\x03 \x01(\x0b\x32\".graph_description.FileDescriptionH\x00\x12\x42\n\x0fip_address_node\x18\x04 \x01(\x0b\x32\'.graph_description.IpAddressDescriptionH\x00\x12I\n\x18outbound_connection_node\x18\x05 \x01(\x0b\x32%.graph_description.OutboundConnectionH\x00\x12G\n\x17inbound_connection_node\x18\x06 \x01(\x0b\x32$.graph_description.InboundConnectionH\x00\x12\x36\n\x0c\x64ynamic_node\x18\x07 \x01(\x0b\x32\x1e.graph_description.DynamicNodeH\x00\x42\x0c\n\nwhich_node\"\xa8\x02\n\x12OutboundConnection\x12\x10\n\x08node_key\x18\x01 \x01(\t\x12.\n\x08\x61sset_id\x18\x02 \x01(\x0b\x32\x1c.google.protobuf.StringValue\x12.\n\x08hostname\x18\x03 \x01(\x0b\x32\x1c.google.protobuf.StringValue\x12-\n\x07host_ip\x18\x04 \x01(\x0b\x32\x1c.google.protobuf.StringValue\x12\r\n\x05state\x18\x05 \x01(\r\x12\x19\n\x11\x63reated_timestamp\x18\x06 \x01(\x04\x12\x1c\n\x14terminated_timestamp\x18\x07 \x01(\x04\x12\x1b\n\x13last_seen_timestamp\x18\x08 \x01(\x04\x12\x0c\n\x04port\x18\t \x01(\r\"\xa7\x02\n\x11InboundConnection\x12\x10\n\x08node_key\x18\x01 \x01(\t\x12.\n\x08\x61sset_id\x18\x02 \x01(\x0b\x32\x1c.google.protobuf.StringValue\x12.\n\x08hostname\x18\x03 \x01(\x0b\x32\x1c.google.protobuf.StringValue\x12-\n\x07host_ip\x18\x04 \x01(\x0b\x32\x1c.google.protobuf.StringValue\x12\r\n\x05state\x18\x05 \x01(\r\x12\x19\n\x11\x63reated_timestamp\x18\x06 \x01(\x04\x12\x1c\n\x14terminated_timestamp\x18\x07 \x01(\x04\x12\x1b\n\x13last_seen_timestamp\x18\x08 \x01(\x04\x12\x0c\n\x04port\x18\t \x01(\r\"\xb3\x03\n\x12ProcessDescription\x12\x10\n\x08node_key\x18\x01 \x01(\t\x12.\n\x08\x61sset_id\x18\x02 \x01(\x0b\x32\x1c.google.protobuf.StringValue\x12.\n\x08hostname\x18\x03 \x01(\x0b\x32\x1c.google.protobuf.StringValue\x12-\n\x07host_ip\x18\x04 \x01(\x0b\x32\x1c.google.protobuf.StringValue\x12\r\n\x05state\x18\x05 \x01(\r\x12\x12\n\nprocess_id\x18\x06 \x01(\x04\x12\x14\n\x0cprocess_guid\x18\x07 \x01(\t\x12\x19\n\x11\x63reated_timestamp\x18\x08 \x01(\x04\x12\x1c\n\x14terminated_timestamp\x18\t \x01(\x04\x12\x1b\n\x13last_seen_timestamp\x18\n \x01(\x04\x12\x14\n\x0cprocess_name\x18\x0b \x01(\t\x12\x1c\n\x14process_command_line\x18\x0c \x01(\t\x12\x1f\n\x17process_integrity_level\x18\r \x01(\t\x12\x18\n\x10operating_system\x18\x0e \x01(\t\"\xd8\x04\n\x0f\x46ileDescription\x12\x10\n\x08node_key\x18\x01 \x01(\t\x12.\n\x08\x61sset_id\x18\x02 \x01(\x0b\x32\x1c.google.protobuf.StringValue\x12.\n\x08hostname\x18\x03 \x01(\x0b\x32\x1c.google.protobuf.StringValue\x12-\n\x07host_ip\x18\x04 \x01(\x0b\x32\x1c.google.protobuf.StringValue\x12\r\n\x05state\x18\x05 \x01(\r\x12\x19\n\x11\x63reated_timestamp\x18\x06 \x01(\x04\x12\x19\n\x11\x64\x65leted_timestamp\x18\x07 \x01(\x04\x12\x1b\n\x13last_seen_timestamp\x18\x08 \x01(\x04\x12\x11\n\tfile_name\x18\t \x01(\t\x12\x11\n\tfile_path\x18\n \x01(\t\x12\x16\n\x0e\x66ile_extension\x18\x0b \x01(\t\x12\x16\n\x0e\x66ile_mime_type\x18\x0c \x01(\t\x12\x11\n\tfile_size\x18\r \x01(\x04\x12\x14\n\x0c\x66ile_version\x18\x0e \x01(\t\x12\x18\n\x10\x66ile_description\x18\x0f \x01(\t\x12\x14\n\x0c\x66ile_product\x18\x10 \x01(\t\x12\x14\n\x0c\x66ile_company\x18\x11 \x01(\t\x12\x16\n\x0e\x66ile_directory\x18\x12 \x01(\t\x12\x12\n\nfile_inode\x18\x13 \x01(\x04\x12\x17\n\x0f\x66ile_hard_links\x18\x14 \x01(\x04\x12\x10\n\x08md5_hash\x18\x15 \x01(\t\x12\x11\n\tsha1_hash\x18\x16 \x01(\t\x12\x13\n\x0bsha256_hash\x18\x17 \x01(\t\"a\n\x14IpAddressDescription\x12\x10\n\x08node_key\x18\x01 \x01(\t\x12\x11\n\ttimestamp\x18\x02 \x01(\x04\x12\x12\n\nip_address\x18\x03 \x01(\t\x12\x10\n\x08ip_proto\x18\x04 \x01(\t\"\x97\x01\n\x07Session\x12\x1e\n\x16primary_key_properties\x18\x01 \x03(\t\x12%\n\x1dprimary_key_requires_asset_id\x18\x02 \x01(\x08\x12\x14\n\x0c\x63reated_time\x18\x03 \x01(\x04\x12\x16\n\x0elast_seen_time\x18\x04 \x01(\x04\x12\x17\n\x0fterminated_time\x18\x05 \x01(\x04\"O\n\x06Static\x12\x1e\n\x16primary_key_properties\x18\x01 \x03(\t\x12%\n\x1dprimary_key_requires_asset_id\x18\x02 \x01(\x08\"t\n\nIdStrategy\x12-\n\x07session\x18\x01 \x01(\x0b\x32\x1a.graph_description.SessionH\x00\x12+\n\x06static\x18\x02 \x01(\x0b\x32\x19.graph_description.StaticH\x00\x42\n\n\x08strategy\"T\n\x0cNodeProperty\x12\x11\n\x07intprop\x18\x01 \x01(\x03H\x00\x12\x12\n\x08uintprop\x18\x02 \x01(\x04H\x00\x12\x11\n\x07strprop\x18\x03 \x01(\tH\x00\x42\n\n\x08property\"\x9e\x03\n\x0b\x44ynamicNode\x12\x42\n\nproperties\x18\x01 \x03(\x0b\x32..graph_description.DynamicNode.PropertiesEntry\x12\x10\n\x08node_key\x18\x02 \x01(\t\x12\x11\n\tnode_type\x18\x03 \x01(\t\x12\x0f\n\x07seen_at\x18\x04 \x01(\x04\x12.\n\x08\x61sset_id\x18\x05 \x01(\x0b\x32\x1c.google.protobuf.StringValue\x12.\n\x08hostname\x18\x06 \x01(\x0b\x32\x1c.google.protobuf.StringValue\x12-\n\x07host_ip\x18\x07 \x01(\x0b\x32\x1c.google.protobuf.StringValue\x12\x32\n\x0bid_strategy\x18\x08 \x03(\x0b\x32\x1d.graph_description.IdStrategy\x1aR\n\x0fPropertiesEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12.\n\x05value\x18\x02 \x01(\x0b\x32\x1f.graph_description.NodeProperty:\x02\x38\x01\"=\n\x0f\x45\x64geDescription\x12\x0c\n\x04\x66rom\x18\x01 \x01(\t\x12\n\n\x02to\x18\x02 \x01(\t\x12\x10\n\x08\x65\x64geName\x18\x03 \x01(\t\"=\n\x08\x45\x64geList\x12\x31\n\x05\x65\x64ges\x18\x01 \x03(\x0b\x32\".graph_description.EdgeDescription\"\xc0\x02\n\x10GraphDescription\x12=\n\x05nodes\x18\x01 \x03(\x0b\x32..graph_description.GraphDescription.NodesEntry\x12=\n\x05\x65\x64ges\x18\x02 \x03(\x0b\x32..graph_description.GraphDescription.EdgesEntry\x12\x11\n\ttimestamp\x18\x03 \x01(\x04\x1aP\n\nNodesEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\x31\n\x05value\x18\x02 \x01(\x0b\x32\".graph_description.NodeDescription:\x02\x38\x01\x1aI\n\nEdgesEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12*\n\x05value\x18\x02 \x01(\x0b\x32\x1b.graph_description.EdgeList:\x02\x38\x01\"L\n\x12GeneratedSubgraphs\x12\x36\n\tsubgraphs\x18\x01 \x03(\x0b\x32#.graph_description.GraphDescriptionb\x06proto3')
,
dependencies=[google_dot_protobuf_dot_wrappers__pb2.DESCRIPTOR,])
_sym_db.RegisterFileDescriptor(DESCRIPTOR)
_HOST = _descriptor.Descriptor(
name='Host',
full_name='graph_description.Host',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='hostname', full_name='graph_description.Host.hostname', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='ip', full_name='graph_description.Host.ip', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='asset_id', full_name='graph_description.Host.asset_id', index=2,
number=3, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
_descriptor.OneofDescriptor(
name='host_id', full_name='graph_description.Host.host_id',
index=0, containing_type=None, fields=[]),
],
serialized_start=78,
serialized_end=149,
)
_ASSETDESCRIPTION = _descriptor.Descriptor(
name='AssetDescription',
full_name='graph_description.AssetDescription',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='node_key', full_name='graph_description.AssetDescription.node_key', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='timestamp', full_name='graph_description.AssetDescription.timestamp', index=1,
number=2, type=4, cpp_type=4, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='asset_id', full_name='graph_description.AssetDescription.asset_id', index=2,
number=3, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='host_name', full_name='graph_description.AssetDescription.host_name', index=3,
number=4, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='host_domain', full_name='graph_description.AssetDescription.host_domain', index=4,
number=5, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='host_fqdn', full_name='graph_description.AssetDescription.host_fqdn', index=5,
number=6, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='host_local_mac', full_name='graph_description.AssetDescription.host_local_mac', index=6,
number=7, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='host_ip', full_name='graph_description.AssetDescription.host_ip', index=7,
number=8, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='operating_system', full_name='graph_description.AssetDescription.operating_system', index=8,
number=9, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=152,
serialized_end=531,
)
_NODEDESCRIPTION = _descriptor.Descriptor(
name='NodeDescription',
full_name='graph_description.NodeDescription',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='asset_node', full_name='graph_description.NodeDescription.asset_node', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='process_node', full_name='graph_description.NodeDescription.process_node', index=1,
number=2, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='file_node', full_name='graph_description.NodeDescription.file_node', index=2,
number=3, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='ip_address_node', full_name='graph_description.NodeDescription.ip_address_node', index=3,
number=4, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='outbound_connection_node', full_name='graph_description.NodeDescription.outbound_connection_node', index=4,
number=5, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='inbound_connection_node', full_name='graph_description.NodeDescription.inbound_connection_node', index=5,
number=6, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='dynamic_node', full_name='graph_description.NodeDescription.dynamic_node', index=6,
number=7, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
_descriptor.OneofDescriptor(
name='which_node', full_name='graph_description.NodeDescription.which_node',
index=0, containing_type=None, fields=[]),
],
serialized_start=534,
serialized_end=1016,
)
_OUTBOUNDCONNECTION = _descriptor.Descriptor(
name='OutboundConnection',
full_name='graph_description.OutboundConnection',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='node_key', full_name='graph_description.OutboundConnection.node_key', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='asset_id', full_name='graph_description.OutboundConnection.asset_id', index=1,
number=2, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='hostname', full_name='graph_description.OutboundConnection.hostname', index=2,
number=3, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='host_ip', full_name='graph_description.OutboundConnection.host_ip', index=3,
number=4, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='state', full_name='graph_description.OutboundConnection.state', index=4,
number=5, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='created_timestamp', full_name='graph_description.OutboundConnection.created_timestamp', index=5,
number=6, type=4, cpp_type=4, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='terminated_timestamp', full_name='graph_description.OutboundConnection.terminated_timestamp', index=6,
number=7, type=4, cpp_type=4, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='last_seen_timestamp', full_name='graph_description.OutboundConnection.last_seen_timestamp', index=7,
number=8, type=4, cpp_type=4, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='port', full_name='graph_description.OutboundConnection.port', index=8,
number=9, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=1019,
serialized_end=1315,
)
_INBOUNDCONNECTION = _descriptor.Descriptor(
name='InboundConnection',
full_name='graph_description.InboundConnection',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='node_key', full_name='graph_description.InboundConnection.node_key', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='asset_id', full_name='graph_description.InboundConnection.asset_id', index=1,
number=2, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='hostname', full_name='graph_description.InboundConnection.hostname', index=2,
number=3, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='host_ip', full_name='graph_description.InboundConnection.host_ip', index=3,
number=4, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='state', full_name='graph_description.InboundConnection.state', index=4,
number=5, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='created_timestamp', full_name='graph_description.InboundConnection.created_timestamp', index=5,
number=6, type=4, cpp_type=4, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='terminated_timestamp', full_name='graph_description.InboundConnection.terminated_timestamp', index=6,
number=7, type=4, cpp_type=4, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='last_seen_timestamp', full_name='graph_description.InboundConnection.last_seen_timestamp', index=7,
number=8, type=4, cpp_type=4, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='port', full_name='graph_description.InboundConnection.port', index=8,
number=9, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=1318,
serialized_end=1613,
)
_PROCESSDESCRIPTION = _descriptor.Descriptor(
name='ProcessDescription',
full_name='graph_description.ProcessDescription',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='node_key', full_name='graph_description.ProcessDescription.node_key', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='asset_id', full_name='graph_description.ProcessDescription.asset_id', index=1,
number=2, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='hostname', full_name='graph_description.ProcessDescription.hostname', index=2,
number=3, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='host_ip', full_name='graph_description.ProcessDescription.host_ip', index=3,
number=4, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='state', full_name='graph_description.ProcessDescription.state', index=4,
number=5, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='process_id', full_name='graph_description.ProcessDescription.process_id', index=5,
number=6, type=4, cpp_type=4, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='process_guid', full_name='graph_description.ProcessDescription.process_guid', index=6,
number=7, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='created_timestamp', full_name='graph_description.ProcessDescription.created_timestamp', index=7,
number=8, type=4, cpp_type=4, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='terminated_timestamp', full_name='graph_description.ProcessDescription.terminated_timestamp', index=8,
number=9, type=4, cpp_type=4, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='last_seen_timestamp', full_name='graph_description.ProcessDescription.last_seen_timestamp', index=9,
number=10, type=4, cpp_type=4, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='process_name', full_name='graph_description.ProcessDescription.process_name', index=10,
number=11, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='process_command_line', full_name='graph_description.ProcessDescription.process_command_line', index=11,
number=12, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='process_integrity_level', full_name='graph_description.ProcessDescription.process_integrity_level', index=12,
number=13, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='operating_system', full_name='graph_description.ProcessDescription.operating_system', index=13,
number=14, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=1616,
serialized_end=2051,
)
_FILEDESCRIPTION = _descriptor.Descriptor(
name='FileDescription',
full_name='graph_description.FileDescription',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='node_key', full_name='graph_description.FileDescription.node_key', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='asset_id', full_name='graph_description.FileDescription.asset_id', index=1,
number=2, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='hostname', full_name='graph_description.FileDescription.hostname', index=2,
number=3, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='host_ip', full_name='graph_description.FileDescription.host_ip', index=3,
number=4, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='state', full_name='graph_description.FileDescription.state', index=4,
number=5, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='created_timestamp', full_name='graph_description.FileDescription.created_timestamp', index=5,
number=6, type=4, cpp_type=4, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='deleted_timestamp', full_name='graph_description.FileDescription.deleted_timestamp', index=6,
number=7, type=4, cpp_type=4, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='last_seen_timestamp', full_name='graph_description.FileDescription.last_seen_timestamp', index=7,
number=8, type=4, cpp_type=4, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='file_name', full_name='graph_description.FileDescription.file_name', index=8,
number=9, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='file_path', full_name='graph_description.FileDescription.file_path', index=9,
number=10, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='file_extension', full_name='graph_description.FileDescription.file_extension', index=10,
number=11, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='file_mime_type', full_name='graph_description.FileDescription.file_mime_type', index=11,
number=12, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='file_size', full_name='graph_description.FileDescription.file_size', index=12,
number=13, type=4, cpp_type=4, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='file_version', full_name='graph_description.FileDescription.file_version', index=13,
number=14, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='file_description', full_name='graph_description.FileDescription.file_description', index=14,
number=15, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='file_product', full_name='graph_description.FileDescription.file_product', index=15,
number=16, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='file_company', full_name='graph_description.FileDescription.file_company', index=16,
number=17, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='file_directory', full_name='graph_description.FileDescription.file_directory', index=17,
number=18, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='file_inode', full_name='graph_description.FileDescription.file_inode', index=18,
number=19, type=4, cpp_type=4, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='file_hard_links', full_name='graph_description.FileDescription.file_hard_links', index=19,
number=20, type=4, cpp_type=4, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='md5_hash', full_name='graph_description.FileDescription.md5_hash', index=20,
number=21, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='sha1_hash', full_name='graph_description.FileDescription.sha1_hash', index=21,
number=22, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='sha256_hash', full_name='graph_description.FileDescription.sha256_hash', index=22,
number=23, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=2054,
serialized_end=2654,
)
_IPADDRESSDESCRIPTION = _descriptor.Descriptor(
name='IpAddressDescription',
full_name='graph_description.IpAddressDescription',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='node_key', full_name='graph_description.IpAddressDescription.node_key', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='timestamp', full_name='graph_description.IpAddressDescription.timestamp', index=1,
number=2, type=4, cpp_type=4, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='ip_address', full_name='graph_description.IpAddressDescription.ip_address', index=2,
number=3, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='ip_proto', full_name='graph_description.IpAddressDescription.ip_proto', index=3,
number=4, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=2656,
serialized_end=2753,
)
_SESSION = _descriptor.Descriptor(
name='Session',
full_name='graph_description.Session',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='primary_key_properties', full_name='graph_description.Session.primary_key_properties', index=0,
number=1, type=9, cpp_type=9, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='primary_key_requires_asset_id', full_name='graph_description.Session.primary_key_requires_asset_id', index=1,
number=2, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='created_time', full_name='graph_description.Session.created_time', index=2,
number=3, type=4, cpp_type=4, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='last_seen_time', full_name='graph_description.Session.last_seen_time', index=3,
number=4, type=4, cpp_type=4, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='terminated_time', full_name='graph_description.Session.terminated_time', index=4,
number=5, type=4, cpp_type=4, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=2756,
serialized_end=2907,
)
_STATIC = _descriptor.Descriptor(
name='Static',
full_name='graph_description.Static',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='primary_key_properties', full_name='graph_description.Static.primary_key_properties', index=0,
number=1, type=9, cpp_type=9, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='primary_key_requires_asset_id', full_name='graph_description.Static.primary_key_requires_asset_id', index=1,
number=2, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=2909,
serialized_end=2988,
)
_IDSTRATEGY = _descriptor.Descriptor(
name='IdStrategy',
full_name='graph_description.IdStrategy',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='session', full_name='graph_description.IdStrategy.session', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='static', full_name='graph_description.IdStrategy.static', index=1,
number=2, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
_descriptor.OneofDescriptor(
name='strategy', full_name='graph_description.IdStrategy.strategy',
index=0, containing_type=None, fields=[]),
],
serialized_start=2990,
serialized_end=3106,
)
_NODEPROPERTY = _descriptor.Descriptor(
name='NodeProperty',
full_name='graph_description.NodeProperty',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='intprop', full_name='graph_description.NodeProperty.intprop', index=0,
number=1, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='uintprop', full_name='graph_description.NodeProperty.uintprop', index=1,
number=2, type=4, cpp_type=4, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='strprop', full_name='graph_description.NodeProperty.strprop', index=2,
number=3, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
_descriptor.OneofDescriptor(
name='property', full_name='graph_description.NodeProperty.property',
index=0, containing_type=None, fields=[]),
],
serialized_start=3108,
serialized_end=3192,
)
_DYNAMICNODE_PROPERTIESENTRY = _descriptor.Descriptor(
name='PropertiesEntry',
full_name='graph_description.DynamicNode.PropertiesEntry',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='key', full_name='graph_description.DynamicNode.PropertiesEntry.key', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='value', full_name='graph_description.DynamicNode.PropertiesEntry.value', index=1,
number=2, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=_descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001')),
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=3527,
serialized_end=3609,
)
_DYNAMICNODE = _descriptor.Descriptor(
name='DynamicNode',
full_name='graph_description.DynamicNode',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='properties', full_name='graph_description.DynamicNode.properties', index=0,
number=1, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='node_key', full_name='graph_description.DynamicNode.node_key', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='node_type', full_name='graph_description.DynamicNode.node_type', index=2,
number=3, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='seen_at', full_name='graph_description.DynamicNode.seen_at', index=3,
number=4, type=4, cpp_type=4, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='asset_id', full_name='graph_description.DynamicNode.asset_id', index=4,
number=5, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='hostname', full_name='graph_description.DynamicNode.hostname', index=5,
number=6, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='host_ip', full_name='graph_description.DynamicNode.host_ip', index=6,
number=7, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='id_strategy', full_name='graph_description.DynamicNode.id_strategy', index=7,
number=8, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[_DYNAMICNODE_PROPERTIESENTRY, ],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=3195,
serialized_end=3609,
)
_EDGEDESCRIPTION = _descriptor.Descriptor(
name='EdgeDescription',
full_name='graph_description.EdgeDescription',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='from', full_name='graph_description.EdgeDescription.from', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='to', full_name='graph_description.EdgeDescription.to', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='edgeName', full_name='graph_description.EdgeDescription.edgeName', index=2,
number=3, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=3611,
serialized_end=3672,
)
_EDGELIST = _descriptor.Descriptor(
name='EdgeList',
full_name='graph_description.EdgeList',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='edges', full_name='graph_description.EdgeList.edges', index=0,
number=1, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=3674,
serialized_end=3735,
)
_GRAPHDESCRIPTION_NODESENTRY = _descriptor.Descriptor(
name='NodesEntry',
full_name='graph_description.GraphDescription.NodesEntry',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='key', full_name='graph_description.GraphDescription.NodesEntry.key', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='value', full_name='graph_description.GraphDescription.NodesEntry.value', index=1,
number=2, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=_descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001')),
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=3903,
serialized_end=3983,
)
_GRAPHDESCRIPTION_EDGESENTRY = _descriptor.Descriptor(
name='EdgesEntry',
full_name='graph_description.GraphDescription.EdgesEntry',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='key', full_name='graph_description.GraphDescription.EdgesEntry.key', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='value', full_name='graph_description.GraphDescription.EdgesEntry.value', index=1,
number=2, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=_descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001')),
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=3985,
serialized_end=4058,
)
_GRAPHDESCRIPTION = _descriptor.Descriptor(
name='GraphDescription',
full_name='graph_description.GraphDescription',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='nodes', full_name='graph_description.GraphDescription.nodes', index=0,
number=1, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='edges', full_name='graph_description.GraphDescription.edges', index=1,
number=2, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='timestamp', full_name='graph_description.GraphDescription.timestamp', index=2,
number=3, type=4, cpp_type=4, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[_GRAPHDESCRIPTION_NODESENTRY, _GRAPHDESCRIPTION_EDGESENTRY, ],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=3738,
serialized_end=4058,
)
_GENERATEDSUBGRAPHS = _descriptor.Descriptor(
name='GeneratedSubgraphs',
full_name='graph_description.GeneratedSubgraphs',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='subgraphs', full_name='graph_description.GeneratedSubgraphs.subgraphs', index=0,
number=1, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=4060,
serialized_end=4136,
)
_HOST.oneofs_by_name['host_id'].fields.append(
_HOST.fields_by_name['hostname'])
_HOST.fields_by_name['hostname'].containing_oneof = _HOST.oneofs_by_name['host_id']
_HOST.oneofs_by_name['host_id'].fields.append(
_HOST.fields_by_name['ip'])
_HOST.fields_by_name['ip'].containing_oneof = _HOST.oneofs_by_name['host_id']
_HOST.oneofs_by_name['host_id'].fields.append(
_HOST.fields_by_name['asset_id'])
_HOST.fields_by_name['asset_id'].containing_oneof = _HOST.oneofs_by_name['host_id']
_ASSETDESCRIPTION.fields_by_name['asset_id'].message_type = google_dot_protobuf_dot_wrappers__pb2._STRINGVALUE
_ASSETDESCRIPTION.fields_by_name['host_name'].message_type = google_dot_protobuf_dot_wrappers__pb2._STRINGVALUE
_ASSETDESCRIPTION.fields_by_name['host_domain'].message_type = google_dot_protobuf_dot_wrappers__pb2._STRINGVALUE
_ASSETDESCRIPTION.fields_by_name['host_fqdn'].message_type = google_dot_protobuf_dot_wrappers__pb2._STRINGVALUE
_ASSETDESCRIPTION.fields_by_name['host_local_mac'].message_type = google_dot_protobuf_dot_wrappers__pb2._STRINGVALUE
_ASSETDESCRIPTION.fields_by_name['host_ip'].message_type = google_dot_protobuf_dot_wrappers__pb2._STRINGVALUE
_NODEDESCRIPTION.fields_by_name['asset_node'].message_type = _ASSETDESCRIPTION
_NODEDESCRIPTION.fields_by_name['process_node'].message_type = _PROCESSDESCRIPTION
_NODEDESCRIPTION.fields_by_name['file_node'].message_type = _FILEDESCRIPTION
_NODEDESCRIPTION.fields_by_name['ip_address_node'].message_type = _IPADDRESSDESCRIPTION
_NODEDESCRIPTION.fields_by_name['outbound_connection_node'].message_type = _OUTBOUNDCONNECTION
_NODEDESCRIPTION.fields_by_name['inbound_connection_node'].message_type = _INBOUNDCONNECTION
_NODEDESCRIPTION.fields_by_name['dynamic_node'].message_type = _DYNAMICNODE
_NODEDESCRIPTION.oneofs_by_name['which_node'].fields.append(
_NODEDESCRIPTION.fields_by_name['asset_node'])
_NODEDESCRIPTION.fields_by_name['asset_node'].containing_oneof = _NODEDESCRIPTION.oneofs_by_name['which_node']
_NODEDESCRIPTION.oneofs_by_name['which_node'].fields.append(
_NODEDESCRIPTION.fields_by_name['process_node'])
_NODEDESCRIPTION.fields_by_name['process_node'].containing_oneof = _NODEDESCRIPTION.oneofs_by_name['which_node']
_NODEDESCRIPTION.oneofs_by_name['which_node'].fields.append(
_NODEDESCRIPTION.fields_by_name['file_node'])
_NODEDESCRIPTION.fields_by_name['file_node'].containing_oneof = _NODEDESCRIPTION.oneofs_by_name['which_node']
_NODEDESCRIPTION.oneofs_by_name['which_node'].fields.append(
_NODEDESCRIPTION.fields_by_name['ip_address_node'])
_NODEDESCRIPTION.fields_by_name['ip_address_node'].containing_oneof = _NODEDESCRIPTION.oneofs_by_name['which_node']
_NODEDESCRIPTION.oneofs_by_name['which_node'].fields.append(
_NODEDESCRIPTION.fields_by_name['outbound_connection_node'])
_NODEDESCRIPTION.fields_by_name['outbound_connection_node'].containing_oneof = _NODEDESCRIPTION.oneofs_by_name['which_node']
_NODEDESCRIPTION.oneofs_by_name['which_node'].fields.append(
_NODEDESCRIPTION.fields_by_name['inbound_connection_node'])
_NODEDESCRIPTION.fields_by_name['inbound_connection_node'].containing_oneof = _NODEDESCRIPTION.oneofs_by_name['which_node']
_NODEDESCRIPTION.oneofs_by_name['which_node'].fields.append(
_NODEDESCRIPTION.fields_by_name['dynamic_node'])
_NODEDESCRIPTION.fields_by_name['dynamic_node'].containing_oneof = _NODEDESCRIPTION.oneofs_by_name['which_node']
_OUTBOUNDCONNECTION.fields_by_name['asset_id'].message_type = google_dot_protobuf_dot_wrappers__pb2._STRINGVALUE
_OUTBOUNDCONNECTION.fields_by_name['hostname'].message_type = google_dot_protobuf_dot_wrappers__pb2._STRINGVALUE
_OUTBOUNDCONNECTION.fields_by_name['host_ip'].message_type = google_dot_protobuf_dot_wrappers__pb2._STRINGVALUE
_INBOUNDCONNECTION.fields_by_name['asset_id'].message_type = google_dot_protobuf_dot_wrappers__pb2._STRINGVALUE
_INBOUNDCONNECTION.fields_by_name['hostname'].message_type = google_dot_protobuf_dot_wrappers__pb2._STRINGVALUE
_INBOUNDCONNECTION.fields_by_name['host_ip'].message_type = google_dot_protobuf_dot_wrappers__pb2._STRINGVALUE
_PROCESSDESCRIPTION.fields_by_name['asset_id'].message_type = google_dot_protobuf_dot_wrappers__pb2._STRINGVALUE
_PROCESSDESCRIPTION.fields_by_name['hostname'].message_type = google_dot_protobuf_dot_wrappers__pb2._STRINGVALUE
_PROCESSDESCRIPTION.fields_by_name['host_ip'].message_type = google_dot_protobuf_dot_wrappers__pb2._STRINGVALUE
_FILEDESCRIPTION.fields_by_name['asset_id'].message_type = google_dot_protobuf_dot_wrappers__pb2._STRINGVALUE
_FILEDESCRIPTION.fields_by_name['hostname'].message_type = google_dot_protobuf_dot_wrappers__pb2._STRINGVALUE
_FILEDESCRIPTION.fields_by_name['host_ip'].message_type = google_dot_protobuf_dot_wrappers__pb2._STRINGVALUE
_IDSTRATEGY.fields_by_name['session'].message_type = _SESSION
_IDSTRATEGY.fields_by_name['static'].message_type = _STATIC
_IDSTRATEGY.oneofs_by_name['strategy'].fields.append(
_IDSTRATEGY.fields_by_name['session'])
_IDSTRATEGY.fields_by_name['session'].containing_oneof = _IDSTRATEGY.oneofs_by_name['strategy']
_IDSTRATEGY.oneofs_by_name['strategy'].fields.append(
_IDSTRATEGY.fields_by_name['static'])
_IDSTRATEGY.fields_by_name['static'].containing_oneof = _IDSTRATEGY.oneofs_by_name['strategy']
_NODEPROPERTY.oneofs_by_name['property'].fields.append(
_NODEPROPERTY.fields_by_name['intprop'])
_NODEPROPERTY.fields_by_name['intprop'].containing_oneof = _NODEPROPERTY.oneofs_by_name['property']
_NODEPROPERTY.oneofs_by_name['property'].fields.append(
_NODEPROPERTY.fields_by_name['uintprop'])
_NODEPROPERTY.fields_by_name['uintprop'].containing_oneof = _NODEPROPERTY.oneofs_by_name['property']
_NODEPROPERTY.oneofs_by_name['property'].fields.append(
_NODEPROPERTY.fields_by_name['strprop'])
_NODEPROPERTY.fields_by_name['strprop'].containing_oneof = _NODEPROPERTY.oneofs_by_name['property']
_DYNAMICNODE_PROPERTIESENTRY.fields_by_name['value'].message_type = _NODEPROPERTY
_DYNAMICNODE_PROPERTIESENTRY.containing_type = _DYNAMICNODE
_DYNAMICNODE.fields_by_name['properties'].message_type = _DYNAMICNODE_PROPERTIESENTRY
_DYNAMICNODE.fields_by_name['asset_id'].message_type = google_dot_protobuf_dot_wrappers__pb2._STRINGVALUE
_DYNAMICNODE.fields_by_name['hostname'].message_type = google_dot_protobuf_dot_wrappers__pb2._STRINGVALUE
_DYNAMICNODE.fields_by_name['host_ip'].message_type = google_dot_protobuf_dot_wrappers__pb2._STRINGVALUE
_DYNAMICNODE.fields_by_name['id_strategy'].message_type = _IDSTRATEGY
_EDGELIST.fields_by_name['edges'].message_type = _EDGEDESCRIPTION
_GRAPHDESCRIPTION_NODESENTRY.fields_by_name['value'].message_type = _NODEDESCRIPTION
_GRAPHDESCRIPTION_NODESENTRY.containing_type = _GRAPHDESCRIPTION
_GRAPHDESCRIPTION_EDGESENTRY.fields_by_name['value'].message_type = _EDGELIST
_GRAPHDESCRIPTION_EDGESENTRY.containing_type = _GRAPHDESCRIPTION
_GRAPHDESCRIPTION.fields_by_name['nodes'].message_type = _GRAPHDESCRIPTION_NODESENTRY
_GRAPHDESCRIPTION.fields_by_name['edges'].message_type = _GRAPHDESCRIPTION_EDGESENTRY
_GENERATEDSUBGRAPHS.fields_by_name['subgraphs'].message_type = _GRAPHDESCRIPTION
DESCRIPTOR.message_types_by_name['Host'] = _HOST
DESCRIPTOR.message_types_by_name['AssetDescription'] = _ASSETDESCRIPTION
DESCRIPTOR.message_types_by_name['NodeDescription'] = _NODEDESCRIPTION
DESCRIPTOR.message_types_by_name['OutboundConnection'] = _OUTBOUNDCONNECTION
DESCRIPTOR.message_types_by_name['InboundConnection'] = _INBOUNDCONNECTION
DESCRIPTOR.message_types_by_name['ProcessDescription'] = _PROCESSDESCRIPTION
DESCRIPTOR.message_types_by_name['FileDescription'] = _FILEDESCRIPTION
DESCRIPTOR.message_types_by_name['IpAddressDescription'] = _IPADDRESSDESCRIPTION
DESCRIPTOR.message_types_by_name['Session'] = _SESSION
DESCRIPTOR.message_types_by_name['Static'] = _STATIC
DESCRIPTOR.message_types_by_name['IdStrategy'] = _IDSTRATEGY
DESCRIPTOR.message_types_by_name['NodeProperty'] = _NODEPROPERTY
DESCRIPTOR.message_types_by_name['DynamicNode'] = _DYNAMICNODE
DESCRIPTOR.message_types_by_name['EdgeDescription'] = _EDGEDESCRIPTION
DESCRIPTOR.message_types_by_name['EdgeList'] = _EDGELIST
DESCRIPTOR.message_types_by_name['GraphDescription'] = _GRAPHDESCRIPTION
DESCRIPTOR.message_types_by_name['GeneratedSubgraphs'] = _GENERATEDSUBGRAPHS
Host = _reflection.GeneratedProtocolMessageType('Host', (_message.Message,), dict(
DESCRIPTOR = _HOST,
__module__ = 'graph_description_pb2'
# @@protoc_insertion_point(class_scope:graph_description.Host)
))
_sym_db.RegisterMessage(Host)
AssetDescription = _reflection.GeneratedProtocolMessageType('AssetDescription', (_message.Message,), dict(
DESCRIPTOR = _ASSETDESCRIPTION,
__module__ = 'graph_description_pb2'
# @@protoc_insertion_point(class_scope:graph_description.AssetDescription)
))
_sym_db.RegisterMessage(AssetDescription)
NodeDescription = _reflection.GeneratedProtocolMessageType('NodeDescription', (_message.Message,), dict(
DESCRIPTOR = _NODEDESCRIPTION,
__module__ = 'graph_description_pb2'
# @@protoc_insertion_point(class_scope:graph_description.NodeDescription)
))
_sym_db.RegisterMessage(NodeDescription)
OutboundConnection = _reflection.GeneratedProtocolMessageType('OutboundConnection', (_message.Message,), dict(
DESCRIPTOR = _OUTBOUNDCONNECTION,
__module__ = 'graph_description_pb2'
# @@protoc_insertion_point(class_scope:graph_description.OutboundConnection)
))
_sym_db.RegisterMessage(OutboundConnection)
InboundConnection = _reflection.GeneratedProtocolMessageType('InboundConnection', (_message.Message,), dict(
DESCRIPTOR = _INBOUNDCONNECTION,
__module__ = 'graph_description_pb2'
# @@protoc_insertion_point(class_scope:graph_description.InboundConnection)
))
_sym_db.RegisterMessage(InboundConnection)
ProcessDescription = _reflection.GeneratedProtocolMessageType('ProcessDescription', (_message.Message,), dict(
DESCRIPTOR = _PROCESSDESCRIPTION,
__module__ = 'graph_description_pb2'
# @@protoc_insertion_point(class_scope:graph_description.ProcessDescription)
))
_sym_db.RegisterMessage(ProcessDescription)
FileDescription = _reflection.GeneratedProtocolMessageType('FileDescription', (_message.Message,), dict(
DESCRIPTOR = _FILEDESCRIPTION,
__module__ = 'graph_description_pb2'
# @@protoc_insertion_point(class_scope:graph_description.FileDescription)
))
_sym_db.RegisterMessage(FileDescription)
IpAddressDescription = _reflection.GeneratedProtocolMessageType('IpAddressDescription', (_message.Message,), dict(
DESCRIPTOR = _IPADDRESSDESCRIPTION,
__module__ = 'graph_description_pb2'
# @@protoc_insertion_point(class_scope:graph_description.IpAddressDescription)
))
_sym_db.RegisterMessage(IpAddressDescription)
Session = _reflection.GeneratedProtocolMessageType('Session', (_message.Message,), dict(
DESCRIPTOR = _SESSION,
__module__ = 'graph_description_pb2'
# @@protoc_insertion_point(class_scope:graph_description.Session)
))
_sym_db.RegisterMessage(Session)
Static = _reflection.GeneratedProtocolMessageType('Static', (_message.Message,), dict(
DESCRIPTOR = _STATIC,
__module__ = 'graph_description_pb2'
# @@protoc_insertion_point(class_scope:graph_description.Static)
))
_sym_db.RegisterMessage(Static)
IdStrategy = _reflection.GeneratedProtocolMessageType('IdStrategy', (_message.Message,), dict(
DESCRIPTOR = _IDSTRATEGY,
__module__ = 'graph_description_pb2'
# @@protoc_insertion_point(class_scope:graph_description.IdStrategy)
))
_sym_db.RegisterMessage(IdStrategy)
NodeProperty = _reflection.GeneratedProtocolMessageType('NodeProperty', (_message.Message,), dict(
DESCRIPTOR = _NODEPROPERTY,
__module__ = 'graph_description_pb2'
# @@protoc_insertion_point(class_scope:graph_description.NodeProperty)
))
_sym_db.RegisterMessage(NodeProperty)
DynamicNode = _reflection.GeneratedProtocolMessageType('DynamicNode', (_message.Message,), dict(
PropertiesEntry = _reflection.GeneratedProtocolMessageType('PropertiesEntry', (_message.Message,), dict(
DESCRIPTOR = _DYNAMICNODE_PROPERTIESENTRY,
__module__ = 'graph_description_pb2'
# @@protoc_insertion_point(class_scope:graph_description.DynamicNode.PropertiesEntry)
))
,
DESCRIPTOR = _DYNAMICNODE,
__module__ = 'graph_description_pb2'
# @@protoc_insertion_point(class_scope:graph_description.DynamicNode)
))
_sym_db.RegisterMessage(DynamicNode)
_sym_db.RegisterMessage(DynamicNode.PropertiesEntry)
EdgeDescription = _reflection.GeneratedProtocolMessageType('EdgeDescription', (_message.Message,), dict(
DESCRIPTOR = _EDGEDESCRIPTION,
__module__ = 'graph_description_pb2'
# @@protoc_insertion_point(class_scope:graph_description.EdgeDescription)
))
_sym_db.RegisterMessage(EdgeDescription)
EdgeList = _reflection.GeneratedProtocolMessageType('EdgeList', (_message.Message,), dict(
DESCRIPTOR = _EDGELIST,
__module__ = 'graph_description_pb2'
# @@protoc_insertion_point(class_scope:graph_description.EdgeList)
))
_sym_db.RegisterMessage(EdgeList)
GraphDescription = _reflection.GeneratedProtocolMessageType('GraphDescription', (_message.Message,), dict(
NodesEntry = _reflection.GeneratedProtocolMessageType('NodesEntry', (_message.Message,), dict(
DESCRIPTOR = _GRAPHDESCRIPTION_NODESENTRY,
__module__ = 'graph_description_pb2'
# @@protoc_insertion_point(class_scope:graph_description.GraphDescription.NodesEntry)
))
,
EdgesEntry = _reflection.GeneratedProtocolMessageType('EdgesEntry', (_message.Message,), dict(
DESCRIPTOR = _GRAPHDESCRIPTION_EDGESENTRY,
__module__ = 'graph_description_pb2'
# @@protoc_insertion_point(class_scope:graph_description.GraphDescription.EdgesEntry)
))
,
DESCRIPTOR = _GRAPHDESCRIPTION,
__module__ = 'graph_description_pb2'
# @@protoc_insertion_point(class_scope:graph_description.GraphDescription)
))
_sym_db.RegisterMessage(GraphDescription)
_sym_db.RegisterMessage(GraphDescription.NodesEntry)
_sym_db.RegisterMessage(GraphDescription.EdgesEntry)
GeneratedSubgraphs = _reflection.GeneratedProtocolMessageType('GeneratedSubgraphs', (_message.Message,), dict(
DESCRIPTOR = _GENERATEDSUBGRAPHS,
__module__ = 'graph_description_pb2'
# @@protoc_insertion_point(class_scope:graph_description.GeneratedSubgraphs)
))
_sym_db.RegisterMessage(GeneratedSubgraphs)
_DYNAMICNODE_PROPERTIESENTRY.has_options = True
_DYNAMICNODE_PROPERTIESENTRY._options = _descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001'))
_GRAPHDESCRIPTION_NODESENTRY.has_options = True
_GRAPHDESCRIPTION_NODESENTRY._options = _descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001'))
_GRAPHDESCRIPTION_EDGESENTRY.has_options = True
_GRAPHDESCRIPTION_EDGESENTRY._options = _descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001'))
# @@protoc_insertion_point(module_scope)
| 46.1161 | 6,976 | 0.755338 | # Generated by the protocol buffer compiler. DO NOT EDIT!
# source: graph_description.proto
import sys
_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1'))
from google.protobuf import descriptor as _descriptor
from google.protobuf import message as _message
from google.protobuf import reflection as _reflection
from google.protobuf import symbol_database as _symbol_database
from google.protobuf import descriptor_pb2
# @@protoc_insertion_point(imports)
_sym_db = _symbol_database.Default()
from google.protobuf import wrappers_pb2 as google_dot_protobuf_dot_wrappers__pb2
DESCRIPTOR = _descriptor.FileDescriptor(
name='graph_description.proto',
package='graph_description',
syntax='proto3',
serialized_pb=_b('\n\x17graph_description.proto\x12\x11graph_description\x1a\x1egoogle/protobuf/wrappers.proto\"G\n\x04Host\x12\x12\n\x08hostname\x18\x01 \x01(\tH\x00\x12\x0c\n\x02ip\x18\x02 \x01(\tH\x00\x12\x12\n\x08\x61sset_id\x18\x03 \x01(\tH\x00\x42\t\n\x07host_id\"\xfb\x02\n\x10\x41ssetDescription\x12\x10\n\x08node_key\x18\x01 \x01(\t\x12\x11\n\ttimestamp\x18\x02 \x01(\x04\x12.\n\x08\x61sset_id\x18\x03 \x01(\x0b\x32\x1c.google.protobuf.StringValue\x12/\n\thost_name\x18\x04 \x01(\x0b\x32\x1c.google.protobuf.StringValue\x12\x31\n\x0bhost_domain\x18\x05 \x01(\x0b\x32\x1c.google.protobuf.StringValue\x12/\n\thost_fqdn\x18\x06 \x01(\x0b\x32\x1c.google.protobuf.StringValue\x12\x34\n\x0ehost_local_mac\x18\x07 \x01(\x0b\x32\x1c.google.protobuf.StringValue\x12-\n\x07host_ip\x18\x08 \x01(\x0b\x32\x1c.google.protobuf.StringValue\x12\x18\n\x10operating_system\x18\t \x01(\t\"\xe2\x03\n\x0fNodeDescription\x12\x39\n\nasset_node\x18\x01 \x01(\x0b\x32#.graph_description.AssetDescriptionH\x00\x12=\n\x0cprocess_node\x18\x02 \x01(\x0b\x32%.graph_description.ProcessDescriptionH\x00\x12\x37\n\tfile_node\x18\x03 \x01(\x0b\x32\".graph_description.FileDescriptionH\x00\x12\x42\n\x0fip_address_node\x18\x04 \x01(\x0b\x32\'.graph_description.IpAddressDescriptionH\x00\x12I\n\x18outbound_connection_node\x18\x05 \x01(\x0b\x32%.graph_description.OutboundConnectionH\x00\x12G\n\x17inbound_connection_node\x18\x06 \x01(\x0b\x32$.graph_description.InboundConnectionH\x00\x12\x36\n\x0c\x64ynamic_node\x18\x07 \x01(\x0b\x32\x1e.graph_description.DynamicNodeH\x00\x42\x0c\n\nwhich_node\"\xa8\x02\n\x12OutboundConnection\x12\x10\n\x08node_key\x18\x01 \x01(\t\x12.\n\x08\x61sset_id\x18\x02 \x01(\x0b\x32\x1c.google.protobuf.StringValue\x12.\n\x08hostname\x18\x03 \x01(\x0b\x32\x1c.google.protobuf.StringValue\x12-\n\x07host_ip\x18\x04 \x01(\x0b\x32\x1c.google.protobuf.StringValue\x12\r\n\x05state\x18\x05 \x01(\r\x12\x19\n\x11\x63reated_timestamp\x18\x06 \x01(\x04\x12\x1c\n\x14terminated_timestamp\x18\x07 \x01(\x04\x12\x1b\n\x13last_seen_timestamp\x18\x08 \x01(\x04\x12\x0c\n\x04port\x18\t \x01(\r\"\xa7\x02\n\x11InboundConnection\x12\x10\n\x08node_key\x18\x01 \x01(\t\x12.\n\x08\x61sset_id\x18\x02 \x01(\x0b\x32\x1c.google.protobuf.StringValue\x12.\n\x08hostname\x18\x03 \x01(\x0b\x32\x1c.google.protobuf.StringValue\x12-\n\x07host_ip\x18\x04 \x01(\x0b\x32\x1c.google.protobuf.StringValue\x12\r\n\x05state\x18\x05 \x01(\r\x12\x19\n\x11\x63reated_timestamp\x18\x06 \x01(\x04\x12\x1c\n\x14terminated_timestamp\x18\x07 \x01(\x04\x12\x1b\n\x13last_seen_timestamp\x18\x08 \x01(\x04\x12\x0c\n\x04port\x18\t \x01(\r\"\xb3\x03\n\x12ProcessDescription\x12\x10\n\x08node_key\x18\x01 \x01(\t\x12.\n\x08\x61sset_id\x18\x02 \x01(\x0b\x32\x1c.google.protobuf.StringValue\x12.\n\x08hostname\x18\x03 \x01(\x0b\x32\x1c.google.protobuf.StringValue\x12-\n\x07host_ip\x18\x04 \x01(\x0b\x32\x1c.google.protobuf.StringValue\x12\r\n\x05state\x18\x05 \x01(\r\x12\x12\n\nprocess_id\x18\x06 \x01(\x04\x12\x14\n\x0cprocess_guid\x18\x07 \x01(\t\x12\x19\n\x11\x63reated_timestamp\x18\x08 \x01(\x04\x12\x1c\n\x14terminated_timestamp\x18\t \x01(\x04\x12\x1b\n\x13last_seen_timestamp\x18\n \x01(\x04\x12\x14\n\x0cprocess_name\x18\x0b \x01(\t\x12\x1c\n\x14process_command_line\x18\x0c \x01(\t\x12\x1f\n\x17process_integrity_level\x18\r \x01(\t\x12\x18\n\x10operating_system\x18\x0e \x01(\t\"\xd8\x04\n\x0f\x46ileDescription\x12\x10\n\x08node_key\x18\x01 \x01(\t\x12.\n\x08\x61sset_id\x18\x02 \x01(\x0b\x32\x1c.google.protobuf.StringValue\x12.\n\x08hostname\x18\x03 \x01(\x0b\x32\x1c.google.protobuf.StringValue\x12-\n\x07host_ip\x18\x04 \x01(\x0b\x32\x1c.google.protobuf.StringValue\x12\r\n\x05state\x18\x05 \x01(\r\x12\x19\n\x11\x63reated_timestamp\x18\x06 \x01(\x04\x12\x19\n\x11\x64\x65leted_timestamp\x18\x07 \x01(\x04\x12\x1b\n\x13last_seen_timestamp\x18\x08 \x01(\x04\x12\x11\n\tfile_name\x18\t \x01(\t\x12\x11\n\tfile_path\x18\n \x01(\t\x12\x16\n\x0e\x66ile_extension\x18\x0b \x01(\t\x12\x16\n\x0e\x66ile_mime_type\x18\x0c \x01(\t\x12\x11\n\tfile_size\x18\r \x01(\x04\x12\x14\n\x0c\x66ile_version\x18\x0e \x01(\t\x12\x18\n\x10\x66ile_description\x18\x0f \x01(\t\x12\x14\n\x0c\x66ile_product\x18\x10 \x01(\t\x12\x14\n\x0c\x66ile_company\x18\x11 \x01(\t\x12\x16\n\x0e\x66ile_directory\x18\x12 \x01(\t\x12\x12\n\nfile_inode\x18\x13 \x01(\x04\x12\x17\n\x0f\x66ile_hard_links\x18\x14 \x01(\x04\x12\x10\n\x08md5_hash\x18\x15 \x01(\t\x12\x11\n\tsha1_hash\x18\x16 \x01(\t\x12\x13\n\x0bsha256_hash\x18\x17 \x01(\t\"a\n\x14IpAddressDescription\x12\x10\n\x08node_key\x18\x01 \x01(\t\x12\x11\n\ttimestamp\x18\x02 \x01(\x04\x12\x12\n\nip_address\x18\x03 \x01(\t\x12\x10\n\x08ip_proto\x18\x04 \x01(\t\"\x97\x01\n\x07Session\x12\x1e\n\x16primary_key_properties\x18\x01 \x03(\t\x12%\n\x1dprimary_key_requires_asset_id\x18\x02 \x01(\x08\x12\x14\n\x0c\x63reated_time\x18\x03 \x01(\x04\x12\x16\n\x0elast_seen_time\x18\x04 \x01(\x04\x12\x17\n\x0fterminated_time\x18\x05 \x01(\x04\"O\n\x06Static\x12\x1e\n\x16primary_key_properties\x18\x01 \x03(\t\x12%\n\x1dprimary_key_requires_asset_id\x18\x02 \x01(\x08\"t\n\nIdStrategy\x12-\n\x07session\x18\x01 \x01(\x0b\x32\x1a.graph_description.SessionH\x00\x12+\n\x06static\x18\x02 \x01(\x0b\x32\x19.graph_description.StaticH\x00\x42\n\n\x08strategy\"T\n\x0cNodeProperty\x12\x11\n\x07intprop\x18\x01 \x01(\x03H\x00\x12\x12\n\x08uintprop\x18\x02 \x01(\x04H\x00\x12\x11\n\x07strprop\x18\x03 \x01(\tH\x00\x42\n\n\x08property\"\x9e\x03\n\x0b\x44ynamicNode\x12\x42\n\nproperties\x18\x01 \x03(\x0b\x32..graph_description.DynamicNode.PropertiesEntry\x12\x10\n\x08node_key\x18\x02 \x01(\t\x12\x11\n\tnode_type\x18\x03 \x01(\t\x12\x0f\n\x07seen_at\x18\x04 \x01(\x04\x12.\n\x08\x61sset_id\x18\x05 \x01(\x0b\x32\x1c.google.protobuf.StringValue\x12.\n\x08hostname\x18\x06 \x01(\x0b\x32\x1c.google.protobuf.StringValue\x12-\n\x07host_ip\x18\x07 \x01(\x0b\x32\x1c.google.protobuf.StringValue\x12\x32\n\x0bid_strategy\x18\x08 \x03(\x0b\x32\x1d.graph_description.IdStrategy\x1aR\n\x0fPropertiesEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12.\n\x05value\x18\x02 \x01(\x0b\x32\x1f.graph_description.NodeProperty:\x02\x38\x01\"=\n\x0f\x45\x64geDescription\x12\x0c\n\x04\x66rom\x18\x01 \x01(\t\x12\n\n\x02to\x18\x02 \x01(\t\x12\x10\n\x08\x65\x64geName\x18\x03 \x01(\t\"=\n\x08\x45\x64geList\x12\x31\n\x05\x65\x64ges\x18\x01 \x03(\x0b\x32\".graph_description.EdgeDescription\"\xc0\x02\n\x10GraphDescription\x12=\n\x05nodes\x18\x01 \x03(\x0b\x32..graph_description.GraphDescription.NodesEntry\x12=\n\x05\x65\x64ges\x18\x02 \x03(\x0b\x32..graph_description.GraphDescription.EdgesEntry\x12\x11\n\ttimestamp\x18\x03 \x01(\x04\x1aP\n\nNodesEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\x31\n\x05value\x18\x02 \x01(\x0b\x32\".graph_description.NodeDescription:\x02\x38\x01\x1aI\n\nEdgesEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12*\n\x05value\x18\x02 \x01(\x0b\x32\x1b.graph_description.EdgeList:\x02\x38\x01\"L\n\x12GeneratedSubgraphs\x12\x36\n\tsubgraphs\x18\x01 \x03(\x0b\x32#.graph_description.GraphDescriptionb\x06proto3')
,
dependencies=[google_dot_protobuf_dot_wrappers__pb2.DESCRIPTOR,])
_sym_db.RegisterFileDescriptor(DESCRIPTOR)
_HOST = _descriptor.Descriptor(
name='Host',
full_name='graph_description.Host',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='hostname', full_name='graph_description.Host.hostname', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='ip', full_name='graph_description.Host.ip', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='asset_id', full_name='graph_description.Host.asset_id', index=2,
number=3, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
_descriptor.OneofDescriptor(
name='host_id', full_name='graph_description.Host.host_id',
index=0, containing_type=None, fields=[]),
],
serialized_start=78,
serialized_end=149,
)
_ASSETDESCRIPTION = _descriptor.Descriptor(
name='AssetDescription',
full_name='graph_description.AssetDescription',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='node_key', full_name='graph_description.AssetDescription.node_key', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='timestamp', full_name='graph_description.AssetDescription.timestamp', index=1,
number=2, type=4, cpp_type=4, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='asset_id', full_name='graph_description.AssetDescription.asset_id', index=2,
number=3, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='host_name', full_name='graph_description.AssetDescription.host_name', index=3,
number=4, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='host_domain', full_name='graph_description.AssetDescription.host_domain', index=4,
number=5, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='host_fqdn', full_name='graph_description.AssetDescription.host_fqdn', index=5,
number=6, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='host_local_mac', full_name='graph_description.AssetDescription.host_local_mac', index=6,
number=7, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='host_ip', full_name='graph_description.AssetDescription.host_ip', index=7,
number=8, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='operating_system', full_name='graph_description.AssetDescription.operating_system', index=8,
number=9, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=152,
serialized_end=531,
)
_NODEDESCRIPTION = _descriptor.Descriptor(
name='NodeDescription',
full_name='graph_description.NodeDescription',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='asset_node', full_name='graph_description.NodeDescription.asset_node', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='process_node', full_name='graph_description.NodeDescription.process_node', index=1,
number=2, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='file_node', full_name='graph_description.NodeDescription.file_node', index=2,
number=3, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='ip_address_node', full_name='graph_description.NodeDescription.ip_address_node', index=3,
number=4, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='outbound_connection_node', full_name='graph_description.NodeDescription.outbound_connection_node', index=4,
number=5, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='inbound_connection_node', full_name='graph_description.NodeDescription.inbound_connection_node', index=5,
number=6, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='dynamic_node', full_name='graph_description.NodeDescription.dynamic_node', index=6,
number=7, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
_descriptor.OneofDescriptor(
name='which_node', full_name='graph_description.NodeDescription.which_node',
index=0, containing_type=None, fields=[]),
],
serialized_start=534,
serialized_end=1016,
)
_OUTBOUNDCONNECTION = _descriptor.Descriptor(
name='OutboundConnection',
full_name='graph_description.OutboundConnection',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='node_key', full_name='graph_description.OutboundConnection.node_key', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='asset_id', full_name='graph_description.OutboundConnection.asset_id', index=1,
number=2, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='hostname', full_name='graph_description.OutboundConnection.hostname', index=2,
number=3, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='host_ip', full_name='graph_description.OutboundConnection.host_ip', index=3,
number=4, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='state', full_name='graph_description.OutboundConnection.state', index=4,
number=5, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='created_timestamp', full_name='graph_description.OutboundConnection.created_timestamp', index=5,
number=6, type=4, cpp_type=4, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='terminated_timestamp', full_name='graph_description.OutboundConnection.terminated_timestamp', index=6,
number=7, type=4, cpp_type=4, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='last_seen_timestamp', full_name='graph_description.OutboundConnection.last_seen_timestamp', index=7,
number=8, type=4, cpp_type=4, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='port', full_name='graph_description.OutboundConnection.port', index=8,
number=9, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=1019,
serialized_end=1315,
)
_INBOUNDCONNECTION = _descriptor.Descriptor(
name='InboundConnection',
full_name='graph_description.InboundConnection',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='node_key', full_name='graph_description.InboundConnection.node_key', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='asset_id', full_name='graph_description.InboundConnection.asset_id', index=1,
number=2, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='hostname', full_name='graph_description.InboundConnection.hostname', index=2,
number=3, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='host_ip', full_name='graph_description.InboundConnection.host_ip', index=3,
number=4, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='state', full_name='graph_description.InboundConnection.state', index=4,
number=5, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='created_timestamp', full_name='graph_description.InboundConnection.created_timestamp', index=5,
number=6, type=4, cpp_type=4, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='terminated_timestamp', full_name='graph_description.InboundConnection.terminated_timestamp', index=6,
number=7, type=4, cpp_type=4, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='last_seen_timestamp', full_name='graph_description.InboundConnection.last_seen_timestamp', index=7,
number=8, type=4, cpp_type=4, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='port', full_name='graph_description.InboundConnection.port', index=8,
number=9, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=1318,
serialized_end=1613,
)
_PROCESSDESCRIPTION = _descriptor.Descriptor(
name='ProcessDescription',
full_name='graph_description.ProcessDescription',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='node_key', full_name='graph_description.ProcessDescription.node_key', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='asset_id', full_name='graph_description.ProcessDescription.asset_id', index=1,
number=2, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='hostname', full_name='graph_description.ProcessDescription.hostname', index=2,
number=3, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='host_ip', full_name='graph_description.ProcessDescription.host_ip', index=3,
number=4, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='state', full_name='graph_description.ProcessDescription.state', index=4,
number=5, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='process_id', full_name='graph_description.ProcessDescription.process_id', index=5,
number=6, type=4, cpp_type=4, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='process_guid', full_name='graph_description.ProcessDescription.process_guid', index=6,
number=7, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='created_timestamp', full_name='graph_description.ProcessDescription.created_timestamp', index=7,
number=8, type=4, cpp_type=4, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='terminated_timestamp', full_name='graph_description.ProcessDescription.terminated_timestamp', index=8,
number=9, type=4, cpp_type=4, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='last_seen_timestamp', full_name='graph_description.ProcessDescription.last_seen_timestamp', index=9,
number=10, type=4, cpp_type=4, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='process_name', full_name='graph_description.ProcessDescription.process_name', index=10,
number=11, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='process_command_line', full_name='graph_description.ProcessDescription.process_command_line', index=11,
number=12, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='process_integrity_level', full_name='graph_description.ProcessDescription.process_integrity_level', index=12,
number=13, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='operating_system', full_name='graph_description.ProcessDescription.operating_system', index=13,
number=14, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=1616,
serialized_end=2051,
)
_FILEDESCRIPTION = _descriptor.Descriptor(
name='FileDescription',
full_name='graph_description.FileDescription',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='node_key', full_name='graph_description.FileDescription.node_key', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='asset_id', full_name='graph_description.FileDescription.asset_id', index=1,
number=2, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='hostname', full_name='graph_description.FileDescription.hostname', index=2,
number=3, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='host_ip', full_name='graph_description.FileDescription.host_ip', index=3,
number=4, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='state', full_name='graph_description.FileDescription.state', index=4,
number=5, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='created_timestamp', full_name='graph_description.FileDescription.created_timestamp', index=5,
number=6, type=4, cpp_type=4, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='deleted_timestamp', full_name='graph_description.FileDescription.deleted_timestamp', index=6,
number=7, type=4, cpp_type=4, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='last_seen_timestamp', full_name='graph_description.FileDescription.last_seen_timestamp', index=7,
number=8, type=4, cpp_type=4, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='file_name', full_name='graph_description.FileDescription.file_name', index=8,
number=9, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='file_path', full_name='graph_description.FileDescription.file_path', index=9,
number=10, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='file_extension', full_name='graph_description.FileDescription.file_extension', index=10,
number=11, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='file_mime_type', full_name='graph_description.FileDescription.file_mime_type', index=11,
number=12, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='file_size', full_name='graph_description.FileDescription.file_size', index=12,
number=13, type=4, cpp_type=4, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='file_version', full_name='graph_description.FileDescription.file_version', index=13,
number=14, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='file_description', full_name='graph_description.FileDescription.file_description', index=14,
number=15, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='file_product', full_name='graph_description.FileDescription.file_product', index=15,
number=16, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='file_company', full_name='graph_description.FileDescription.file_company', index=16,
number=17, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='file_directory', full_name='graph_description.FileDescription.file_directory', index=17,
number=18, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='file_inode', full_name='graph_description.FileDescription.file_inode', index=18,
number=19, type=4, cpp_type=4, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='file_hard_links', full_name='graph_description.FileDescription.file_hard_links', index=19,
number=20, type=4, cpp_type=4, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='md5_hash', full_name='graph_description.FileDescription.md5_hash', index=20,
number=21, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='sha1_hash', full_name='graph_description.FileDescription.sha1_hash', index=21,
number=22, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='sha256_hash', full_name='graph_description.FileDescription.sha256_hash', index=22,
number=23, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=2054,
serialized_end=2654,
)
_IPADDRESSDESCRIPTION = _descriptor.Descriptor(
name='IpAddressDescription',
full_name='graph_description.IpAddressDescription',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='node_key', full_name='graph_description.IpAddressDescription.node_key', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='timestamp', full_name='graph_description.IpAddressDescription.timestamp', index=1,
number=2, type=4, cpp_type=4, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='ip_address', full_name='graph_description.IpAddressDescription.ip_address', index=2,
number=3, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='ip_proto', full_name='graph_description.IpAddressDescription.ip_proto', index=3,
number=4, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=2656,
serialized_end=2753,
)
_SESSION = _descriptor.Descriptor(
name='Session',
full_name='graph_description.Session',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='primary_key_properties', full_name='graph_description.Session.primary_key_properties', index=0,
number=1, type=9, cpp_type=9, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='primary_key_requires_asset_id', full_name='graph_description.Session.primary_key_requires_asset_id', index=1,
number=2, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='created_time', full_name='graph_description.Session.created_time', index=2,
number=3, type=4, cpp_type=4, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='last_seen_time', full_name='graph_description.Session.last_seen_time', index=3,
number=4, type=4, cpp_type=4, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='terminated_time', full_name='graph_description.Session.terminated_time', index=4,
number=5, type=4, cpp_type=4, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=2756,
serialized_end=2907,
)
_STATIC = _descriptor.Descriptor(
name='Static',
full_name='graph_description.Static',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='primary_key_properties', full_name='graph_description.Static.primary_key_properties', index=0,
number=1, type=9, cpp_type=9, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='primary_key_requires_asset_id', full_name='graph_description.Static.primary_key_requires_asset_id', index=1,
number=2, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=2909,
serialized_end=2988,
)
_IDSTRATEGY = _descriptor.Descriptor(
name='IdStrategy',
full_name='graph_description.IdStrategy',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='session', full_name='graph_description.IdStrategy.session', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='static', full_name='graph_description.IdStrategy.static', index=1,
number=2, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
_descriptor.OneofDescriptor(
name='strategy', full_name='graph_description.IdStrategy.strategy',
index=0, containing_type=None, fields=[]),
],
serialized_start=2990,
serialized_end=3106,
)
_NODEPROPERTY = _descriptor.Descriptor(
name='NodeProperty',
full_name='graph_description.NodeProperty',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='intprop', full_name='graph_description.NodeProperty.intprop', index=0,
number=1, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='uintprop', full_name='graph_description.NodeProperty.uintprop', index=1,
number=2, type=4, cpp_type=4, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='strprop', full_name='graph_description.NodeProperty.strprop', index=2,
number=3, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
_descriptor.OneofDescriptor(
name='property', full_name='graph_description.NodeProperty.property',
index=0, containing_type=None, fields=[]),
],
serialized_start=3108,
serialized_end=3192,
)
_DYNAMICNODE_PROPERTIESENTRY = _descriptor.Descriptor(
name='PropertiesEntry',
full_name='graph_description.DynamicNode.PropertiesEntry',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='key', full_name='graph_description.DynamicNode.PropertiesEntry.key', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='value', full_name='graph_description.DynamicNode.PropertiesEntry.value', index=1,
number=2, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=_descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001')),
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=3527,
serialized_end=3609,
)
_DYNAMICNODE = _descriptor.Descriptor(
name='DynamicNode',
full_name='graph_description.DynamicNode',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='properties', full_name='graph_description.DynamicNode.properties', index=0,
number=1, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='node_key', full_name='graph_description.DynamicNode.node_key', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='node_type', full_name='graph_description.DynamicNode.node_type', index=2,
number=3, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='seen_at', full_name='graph_description.DynamicNode.seen_at', index=3,
number=4, type=4, cpp_type=4, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='asset_id', full_name='graph_description.DynamicNode.asset_id', index=4,
number=5, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='hostname', full_name='graph_description.DynamicNode.hostname', index=5,
number=6, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='host_ip', full_name='graph_description.DynamicNode.host_ip', index=6,
number=7, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='id_strategy', full_name='graph_description.DynamicNode.id_strategy', index=7,
number=8, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[_DYNAMICNODE_PROPERTIESENTRY, ],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=3195,
serialized_end=3609,
)
_EDGEDESCRIPTION = _descriptor.Descriptor(
name='EdgeDescription',
full_name='graph_description.EdgeDescription',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='from', full_name='graph_description.EdgeDescription.from', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='to', full_name='graph_description.EdgeDescription.to', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='edgeName', full_name='graph_description.EdgeDescription.edgeName', index=2,
number=3, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=3611,
serialized_end=3672,
)
_EDGELIST = _descriptor.Descriptor(
name='EdgeList',
full_name='graph_description.EdgeList',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='edges', full_name='graph_description.EdgeList.edges', index=0,
number=1, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=3674,
serialized_end=3735,
)
_GRAPHDESCRIPTION_NODESENTRY = _descriptor.Descriptor(
name='NodesEntry',
full_name='graph_description.GraphDescription.NodesEntry',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='key', full_name='graph_description.GraphDescription.NodesEntry.key', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='value', full_name='graph_description.GraphDescription.NodesEntry.value', index=1,
number=2, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=_descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001')),
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=3903,
serialized_end=3983,
)
_GRAPHDESCRIPTION_EDGESENTRY = _descriptor.Descriptor(
name='EdgesEntry',
full_name='graph_description.GraphDescription.EdgesEntry',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='key', full_name='graph_description.GraphDescription.EdgesEntry.key', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='value', full_name='graph_description.GraphDescription.EdgesEntry.value', index=1,
number=2, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=_descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001')),
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=3985,
serialized_end=4058,
)
_GRAPHDESCRIPTION = _descriptor.Descriptor(
name='GraphDescription',
full_name='graph_description.GraphDescription',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='nodes', full_name='graph_description.GraphDescription.nodes', index=0,
number=1, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='edges', full_name='graph_description.GraphDescription.edges', index=1,
number=2, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='timestamp', full_name='graph_description.GraphDescription.timestamp', index=2,
number=3, type=4, cpp_type=4, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[_GRAPHDESCRIPTION_NODESENTRY, _GRAPHDESCRIPTION_EDGESENTRY, ],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=3738,
serialized_end=4058,
)
_GENERATEDSUBGRAPHS = _descriptor.Descriptor(
name='GeneratedSubgraphs',
full_name='graph_description.GeneratedSubgraphs',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='subgraphs', full_name='graph_description.GeneratedSubgraphs.subgraphs', index=0,
number=1, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=4060,
serialized_end=4136,
)
_HOST.oneofs_by_name['host_id'].fields.append(
_HOST.fields_by_name['hostname'])
_HOST.fields_by_name['hostname'].containing_oneof = _HOST.oneofs_by_name['host_id']
_HOST.oneofs_by_name['host_id'].fields.append(
_HOST.fields_by_name['ip'])
_HOST.fields_by_name['ip'].containing_oneof = _HOST.oneofs_by_name['host_id']
_HOST.oneofs_by_name['host_id'].fields.append(
_HOST.fields_by_name['asset_id'])
_HOST.fields_by_name['asset_id'].containing_oneof = _HOST.oneofs_by_name['host_id']
_ASSETDESCRIPTION.fields_by_name['asset_id'].message_type = google_dot_protobuf_dot_wrappers__pb2._STRINGVALUE
_ASSETDESCRIPTION.fields_by_name['host_name'].message_type = google_dot_protobuf_dot_wrappers__pb2._STRINGVALUE
_ASSETDESCRIPTION.fields_by_name['host_domain'].message_type = google_dot_protobuf_dot_wrappers__pb2._STRINGVALUE
_ASSETDESCRIPTION.fields_by_name['host_fqdn'].message_type = google_dot_protobuf_dot_wrappers__pb2._STRINGVALUE
_ASSETDESCRIPTION.fields_by_name['host_local_mac'].message_type = google_dot_protobuf_dot_wrappers__pb2._STRINGVALUE
_ASSETDESCRIPTION.fields_by_name['host_ip'].message_type = google_dot_protobuf_dot_wrappers__pb2._STRINGVALUE
_NODEDESCRIPTION.fields_by_name['asset_node'].message_type = _ASSETDESCRIPTION
_NODEDESCRIPTION.fields_by_name['process_node'].message_type = _PROCESSDESCRIPTION
_NODEDESCRIPTION.fields_by_name['file_node'].message_type = _FILEDESCRIPTION
_NODEDESCRIPTION.fields_by_name['ip_address_node'].message_type = _IPADDRESSDESCRIPTION
_NODEDESCRIPTION.fields_by_name['outbound_connection_node'].message_type = _OUTBOUNDCONNECTION
_NODEDESCRIPTION.fields_by_name['inbound_connection_node'].message_type = _INBOUNDCONNECTION
_NODEDESCRIPTION.fields_by_name['dynamic_node'].message_type = _DYNAMICNODE
_NODEDESCRIPTION.oneofs_by_name['which_node'].fields.append(
_NODEDESCRIPTION.fields_by_name['asset_node'])
_NODEDESCRIPTION.fields_by_name['asset_node'].containing_oneof = _NODEDESCRIPTION.oneofs_by_name['which_node']
_NODEDESCRIPTION.oneofs_by_name['which_node'].fields.append(
_NODEDESCRIPTION.fields_by_name['process_node'])
_NODEDESCRIPTION.fields_by_name['process_node'].containing_oneof = _NODEDESCRIPTION.oneofs_by_name['which_node']
_NODEDESCRIPTION.oneofs_by_name['which_node'].fields.append(
_NODEDESCRIPTION.fields_by_name['file_node'])
_NODEDESCRIPTION.fields_by_name['file_node'].containing_oneof = _NODEDESCRIPTION.oneofs_by_name['which_node']
_NODEDESCRIPTION.oneofs_by_name['which_node'].fields.append(
_NODEDESCRIPTION.fields_by_name['ip_address_node'])
_NODEDESCRIPTION.fields_by_name['ip_address_node'].containing_oneof = _NODEDESCRIPTION.oneofs_by_name['which_node']
_NODEDESCRIPTION.oneofs_by_name['which_node'].fields.append(
_NODEDESCRIPTION.fields_by_name['outbound_connection_node'])
_NODEDESCRIPTION.fields_by_name['outbound_connection_node'].containing_oneof = _NODEDESCRIPTION.oneofs_by_name['which_node']
_NODEDESCRIPTION.oneofs_by_name['which_node'].fields.append(
_NODEDESCRIPTION.fields_by_name['inbound_connection_node'])
_NODEDESCRIPTION.fields_by_name['inbound_connection_node'].containing_oneof = _NODEDESCRIPTION.oneofs_by_name['which_node']
_NODEDESCRIPTION.oneofs_by_name['which_node'].fields.append(
_NODEDESCRIPTION.fields_by_name['dynamic_node'])
_NODEDESCRIPTION.fields_by_name['dynamic_node'].containing_oneof = _NODEDESCRIPTION.oneofs_by_name['which_node']
_OUTBOUNDCONNECTION.fields_by_name['asset_id'].message_type = google_dot_protobuf_dot_wrappers__pb2._STRINGVALUE
_OUTBOUNDCONNECTION.fields_by_name['hostname'].message_type = google_dot_protobuf_dot_wrappers__pb2._STRINGVALUE
_OUTBOUNDCONNECTION.fields_by_name['host_ip'].message_type = google_dot_protobuf_dot_wrappers__pb2._STRINGVALUE
_INBOUNDCONNECTION.fields_by_name['asset_id'].message_type = google_dot_protobuf_dot_wrappers__pb2._STRINGVALUE
_INBOUNDCONNECTION.fields_by_name['hostname'].message_type = google_dot_protobuf_dot_wrappers__pb2._STRINGVALUE
_INBOUNDCONNECTION.fields_by_name['host_ip'].message_type = google_dot_protobuf_dot_wrappers__pb2._STRINGVALUE
_PROCESSDESCRIPTION.fields_by_name['asset_id'].message_type = google_dot_protobuf_dot_wrappers__pb2._STRINGVALUE
_PROCESSDESCRIPTION.fields_by_name['hostname'].message_type = google_dot_protobuf_dot_wrappers__pb2._STRINGVALUE
_PROCESSDESCRIPTION.fields_by_name['host_ip'].message_type = google_dot_protobuf_dot_wrappers__pb2._STRINGVALUE
_FILEDESCRIPTION.fields_by_name['asset_id'].message_type = google_dot_protobuf_dot_wrappers__pb2._STRINGVALUE
_FILEDESCRIPTION.fields_by_name['hostname'].message_type = google_dot_protobuf_dot_wrappers__pb2._STRINGVALUE
_FILEDESCRIPTION.fields_by_name['host_ip'].message_type = google_dot_protobuf_dot_wrappers__pb2._STRINGVALUE
_IDSTRATEGY.fields_by_name['session'].message_type = _SESSION
_IDSTRATEGY.fields_by_name['static'].message_type = _STATIC
_IDSTRATEGY.oneofs_by_name['strategy'].fields.append(
_IDSTRATEGY.fields_by_name['session'])
_IDSTRATEGY.fields_by_name['session'].containing_oneof = _IDSTRATEGY.oneofs_by_name['strategy']
_IDSTRATEGY.oneofs_by_name['strategy'].fields.append(
_IDSTRATEGY.fields_by_name['static'])
_IDSTRATEGY.fields_by_name['static'].containing_oneof = _IDSTRATEGY.oneofs_by_name['strategy']
_NODEPROPERTY.oneofs_by_name['property'].fields.append(
_NODEPROPERTY.fields_by_name['intprop'])
_NODEPROPERTY.fields_by_name['intprop'].containing_oneof = _NODEPROPERTY.oneofs_by_name['property']
_NODEPROPERTY.oneofs_by_name['property'].fields.append(
_NODEPROPERTY.fields_by_name['uintprop'])
_NODEPROPERTY.fields_by_name['uintprop'].containing_oneof = _NODEPROPERTY.oneofs_by_name['property']
_NODEPROPERTY.oneofs_by_name['property'].fields.append(
_NODEPROPERTY.fields_by_name['strprop'])
_NODEPROPERTY.fields_by_name['strprop'].containing_oneof = _NODEPROPERTY.oneofs_by_name['property']
_DYNAMICNODE_PROPERTIESENTRY.fields_by_name['value'].message_type = _NODEPROPERTY
_DYNAMICNODE_PROPERTIESENTRY.containing_type = _DYNAMICNODE
_DYNAMICNODE.fields_by_name['properties'].message_type = _DYNAMICNODE_PROPERTIESENTRY
_DYNAMICNODE.fields_by_name['asset_id'].message_type = google_dot_protobuf_dot_wrappers__pb2._STRINGVALUE
_DYNAMICNODE.fields_by_name['hostname'].message_type = google_dot_protobuf_dot_wrappers__pb2._STRINGVALUE
_DYNAMICNODE.fields_by_name['host_ip'].message_type = google_dot_protobuf_dot_wrappers__pb2._STRINGVALUE
_DYNAMICNODE.fields_by_name['id_strategy'].message_type = _IDSTRATEGY
_EDGELIST.fields_by_name['edges'].message_type = _EDGEDESCRIPTION
_GRAPHDESCRIPTION_NODESENTRY.fields_by_name['value'].message_type = _NODEDESCRIPTION
_GRAPHDESCRIPTION_NODESENTRY.containing_type = _GRAPHDESCRIPTION
_GRAPHDESCRIPTION_EDGESENTRY.fields_by_name['value'].message_type = _EDGELIST
_GRAPHDESCRIPTION_EDGESENTRY.containing_type = _GRAPHDESCRIPTION
_GRAPHDESCRIPTION.fields_by_name['nodes'].message_type = _GRAPHDESCRIPTION_NODESENTRY
_GRAPHDESCRIPTION.fields_by_name['edges'].message_type = _GRAPHDESCRIPTION_EDGESENTRY
_GENERATEDSUBGRAPHS.fields_by_name['subgraphs'].message_type = _GRAPHDESCRIPTION
DESCRIPTOR.message_types_by_name['Host'] = _HOST
DESCRIPTOR.message_types_by_name['AssetDescription'] = _ASSETDESCRIPTION
DESCRIPTOR.message_types_by_name['NodeDescription'] = _NODEDESCRIPTION
DESCRIPTOR.message_types_by_name['OutboundConnection'] = _OUTBOUNDCONNECTION
DESCRIPTOR.message_types_by_name['InboundConnection'] = _INBOUNDCONNECTION
DESCRIPTOR.message_types_by_name['ProcessDescription'] = _PROCESSDESCRIPTION
DESCRIPTOR.message_types_by_name['FileDescription'] = _FILEDESCRIPTION
DESCRIPTOR.message_types_by_name['IpAddressDescription'] = _IPADDRESSDESCRIPTION
DESCRIPTOR.message_types_by_name['Session'] = _SESSION
DESCRIPTOR.message_types_by_name['Static'] = _STATIC
DESCRIPTOR.message_types_by_name['IdStrategy'] = _IDSTRATEGY
DESCRIPTOR.message_types_by_name['NodeProperty'] = _NODEPROPERTY
DESCRIPTOR.message_types_by_name['DynamicNode'] = _DYNAMICNODE
DESCRIPTOR.message_types_by_name['EdgeDescription'] = _EDGEDESCRIPTION
DESCRIPTOR.message_types_by_name['EdgeList'] = _EDGELIST
DESCRIPTOR.message_types_by_name['GraphDescription'] = _GRAPHDESCRIPTION
DESCRIPTOR.message_types_by_name['GeneratedSubgraphs'] = _GENERATEDSUBGRAPHS
Host = _reflection.GeneratedProtocolMessageType('Host', (_message.Message,), dict(
DESCRIPTOR = _HOST,
__module__ = 'graph_description_pb2'
# @@protoc_insertion_point(class_scope:graph_description.Host)
))
_sym_db.RegisterMessage(Host)
AssetDescription = _reflection.GeneratedProtocolMessageType('AssetDescription', (_message.Message,), dict(
DESCRIPTOR = _ASSETDESCRIPTION,
__module__ = 'graph_description_pb2'
# @@protoc_insertion_point(class_scope:graph_description.AssetDescription)
))
_sym_db.RegisterMessage(AssetDescription)
NodeDescription = _reflection.GeneratedProtocolMessageType('NodeDescription', (_message.Message,), dict(
DESCRIPTOR = _NODEDESCRIPTION,
__module__ = 'graph_description_pb2'
# @@protoc_insertion_point(class_scope:graph_description.NodeDescription)
))
_sym_db.RegisterMessage(NodeDescription)
OutboundConnection = _reflection.GeneratedProtocolMessageType('OutboundConnection', (_message.Message,), dict(
DESCRIPTOR = _OUTBOUNDCONNECTION,
__module__ = 'graph_description_pb2'
# @@protoc_insertion_point(class_scope:graph_description.OutboundConnection)
))
_sym_db.RegisterMessage(OutboundConnection)
InboundConnection = _reflection.GeneratedProtocolMessageType('InboundConnection', (_message.Message,), dict(
DESCRIPTOR = _INBOUNDCONNECTION,
__module__ = 'graph_description_pb2'
# @@protoc_insertion_point(class_scope:graph_description.InboundConnection)
))
_sym_db.RegisterMessage(InboundConnection)
ProcessDescription = _reflection.GeneratedProtocolMessageType('ProcessDescription', (_message.Message,), dict(
DESCRIPTOR = _PROCESSDESCRIPTION,
__module__ = 'graph_description_pb2'
# @@protoc_insertion_point(class_scope:graph_description.ProcessDescription)
))
_sym_db.RegisterMessage(ProcessDescription)
FileDescription = _reflection.GeneratedProtocolMessageType('FileDescription', (_message.Message,), dict(
DESCRIPTOR = _FILEDESCRIPTION,
__module__ = 'graph_description_pb2'
# @@protoc_insertion_point(class_scope:graph_description.FileDescription)
))
_sym_db.RegisterMessage(FileDescription)
IpAddressDescription = _reflection.GeneratedProtocolMessageType('IpAddressDescription', (_message.Message,), dict(
DESCRIPTOR = _IPADDRESSDESCRIPTION,
__module__ = 'graph_description_pb2'
# @@protoc_insertion_point(class_scope:graph_description.IpAddressDescription)
))
_sym_db.RegisterMessage(IpAddressDescription)
Session = _reflection.GeneratedProtocolMessageType('Session', (_message.Message,), dict(
DESCRIPTOR = _SESSION,
__module__ = 'graph_description_pb2'
# @@protoc_insertion_point(class_scope:graph_description.Session)
))
_sym_db.RegisterMessage(Session)
Static = _reflection.GeneratedProtocolMessageType('Static', (_message.Message,), dict(
DESCRIPTOR = _STATIC,
__module__ = 'graph_description_pb2'
# @@protoc_insertion_point(class_scope:graph_description.Static)
))
_sym_db.RegisterMessage(Static)
IdStrategy = _reflection.GeneratedProtocolMessageType('IdStrategy', (_message.Message,), dict(
DESCRIPTOR = _IDSTRATEGY,
__module__ = 'graph_description_pb2'
# @@protoc_insertion_point(class_scope:graph_description.IdStrategy)
))
_sym_db.RegisterMessage(IdStrategy)
NodeProperty = _reflection.GeneratedProtocolMessageType('NodeProperty', (_message.Message,), dict(
DESCRIPTOR = _NODEPROPERTY,
__module__ = 'graph_description_pb2'
# @@protoc_insertion_point(class_scope:graph_description.NodeProperty)
))
_sym_db.RegisterMessage(NodeProperty)
DynamicNode = _reflection.GeneratedProtocolMessageType('DynamicNode', (_message.Message,), dict(
PropertiesEntry = _reflection.GeneratedProtocolMessageType('PropertiesEntry', (_message.Message,), dict(
DESCRIPTOR = _DYNAMICNODE_PROPERTIESENTRY,
__module__ = 'graph_description_pb2'
# @@protoc_insertion_point(class_scope:graph_description.DynamicNode.PropertiesEntry)
))
,
DESCRIPTOR = _DYNAMICNODE,
__module__ = 'graph_description_pb2'
# @@protoc_insertion_point(class_scope:graph_description.DynamicNode)
))
_sym_db.RegisterMessage(DynamicNode)
_sym_db.RegisterMessage(DynamicNode.PropertiesEntry)
EdgeDescription = _reflection.GeneratedProtocolMessageType('EdgeDescription', (_message.Message,), dict(
DESCRIPTOR = _EDGEDESCRIPTION,
__module__ = 'graph_description_pb2'
# @@protoc_insertion_point(class_scope:graph_description.EdgeDescription)
))
_sym_db.RegisterMessage(EdgeDescription)
EdgeList = _reflection.GeneratedProtocolMessageType('EdgeList', (_message.Message,), dict(
DESCRIPTOR = _EDGELIST,
__module__ = 'graph_description_pb2'
# @@protoc_insertion_point(class_scope:graph_description.EdgeList)
))
_sym_db.RegisterMessage(EdgeList)
GraphDescription = _reflection.GeneratedProtocolMessageType('GraphDescription', (_message.Message,), dict(
NodesEntry = _reflection.GeneratedProtocolMessageType('NodesEntry', (_message.Message,), dict(
DESCRIPTOR = _GRAPHDESCRIPTION_NODESENTRY,
__module__ = 'graph_description_pb2'
# @@protoc_insertion_point(class_scope:graph_description.GraphDescription.NodesEntry)
))
,
EdgesEntry = _reflection.GeneratedProtocolMessageType('EdgesEntry', (_message.Message,), dict(
DESCRIPTOR = _GRAPHDESCRIPTION_EDGESENTRY,
__module__ = 'graph_description_pb2'
# @@protoc_insertion_point(class_scope:graph_description.GraphDescription.EdgesEntry)
))
,
DESCRIPTOR = _GRAPHDESCRIPTION,
__module__ = 'graph_description_pb2'
# @@protoc_insertion_point(class_scope:graph_description.GraphDescription)
))
_sym_db.RegisterMessage(GraphDescription)
_sym_db.RegisterMessage(GraphDescription.NodesEntry)
_sym_db.RegisterMessage(GraphDescription.EdgesEntry)
GeneratedSubgraphs = _reflection.GeneratedProtocolMessageType('GeneratedSubgraphs', (_message.Message,), dict(
DESCRIPTOR = _GENERATEDSUBGRAPHS,
__module__ = 'graph_description_pb2'
# @@protoc_insertion_point(class_scope:graph_description.GeneratedSubgraphs)
))
_sym_db.RegisterMessage(GeneratedSubgraphs)
_DYNAMICNODE_PROPERTIESENTRY.has_options = True
_DYNAMICNODE_PROPERTIESENTRY._options = _descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001'))
_GRAPHDESCRIPTION_NODESENTRY.has_options = True
_GRAPHDESCRIPTION_NODESENTRY._options = _descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001'))
_GRAPHDESCRIPTION_EDGESENTRY.has_options = True
_GRAPHDESCRIPTION_EDGESENTRY._options = _descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001'))
# @@protoc_insertion_point(module_scope)
| 0 | 0 | 0 |
4a803b6a4b7a187c5312dd0b738e8e9913675a77 | 6,306 | py | Python | Codes/Version 1.7.6/force_raised_gaussian.py | zaman13/Brownian-dynamics-in-a-time-varying-force-field | 1dce268fcc4f27e066be0ec0b511178cbc1437c5 | [
"MIT"
] | 3 | 2022-01-05T10:25:01.000Z | 2022-03-11T17:19:39.000Z | Codes/Version 1.7.6/force_raised_gaussian.py | zaman13/Brownian-dynamics-in-a-time-varying-force-field | 1dce268fcc4f27e066be0ec0b511178cbc1437c5 | [
"MIT"
] | null | null | null | Codes/Version 1.7.6/force_raised_gaussian.py | zaman13/Brownian-dynamics-in-a-time-varying-force-field | 1dce268fcc4f27e066be0ec0b511178cbc1437c5 | [
"MIT"
] | 2 | 2021-10-16T16:04:29.000Z | 2022-01-17T09:14:12.000Z | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created on March 6, 2021
@author: Mohammad Asif Zaman
- April 10, 2021
- Added optical spot ON-OFF text
"""
import numpy as np
import pylab as py
import matplotlib as plt
from parameters import *
# Module global Parameters:
# =============================================================================
# Force parameters
r_active = 0
n_order = 1 # Order of the Gaussian potential = 2n
w_well = 10e-6 # 1/e *max width of the potential well
A_well = 4000*k_B*T # well depth
# Particle parameters (number and raidus array)
# =============================================================================
# def draw_yz(tm):
# substrate_yz = py.Rectangle((-xrange_limit*1e6, zlow_limit*1e6),2*xrange_limit*1e6, abs(zlow_limit)*1e6,fc='#d4d4d4', ec='k')
# py.gca().add_patch(substrate_yz)
# def draw_xz(tm):
# substrate_xz = py.Rectangle((-xrange_limit*1e6, zlow_limit*1e6),2*xrange_limit*1e6, abs(zlow_limit)*1e6,fc='#d4d4d4', ec='k')
# py.gca().add_patch(substrate_xz)
# This is function that is called from the main program
# Simplified spring force model
# force_plot()
# draw_source(9)
| 29.194444 | 131 | 0.584047 | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created on March 6, 2021
@author: Mohammad Asif Zaman
- April 10, 2021
- Added optical spot ON-OFF text
"""
import numpy as np
import pylab as py
import matplotlib as plt
from parameters import *
# Module global Parameters:
# =============================================================================
# Force parameters
r_active = 0
n_order = 1 # Order of the Gaussian potential = 2n
w_well = 10e-6 # 1/e *max width of the potential well
A_well = 4000*k_B*T # well depth
# Particle parameters (number and raidus array)
def time_pos_ax_limits():
Np = 4 # Number of particles
# ro = np.zeros((Np,1)) + 2e-6
ro = np.zeros(Np) + 2e-6
ro[0] = 1.5e-6
ro[1] = 2.5e-6
# Time parameter
tfinal = 12
# Axes parameter
xrange_limit = 30e-6 # Max and min of x axis range for plotting animation
zlow_limit = -5e-6
zhigh_limit = 25e-6
# Limit of initial particle positions
xi_lim = [-10e-6, 10e-6]
yi_lim = [-10e-6, 10e-6]
zi_lim = [max(ro)*1.5, 30e-6]
return Np, ro, tfinal, xrange_limit, zlow_limit, zhigh_limit, xi_lim, yi_lim, zi_lim
# =============================================================================
def draw_static_geo(ax_xy, ax_yz, ax_xz):
Np, ro, tfinal, xrange_limit, zlow_limit, zhigh_limit, xi_lim, yi_lim, zi_lim = time_pos_ax_limits()
py.gcf().sca(ax_yz)
substrate_yz = py.Rectangle((-xrange_limit*1e6, zlow_limit*1e6),2*xrange_limit*1e6, abs(zlow_limit)*1e6,fc='#d4d4d4', ec='k')
py.gca().add_patch(substrate_yz)
py.gcf().sca(ax_xz)
substrate_xz = py.Rectangle((-xrange_limit*1e6, zlow_limit*1e6),2*xrange_limit*1e6, abs(zlow_limit)*1e6,fc='#d4d4d4', ec='k')
py.gca().add_patch(substrate_xz)
py.gcf().sca(ax_xy)
substrate_xy = py.Rectangle((-xrange_limit*1e6, -xrange_limit*1e6),2*xrange_limit*1e6,2*xrange_limit*1e6,fc='#f9f9f9')
py.gca().add_patch(substrate_xy)
return 0
def draw_dynamic_geo(ax_xy, ax_yz, ax_xz):
patch_spot_xy = py.Circle((0, 0), 0.5*w_well*1e6, fc='#ff8c00',alpha = 0.8)
# patch_spot_yz = plt.patches.Arc((0, 0), 0.5*w_well*1e6, 0.5*w_well*1e6,0, 0, 180, fc='#ff8c00',alpha = 0.8)
py.gcf().sca(ax_xy)
py.gca().add_patch(patch_spot_xy)
# py.sca(ax_yz)
# py.gca().add_patch(patch_spot_yz)
return 0
def draw_geo(tm, ax_xy, ax_yz, ax_xz):
# March 7, 2021
# The flag_source_state variable is used to draw/erase the source geometry only once
# This is necessary to speed up the animation.
global flag_source_state_1 # Make this variable global so that the assigned value remains saved globally as t changes
global flag_source_state_2
global str1
global str2
if 'flag_source_state_1' not in globals():
global flag_source_state # Make this variable global so that the assigned value remains saved globally as t changes
flag_source_state_1 = 0 # initialize with OFF state
print('Defining global flag for source geometry \n')
if 'flag_source_state_2' not in globals():
global flag_source_state # Make this variable global so that the assigned value remains saved globally as t changes
flag_source_state_2 = 0 # initialize with OFF state
print('Defining global flag for source geometry \n')
# Draw static geometry (only once)
if flag_source_state_2 < 1:
draw_static_geo(ax_xy, ax_yz, ax_xz)
flag_source_state_2 = 1
# Draw source
if (tm > 1) & (tm < 8) & (flag_source_state_1 < 1):
draw_dynamic_geo(ax_xy, ax_yz, ax_xz)
str1 = 'Optical beam ON'
str2 = ''
# text_string2.set_text(str2)
# ax_xy.text(0.05, 0.8, 'Optical spot ON',color = '#FF0000',transform=ax_xy.transAxes)
flag_source_state_1 = 1
print('Drawing source\n')
# Erase source (draw a white circle)
if (tm > 8) & (flag_source_state_1 == 1):
patch_spot = py.Circle((0, 0), 0.51*w_well*1e6, fc='#f9f9f9',alpha = 1)
py.gca().add_patch(patch_spot)
str1 = 'Optical beam OFF'
str2 = ''
# ax_xy.text(0.05, 0.8, 'Optical spot ON',color = '#f9f9f9', transform=ax_xy.transAxes)
# ax_xy.text(0.05, 0.8, 'Optical spot OFF',color = '#FF0000',transform=ax_xy.transAxes)
print('Erasing source\n')
flag_source_state_1 = 0
if 'str1' not in globals():
str1 = ''
str2 = ''
return str1, str2
# def draw_yz(tm):
# substrate_yz = py.Rectangle((-xrange_limit*1e6, zlow_limit*1e6),2*xrange_limit*1e6, abs(zlow_limit)*1e6,fc='#d4d4d4', ec='k')
# py.gca().add_patch(substrate_yz)
# def draw_xz(tm):
# substrate_xz = py.Rectangle((-xrange_limit*1e6, zlow_limit*1e6),2*xrange_limit*1e6, abs(zlow_limit)*1e6,fc='#d4d4d4', ec='k')
# py.gca().add_patch(substrate_xz)
# This is function that is called from the main program
# Simplified spring force model
def force_profile(r_in, t):
Np = r_in[0,:].size
fm = np.zeros((3,Np))
r_norm = np.linalg.norm(r_in, axis = 0) + 1e-30
g = A_well*np.exp(-(r_norm/w_well)**(2*n_order))
if (t > 1) & (t<8):
fm[0,:] = -2*n_order*r_in[0,:]/(r_norm**2) * (r_norm/w_well)**(2*n_order) * g
fm[1,:] = -2*n_order*r_in[1,:]/(r_norm**2) * (r_norm/w_well)**(2*n_order) * g
fm[2,:] = -2*n_order*r_in[2,:]/(r_norm**2) * (r_norm/w_well)**(2*n_order) * g
# fm[:,2] = 0
# fm[:,3] = 0
# fm[:,4] = 0
# fm[:,5] = 0
# fm[:,6] = 0
return fm
def force_plot():
Np = 1
rin = np.zeros((3,Np))
r_in = np.tile(np.linspace(-xrange_limit,xrange_limit,200),(3,1))
F = force_profile(r_in,2)
py.figure()
py.plot(r_in[0,:]*1e6,F[0,:]*1e12, label = '$F_x$')
# py.plot(r_in[1,:]*1e6,F[1,:]*1e12,'.', label = '$F_y$')
# py.plot(r_in[2,:]*1e6,F[2,:]*1e12,'x', label = '$F_z$')
py.xlabel('$x$ ($\mu$m)')
py.ylabel('Force (pN)')
py.legend()
# force_plot()
# draw_source(9)
| 4,913 | 0 | 140 |
5b645b173367da9fc04c68cc9f6b37011b4da840 | 402 | py | Python | scripts/dates.py | b-nroths/chi-data | 5f9f826d550726abbdf76fa85e374f4dd0b88a83 | [
"Xnet",
"X11"
] | null | null | null | scripts/dates.py | b-nroths/chi-data | 5f9f826d550726abbdf76fa85e374f4dd0b88a83 | [
"Xnet",
"X11"
] | null | null | null | scripts/dates.py | b-nroths/chi-data | 5f9f826d550726abbdf76fa85e374f4dd0b88a83 | [
"Xnet",
"X11"
] | null | null | null | import datetime
import random
import json
# base = datetime.datetime.today()
# date_list = [base - datetime.timedelta(days=x) for x in range(0, 100*365)]
# lt = {}
# for d in date_list:
# print str(d)[:10]
# lt[str(d)[:10]] = int(10000*random.random())
# with open('test.json', 'w') as f:
# f.write(json.dumps(lt))
with open('untitled.json') as f:
text = f.read()
print len(json.loads(text)) | 19.142857 | 76 | 0.646766 | import datetime
import random
import json
# base = datetime.datetime.today()
# date_list = [base - datetime.timedelta(days=x) for x in range(0, 100*365)]
# lt = {}
# for d in date_list:
# print str(d)[:10]
# lt[str(d)[:10]] = int(10000*random.random())
# with open('test.json', 'w') as f:
# f.write(json.dumps(lt))
with open('untitled.json') as f:
text = f.read()
print len(json.loads(text)) | 0 | 0 | 0 |
12d560898c0e8d5ac027e35499e8617a57312f55 | 2,833 | py | Python | corehq/apps/integration/forms.py | kkrampa/commcare-hq | d64d7cad98b240325ad669ccc7effb07721b4d44 | [
"BSD-3-Clause"
] | 1 | 2020-05-05T13:10:01.000Z | 2020-05-05T13:10:01.000Z | corehq/apps/integration/forms.py | kkrampa/commcare-hq | d64d7cad98b240325ad669ccc7effb07721b4d44 | [
"BSD-3-Clause"
] | 1 | 2019-12-09T14:00:14.000Z | 2019-12-09T14:00:14.000Z | corehq/apps/integration/forms.py | MaciejChoromanski/commcare-hq | fd7f65362d56d73b75a2c20d2afeabbc70876867 | [
"BSD-3-Clause"
] | 5 | 2015-11-30T13:12:45.000Z | 2019-07-01T19:27:07.000Z | from __future__ import absolute_import
from __future__ import unicode_literals
from memoized import memoized
from django import forms
from django.utils.translation import ugettext as _, ugettext_noop, ugettext_lazy
from crispy_forms.layout import Submit
from crispy_forms import layout as crispy
from corehq.apps.hqwebapp import crispy as hqcrispy
from corehq.apps.integration.models import SimprintsIntegration
| 34.54878 | 81 | 0.641723 | from __future__ import absolute_import
from __future__ import unicode_literals
from memoized import memoized
from django import forms
from django.utils.translation import ugettext as _, ugettext_noop, ugettext_lazy
from crispy_forms.layout import Submit
from crispy_forms import layout as crispy
from corehq.apps.hqwebapp import crispy as hqcrispy
from corehq.apps.integration.models import SimprintsIntegration
class SimprintsIntegrationForm(forms.Form):
is_enabled = forms.BooleanField(
label=ugettext_noop("Enable Simprints Integration"),
required=False
)
project_id = forms.CharField(
label=ugettext_noop("Project ID"),
required=False,
)
user_id = forms.CharField(
label=ugettext_noop("User ID"),
required=False,
)
module_id = forms.CharField(
label=ugettext_noop("Module ID"),
required=False,
)
def __init__(self, data, *args, **kwargs):
self._domain = kwargs.pop('domain')
super(SimprintsIntegrationForm, self).__init__(data, *args, **kwargs)
self.helper = hqcrispy.HQFormHelper()
self.helper.form_method = 'POST'
self.helper.layout = crispy.Layout(
hqcrispy.B3MultiField(
_("Simprints Integration"),
hqcrispy.InlineField(
'is_enabled', data_bind="checked: isEnabled"
),
),
crispy.Div(
crispy.Field('project_id', data_bind="value: projectId"),
crispy.Field('user_id', data_bind="value: userId"),
crispy.Field('module_id', data_bind="value: moduleId"),
data_bind="visible: isEnabled"
),
hqcrispy.FormActions(
crispy.ButtonHolder(
Submit('submit', ugettext_lazy("Update"))
)
)
)
@property
@memoized
def _existing_integration(self):
existing, _created = SimprintsIntegration.objects.get_or_create(
domain=self._domain,
)
return existing
@property
def initial_data(self):
return {
'is_enabled': self._existing_integration.is_enabled,
'project_id': self._existing_integration.project_id,
'user_id': self._existing_integration.user_id or "global_user",
'module_id': self._existing_integration.module_id or "global_module",
}
def save(self):
self._existing_integration.is_enabled = self.cleaned_data['is_enabled']
self._existing_integration.project_id = self.cleaned_data['project_id']
self._existing_integration.user_id = self.cleaned_data['user_id']
self._existing_integration.module_id = self.cleaned_data['module_id']
self._existing_integration.save()
| 1,782 | 612 | 23 |
0f47ec33488f216aeafa47b0d35b9f11dcbe92c3 | 1,111 | py | Python | hypha/apply/categories/migrations/0002_metacategory.py | maxpearl/hypha | e181ebadfb744aab34617bb766e746368d6f2de0 | [
"BSD-3-Clause"
] | 20 | 2021-04-08T16:38:49.000Z | 2022-02-09T20:05:57.000Z | hypha/apply/categories/migrations/0002_metacategory.py | maxpearl/hypha | e181ebadfb744aab34617bb766e746368d6f2de0 | [
"BSD-3-Clause"
] | 1,098 | 2017-12-15T11:23:03.000Z | 2020-01-24T07:58:07.000Z | hypha/apply/categories/migrations/0002_metacategory.py | maxpearl/hypha | e181ebadfb744aab34617bb766e746368d6f2de0 | [
"BSD-3-Clause"
] | 17 | 2020-02-07T14:55:54.000Z | 2021-04-04T19:32:38.000Z | # Generated by Django 2.0.9 on 2019-02-22 15:06
from django.db import migrations, models
import wagtail.search.index
| 35.83871 | 123 | 0.586859 | # Generated by Django 2.0.9 on 2019-02-22 15:06
from django.db import migrations, models
import wagtail.search.index
class Migration(migrations.Migration):
dependencies = [
('categories', '0001_initial'),
]
operations = [
migrations.CreateModel(
name='MetaCategory',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('path', models.CharField(max_length=255, unique=True)),
('depth', models.PositiveIntegerField()),
('numchild', models.PositiveIntegerField(default=0)),
('name', models.CharField(help_text='Keep the name short, ideally one word.', max_length=50, unique=True)),
('node_order_index', models.IntegerField(blank=True, default=0, editable=False)),
],
options={
'verbose_name': 'Meta Category',
'verbose_name_plural': 'Meta Categories',
},
bases=(wagtail.search.index.Indexed, models.Model),
),
]
| 0 | 969 | 23 |
f81186549a0f6e62de9f8cb0490a4ad6e290c2fd | 4,565 | py | Python | Clases/Soledad Zambrano/class.py | lucrohatsch/grupo-5 | be6a1dd4322b7c2ade2d80ab1b45b1df7fe6cb47 | [
"MIT"
] | null | null | null | Clases/Soledad Zambrano/class.py | lucrohatsch/grupo-5 | be6a1dd4322b7c2ade2d80ab1b45b1df7fe6cb47 | [
"MIT"
] | 1 | 2021-11-16T00:53:19.000Z | 2021-11-16T00:53:19.000Z | Clases/Soledad Zambrano/class.py | lucrohatsch/grupo-5 | be6a1dd4322b7c2ade2d80ab1b45b1df7fe6cb47 | [
"MIT"
] | 3 | 2021-11-15T22:18:31.000Z | 2021-11-15T23:23:52.000Z | from random import randint
#con init clases de numeros complejos
#clase con init
# complejo=Complejo(3,4)
# print(complejo.getimaginary(9,3))
# print(complejo.suma(3,6))
# print(complejo.resta(3,6))
# print(complejo.multiplicacion(3,6))
# print(complejo.division(3,6))
#clase sin init de numero complejo
# class Complejo:
# def getimaginary(self,r,i):
# return str(r)+ "+" + str(i)+"i"
# def suma(self,x,y):
# real=int(x[0:1])+ int(y[0:1])
# imaginary=int(x[2:])+int(y[2:])
# complejo=str(real)+"+"+str(imaginary)+self.i
# return (complejo)
# complejo=Complejo()
# print(complejo.getimaginary(9,3))
# numero1=input("ingrese un numero complejo para sumar")
# numero2=input("ingrese el segundo valor")
# print(complejo.suma(numero1,numero2))
#idem para el resto de operaciones, pedir valores
#ejercicio 2 clase vector de 3 dimensiones
# vector=Vector()
# vec1=input("ingrese un vector ejemplo a,b,c")
# vec2=input("ingree otro vector")
# num=input("ingrese un numero para multiplicar y/o dividir")
# print(vector.suma(vec1,vec2))
# print(vector.resta(vec1,vec2))
# print(vector.multiplicacion(vec1,num))
# print(vector.dividir(vec1,num))
#ejercicio3 clases matrices
# def multiplicacion(self,vec):
dimension = int(input('Ingrese dimensión de la matriz cuadrada: '))
dimensionSegunda = int(input('Ingrese dimensión de la matriz cuadrada: '))
matriz=Matriz(dimension)
matrizSegunda=Matriz(dimensionSegunda)
# print(matriz)
matriz.representacion()
print('\n *********** \n')
matrizSegunda.representacion()
print('\n *********** \n')
#para que la representacion sea igual a las matrices
suma = matriz.sumaMatriz(matrizSegunda)
for fila in suma:
print(fila)
| 28.179012 | 75 | 0.548521 | from random import randint
#con init clases de numeros complejos
class Complejo:
def __init__(self, real, imaginary):
self.real=real
self.imaginary=imaginary
self.i="i"
def getimaginary(self,r,i):
return str(r)+ "+" + str(i)+"i"
def suma(self,x,y):
return str(x+self.real)+str(y+self.imaginary)+self.i
def resta(self,x,y):
return str(x-self.real)+str(y-self.imaginary)+self.i
def multiplicacion(self,x,y):
return str(x*self.real)+str(y*self.imaginary)+self.i
def division(self,x,y):
return str(x/self.real)+str(y/self.imaginary)+self.i
#clase con init
# complejo=Complejo(3,4)
# print(complejo.getimaginary(9,3))
# print(complejo.suma(3,6))
# print(complejo.resta(3,6))
# print(complejo.multiplicacion(3,6))
# print(complejo.division(3,6))
#clase sin init de numero complejo
# class Complejo:
# def getimaginary(self,r,i):
# return str(r)+ "+" + str(i)+"i"
# def suma(self,x,y):
# real=int(x[0:1])+ int(y[0:1])
# imaginary=int(x[2:])+int(y[2:])
# complejo=str(real)+"+"+str(imaginary)+self.i
# return (complejo)
# complejo=Complejo()
# print(complejo.getimaginary(9,3))
# numero1=input("ingrese un numero complejo para sumar")
# numero2=input("ingrese el segundo valor")
# print(complejo.suma(numero1,numero2))
#idem para el resto de operaciones, pedir valores
#ejercicio 2 clase vector de 3 dimensiones
class Vector:
def suma(self,v1,v2):
x= int(v1[0:1])+int(v2[0:1])
y= int(v1[2:3])+int(v2[2:3])
z= int(v1[4:])+int(v2[4:])
return str(x)+","+str(y)+","+str(z)
def resta(self,v1,v2):
x= int(v1[0:1])-int(v2[0:1])
y= int(v1[2:3])-int(v2[2:3])
z= int(v1[4:])-int(v2[4:])
return str(x)+","+str(y)+","+str(z)
def multiplicacion(self,v1,num):
x= int(v1[0:1])*int(num)
y= int(v1[2:3])*int(num)
z= int(v1[4:])*int(num)
return str(x)+","+str(y)+","+str(z)
def dividir(self,v1,num):
x= int(v1[0:1])/int(num)
y= int(v1[2:3])/int(num)
z= int(v1[4:])/int(num)
return str(x)+","+str(y)+","+str(z)
# vector=Vector()
# vec1=input("ingrese un vector ejemplo a,b,c")
# vec2=input("ingree otro vector")
# num=input("ingrese un numero para multiplicar y/o dividir")
# print(vector.suma(vec1,vec2))
# print(vector.resta(vec1,vec2))
# print(vector.multiplicacion(vec1,num))
# print(vector.dividir(vec1,num))
#ejercicio3 clases matrices
class Matriz:
def __init__(self, dim):
# self.m=[[a,b,c],[d,e,f],[g,h,i]]
matriz=[]
for fila in range(dim): # para fila desde 0 hasta dim-1
nuevaC = []
for columna in range(dim):
nuevaC.append(randint(1,10))
matriz.append(nuevaC)
# matriz: [[7,4], [4,9]]
self.m = matriz
def representacion(self):
for fila in self.m:
print(fila)
def sumaMatriz(self,m1):
matriz=[]
dim = len(self.m)
if dim != len(m1.m):
if(len(self.m[0]) != len(m1.m[0])):
return('Las dimensiones de sus matrices no son iguales')
for i in range(dim):
nuevaC = []
for j in range(len(self.m[0])):
nuevaC.append(self.m[i][j]+m1.m[i][j])
matriz.append(nuevaC)
return matriz
def restaMatriz(self,m1):
matriz=[]
dim = len(self.m)
if dim != len(m1.m):
if(len(self.m[0]) != len(m1.m[0])):
return('Las dimensiones de sus matrices no son iguales')
for i in range(dim):
nuevaC = []
for j in range(len(self.m[0])):
nuevaC.append(self.m[i][j]-m1.m[i][j])
matriz.append(nuevaC)
return matriz
# def multiplicacion(self,vec):
dimension = int(input('Ingrese dimensión de la matriz cuadrada: '))
dimensionSegunda = int(input('Ingrese dimensión de la matriz cuadrada: '))
matriz=Matriz(dimension)
matrizSegunda=Matriz(dimensionSegunda)
# print(matriz)
matriz.representacion()
print('\n *********** \n')
matrizSegunda.representacion()
print('\n *********** \n')
#para que la representacion sea igual a las matrices
suma = matriz.sumaMatriz(matrizSegunda)
for fila in suma:
print(fila)
| 2,259 | -22 | 478 |
d216db91805a0649ebde91802222cf781d19168b | 1,090 | py | Python | pandas/main.py | monishshah18/python-cp-cheatsheet | a5514b08816959de1198156f7764c54a7a585f20 | [
"Apache-2.0"
] | 140 | 2020-10-21T13:23:52.000Z | 2022-03-31T15:09:45.000Z | pandas/main.py | stacykutyepov/python-cp-cheatsheet | a00a57e1b36433648d1cace331e15ff276cef189 | [
"Apache-2.0"
] | 1 | 2021-07-22T14:01:25.000Z | 2021-07-22T14:01:25.000Z | pandas/main.py | stacykutyepov/python-cp-cheatsheet | a00a57e1b36433648d1cace331e15ff276cef189 | [
"Apache-2.0"
] | 33 | 2020-10-21T14:17:02.000Z | 2022-03-25T11:25:03.000Z | """
Summarize a column total cases column and total deaths column
Country by country data in columns, sum up and match global totals
"""
import csv
import pandas
pandas.set_option("display.max_rows", None, "display.max_columns", None)
col_list = ["Total Cases", "Country/ Other", "Total Deaths", "# 9/27/2020"]
df = pandas.read_csv("covidmilliondead.csv", usecols=col_list, thousands=',')
totalCases, totalDeaths = 0,0
for idx, cases,deaths in zip(df["# 9/27/2020"], df["Total Cases"], df["Total Deaths"]):
if idx > 0:
totalCases += cases
if deaths > 0:
totalDeaths += deaths
for idx, country, cases, deaths in zip(df["# 9/27/2020"], df["Country/ Other"], df["Total Cases"], df["Total Deaths"]):
if idx > 0:
print("\n",country)
print("Cases : ", cases, "/", totalCases, " %", "{:.5%}".format(cases/totalCases))
if deaths > 0:
print("Deaths : ", int(deaths), "/", totalDeaths, " %", "{:.5%}".format(deaths/totalDeaths))
print("")
print("Total Cases")
print(totalCases)
print("Total Deaths")
print(totalDeaths) | 34.0625 | 119 | 0.633945 | """
Summarize a column total cases column and total deaths column
Country by country data in columns, sum up and match global totals
"""
import csv
import pandas
pandas.set_option("display.max_rows", None, "display.max_columns", None)
col_list = ["Total Cases", "Country/ Other", "Total Deaths", "# 9/27/2020"]
df = pandas.read_csv("covidmilliondead.csv", usecols=col_list, thousands=',')
totalCases, totalDeaths = 0,0
for idx, cases,deaths in zip(df["# 9/27/2020"], df["Total Cases"], df["Total Deaths"]):
if idx > 0:
totalCases += cases
if deaths > 0:
totalDeaths += deaths
for idx, country, cases, deaths in zip(df["# 9/27/2020"], df["Country/ Other"], df["Total Cases"], df["Total Deaths"]):
if idx > 0:
print("\n",country)
print("Cases : ", cases, "/", totalCases, " %", "{:.5%}".format(cases/totalCases))
if deaths > 0:
print("Deaths : ", int(deaths), "/", totalDeaths, " %", "{:.5%}".format(deaths/totalDeaths))
print("")
print("Total Cases")
print(totalCases)
print("Total Deaths")
print(totalDeaths) | 0 | 0 | 0 |
3f63e942c646f11fbe6852aa56443bd974dd692b | 1,191 | py | Python | src/api/v1/service_api.py | glimsil/orch | 3212c95855cbde73e9d5fd3e7a8464609eb49c71 | [
"BSD-2-Clause"
] | null | null | null | src/api/v1/service_api.py | glimsil/orch | 3212c95855cbde73e9d5fd3e7a8464609eb49c71 | [
"BSD-2-Clause"
] | null | null | null | src/api/v1/service_api.py | glimsil/orch | 3212c95855cbde73e9d5fd3e7a8464609eb49c71 | [
"BSD-2-Clause"
] | null | null | null | from flask import request, jsonify
from api import api, core
@api.route('/v1/service/deploy', methods=['POST'])
@api.route('/v1/service/<name>', methods=['DELETE'])
@api.route('/v1/service/<name>', methods=['GET'])
@api.route('/v1/service/<name>/scale/<replicas>', methods=['POST'])
@api.route('/v1/service/<name>/scale-up', methods=['POST'])
@api.route('/v1/service/<name>/scale-down', methods=['POST'])
| 29.775 | 79 | 0.70361 | from flask import request, jsonify
from api import api, core
@api.route('/v1/service/deploy', methods=['POST'])
def service_deploy():
service_info = request.json
print(service_info)
core.deploy_service_with_info(service_info)
return jsonify(core.service_storage.get_service_info(service_info['name']))
@api.route('/v1/service/<name>', methods=['DELETE'])
def delete_service(name):
response = {
'service_name' : name,
'removed' : True
}
try:
core.remove_service(name)
except:
response['removed'] = False
return jsonify(response)
@api.route('/v1/service/<name>', methods=['GET'])
def service_get_deployment(name):
return jsonify(core.service_storage.get_service_info(name))
@api.route('/v1/service/<name>/scale/<replicas>', methods=['POST'])
def service_scale(name, replicas):
return jsonify(core.scale_service(name, int(replicas)))
@api.route('/v1/service/<name>/scale-up', methods=['POST'])
def service_scale_up(name):
return jsonify(core.scale_service_up(name))
@api.route('/v1/service/<name>/scale-down', methods=['POST'])
def service_scale_down(name):
return jsonify(core.scale_service_up(name))
| 646 | 0 | 132 |
ea47b08c30aa8205f8c763b4a1955fdcb13cee13 | 94,046 | py | Python | packages/pywst/pywst/inversion/problem.py | USEPA/Water-Security-Toolkit | 6b6b68e0e1b3dcc8023b453ab48a64f7fd740feb | [
"BSD-3-Clause"
] | 3 | 2019-06-10T18:04:14.000Z | 2020-12-05T18:11:40.000Z | packages/pywst/pywst/inversion/problem.py | USEPA/Water-Security-Toolkit | 6b6b68e0e1b3dcc8023b453ab48a64f7fd740feb | [
"BSD-3-Clause"
] | null | null | null | packages/pywst/pywst/inversion/problem.py | USEPA/Water-Security-Toolkit | 6b6b68e0e1b3dcc8023b453ab48a64f7fd740feb | [
"BSD-3-Clause"
] | 2 | 2020-09-24T19:04:14.000Z | 2020-12-05T18:11:43.000Z | # _________________________________________________________________________
#
# Water Security Toolkit (WST)
# Copyright (c) 2012 Sandia Corporation.
# This software is distributed under the Revised BSD License.
# Under the terms of Contract DE-AC04-94AL85000, there is a non-exclusive
# license for use of this work by or on behalf of the U.S. government.
# For more information, see the License Notice in the WST User Manual.
# _________________________________________________________________________
#
import os, sys, datetime
import pyutilib.subprocess
import yaml, json
import time
import logging
import itertools
import pprint
import imp
import pywst.common.problem
import pywst.common.wst_util as wst_util
import pywst.common.wst_config as wst_config
from pyutilib.misc.config import ConfigBlock
import pywst.visualization.inp2svg as inp2svg
from pyomo.environ import *
logger = logging.getLogger('wst.inversion')
try:
import pyepanet
except ImportError:
pyepanet = {}
#raise RuntimeError("EPANET DLL is missing or corrupt. Please reinstall PyEPANET.")
| 47.378338 | 157 | 0.551049 | # _________________________________________________________________________
#
# Water Security Toolkit (WST)
# Copyright (c) 2012 Sandia Corporation.
# This software is distributed under the Revised BSD License.
# Under the terms of Contract DE-AC04-94AL85000, there is a non-exclusive
# license for use of this work by or on behalf of the U.S. government.
# For more information, see the License Notice in the WST User Manual.
# _________________________________________________________________________
#
import os, sys, datetime
import pyutilib.subprocess
import yaml, json
import time
import logging
import itertools
import pprint
import imp
import pywst.common.problem
import pywst.common.wst_util as wst_util
import pywst.common.wst_config as wst_config
from pyutilib.misc.config import ConfigBlock
import pywst.visualization.inp2svg as inp2svg
from pyomo.environ import *
logger = logging.getLogger('wst.inversion')
try:
import pyepanet
except ImportError:
pyepanet = {}
#raise RuntimeError("EPANET DLL is missing or corrupt. Please reinstall PyEPANET.")
class Problem(pywst.common.problem.Problem):
results = { 'dateOfLastRun': '',
'nodesToSource': [],
'finalMetric': -999 }
# Trying handle all possible ways we may encounter None coming from the yaml parser
none_list = ['none','','None','NONE', None]
defLocs = {}
epanetOkay = False
def __init__(self):
pywst.common.problem.Problem.__init__(self, 'inversion', ("network", "measurements", "inversion", "solver", "configure"))
self.filename = 'inversion.yml'
self.loadPreferencesFile()
self.validateEPANET()
return
def validateEPANET(self):
"""
try:
enData = pyepanet.ENepanet()
except:
raise RuntimeError("EPANET DLL is missing or corrupt. Please reinstall PyEPANET.")
self.epanetOkay = True
"""
return
def trunc(self,f, n):
'''Truncates/pads a float f to n decimal places without rounding'''
slen = len('%.*f' % (n, f))
return str(f)[:slen]
def loadPreferencesFile(self):
if os.name in ['nt','win','win32','win64','dos']:
rcPath = os.path.join(os.path.abspath(os.environ['APPDATA']),
'.wstrc')
else:
rcPath = os.path.join(os.path.abspath(os.environ['HOME']),
'.wstrc')
if os.path.exists(rcPath) and os.path.isfile(rcPath):
fid = open(rcPath,'r')
defLocs = yaml.load(fid)
fid.close()
self.defLocs = defLocs
for key in defLocs.keys():
if key == 'ampl':
self.opts['configure']['ampl executable'] = defLocs[key]
if key == 'pyomo':
self.opts['configure']['pyomo executable'] = defLocs[key]
return
def runInversionsim(self):
logger = logging.getLogger('wst.inversion.inversionsim')
# Prepend all output file names with this
prefix = self.getConfigureOption('output prefix')
if prefix is None:
prefix = ''
#self.createInversionSimDat()
cmd = ['inversionsim']
is_inp_file = (self.getNetworkOption('epanet file') not in self.none_list)
is_wqm_file = (self.getInversionOption('wqm file') not in self.none_list)
# Optional arguments are simulation duration and water quality timestep, which will
# override what is in the EPANET input file.
if is_inp_file and self.getNetworkOption('simulation duration') not in ['INP','Inp','inp']:
cmd.append('--simulation-duration-minutes='+str(self.getNetworkOption('simulation duration')))
if is_inp_file and self.getNetworkOption('water quality timestep') not in ['INP','Inp','inp']:
cmd.append('--quality-timestep-minutes='+str(self.getNetworkOption('water quality timestep')))
# Substitude integers for node names in output files
cmd.append('--output-merlion-labels')
# the above command will produce the following file
label_map_file = self._get_prefixed_filename('MERLION_LABEL_MAP.txt',tempfile=True)
# Prepend all output file names with this
cmd.append('--output-prefix='+prefix)
#if self.getInversionOption('model format') not in ['AMPL','PYOMO']:
# raise IOError("Invalid model format: "+self.getInversionOption('model format'))
#check for the horizon
if self.getInversionOption('horizon') not in self.none_list:
cmd.append('--horizon-minutes=' + str(self.getInversionOption('horizon')))
# Allowed node filename
if self.getInversionOption('feasible nodes') not in self.none_list:
try:
enData = pyepanet.ENepanet()
enData.ENopen(self.opts['network']['epanet file'],'tmp.rpt')
except:
msg = 'EPANET inp file not loaded using pyepanet'
logger.error(msg)
raise RuntimeError(msg)
feasible_node_names, feasible_node_indices = wst_util.feasible_nodes(\
self.getInversionOption('feasible nodes'),\
[], \
True, enData)
enData.ENclose()
tmpNodesFile = self._get_tempfile('nodes.txt')
# write nodes file
fid = open(tmpNodesFile,'w')
for n in feasible_node_names:
fid.write(n + '\n')
fid.close()
cmd.append('--allowed-impacts=' + tmpNodesFile)
self.opts['inversion']['feasible nodes'] = tmpNodesFile
#check for water quality model tolerance
#if self.getInversionOption('wqm_zero_tol') not in self.none_list:
#cmd.append('--wqm-zero=' + str(self.getInversionOption('wqm_zero_tol')))
#check for algorithm type
if self.getInversionOption('algorithm')=='optimization':
cmd.append('--optimization')
elif self.getInversionOption('algorithm')=='bayesian':
# Check is merlion is selected
if self.getInversionOption('merlion water quality model'):
cmd.append('--merlion')
cmd.append('--probability')
if self.getInversionOption('negative threshold') not in self.none_list:
cmd.append('--meas-threshold='+ str(self.getInversionOption('negative threshold')))
else:
cmd.append('--meas-threshold=0.0')
else: cmd.append('--optimization')
#probability options
#if self.getInversionOption('meas_threshold') not in self.none_list:
#cmd.append('--meas-threshold=' + str(self.getInversionOption('meas_threshold')))
if self.getInversionOption('measurement failure') not in self.none_list and self.getInversionOption('measurement failure')!=0.05:
cmd.append('--meas-failure='+ str(self.getInversionOption('measurement failure')))
if self.getInversionOption('confidence') not in self.none_list and self.getInversionOption('confidence')!=0.95:
cmd.append('--prob-confidence='+ str(self.getInversionOption('confidence')))
if self.getInversionOption('output impact nodes'):
cmd.append('--output-impact-nodes')
#check for the horizon
#if self.getInversionOption('start inversion') not in self.none_list:
#cmd.append('--start-inversion=' + str(self.getInversionOption('start inversion')))
# Ignore Merlion Warnings
if self.getInversionOption('ignore merlion warnings'):
cmd.append('--ignore-merlion-warnings')
cmd.append('--epanet-rpt-file='+prefix+'epanet')
cmd.append('--merlion-save-file='+prefix+'merlion')
#LOGIC VALIDATION
assert(is_inp_file != is_wqm_file) # one and only one is true
# The file defining the water quality model
if is_inp_file:
cmd.append('--inp='+ self.getNetworkOption('epanet file'))
elif is_wqm_file:
cmd.append('--wqm='+ self.getInversionOption('wqm file'))
# Check for the measurement file
assert(self.getMeasurementOption('grab samples') not in self.none_list)
cmd.append(self.getMeasurementOption('grab samples'))
logger.info("Launching inversionsim executable ...")
logger.debug(cmd)
out = self._get_prefixed_filename('inversionsim.out')
sim_timelimit = None
sub_logger = logging.getLogger('wst.inversion.inversionsim.exec')
sub_logger.setLevel(logging.DEBUG)
fh = logging.FileHandler(out, mode='w')
sub_logger.addHandler(fh)
p = pyutilib.subprocess.run(cmd,timelimit=sim_timelimit,stdout=pywst.common.problem.LoggingFile(sub_logger))
if p[0]:
msg = 'An error occured when running the inversionsim executable (return code %s)\nError Message: %s\nCommand: %s' % (p[0], p[1], cmd)
logger.error(msg)
raise RuntimeError(msg)
nodemap = {}
f = open(label_map_file,'r')
for line in f:
t = line.split()
nodemap[t[1]] = t[0]
f.close()
return nodemap
def runCSArun(self,num_sensors, meas_step_sec, sim_stop_time):
logger = logging.getLogger('wst.inversion.csarun')
# Prepend all output file names with this
prefix = self.getConfigureOption('output prefix')
if prefix is None:
prefix = ''
cmd = ['csarun', '--output-prefix=' + prefix]
is_inp_file = (self.getNetworkOption('epanet file') not in self.none_list)
is_wqm_file = (self.getInversionOption('wqm file') not in self.none_list)
if is_inp_file:
cmd.append( '--inp='+self.getNetworkOption('epanet file') )
else:
msg = 'No INP file specified.'
logger.error(msg)
raise RuntimeError(msg)
# Prepend all output file names with this
cmd.append( '--num-sensors='+str(num_sensors) )
cmd.append( '--meas-step-sec='+str(meas_step_sec) )
cmd.append( '--qual-step-sec='+str(meas_step_sec) )
cmd.append( '--sim-duration='+str(sim_stop_time) )
meas_file_name = prefix + "csa_measurements"
cmd.append( '--meas='+meas_file_name )
sensor_file_name = prefix + "csa_sensors"
cmd.append( '--sensors='+sensor_file_name )
#check for the horizon
if self.getInversionOption('horizon') not in self.none_list:
cmd.append( '--horizon=' + str(self.getInversionOption('horizon')/60.0) ) # Converting to hours
# Allowed node filename
if self.getInversionOption('feasible nodes') not in self.none_list:
msg = 'The CSA algorithm does not yet support feasible nodes option'
logger.error(msg)
raise RuntimeError(msg)
# Measurement threshold
if self.getInversionOption('negative threshold') not in self.none_list:
if self.getInversionOption('negative threshold') > 0.0:
logger.info('\nWARNING: The current CSA implementation only supports a negetive threshold of 0. Setting to 0. \n')
cmd.append( '--meas-threshold='+ str(0.0) )
logger.info("Launching csarun executable ...")
logger.debug(cmd)
out = self._get_prefixed_filename('csarun.out')
sim_timelimit = None
sub_logger = logging.getLogger('wst.inversion.csarun.exec')
sub_logger.setLevel(logging.DEBUG)
fh = logging.FileHandler(out, mode='w')
sub_logger.addHandler(fh)
p = pyutilib.subprocess.run(cmd,timelimit=sim_timelimit,stdout=pywst.common.problem.LoggingFile(sub_logger))
if (p[0]):
msg = 'An error occured when running the csarun executable\nError Message: %s\nCommand: %s' % (p[1], cmd)
logger.error(msg)
raise RuntimeError(msg)
def run(self, cmd_line_options=None):
logger.info("WST inversion subcommand")
logger.info("---------------------------")
# set start time
self.startTime = time.time()
# validate input
logger.info("Validating configuration file")
self.validate()
# Override commandline options
if (cmd_line_options is not None) and \
(cmd_line_options.inp_file is not None):
self.setNetworkOption('epanet file', cmd_line_options.inp_file)
if (cmd_line_options is not None) and \
(cmd_line_options.measurements is not None):
self.setMeasurementOption('grab samples', cmd_line_options.measurements)
# Run C/C++ executables : inversionsim or runcsa
if self.getInversionOption('algorithm') == 'csa':
[num_sensors, meas_step_sec, sim_stop_time] = self.writeCSAInputFiles()
self.runCSArun(num_sensors, meas_step_sec, sim_stop_time)
Solution = self.readCSAOutputFiles(sim_stop_time, meas_step_sec)
logger.debug(Solution)
else:
nodemap = self.runInversionsim()
# Setup result vectors
inversion_nodes = []
objective_val = []
if self.getInversionOption('algorithm') == 'optimization':
# Check is merlion is selected
if not self.getInversionOption('merlion water quality model'):
msg = 'ERROR: The optimization based method requires using the Merlion water quality model. Please set to true.'
logger.error(msg)
raise RuntimeError(msg)
solve_timelimit = None
p = (1,"There was a problem with the 'formulation' or 'model format' options")
cmd = None
Solution = []
allowed_nodes_set=set()
if self.getInversionOption('feasible nodes') not in self.none_list \
and len(open(self.getInversionOption('feasible nodes'),'r').readlines())!=0:
label_map_file = self._get_prefixed_filename("MERLION_LABEL_MAP.txt")
name_to_id={}
f = open(label_map_file,'r')
for line in f:
t = line.split()
name_to_id[t[0]] = t[1]
f.close()
for line in open(self.getInversionOption('feasible nodes'),'r'):
l=line.split()
for n_ in l:
if name_to_id.has_key(n_)!=True:
msg = 'ERROR: Nodename %s specified in %s is not part of the network' % (n_, self.getInversionOption('feasible nodes'))
logger.error(msg)
raise RuntimeError(msg)
allowed_nodes_set.add(int(name_to_id[n_]))
#run pyomo or ampl
self.setInversionOption('model format',self.getInversionOption('model format').upper())
self.setInversionOption('formulation',self.getInversionOption('formulation').upper())
#print self.getInversionOption('formulation')
if self.getInversionOption('model format') == 'AMPL':
exe = self.getConfigureOption('ampl executable')
inp = self._get_prefixed_filename('ampl.run')
out = self._get_prefixed_filename('ampl.out')
results_file = ''
if self.getInversionOption('formulation') in self.none_list or self.getInversionOption('formulation')=='LP_DISCRETE':
if self.getInversionOption('num injections') not in self.none_list \
and self.getInversionOption('num injections')!=1:
logger.info('\nWARNING: This model cannot handle more than one contamination injection.\tIt is recommended to use the MIP model.\n')
logger.info('Solving the reduce LP model ...')
results_file += self.createAMPLRunReduceLP(inp,allowed_nodes_set)
elif self.getInversionOption('formulation') == 'MIP_DISCRETE_ND':
logger.info('Solving the reduce discrete MIP model with no decrease ...')
results_file += self.createAMPLRunReduceMIPnd(inp,allowed_nodes_set)
elif self.getInversionOption('formulation') == 'MIP_DISCRETE':
logger.info('Solving the reduce discrete MIP model ...')
results_file += self.createAMPLRunReduceMIP(inp,allowed_nodes_set)
elif self.getInversionOption('formulation') == 'MIP_DISCRETE_STEP':
if self.getInversionOption('num injections') not in self.none_list\
and self.getInversionOption('num injections') != 1:
logger.info('\nWARNING: This model cannot handle more than one contamination injection.\tIt is recommended to use the MIP model.\n')
logger.info('Solving the reduce STEP model ...')
results_file += self.createAMPLRunStep(inp,allowed_nodes_set)
else:
msg = """Bad specification of the formulation option.\n
\tThe posibilities are:\n
\t1. LP_discrete\n
\t2. MIP_discrete\n
\t3. MIP_discrete_step\n
\t4. MIP_discrete_nd"""
logger.error(msg)
raise RuntimeError(msg)
cmd = [exe,inp]
logger.info('Launching AMPL ...')
p = pyutilib.subprocess.run(cmd,timelimit=solve_timelimit,outfile=out)
if (p[0]):
msg = 'An error occured when running the optimization problem\nError Message: %s\nCommand: %s' % (p[1], cmd)
logger.error(msg)
raise RuntimeError(msg)
#try to load the results file
logger.info('AMPL result file from inversion: ' + results_file)
Solution=self.AMPLresultsReader(results_file)
#stop_timing=time()
#print 'AMPL Timing',stop_timing-start_timing
elif self.getInversionOption('model format') == 'PYOMO':
if self.getInversionOption('formulation') in self.none_list or self.getInversionOption('formulation')=='LP_DISCRETE':
if self.getInversionOption('num injections') not in self.none_list \
and self.getInversionOption('num injections')!=1:
msg = """\nWARNING: This model cannot handle more than one contamination injection.
\tIt is recommended to use the MIP model.\n"""
logger.info(msg)
logger.info('Solving the reduce LP model ...')
Solution=self.runPYOMOReduceLP(allowed_nodes_set)
elif self.getInversionOption('formulation') == 'MIP_DISCRETE_ND':
logger.info('Solving the MIP model ...')
Solution=self.runPYOMOReduceMIPnd(allowed_nodes_set)
elif self.getInversionOption('formulation') == 'MIP_DISCRETE':
logger.info('Solving the MIP model ...')
Solution=self.runPYOMOReduceMIP(allowed_nodes_set)
elif self.getInversionOption('formulation') == 'MIP_DISCRETE_STEP':
if self.getInversionOption('num injections') not in self.none_list\
and self.getInversionOption('num injections') != 1:
msg = """\nWARNING: This model cannot handle more than one contamination injection.
\tIt is recommended to use the MIP model.\n"""
logger.info(msg)
logger.info('Solving the reduce STEP model ...')
Solution=self.runPYOMOStep(allowed_nodes_set)
else:
msg = """Bad specification of the formulation option.\n
\tThe posibilities are:\n
\t1. LP_discrete\n
\t2. MIP_discrete\n
\t3. MIP_discrete_step\n
\t4. MIP_discrete_nd"""
logger.error(msg)
raise RuntimeError(msg)
else:
msg = """\nBad specification of the model format option.\n
\tThe posibilities are:\n
\t1. AMPL\n
\t2. PYOMO"""
logger.info(msg)
# Normalize Objective values
obj_list = []
Solution.sort()
try:
bigger=Solution[-1][0]
except IndexError:
msg = 'ERROR: The optimization solution does not contain any nodes'
logger.error(msg)
raise RuntimeError(msg)
bigger=Solution[-1][0]
if(bigger==0):
bigger=1
for i in xrange(0,len(Solution)):
if len(Solution)>1:
Obj=float(1-Solution[i][0]/bigger)
#Obj=Solution[i][0]
else:
Obj=1
obj_list.append(Obj)
#Solution[i][0]=Obj
nodes_=[]
for j in xrange(0,len(Solution[i][1])):
nodename=nodemap[Solution[i][1][j][0]]
Solution[i][1][j][0]=nodename
nodes_.append(nodename)
#print nodes_,'\t',Obj
top_obj = obj_list[0]
assert top_obj != 0.0, "ERROR: Highest objective value is 0!"
for i in xrange(0,len(Solution)):
Solution[i][0]=obj_list[i]/top_obj
if self.getInversionOption('candidate threshold') not in self.none_list:
tao = float(self.getInversionOption('candidate threshold'))
else:
tao = 0.95
#print Solution
json_file = self._get_prefixed_filename('inversion.json')
json_file_wDir = os.path.join(os.path.abspath(os.curdir),json_file)
[num_events, objective_val, inversion_nodes] = self.printResults(Solution, tao, json_file_wDir)
[tsg_file, likely_nodes_file] = self.writeProfiles(Solution, tao) # tsg or scn file
logger.debug(tsg_file)
#self.writeAllowedNodesFile('allowed.dat',Solution,tao)
elif self.getInversionOption('algorithm') == 'bayesian':
json_file = self._get_prefixed_filename('inversion.json')
json_file_wDir = os.path.join(os.path.abspath(os.curdir),json_file)
data_prob_results = open(json_file_wDir).read()
Solution = json.loads(data_prob_results)
for sol in Solution:
objective_val.append(sol['Objective'])
inversion_nodes.append(sol['Nodes'][0]['Name'])
num_events = self.printResults(Solution, 0.0, json_file)
[tsg_file, likely_nodes_file] = self.writeProfiles(Solution, 0.0) # tsg or scn file
#self.writeAllowedNodesFile('allowed.dat',Solution,tao)
tao = 1 # for visualization
else:
[json_file_wDir, tsg_file, num_events, likely_nodes_file, objective_val, inversion_nodes] = self.writeCSAresults(Solution)
tao = 1 # for visualization
# remove temporary files if debug = 0
if self.opts['configure']['debug'] == 0:
pyutilib.services.TempfileManager.clear_tempfiles()
# write output file
logfilename = logger.parent.handlers[0].baseFilename
outfilename = logger.parent.handlers[0].baseFilename.replace('.log','.yml')
visfilename = logger.parent.handlers[0].baseFilename.replace('.log','.html')
visymlfilename = logger.parent.handlers[0].baseFilename.replace('.log','_vis.yml')
# Write visualization YML file
self.writeVisualizationFile(Solution, tao, outfilename, visfilename, visymlfilename)
#build output vectors
config = wst_config.output_config()
module_blocks = ("general", "inversion")
template_options = {
'general':{
'cpu time': round(time.time() - self.startTime,3),
'directory': os.path.dirname(logfilename),
'log file': os.path.basename(logfilename)},
'inversion': {
'tsg file': tsg_file,
'likely nodes file': likely_nodes_file,
'candidate nodes': inversion_nodes,
'node likeliness': objective_val}}
if outfilename != None:
self.saveOutput(outfilename, config, module_blocks, template_options)
# Run visualization
cmd = ['wst', 'visualization', visymlfilename]
p = pyutilib.subprocess.run(cmd) # logging information should not be printed to the screen
# print solution to screen
logger.info("\nWST normal termination")
logger.info("---------------------------")
logger.info("Directory: "+os.path.dirname(logfilename))
logger.info("Results file: "+os.path.basename(outfilename))
logger.info("Log file: "+os.path.basename(logfilename))
#logger.info("Visualization file: "+os.path.basename(visfilename)+', '+os.path.basename(visymlfilename)+'\n')
logger.info("Visualization Configuration file: " +os.path.basename(visymlfilename)+'\n')
return [Solution, json_file_wDir, tsg_file, num_events]
def AMPLresultsReader(self,filename=None,lower_strenght_value=1e-3):
if filename is None:
filename = self._get_prefixed_filename('inversion_results.dat')
reader=open(filename,'r')
#In case we want to change from time to timesteps later...
timestep=float(reader.readline().split()[1])
unit_change=timestep*60
#profile{start,stop,strength}
#object{objective_val,list_of_Nodes{node_name,profile}}
results=[]
while True:
try:
columns=reader.next().split()
if columns[0]=='Solution':
node_names=[]
profiles=[]
objective=float(columns[-1])
if len(columns)>3:
for i in xrange(1,len(columns)-1):
node_names.append(columns[i])
else:
node_names.append(columns[1])
for j in xrange(0,len(node_names)):
str_profiles=reader.next().split()
profile=[];injection=[]
for k in xrange(0,len(str_profiles)):
injection.append(float(str_profiles[k]))
if len(injection)==3:
injection[0]=injection[0]*unit_change
injection[1]=injection[1]*unit_change
if injection[2]<=0:
injection[2]=lower_strenght_value
profile.append(injection)
injection=[]
profiles.append(profile)
nodes=[[node_names[n],profiles[n]] for n in xrange(0,len(profiles))]
results.append([objective,nodes])
#print objective
except StopIteration:
break
#print results
return results
def printResults(self, result_list, tao, file_name):
if self.getInversionOption('algorithm') == 'optimization':
inversion_nodes = []
objective_val = []
likely_events = [result for result in result_list if result[0] >= tao]
'''
print '\n*********************************************************************\n'
print '\t\t\tInversion Results\n'
if self.getInversionOption('num injections') not in self.none_list\
and self.getInversionOption('num injections') != 1:
flag=1;strl=''
for node in result_list[0][1]:
strl+=node[0]
if flag<len(result_list[0][1]): strl+=','
flag+=1
#print '\tMore likely injection nodes:\t\t\t',strl
#else:
#print'\tMore likely injection node:\t\t\t',result_list[0][1][0][0]
#print'\tStart time of contaminant injection (s):\t',result_list[0][1][0][1][0][0]
print'\tNumber of candidate events:\t\t\t',len(likely_events)
print'\tInversion algorithm:\t\t\t\toptimization'
print'\tInversion model:\t\t\t\t',self.getInversionOption('formulation')
print'\tAML:\t\t\t\t\t\t',self.getInversionOption('model format')
print'\tAllowed nodes in file:\t\t\t\t',self.getInversionOption('feasible nodes')
print '\tDetailed results in:\t' + file_name +'\n'
print '*********************************************************************\n'
#
'''
if self.getInversionOption('num injections') not in self.none_list:
num_inj = self.getInversionOption('num injections')
else:
num_inj = 1
results_object = []
for result in result_list:
if result[0] < tao:
continue
tmp_results = dict()
nodes_list = []
for node_i in result[1]:
profile_list = []
tmp_node_dic = dict()
for injection in node_i[1]:
#print injection
profile_list.append(dict(Start=injection[0], Stop=injection[1], Strength=injection[2]))
tmp_node_dic['Name'] = node_i[0]
tmp_node_dic['Profile'] = profile_list
nodes_list.append(tmp_node_dic)
tmp_results['Objective'] = result[0]
objective_val.append(self.trunc(result[0],3))
tmp_results['Nodes'] = nodes_list
if num_inj > 1:
inversion_nodes.append([i['Name'] for i in nodes_list])
else:
inversion_nodes.append(nodes_list[0]['Name'])
tmp_results['CPU time'] = time.time() - self.startTime
tmp_results['run date'] = datetime.datetime.now().strftime("%Y-%m-%d %H:%M:%S")
results_object.append(tmp_results)
f = open(file_name, 'w')
json.dump(results_object, f,indent=2)
f.close()
num_events = len(results_object)
return [num_events, objective_val, inversion_nodes]
else:
num_events = len(result_list)
'''
print'\n*********************************************************************\n'
print'\t\t\tInversion Results\n'
print'\tNumber of candidate events:\t\t\t',num_events
print'\tInversion algorithm:\t\t\t\tbayesian'
print'\tAllowed nodes in file:\t\t\t\t',self.getInversionOption('feasible nodes')
print'\tDetailed results in:\t' + file_name +'\n'
print'*********************************************************************\n'
'''
return num_events
def validateExecutables(self):
amplExe = self.getConfigureOption('ampl executable')
pyomoExe = self.getConfigureOption('pyomo executable')
if amplExe is not None and not os.path.exists(amplExe):
if 'ampl' in self.defLocs.keys():
amplExe = self.defLocs['ampl']
elif amplExe is not None:
amplExe = os.path.split(amplExe)[1]
for p in os.sys.path:
f = os.path.join(p,amplExe)
if os.path.exists(f) and os.path.isfile(f):
amplExe = f
break
f = os.path.join(p,amplExe+'.exe')
if os.path.exists(f) and os.path.isfile(f):
amplExe = f
break
if pyomoExe is not None and not os.path.exists(pyomoExe):
if 'pyomo' in self.defLocs.keys():
pyomoExe = self.defLocs['pyomo']
elif pyomoExe is not None:
pyomoExe = os.path.split(pyomoExe)[1]
for p in os.sys.path:
f = os.path.join(p,pyomoExe)
if os.path.exists(f) and os.path.isfile(f):
pyomoExe = f
break
f = os.path.join(p,pyomoExe+'.exe')
if os.path.exists(f) and os.path.isfile(f):
pyomoExe = f
break
self.setConfigureOption('ampl executable',amplExe)
self.setConfigureOption('pyomo executable',pyomoExe)
def validate(self):
output_prefix = self.getConfigureOption('output prefix')
self.validateExecutables()
if output_prefix == '':
output_prefix = 'invsn'
self.setConfigureOption('output prefix',output_prefix)
return
def createAMPLRunReduceLP(self,filename=None,allowed_list=set([])):
if filename is None:
filename = self._get_prefixed_filename('ampl.run')
ampl_model = os.path.join(os.path.dirname(os.path.abspath(__file__)),'models','ampl','inversion_LP.mod')
fid = open(filename,'wt')
fid.write('option presolve 0;\n')
fid.write('option substout 0;\n')
fid.write('\n')
fid.write('# LP source inversion model\n')
fid.write('model %s;\n'%ampl_model)
fid.write('\n')
fid.write('# Source inversion data\n')
fid.write('data '+self._get_prefixed_filename('CONC.dat')+'\n')
fid.write('data '+self._get_prefixed_filename('INV_ROWS_INDEX.dat')+'\n')
fid.write('data '+self._get_prefixed_filename('INV_ROWS_VALS.dat')+'\n')
results_file = self._get_prefixed_filename('inversion_results.dat')
if self.getInversionOption('positive threshold') not in self.none_list:
fid.write('let P_TH_POS := '+str(self.getInversionOption('positive threshold'))+';\n')
if self.getInversionOption('negative threshold') not in self.none_list:
fid.write('let P_TH_NEG := '+str(self.getInversionOption('negative threshold'))+';\n')
fid.write('# Solve the problem\n')
fid.write('option solver '+self.getSolverOption('type')+';\n')
# HACK: Not sure what the correct label is for solvers other than
# cplex and gurobi so I will throw an error if I encounter options.
# The alternative is to ask the user for the solver executable and this
# ampl specific label which would be weird. The solver configuration system
# will likely be updated in the future so this should work for now.
options_label = ''
if self.getSolverOption('type') == 'cplexamp':
options_label += 'cplex_options'
elif self.getSolverOption('type') == 'gurobi_ampl':
options_label += 'gurobi_options'
if self.getSolverOption('options') not in self.none_list:
if options_label != '':
fid.write('option '+options_label+' \'')
for (key,value) in self.getSolverOption('options').iteritems():
if value in self.none_list:
# this is the case where an option does not accept a value
fid.write(key+' ')
else:
fid.write(key+'='+str(value)+' ')
fid.write('\';\n')
else:
print >> sys.stderr, ' '
print >> sys.stderr, "WARNING: Solver options in AMPL are currently not handled for"
print >> sys.stderr, " the specified solver: ", self.getSolverOption('type')
print >> sys.stderr, " All solver options will be ignored."
print >> sys.stderr, ' '
#fid.write('option solver cplexamp;\n')
#if self.getSolverOption('options') in self.none_list:
#fid.write('option cplex_options \'mipdisplay=2\';\n')
if len(allowed_list)>0:
fid.write('\n')
fid.write('set S_ALLOWED_NODES;\n')
fid.write('let S_ALLOWED_NODES :={')
i=0
for allowed_node in allowed_list:
fid.write(str(allowed_node))
if(i<len(allowed_list)-1):
fid.write(',')
i+=1
fid.write('};\n')
fid.write('\n')
fid.write('param flag;\n')
fid.write('printf "Minutes_per_timestep %q\\n",P_MINUTES_PER_TIMESTEP>'+results_file+';\n')
fid.write('for{n in {S_IMPACT_NODES inter S_ALLOWED_NODES}} \n')
fid.write('{\n')
fid.write('\tfor{nn in {S_IMPACT_NODES inter S_ALLOWED_NODES}}\n')
fid.write('\t{\n')
fid.write('\t\tunfix{t in S_IMPACT_TIMES[nn]} mn_tox_gpmin[nn,t];\n')
fid.write('\t}\n')
fid.write('\n')
fid.write('\tfor{nn in S_IMPACT_NODES:nn!=n}\n')
fid.write('\t{\n')
fid.write('\t\tfix{t in S_IMPACT_TIMES[nn]} mn_tox_gpmin[nn,t]:=0;\n')
fid.write('\t}\n')
fid.write('\n')
else:
fid.write('\n')
fid.write('param flag;\n')
fid.write('printf "Minutes_per_timestep %q\\n",P_MINUTES_PER_TIMESTEP>'+results_file+';\n')
fid.write('for{n in S_IMPACT_NODES} \n')
fid.write('{\n')
fid.write('\tfor{nn in S_IMPACT_NODES}\n')
fid.write('\t{\n')
fid.write('\t\tunfix{t in S_IMPACT_TIMES[nn]} mn_tox_gpmin[nn,t];\n')
fid.write('\t}\n')
fid.write('\n')
fid.write('\tfor{nn in S_IMPACT_NODES:nn!=n}\n')
fid.write('\t{\n')
fid.write('\t\tfix{t in S_IMPACT_TIMES[nn]} mn_tox_gpmin[nn,t]:=0;\n')
fid.write('\t}\n')
fid.write('\n')
fid.write('\tsolve;\n')
fid.write('\tprintf "Solution\\t%q\\t%q\\n",n,OBJ >>'+results_file+';\n')
fid.write('\tlet flag :=0;\n')
fid.write('\tfor{t in S_IMPACT_TIMES[n]:t != first(S_IMPACT_TIMES[n])}\n')
fid.write('\t{\n')
fid.write('\t\tif mn_tox_gpmin[n,prev(t,S_IMPACT_TIMES[n])]>0 then\n')
fid.write('\t\t{\n')
fid.write('\t\t\tlet flag := 1;\n')
fid.write('\t\t\tprintf "%q\\t%q\\t%q\\t",prev(t,S_IMPACT_TIMES[n]),t,mn_tox_gpmin[n,prev(t,S_IMPACT_TIMES[n])]>>'+results_file+';\n')
fid.write('\t\t}\n')
fid.write('\t}\n')
fid.write('\tif flag==0 then\n')
fid.write('\t{\n')
fid.write('\t\tprintf "%q\\t%q\\t%q",(P_TIME_STEPS-1),P_TIME_STEPS,0>>'+results_file+';\n')
fid.write('\t}\n')
fid.write('\tprintf "\\n">>'+results_file+';\n')
fid.write('}')
fid.close()
return results_file
def runPYOMOReduceLP(self,allowed_list=set([]),lower_strenght_value=1e-3):
pyomo_module = os.path.join(os.path.dirname(os.path.abspath(__file__)),'models','pyomo','inversion_LP')
pm = imp.load_source(os.path.basename(pyomo_module),pyomo_module+".py")
model = pm.model
dat1=self._get_prefixed_filename('CONC.dat',tempfile=True)
dat2=self._get_prefixed_filename('INV_ROWS_INDEX.dat',tempfile=True)
dat3=self._get_prefixed_filename('INV_ROWS_VALS.dat',tempfile=True)
if self.getInversionOption('positive threshold') not in self.none_list \
or self.getInversionOption('negative threshold') not in self.none_list:
with open(dat2, "a") as myfile:
if self.getInversionOption('positive threshold') not in self.none_list:
myfile.write("\n")
myfile.write("param P_TH_POS :="+str(self.getInversionOption('positive threshold'))+";\n")
if self.getInversionOption('negative threshold') not in self.none_list:
myfile.write("\n")
myfile.write("param P_TH_NEG :="+str(self.getInversionOption('negative threshold'))+";\n")
modeldata=DataPortal()
modeldata.load(model=model, filename=dat1)
modeldata.load(model=model, filename=dat2)
modeldata.load(model=model, filename=dat3)
LPmod=model.create_instance(modeldata)
opt=SolverFactory(self.getSolverOption('type'))
if self.getSolverOption('options') not in self.none_list:
for (key,val) in self.getSolverOption('options').iteritems():
if val in self.none_list:
# this is the case where an option does not accept a value
opt.options[key] = ''
else:
opt.options[key] = val
#print "Node_Name objective value"
Solution = []
allowed_list=[n for n in allowed_list if n in LPmod.S_IMPACT_NODES]
loop_through=LPmod.S_IMPACT_NODES if len(allowed_list)==0 else allowed_list
unit_change=value(LPmod.P_MINUTES_PER_TIMESTEP)*60
for n in loop_through:
profile=[]
for nn in loop_through:
for t in LPmod.S_IMPACT_TIMES[nn]:
LPmod.mn_tox_gpmin[nn,t].fixed=False
for (nn,t) in LPmod.S_IMPACT:
if nn != n:
LPmod.mn_tox_gpmin[nn,t].fixed=True
LPmod.mn_tox_gpmin[nn,t].value=0
LPmod.preprocess()
results = opt.solve(LPmod)
#LPmod.load(results)
times_minus_first=[time for time in LPmod.S_IMPACT_TIMES[n]]
for tt in xrange(1,len(times_minus_first)):
start=times_minus_first[tt-1]*unit_change
end=times_minus_first[tt]*unit_change
strength=value(LPmod.mn_tox_gpmin[n,times_minus_first[tt-1]])
if strength>0:
profile.append([start,end,strength])
if len(profile)==0:
start=(value(LPmod.P_TIME_STEPS)-1)*unit_change
end=value(LPmod.P_TIME_STEPS)*unit_change
profile.append([start,end,lower_strenght_value])
Solution.append([value(LPmod.OBJ),[[str(n),profile]]])
return Solution
def createAMPLRunReduceMIPnd(self,filename=None,allowed_list=set([])):
if filename is None:
filename = self._get_prefixed_filename('ampl.run')
ampl_model = os.path.join(os.path.dirname(os.path.abspath(__file__)),'models','ampl','inversion_MIP_nd.mod')
fid = open(filename,'wt')
fid.write('option presolve 0;\n')
fid.write('option substout 0;\n')
fid.write('\n')
fid.write('# MIP source inversion model\n')
fid.write('model %s;\n'%ampl_model)
fid.write('\n')
fid.write('# Source inversion data\n')
fid.write('data '+self._get_prefixed_filename('CONC.dat')+'\n')
fid.write('data '+self._get_prefixed_filename('INV_ROWS_INDEX.dat')+'\n')
fid.write('data '+self._get_prefixed_filename('INV_ROWS_VALS.dat')+'\n')
fid.write('\n')
if self.getInversionOption('positive threshold') not in self.none_list:
fid.write('let P_TH_POS := '+str(self.getInversionOption('positive threshold'))+';\n')
if self.getInversionOption('negative threshold') not in self.none_list:
fid.write('let P_TH_NEG := '+str(self.getInversionOption('negative threshold'))+';\n')
if self.getInversionOption('num injections') not in self.none_list:
fid.write('let N_INJECTIONS :=' + str(self.getInversionOption('num injections'))+';\n')
fid.write('# Solve the problem\n')
fid.write('option solver '+self.getSolverOption('type')+';\n')
# HACK: Not sure what the correct label is for solvers other than
# cplex and gurobi so I will throw an error if I encounter options.
# The alternative is to ask the user for the solver executable and this
# ampl specific label which would be weird. The solver configuration system
# will likely be updated in the future so this should work for now.
options_label = ''
if self.getSolverOption('type') == 'cplexamp':
options_label += 'cplex_options'
elif self.getSolverOption('type') == 'gurobi_ampl':
options_label += 'gurobi_options'
if self.getSolverOption('options') not in self.none_list:
if options_label != '':
fid.write('option '+options_label+' \'')
for (key,value) in self.getSolverOption('options').iteritems():
if value in self.none_list:
# this is the case where an option does not accept a value
fid.write(key+' ')
else:
fid.write(key+'='+str(value)+' ')
fid.write('\';\n')
else:
print >> sys.stderr, ' '
print >> sys.stderr, "WARNING: Solver options in AMPL are currently not handled for"
print >> sys.stderr, " the specified solver: ", self.getSolverOption('type')
print >> sys.stderr, " All solver options will be ignored."
print >> sys.stderr, ' '
#fid.write('option solver cplexamp;\n')
#if self.getSolverOption('options') is None:
#fid.write('option cplex_options \'mipdisplay=2\';\n')
if len(allowed_list)>0:
fid.write('\n')
fid.write('set S_ALLOWED_NODES;\n')
fid.write('let S_ALLOWED_NODES :={')
i=0
for allowed_node in allowed_list:
fid.write(str(allowed_node))
if(i<len(allowed_list)-1):
fid.write(',')
i+=1
fid.write('};\n')
fid.write('fix{n in S_IMPACT_NODES diff S_ALLOWED_NODES} y[n]:=0;\n')
fid.write('\n')
fid.write('solve;\n')
results_file = self._get_prefixed_filename('inversion_results.dat')
fid.write('printf "Minutes_per_timestep %q\\n",P_MINUTES_PER_TIMESTEP>'+results_file+';\n')
fid.write('printf "Solution" >>'+results_file+';\n')
fid.write('param flag;\n')
fid.write('for{n in S_IMPACT_NODES:y[n]!=0}\n')
fid.write('{\n')
fid.write('\tprintf "\\t%q",n>>'+results_file+';\n')
fid.write('}\n')
fid.write('printf "\\t%q\\n",OBJ>>'+results_file+';\n')
fid.write('for{n in S_IMPACT_NODES:y[n]!=0}\n')
fid.write('{\n')
fid.write('\tlet flag := 0;\n')
fid.write('\tfor{t in S_IMPACT_TIMES[n]:t != first(S_IMPACT_TIMES[n])}\n')
fid.write('\t{\n')
fid.write('\t\tif mn_tox_gpmin[n,prev(t,S_IMPACT_TIMES[n])]>0 then\n')
fid.write('\t\t{\n')
fid.write('\t\tlet flag := 1;\n')
fid.write('\t\t\tprintf "%q\\t%q\\t%q\\t",prev(t,S_IMPACT_TIMES[n]),t,mn_tox_gpmin[n,prev(t,S_IMPACT_TIMES[n])]>>'+results_file+';\n')
fid.write('\t\t}\n')
fid.write('\t}\n')
fid.write('\tif flag ==0 then')
fid.write('\t{\n')
fid.write('\t\tprintf "%q\\t%q\\t%q\\t",(P_TIME_STEPS-1),P_TIME_STEPS,0>>'+results_file+';\n')
fid.write('\t}\n')
fid.write('\tprintf "\\n">>'+results_file+';\n')
fid.write('}\n')
fid.close()
return results_file
def createAMPLRunReduceMIP(self,filename=None,allowed_list=set([])):
if filename is None:
filename = self._get_prefixed_filename('ampl.run')
ampl_model = os.path.join(os.path.dirname(os.path.abspath(__file__)),'models','ampl','inversion_MIP.mod')
fid = open(filename,'wt')
fid.write('option presolve 0;\n')
fid.write('option substout 0;\n')
fid.write('\n')
fid.write('# MIP source inversion model\n')
fid.write('model %s;\n'%ampl_model)
fid.write('\n')
fid.write('# Source inversion data\n')
fid.write('data '+self._get_prefixed_filename('CONC.dat')+'\n')
fid.write('data '+self._get_prefixed_filename('INV_ROWS_INDEX.dat')+'\n')
fid.write('data '+self._get_prefixed_filename('INV_ROWS_VALS.dat')+'\n')
fid.write('\n')
if self.getInversionOption('positive threshold') not in self.none_list:
fid.write('let P_TH_POS := '+str(self.getInversionOption('positive threshold'))+';\n')
if self.getInversionOption('negative threshold') not in self.none_list:
fid.write('let P_TH_NEG := '+str(self.getInversionOption('negative threshold'))+';\n')
if self.getInversionOption('num injections') not in self.none_list:
fid.write('let N_INJECTIONS :=' + str(self.getInversionOption('num injections'))+';\n')
fid.write('# Solve the problem\n')
fid.write('option solver '+self.getSolverOption('type')+';\n')
# HACK: Not sure what the correct label is for solvers other than
# cplex and gurobi so I will throw an error if I encounter options.
# The alternative is to ask the user for the solver executable and this
# ampl specific label which would be weird. The solver configuration system
# will likely be updated in the future so this should work for now.
options_label = ''
if self.getSolverOption('type') == 'cplexamp':
options_label += 'cplex_options'
elif self.getSolverOption('type') == 'gurobi_ampl':
options_label += 'gurobi_options'
if self.getSolverOption('options') not in self.none_list:
if options_label != '':
fid.write('option '+options_label+' \'')
for (key,value) in self.getSolverOption('options').iteritems():
if value in self.none_list:
# this is the case where an option does not accept a value
fid.write(key+' ')
else:
fid.write(key+'='+str(value)+' ')
fid.write('\';\n')
else:
print >> sys.stderr, ' '
print >> sys.stderr, "WARNING: Solver options in AMPL are currently not handled for"
print >> sys.stderr, " the specified solver: ", self.getSolverOption('type')
print >> sys.stderr, " All solver options will be ignored."
print >> sys.stderr, ' '
#fid.write('option solver cplexamp;\n')
#if self.getSolverOption('options') is None:
#fid.write('option cplex_options \'mipdisplay=2\';\n')
if len(allowed_list)>0:
fid.write('\n')
fid.write('set S_ALLOWED_NODES;\n')
fid.write('let S_ALLOWED_NODES :={')
i=0
for allowed_node in allowed_list:
fid.write(str(allowed_node))
if(i<len(allowed_list)-1):
fid.write(',')
i+=1
fid.write('};\n')
fid.write('fix{n in S_IMPACT_NODES diff S_ALLOWED_NODES} y[n]:=0;\n')
fid.write('\n')
fid.write('solve;\n')
results_file = self._get_prefixed_filename('inversion_results.dat')
fid.write('printf "Minutes_per_timestep %q\\n",P_MINUTES_PER_TIMESTEP>'+results_file+';\n')
fid.write('printf "Solution" >>'+results_file+';\n')
fid.write('param flag;\n')
fid.write('for{n in S_IMPACT_NODES:y[n]!=0}\n')
fid.write('{\n')
fid.write('\tprintf "\\t%q",n>>'+results_file+';\n')
fid.write('}\n')
fid.write('printf "\\t%q\\n",OBJ>>'+results_file+';\n')
fid.write('for{n in S_IMPACT_NODES:y[n]!=0}\n')
fid.write('{\n')
fid.write('\tlet flag := 0;\n')
fid.write('\tfor{t in S_IMPACT_TIMES[n]:t != first(S_IMPACT_TIMES[n])}\n')
fid.write('\t{\n')
fid.write('\t\tif mn_tox_gpmin[n,prev(t,S_IMPACT_TIMES[n])]>0 then\n')
fid.write('\t\t{\n')
fid.write('\t\tlet flag := 1;\n')
fid.write('\t\t\tprintf "%q\\t%q\\t%q\\t",prev(t,S_IMPACT_TIMES[n]),t,mn_tox_gpmin[n,prev(t,S_IMPACT_TIMES[n])]>>'+results_file+';\n')
fid.write('\t\t}\n')
fid.write('\t}\n')
fid.write('\tif flag ==0 then')
fid.write('\t{\n')
fid.write('\t\tprintf "%q\\t%q\\t%q\\t",(P_TIME_STEPS-1),P_TIME_STEPS,0>>'+results_file+';\n')
fid.write('\t}\n')
fid.write('\tprintf "\\n">>'+results_file+';\n')
fid.write('}\n')
fid.close()
return results_file
def runPYOMOReduceMIPnd(self,allowed_list=set([]),lower_strenght_value=1e-3):
pyomo_module = os.path.join(os.path.dirname(os.path.abspath(__file__)),'models','pyomo','inversion_MIP_nd')
pm = imp.load_source(os.path.basename(pyomo_module),pyomo_module+".py")
model = pm.model
dat1=self._get_prefixed_filename('CONC.dat',tempfile=True)
dat2=self._get_prefixed_filename('INV_ROWS_INDEX.dat',tempfile=True)
dat3=self._get_prefixed_filename('INV_ROWS_VALS.dat',tempfile=True)
if self.getInversionOption('num injections') not in self.none_list\
and self.getInversionOption('num injections') != 1:
with open(dat2, "a") as myfile:
myfile.write("\n")
myfile.write("param N_INJECTIONS :="+str(self.getInversionOption('num injections'))+";\n")
if self.getInversionOption('positive threshold') not in self.none_list \
or self.getInversionOption('negative threshold') not in self.none_list:
with open(dat2, "a") as myfile:
if self.getInversionOption('positive threshold') not in self.none_list:
myfile.write("\n")
myfile.write("param P_TH_POS :="+str(self.getInversionOption('positive threshold'))+";\n")
if self.getInversionOption('negative threshold') not in self.none_list:
myfile.write("\n")
myfile.write("param P_TH_NEG :="+str(self.getInversionOption('negative threshold'))+";\n")
modeldata=DataPortal()
modeldata.load(model=model, filename=dat1)
modeldata.load(model=model, filename=dat2)
modeldata.load(model=model, filename=dat3)
MIPmodel=model.create_instance(modeldata)
MIPmodel._defer_construction=False
#opt=SolverFactory("cplex")
opt=SolverFactory(self.getSolverOption('type'))
if self.getSolverOption('options') not in self.none_list:
for (key,val) in self.getSolverOption('options').iteritems():
if val in self.none_list:
# this is the case where an option does not accept a value
opt.options[key] = ''
else:
opt.options[key] = val
if len(allowed_list)!=0:
to_be_fixed=[n for n in MIPmodel.S_IMPACT_NODES if n not in allowed_list]
for n in to_be_fixed:
MIPmodel.y[n].fixed=True
MIPmodel.y[n].value=0
MIPmodel.preprocess()
Solution = []
unit_change=value(MIPmodel.P_MINUTES_PER_TIMESTEP)*60
if self.getInversionOption('num injections') not in self.none_list\
and self.getInversionOption('num injections') != 1:
MIPmodel.N_INJECTIONS=self.getInversionOption('num injections')
MIPmodel.preprocess()
results = opt.solve(MIPmodel)
#MIPmodel.load(results)
ObjVals = [value(MIPmodel.OBJ),value(MIPmodel.OBJ)]
percentage=0.1
i=0
MAX=30
while (ObjVals[-1]<=ObjVals[-2]*(1+percentage)) and i<MAX:
cuts_on = []
cuts_off = []
results = opt.solve(MIPmodel)
#MIPmodel.load(results)
for n in MIPmodel.S_IMPACT_NODES:
if int(round(MIPmodel.y[n].value)) == 1:
cuts_on.append(n)
else:
cuts_off.append(n)
nodes_=[]
for n in cuts_on:
profile=[]
times_minus_first=[time for time in MIPmodel.S_IMPACT_TIMES[n]]
for tt in xrange(1,len(times_minus_first)):
start=times_minus_first[tt-1]*unit_change
end=times_minus_first[tt]*unit_change
strength=value(MIPmodel.mn_tox_gpmin[n,times_minus_first[tt-1]])
if strength>0:profile.append([start,end,strength])
if len(profile)==0:
start=(value(MIPmodel.P_TIME_STEPS)-1)*unit_change
end=value(MIPmodel.P_TIME_STEPS)*unit_change
profile.append([start,end,lower_strenght_value])
nodes_.append([str(n),profile])
# Add objective and potential injection nodes to the list
Solution.append([float(ObjVals[-1]),nodes_])
# Define rule for integer cut
def int_cut_rule(m):
return sum( (1-m.y[r]) for (r) in cuts_on) + \
sum( m.y[r] for (r) in cuts_off) \
>= 1
# Add new cut to the model
setattr(MIPmodel,'int_cut_'+str(i), Constraint(rule=int_cut_rule))
MIPmodel.preprocess()
# determine new objective
ObjVals.append(value(MIPmodel.OBJ))
i+=1
else:
results = opt.solve(MIPmodel)
#MIPmodel.load(results)
nodes_=[]
for n in MIPmodel.S_IMPACT_NODES:
if int(round(MIPmodel.y[n].value)) == 1:
profile=[]
times_minus_first=[time for time in MIPmodel.S_IMPACT_TIMES[n]]
for tt in xrange(1,len(times_minus_first)):
start=times_minus_first[tt-1]*unit_change
end=times_minus_first[tt]*unit_change
strength=value(MIPmodel.mn_tox_gpmin[n,times_minus_first[tt-1]])
if strength>0:
profile.append([start,end,strength])
if len(profile)==0:
start=(value(MIPmodel.P_TIME_STEPS)-1)*unit_change
end=value(MIPmodel.P_TIME_STEPS)*unit_change
profile.append([start,end,lower_strenght_value])
nodes_.append([str(n),profile])
Solution.append([float(value(MIPmodel.OBJ)),nodes_])
return Solution
def runPYOMOReduceMIP(self,allowed_list=set([]),lower_strenght_value=1e-3):
pyomo_module = os.path.join(os.path.dirname(os.path.abspath(__file__)),'models','pyomo','inversion_MIP')
pm = imp.load_source(os.path.basename(pyomo_module),pyomo_module+".py")
model = pm.model
dat1=self._get_prefixed_filename('CONC.dat',tempfile=True)
dat2=self._get_prefixed_filename('INV_ROWS_INDEX.dat',tempfile=True)
dat3=self._get_prefixed_filename('INV_ROWS_VALS.dat',tempfile=True)
if self.getInversionOption('num injections') not in self.none_list\
and self.getInversionOption('num injections') != 1:
with open(dat2, "a") as myfile:
myfile.write("\n")
myfile.write("param N_INJECTIONS :="+str(self.getInversionOption('num injections'))+";\n")
if self.getInversionOption('positive threshold') not in self.none_list \
or self.getInversionOption('negative threshold') not in self.none_list:
with open(dat2, "a") as myfile:
if self.getInversionOption('positive threshold') not in self.none_list:
myfile.write("\n")
myfile.write("param P_TH_POS :="+str(self.getInversionOption('positive threshold'))+";\n")
if self.getInversionOption('negative threshold') not in self.none_list:
myfile.write("\n")
myfile.write("param P_TH_NEG :="+str(self.getInversionOption('negative threshold'))+";\n")
modeldata=DataPortal()
modeldata.load(model=model, filename=dat1)
modeldata.load(model=model, filename=dat2)
modeldata.load(model=model, filename=dat3)
MIPmodel=model.create_instance(modeldata)
MIPmodel._defer_construction=False
#opt=SolverFactory("cplex")
opt=SolverFactory(self.getSolverOption('type'))
if self.getSolverOption('options') not in self.none_list:
for (key,val) in self.getSolverOption('options').iteritems():
if val in self.none_list:
# this is the case where an option does not accept a value
opt.options[key] = ''
else:
opt.options[key] = val
if len(allowed_list)!=0:
to_be_fixed=[n for n in MIPmodel.S_IMPACT_NODES if n not in allowed_list]
for n in to_be_fixed:
MIPmodel.y[n].fixed=True
MIPmodel.y[n].value=0
MIPmodel.preprocess()
Solution = []
unit_change=value(MIPmodel.P_MINUTES_PER_TIMESTEP)*60
if self.getInversionOption('num injections') not in self.none_list\
and self.getInversionOption('num injections') != 1:
MIPmodel.N_INJECTIONS=self.getInversionOption('num injections')
MIPmodel.preprocess()
results = opt.solve(MIPmodel)
#MIPmodel.load(results)
ObjVals = [value(MIPmodel.OBJ),value(MIPmodel.OBJ)]
percentage=0.1
i=0;MAX=30
while (ObjVals[-1]<=ObjVals[-2]*(1+percentage)) and i<MAX:
cuts_on = []
cuts_off = []
results = opt.solve(MIPmodel)
#MIPmodel.load(results)
for n in MIPmodel.S_IMPACT_NODES:
if int(round(MIPmodel.y[n].value)) == 1:
cuts_on.append(n)
else:
cuts_off.append(n)
nodes_=[]
for n in cuts_on:
profile=[]
times_minus_first=[time for time in MIPmodel.S_IMPACT_TIMES[n]]
for tt in xrange(1,len(times_minus_first)):
start=times_minus_first[tt-1]*unit_change
end=times_minus_first[tt]*unit_change
strength=value(MIPmodel.mn_tox_gpmin[n,times_minus_first[tt-1]])
if strength>0:profile.append([start,end,strength])
if len(profile)==0:
start=(value(MIPmodel.P_TIME_STEPS)-1)*unit_change
end=value(MIPmodel.P_TIME_STEPS)*unit_change
profile.append([start,end,lower_strenght_value])
nodes_.append([str(n),profile])
# Add objective and potential injection nodes to the list
Solution.append([float(ObjVals[-1]),nodes_])
# Define rule for integer cut
def int_cut_rule(m):
return sum( (1-m.y[r]) for (r) in cuts_on) + \
sum( m.y[r] for (r) in cuts_off) \
>= 1
# Add new cut to the model
setattr(MIPmodel,'int_cut_'+str(i), Constraint(rule=int_cut_rule))
MIPmodel.preprocess()
# determine new objective
ObjVals.append(value(MIPmodel.OBJ))
i+=1
else:
results = opt.solve(MIPmodel)
#MIPmodel.load(results)
nodes_=[]
for n in MIPmodel.S_IMPACT_NODES:
if int(round(MIPmodel.y[n].value)) == 1:
profile=[]
times_minus_first=[time for time in MIPmodel.S_IMPACT_TIMES[n]]
for tt in xrange(1,len(times_minus_first)):
start=times_minus_first[tt-1]*unit_change
end=times_minus_first[tt]*unit_change
strength=value(MIPmodel.mn_tox_gpmin[n,times_minus_first[tt-1]])
if strength>0:
profile.append([start,end,strength])
if len(profile)==0:
start=(value(MIPmodel.P_TIME_STEPS)-1)*unit_change
end=value(MIPmodel.P_TIME_STEPS)*unit_change
profile.append([start,end,lower_strenght_value])
nodes_.append([str(n),profile])
Solution.append([float(value(MIPmodel.OBJ)),nodes_])
return Solution
def createAMPLRunStep(self,filename=None,allowed_list=set([])):
if filename is None:
filename = self._get_prefixed_filename('ampl.run')
ampl_model = os.path.join(os.path.dirname(os.path.abspath(__file__)),'models','ampl','MIP_step.mod')
runfile = open(filename,'wt')
runfile.write('option presolve 0;\n')
runfile.write('option substout 0;\n')
runfile.write('\n')
runfile.write('# MIP step_inversion model\n')
runfile.write('model %s;\n'%ampl_model)
runfile.write('\n')
runfile.write('# Source inversion data\n')
runfile.write('data '+self._get_prefixed_filename('CONC.dat')+'\n')
runfile.write('data '+self._get_prefixed_filename('INV_ROWS_INDEX.dat')+'\n')
runfile.write('data '+self._get_prefixed_filename('INV_ROWS_VALS.dat')+'\n')
results_file = self._get_prefixed_filename('inversion_results.dat')
if self.getInversionOption('positive threshold') not in self.none_list:
runfile.write('let P_TH_POS := '+str(self.getInversionOption('positive threshold'))+';\n')
if self.getInversionOption('negative threshold') not in self.none_list:
runfile.write('let P_TH_NEG := '+str(self.getInversionOption('negative threshold'))+';\n')
if len(allowed_list)>0:
runfile.write('\n')
runfile.write('set S_ALLOWED_NODES;\n')
runfile.write('let S_ALLOWED_NODES :={')
i=0
for allowed_node in allowed_list:
runfile.write(str(allowed_node))
if(i<len(allowed_list)-1):
runfile.write(',')
i+=1
runfile.write('};\n')
runfile.write('\n')
runfile.write('# Solve the problem\n')
runfile.write('option solver '+self.getSolverOption('type')+';\n')
# HACK: Not sure what the correct label is for solvers other than
# cplex and gurobi so I will throw an error if I encounter options.
# The alternative is to ask the user for the solver executable and this
# ampl specific label which would be weird. The solver configuration system
# will likely be updated in the future so this should work for now.
options_label = ''
if self.getSolverOption('type') == 'cplexamp':
options_label += 'cplex_options'
elif self.getSolverOption('type') == 'gurobi_ampl':
options_label += 'gurobi_options'
if self.getSolverOption('options') not in self.none_list:
if options_label != '':
runfile.write('option '+options_label+' \'')
for (key,value) in self.getSolverOption('options').iteritems():
if value in self.none_list:
# this is the case where an option does not accept a value
runfile.write(key+' ')
else:
runfile.write(key+'='+str(value)+' ')
runfile.write('\';\n')
else:
print >> sys.stderr, ' '
print >> sys.stderr, "WARNING: Solver options in AMPL are currently not handled for"
print >> sys.stderr, " the specified solver: ", self.getSolverOption('type')
print >> sys.stderr, " All solver options will be ignored."
print >> sys.stderr, ' '
#runfile.write('option solver cplexamp;\n')
#runfile.write('option cplex_options \'timing=2 mipdisplay=2\';\n\n')
runfile.write('\n')
runfile.write('printf "Timestep_minutes=\\t%q\\n",P_MINUTES_PER_TIMESTEP>'+results_file+';\n')
runfile.write('param start_time;\n')
runfile.write('for{n in S_IMPACT_NODES}\n')
runfile.write('{\n')
runfile.write('\tunfix {nn in S_IMPACT_NODES,t in S_ALL_TIMES} y[nn,t];\n')
runfile.write('\tfix {nn in S_IMPACT_NODES,t in S_ALL_TIMES:nn!=n} y[nn,t]:=0;\n')
runfile.write('\tsolve;\n')
runfile.write('\tprintf "Solution\\t%q\\t%q\\n",n,OBJ>>'+results_file+';\n')
runfile.write('\tif card({t in S_ALL_TIMES:y[n,t]!=0})=0 then let start_time := P_TIME_STEPS-1;\n')
runfile.write('\telse\n')
runfile.write('\t{\n')
runfile.write('\t\tif y[n,first(S_ALL_TIMES)]==1 then let start_time := first(S_ALL_TIMES);\n')
runfile.write('\t\telse\n')
runfile.write('\t\t{\n')
runfile.write('\t\t\tfor{t in S_ALL_TIMES:t!=first(S_ALL_TIMES)}\n')
runfile.write('\t\t\t{\n')
runfile.write('\t\t\t\tif y[n,t]!=y[n,prev(t)] then let start_time := t\n')
runfile.write('\t\t\t}\n')
runfile.write('\t\t}\n')
runfile.write('\t}\n')
runfile.write('\tprintf "%q\\t%q\\t%q\\n",start_time,P_TIME_STEPS,strength>>'+results_file+';\n')
runfile.write('}\n')
runfile.close()
return results_file
def runPYOMOStep(self,allowed_list=set([]),lower_strenght_value=1e-3):
pyomo_module = os.path.join(os.path.dirname(os.path.abspath(__file__)),'models','pyomo','step_MIP')
pm = imp.load_source(os.path.basename(pyomo_module),pyomo_module+".py")
model = pm.model
dat1=self._get_prefixed_filename('CONC.dat',tempfile=True)
dat2=self._get_prefixed_filename('INV_ROWS_INDEX.dat',tempfile=True)
dat3=self._get_prefixed_filename('INV_ROWS_VALS.dat',tempfile=True)
if self.getInversionOption('positive threshold') not in self.none_list \
or self.getInversionOption('negative threshold') not in self.none_list:
with open(dat2, "a") as myfile:
if self.getInversionOption('positive threshold') not in self.none_list:
myfile.write("\n")
myfile.write("param P_TH_POS :="+str(self.getInversionOption('positive threshold'))+";\n")
if self.getInversionOption('negative threshold') not in self.none_list:
myfile.write("\n")
myfile.write("param P_TH_NEG :="+str(self.getInversionOption('negative threshold'))+";\n")
modeldata=DataPortal()
modeldata.load(model=model, filename=dat1)
modeldata.load(model=model, filename=dat2)
modeldata.load(model=model, filename=dat3)
Stepmod=model.create_instance(modeldata)
#opt=SolverFactory("cplex")
opt=SolverFactory(self.getSolverOption('type'))
if self.getSolverOption('options') not in self.none_list:
for (key,val) in self.getSolverOption('options').iteritems():
if val in self.none_list:
# this is the case where an option does not accept a value
opt.options[key] = ''
else:
opt.options[key] = val
#start_timing=time()
Solution=[]
allowed_list=[n for n in allowed_list if n in Stepmod.S_IMPACT_NODES]
loop_through=Stepmod.S_IMPACT_NODES if len(allowed_list)==0 else allowed_list
unit_change=value(Stepmod.P_MINUTES_PER_TIMESTEP)*60
for n in Stepmod.S_IMPACT_NODES:
for t in Stepmod.S_ALL_TIMES:
Stepmod.y[n,t].value = 0
Stepmod.y[n,t].fixed = True
for n in loop_through:
#unfix binary variables
for t in Stepmod.S_ALL_TIMES:
Stepmod.y[n,t].fixed = False
Stepmod.preprocess()
solved_instance=opt.solve(Stepmod)
#Stepmod.load(solved_instance)
start_time=0
stop_time=value(Stepmod.P_TIME_STEPS)*unit_change
injection_strength=value(Stepmod.strength)
timesteps=[ts for ts in Stepmod.S_ALL_TIMES if Stepmod.y[n,ts].value!=0]
if len(timesteps)>0:
start_time=timesteps[0]*unit_change
else:
start_time=(value(Stepmod.P_TIME_STEPS)-1)*unit_change
if injection_strength<=0:
injection_strength=lower_strenght_value
profile=[start_time,stop_time,injection_strength]
Solution.append([value(Stepmod.OBJ),[[str(n),[profile]]]])
for t in Stepmod.S_ALL_TIMES:
Stepmod.y[n,t].value = 0
Stepmod.y[n,t].fixed = True
#stop_timing=time()
#print 'PYOMO timing',stop_timing-start_timing
return Solution
# the profile output file is used by the grab sample code
def writeProfiles(self, results_list, tao):
output_prefix = self.getConfigureOption('output prefix')
if output_prefix in self.none_list:
output_prefix = ''
if self.getInversionOption('algorithm') == 'bayesian':
# the profile file is actually printed by the c++ code for the probablility algorithm
if output_prefix == '':
filename = 'profile.tsg'
impact_nodes_file = 'Likely_Nodes.dat'
else:
filename = output_prefix + '_profile.tsg'
impact_nodes_file = output_prefix + 'Likely_Nodes.dat'
else:
if output_prefix == '':
filename = 'profile.tsg'
impact_nodes_file = 'Likely_Nodes.dat'
else:
filename = output_prefix + 'profile.tsg'
impact_nodes_file = output_prefix + 'Likely_Nodes.dat'
#
profile = open(filename, 'w')
impact_nodes = open(impact_nodes_file, 'w')
#print tao
events = 0
for result in results_list:
if result[0] >= tao:
events += 1
for node in result[1]:
#print node
if self.getInversionOption('output impact nodes'):
impact_nodes.write(node[0] + '\n')
profile.write(node[0]+ '\tMASS\t' + str(node[1][0][2]) + '\t' + str(node[1][0][0]) + '\t' + str(node[1][0][1]) + '\n')
profile.close()
impact_nodes.close()
return filename, impact_nodes_file
def writeCSAInputFiles(self):
# Prefix
if self.getConfigureOption('output prefix') not in self.none_list:
output_prefix = self.getConfigureOption("output prefix")
else:
output_prefix = ""
# Read measurements
csa_measures = {}
if self.getMeasurementOption('grab samples') not in self.none_list:
meas_file_name = self.getMeasurementOption('grab samples')
meas_file = open(meas_file_name, "r")
for line in meas_file:
line = line.strip()
if len(line) == 0: continue
if line[0] == "#" : continue
[sensor, time, meas] = line.split()
if csa_measures.has_key(sensor):
csa_measures[sensor]['meas'].append(meas)
csa_measures[sensor]['time'].append(int(time))
csa_measures[sensor]['length'] += 1
else:
csa_measures[sensor] = {'meas':[meas], 'time':[int(time)], 'length': 1}
meas_file.close()
all_meas_length = [s['length'] for s in csa_measures.itervalues()]
meas_length = all_meas_length[0] # Used when writing meas file
assert max(all_meas_length) == min(all_meas_length) != 1, "INPUT ERROR: CSA algorithm does not support grabsamples. Make " + \
"sure the length of measurements from each sensor is the same. Also make sure there are more than 1 measurements."
else:
raise IOError("ERROR: Measurements file not specified.")
# Write CSA sensor file
csa_sensor_file = open(output_prefix+"csa_sensors", 'w')
for key in csa_measures.iterkeys():
csa_sensor_file.write(key+'\n')
csa_sensor_file.close()
# Write CSA Measurements file
csa_meas_file = open(output_prefix+"csa_measurements",'w')
for t in range(0,meas_length):
for sensor_ in csa_measures.itervalues():
csa_meas_file.write(sensor_['meas'][t]+'\t')
csa_meas_file.write('\n')
csa_meas_file.close()
# Calculate return values
num_sensors = len(csa_measures)
item = csa_measures.popitem()
meas_time_step_sec = item[1]['time'][1] - item[1]['time'][0]
sim_stop_time = (item[1]['time'][-1])/3600.0 #one step ahead
#logger.debug("SIM STOP TIME: ",str(sim_stop_time))
return [num_sensors, meas_time_step_sec, sim_stop_time]
def readCSAOutputFiles(self, sim_stop_time, meas_step_sec):
if self.getConfigureOption('output prefix') not in self.none_list:
output_prefix = self.getConfigureOption("output prefix")
else:
output_prefix = ""
Smatrix = []
matrix_file_name = output_prefix + "Smatrix.txt"
matrix_file = open(matrix_file_name, 'r')
sim_stop_time = round(sim_stop_time,2)
time_found = False
for line in matrix_file:
if time_found:
Smatrix.append(line.split())
if str(sim_stop_time) in line.strip():
time_found = True
matrix_file.close()
# Loop through each node and time and save solution
horizon_sec = sim_stop_time*3600.0
# Change value if specified
if self.getInversionOption('horizon') not in self.none_list:
horizon_sec = self.getInversionOption('horizon')*60.0
result_window_start_time = sim_stop_time*3600.0 - horizon_sec
inj_stop_time_sec = sim_stop_time*3600.0 #Assumption
inj_strength = 1000 #Assumption
node_count = len(Smatrix)
time_count = len(Smatrix[0])
# Load epanet data to get node names
try:
enData = pyepanet.ENepanet()
enData.ENopen(self.opts['network']['epanet file'],'tmp.rpt')
except:
msg = 'EPANET inp file not loaded using pyepanet'
logger.error(msg)
raise RuntimeError(msg)
all_nodes_with_obj = []
for i in range(node_count):
try:
node = enData.ENgetnodeid(i+1)
except:
msg = 'Pyepanet could not find node id'
logger.error(msg)
raise RuntimeError(msg)
all_node_inj_profile = []
for j in range(time_count):
if float(Smatrix[i][j]) > 0.9:
inj_start_time_sec = result_window_start_time + j*meas_step_sec
inj_profile = [inj_start_time_sec,inj_stop_time_sec,inj_strength]
all_node_inj_profile.append(inj_profile)
if len(all_node_inj_profile) > 0:
all_nodes_with_obj.append([node,all_node_inj_profile])
enData.ENclose()
Solution = [[1, all_nodes_with_obj]] #All events are assumed to have the same objective = 1
return Solution
def writeCSAresults(self,Solution):
output_prefix = self.getConfigureOption('output prefix')
if output_prefix not in self.none_list:
json_file = self.getConfigureOption('output prefix') + '_inversion.json'
tsg_filename = output_prefix + 'profile.tsg'
impact_nodes_file = output_prefix + 'Likely_Nodes.dat'
else:
json_file = 'inversion.json'
tsg_filename = 'profile.tsg'
impact_nodes_file = 'Likely_Nodes.dat'
json_file_wDir = os.path.join(os.path.abspath(os.curdir),json_file)
wst_util.declare_tempfile(json_file_wDir)
num_events = len(Solution[0][1])
'''
print '\n*********************************************************************\n'
print '\t\t\tInversion Results\n'
print'\tNumber of candidate events:\t\t\t',num_events
print'\tInversion algorithm:\t\t\t\tCSA'
print '\tDetailed results in:\t' + json_file_wDir +'\n'
print '*********************************************************************\n'
'''
#
inversion_nodes = []
results_object = []
for result in Solution:
tmp_results = dict()
nodes_list = []
for node_i in result[1]:
profile_list = []
tmp_node_dic = dict()
for injection in node_i[1]:
#print injection
profile_list.append(dict(Start=injection[0], Stop=injection[1], Strength=injection[2]))
tmp_node_dic['Name'] = node_i[0]
tmp_node_dic['Profile'] = profile_list
nodes_list.append(tmp_node_dic)
inversion_nodes.append([i['Name'] for i in nodes_list])
tmp_results['Objective'] = result[0]
tmp_results['Nodes'] = nodes_list
tmp_results['run date'] = datetime.datetime.now().strftime("%Y-%m-%d %H:%M:%S")
results_object.append(tmp_results)
f = open(json_file, 'w')
json.dump(results_object, f,indent=2)
f.close()
# Print TSG and Likely Nodes File
profile = open(tsg_filename, 'w')
if self.getInversionOption('output impact nodes'):
impact_nodes = open(impact_nodes_file, 'w')
for result in Solution:
for node in result[1]:
#print node
if self.getInversionOption('output impact nodes'):
impact_nodes.write(node[0] + '\n')
profile.write(node[0]+ '\tMASS\t' + str(node[1][0][2]) + '\t' + str(node[1][0][0]) + '\t' + str(node[1][0][1]) + '\n')
profile.close()
if self.getInversionOption('output impact nodes'):
impact_nodes.close()
objective_val = [1 for i in inversion_nodes[0]]
return [json_file_wDir, tsg_filename, num_events, impact_nodes_file, objective_val, inversion_nodes[0]]
def writeAllowedNodesFile(self,filename,results_list,tao):
fnodes=open(filename,'w')
for result in results_list:
if result[0]>=tao:
for node in result[1]:
fnodes.write(node[0]+ '\n')
fnodes.close()
def writeVisualizationFile(self, Solution, tao, yml_file, html_filename, yml_filename):
inp_file = os.path.abspath(self.opts['network']['epanet file'])
#
if self.getConfigureOption('output prefix') not in self.none_list:
#output_prefix = os.path.join('vis', os.path.basename(self.opts['configure']['output prefix']))
output_prefix = os.path.join(os.path.abspath(self.opts['configure']['output prefix']))
else:
output_prefix = ""
#
if inp_file and len(inp_file) > 0:
svg = inp2svg.inp2svg(inp_file)
#
sensor_list = []
source_list = []
#
if self.getMeasurementOption('grab samples') not in self.none_list:
sensor_file_name = self.getMeasurementOption('grab samples')
sensor_file = open(sensor_file_name, "r")
sensors = sensor_file.readlines()
for line in sensors:
line = line.strip()
if len(line) == 0: continue
if line[0] == "#" : continue
sensor = line.split()[0]
svg.addShapeOn("square", sensor, sc="#000099", sw=2, fo=0, fs=15)
sensor_list.append(sensor)
nlen1 = len(Solution)
bOpt = self.getInversionOption("algorithm") == "optimization"
bCSA = self.getInversionOption("algorithm") in ["csa","CSA"]
max_objective = 0
if bOpt or bCSA:
for i in range(0, nlen1):
objective = Solution[i][0]
max_objective = max(max_objective, objective)
for i in range(0, nlen1):
objective = Solution[i][0]
if objective < tao: continue
scale = objective / max_objective * 15
if len(Solution[i][1]) == 0:
pass
else:
node_name = Solution[i][1][0][0]
svg.addShapeOn("circle", node_name, fc="#aa0000", sc="#bb0000", sw=1, so=1, fs=scale)
source_list.append({"objective": objective / max_objective, "name": node_name})
else:
for i in range(0, nlen1):
objective = Solution[i]["Objective"]
max_objective = max(max_objective, objective)
for i in range(0, nlen1):
objective = Solution[i]["Objective"]
scale = objective / max_objective * 15
node_name = Solution[i]["Nodes"][0]["Name"]
svg.addShapeOn("circle", node_name, fc="#aa0000", sc="#bb0000", sw=1, so=1, fs=scale)
source_list.append({"objective": objective / max_objective, "name": node_name})
#
scale = 15 / max_objective if max_objective > 0 else 15
svg.setWidth(800)
svg.setNodeSize(3)
svg.setLinkSize(1)
#
svg.addLayer("Measurement locations")
svg.getLayer(0)["type" ] = svg.LAYER_TYPE_NODE
svg.getLayer(0)["shape" ] = "Square"
svg.getLayer(0)["fill color" ] = "#000099"
svg.getLayer(0)["fill opacity"] = 0
svg.getLayer(0)["line size" ] = 2
svg.getLayer(0)["line color" ] = "#000099"
svg.getLayer(0)["line opacity"] = 0.6
#
svg.addLayer("Possible source locations")
svg.getLayer(1)["type" ] = svg.LAYER_TYPE_NODE
svg.getLayer(1)["shape" ] = "Circle"
svg.getLayer(1)["fill color" ] = "#aa0000"
svg.getLayer(1)["fill opacity"] = 0.6
svg.getLayer(1)["line color" ] = "#aa0000"
svg.getLayer(1)["line size" ] = 1
svg.getLayer(1)["line opacity"] = 0.8
#
svg.setLegendColor("white")
svg.setBackgroundColor("white")
svg.setLegendXY(10,10)
svg.showLegend()
#svg.writeFile(html_filename)
else:
logger.info('EPANet file input requried for Visualization.')
inp_file = "<REQUIRED INPUT>"
#
f = open(yml_filename, "w")
f.write("# YML input file for custom Inversion visualization\n")
f.write("\n")
#
vis = {}
vis["network"] = {}
vis["network"]["epanet file"] = inp_file
#
vis["visualization"] = {}
vis["visualization"]["nodes"] = {}
vis["visualization"]["nodes"]["size"] = 3
vis["visualization"]["links"] = {}
vis["visualization"]["links"]["size"] = 1
#
vis["visualization"]["layers"] = []
#
vis["visualization"]["layers"].append({})
vis["visualization"]["layers"][0]["label" ] = "Measurement locations"
vis["visualization"]["layers"][0]["shape" ] = "Square"
vis["visualization"]["layers"][0]["fill"] = {}
vis["visualization"]["layers"][0]["fill"]["color" ] = "#000099"
vis["visualization"]["layers"][0]["fill"]["size" ] = 15
vis["visualization"]["layers"][0]["fill"]["opacity"] = 0
vis["visualization"]["layers"][0]["line"] = {}
vis["visualization"]["layers"][0]["line"]["color" ] = "#000099"
vis["visualization"]["layers"][0]["line"]["size" ] = 2
vis["visualization"]["layers"][0]["line"]["opacity"] = 0.6
vis["visualization"]["layers"][0]["locations" ] = []
sensor_list = list(set(sensor_list))
for sensor in sensor_list:
vis["visualization"]["layers"][0]["locations"].append(sensor)
#
vis["visualization"]["layers"].append({})
vis["visualization"]["layers"][1]["label" ] = "Possible source locations"
vis["visualization"]["layers"][1]["file" ] = yml_file
vis["visualization"]["layers"][1]["shape" ] = "Circle"
vis["visualization"]["layers"][1]["fill"] = {}
vis["visualization"]["layers"][1]["fill"]["size" ] = "['inversion']['node likeliness'][i] * " + str(scale)
vis["visualization"]["layers"][1]["fill"]["color" ] = "#aa0000"
vis["visualization"]["layers"][1]["fill"]["opacity"] = 0.6
vis["visualization"]["layers"][1]["line"] = {}
vis["visualization"]["layers"][1]["line"]["color" ] = "#aa0000"
vis["visualization"]["layers"][1]["line"]["size" ] = 1
vis["visualization"]["layers"][1]["line"]["opacity"] = 0.8
vis["visualization"]["layers"][1]["locations" ] = "['inversion']['candidate nodes'][i]"
#
vis["configure"] = {}
vis["configure"]["output prefix"] = output_prefix
#
yaml.dump(vis, f, default_flow_style=False)
return
# General Option SET functions
def setNetworkOption(self, name, value):
self.opts['network'][name] = value
return
def setMeasurementOption(self, name, value):
self.opts['measurements'][name] = value
return
def setInversionOption(self, name, value):
self.opts['inversion'][name] = value
return
def setConfigureOption(self, name, value):
self.opts['configure'][name] = value
return
# General Option GET functions
def getConfigureOption(self, name):
return self.opts['configure'][name]
def getInversionOption(self, name):
return self.opts['inversion'][name]
def getMeasurementOption(self, name):
return self.opts['measurements'][name]
def getNetworkOption(self, name):
return self.opts['network'][name]
def getSolverOption(self, name):
return self.opts['solver'][name]
def getInternalOption(self, name):
return self.opts['internal'][name]
| 90,995 | 1,899 | 24 |
48d5c5150e9eef962ead6d8f15a892e6c243fa11 | 170 | py | Python | dojo/templatetags/get_note_status.py | eaguade-bodas/django-DefectDojo | 6a6353367c289710f8f0be0ab8742bc64439195c | [
"BSD-3-Clause"
] | 1 | 2020-09-10T02:26:49.000Z | 2020-09-10T02:26:49.000Z | dojo/templatetags/get_note_status.py | eaguade-bodas/django-DefectDojo | 6a6353367c289710f8f0be0ab8742bc64439195c | [
"BSD-3-Clause"
] | 51 | 2020-06-06T00:29:00.000Z | 2022-03-10T23:14:37.000Z | dojo/templatetags/get_note_status.py | eaguade-bodas/django-DefectDojo | 6a6353367c289710f8f0be0ab8742bc64439195c | [
"BSD-3-Clause"
] | 1 | 2020-11-06T10:54:46.000Z | 2020-11-06T10:54:46.000Z | from django import template
register = template.Library()
@register.filter(name='get_public_notes')
| 21.25 | 41 | 0.788235 | from django import template
register = template.Library()
@register.filter(name='get_public_notes')
def get_public_notes(notes):
return notes.filter(private=False)
| 46 | 0 | 22 |
3364177ce341fed85da4559f15a29921e8bc7cba | 9,965 | py | Python | baseline/tf/lm/training/utils.py | blester125/baseline | 4ad4147d4a88a42b309c6784a95b0b9f1faa2c60 | [
"Apache-2.0"
] | 1 | 2019-08-13T21:35:20.000Z | 2019-08-13T21:35:20.000Z | baseline/tf/lm/training/utils.py | blester125/baseline | 4ad4147d4a88a42b309c6784a95b0b9f1faa2c60 | [
"Apache-2.0"
] | null | null | null | baseline/tf/lm/training/utils.py | blester125/baseline | 4ad4147d4a88a42b309c6784a95b0b9f1faa2c60 | [
"Apache-2.0"
] | null | null | null | import os
import time
import numpy as np
import tensorflow as tf
from eight_mile.tf.layers import reload_checkpoint
from eight_mile.tf.optz import optimizer
from baseline.tf.tfy import TRAIN_FLAG, SET_TRAIN_FLAG
from baseline.train import Trainer, register_trainer
from baseline.model import create_model_for
from collections import OrderedDict
# Number of batches to prefetch if using tf.datasets
NUM_PREFETCH = 2
# The shuffle buffer
SHUF_BUF_SZ = 5000
_EVENT_FILE_GLOB_PATTERN = 'events.out.tfevents.*'
def _summaries(eval_dir):
"""Yields `tensorflow.Event` protos from event files in the eval dir.
Args:
eval_dir: Directory containing summary files with eval metrics.
Yields:
`tensorflow.Event` object read from the event files.
"""
if tf.gfile.Exists(eval_dir):
for event_file in tf.gfile.Glob(os.path.join(eval_dir, _EVENT_FILE_GLOB_PATTERN)):
for event in tf.train.summary_iterator(event_file):
yield event
def read_eval_metrics(eval_dir):
"""Helper to read eval metrics from eval summary files.
Args:
eval_dir: Directory containing summary files with eval metrics.
Returns:
A `dict` with global steps mapping to `dict` of metric names and values.
"""
eval_metrics_dict = {}
for event in _summaries(eval_dir):
if not event.HasField('summary'):
continue
metrics = {}
for value in event.summary.value:
if value.HasField('simple_value'):
metrics[value.tag] = value.simple_value
if metrics:
eval_metrics_dict[event.step] = metrics
return OrderedDict(sorted(eval_metrics_dict.items(), key=lambda t: t[0]))
def to_tensors(ts):
"""Convert a data feed into a tuple of `features` (`dict`) and `y` values
This method is required to produce `tf.dataset`s from the input data feed.
Any fields ending with `_lengths` are ignored, unless they match the
`src_lengths_key` or `tgt_lengths_key`, in which case, they are converted to `src_len` and `tgt_len`
:param ts: The data feed to convert
:param lengths_key: This is a field passed from the model params specifying source of truth of the temporal lengths
:return: A `tuple` of `features` and `y` (labels)
"""
keys = ts[0].keys()
# This is kind of a hack
keys = [k for k in keys if k != 'ids']
features = dict((k, []) for k in keys)
for sample in ts:
for k in features.keys():
for s in sample[k]:
features[k].append(s)
features = dict((k, np.stack(v).astype(np.int32)) for k, v in features.items())
tgt = features.pop('y')
return features, tgt
@register_trainer(task='lm', name='default')
class LanguageModelTrainerTf(Trainer):
"""A Trainer to use if not using eager mode
The trainer can run in 2 modes: `dataset` and `feed_dict`. When the former, the graph is assumed to
be connected by features attached to the input so the `feed_dict` will only be used to pass dropout information.
When the latter, we will use the baseline DataFeed to read the object into the `feed_dict`
"""
def checkpoint(self):
"""This method saves a checkpoint
:return: None
"""
checkpoint_dir = '{}-{}'.format("./tf-lm", os.getpid())
self.model.saver.save(self.sess,
os.path.join(checkpoint_dir, 'lm'),
global_step=self.global_step,
write_meta_graph=False)
def recover_last_checkpoint(self):
"""Recover the last saved checkpoint
:return: None
"""
checkpoint_dir = '{}-{}'.format("./tf-lm", os.getpid())
latest = tf.train.latest_checkpoint(checkpoint_dir)
self.model.saver.restore(self.model.sess, latest)
@staticmethod
def train(self, ts, reporting_fns, dataset=True):
"""Train by looping over the steps
For a `tf.dataset`-backed `fit_func`, we are using the previously wired `dataset`s
in the model (and `dataset` is `True`). For `feed_dict`, we convert the ts samples
to `feed_dict`s and hand them in one-by-one
:param ts: The training set
:param reporting_fns: A list of reporting hooks
:param dataset: (`bool`) Are we using `tf.dataset`s
:return: Metrics
"""
epoch_loss = 0.0
epoch_toks = 0
if self.model.requires_state:
state = self.model.sess.run(self.model.initial_state, self.model.make_input(ts[0], True))
fetches = {
"loss": self.loss,
"train_op": self.train_op,
"global_step": self.global_step
}
if self.model.requires_state:
fetches["final_state"] = self.model.final_state
start = time.time()
self.nstep_start = start
for batch_dict in ts:
if dataset:
feed_dict = {TRAIN_FLAG(): 1}
else:
feed_dict = self.model.make_input(batch_dict, True)
_, global_step, lossv = self.sess.run([self.train_op, self.global_step, self.loss], feed_dict=feed_dict)
# In Keras LSTM, the order is h first, c second, its the opposite in TF 1, however I dont think it
# ends up mattering here
if self.model.requires_state:
for i, (s1, s2) in enumerate(self.model.initial_state):
feed_dict[s1] = state[i][0] #.c # 0
feed_dict[s2] = state[i][1] #.h # 1
vals = self.model.sess.run(fetches, feed_dict)
loss = vals["loss"]
if self.model.requires_state:
state = vals["final_state"]
global_step = vals["global_step"]
toks = self._num_toks(batch_dict)
report_loss = loss * toks
epoch_loss += report_loss
epoch_toks += toks
self.nstep_agg += report_loss
self.nstep_div += toks
if (global_step + 1) % self.nsteps == 0:
metrics = self.calc_metrics(self.nstep_agg, self.nstep_div)
self.report(
global_step + 1, metrics, self.nstep_start,
'Train', 'STEP', reporting_fns, self.nsteps
)
self.reset_nstep()
metrics = self.calc_metrics(epoch_loss, epoch_toks)
self.train_epochs += 1
self.report(
self.train_epochs, metrics, start,
'Train', 'EPOCH', reporting_fns
)
return metrics
def test(self, vs, reporting_fns, phase, dataset=True):
"""Run an epoch of testing over the dataset
If we are using a `tf.dataset`-based `fit_func`, we will just
cycle the number of steps and let the `dataset` yield new batches.
If we are using `feed_dict`s, we convert each batch from the `DataFeed`
and pass that into TF as the `feed_dict`
:param vs: A validation set
:param reporting_fns: Reporting hooks
:param phase: The phase of evaluation (`Test`, `Valid`)
:param dataset: (`bool`) Are we using `tf.dataset`s
:return: Metrics
"""
total_loss = 0.0
total_toks = 0
epochs = 0
if phase == 'Valid':
self.valid_epochs += 1
epochs = self.valid_epochs
if self.model.requires_state:
state = self.model.sess.run(self.model.initial_state, self.model.make_input(vs[0], False))
fetches = {
"loss": self.test_loss,
}
if self.model.requires_state:
fetches["final_state"] = self.model.final_state
start = time.time()
for batch_dict in vs:
feed_dict = {}
if not dataset:
feed_dict = self.model.make_input(batch_dict, False)
# In Keras LSTM, the order is h first, c second, its the opposite in TF 1, however I dont think it
# ends up mattering here
if self.model.requires_state:
for i, (s1, s2) in enumerate(self.model.initial_state):
feed_dict[s1] = state[i][0] # .c # 0
feed_dict[s2] = state[i][1] # .h # 1
vals = self.model.sess.run(fetches, feed_dict)
loss = vals["loss"]
toks = self._num_toks(batch_dict)
if self.model.requires_state:
state = vals["final_state"]
total_loss += loss * toks
total_toks += toks
metrics = self.calc_metrics(total_loss, total_toks)
self.report(
epochs, metrics, start,
phase, 'EPOCH', reporting_fns
)
return metrics
| 36.636029 | 148 | 0.607627 | import os
import time
import numpy as np
import tensorflow as tf
from eight_mile.tf.layers import reload_checkpoint
from eight_mile.tf.optz import optimizer
from baseline.tf.tfy import TRAIN_FLAG, SET_TRAIN_FLAG
from baseline.train import Trainer, register_trainer
from baseline.model import create_model_for
from collections import OrderedDict
# Number of batches to prefetch if using tf.datasets
NUM_PREFETCH = 2
# The shuffle buffer
SHUF_BUF_SZ = 5000
_EVENT_FILE_GLOB_PATTERN = 'events.out.tfevents.*'
def _summaries(eval_dir):
"""Yields `tensorflow.Event` protos from event files in the eval dir.
Args:
eval_dir: Directory containing summary files with eval metrics.
Yields:
`tensorflow.Event` object read from the event files.
"""
if tf.gfile.Exists(eval_dir):
for event_file in tf.gfile.Glob(os.path.join(eval_dir, _EVENT_FILE_GLOB_PATTERN)):
for event in tf.train.summary_iterator(event_file):
yield event
def read_eval_metrics(eval_dir):
"""Helper to read eval metrics from eval summary files.
Args:
eval_dir: Directory containing summary files with eval metrics.
Returns:
A `dict` with global steps mapping to `dict` of metric names and values.
"""
eval_metrics_dict = {}
for event in _summaries(eval_dir):
if not event.HasField('summary'):
continue
metrics = {}
for value in event.summary.value:
if value.HasField('simple_value'):
metrics[value.tag] = value.simple_value
if metrics:
eval_metrics_dict[event.step] = metrics
return OrderedDict(sorted(eval_metrics_dict.items(), key=lambda t: t[0]))
def to_tensors(ts):
"""Convert a data feed into a tuple of `features` (`dict`) and `y` values
This method is required to produce `tf.dataset`s from the input data feed.
Any fields ending with `_lengths` are ignored, unless they match the
`src_lengths_key` or `tgt_lengths_key`, in which case, they are converted to `src_len` and `tgt_len`
:param ts: The data feed to convert
:param lengths_key: This is a field passed from the model params specifying source of truth of the temporal lengths
:return: A `tuple` of `features` and `y` (labels)
"""
keys = ts[0].keys()
# This is kind of a hack
keys = [k for k in keys if k != 'ids']
features = dict((k, []) for k in keys)
for sample in ts:
for k in features.keys():
for s in sample[k]:
features[k].append(s)
features = dict((k, np.stack(v).astype(np.int32)) for k, v in features.items())
tgt = features.pop('y')
return features, tgt
@register_trainer(task='lm', name='default')
class LanguageModelTrainerTf(Trainer):
"""A Trainer to use if not using eager mode
The trainer can run in 2 modes: `dataset` and `feed_dict`. When the former, the graph is assumed to
be connected by features attached to the input so the `feed_dict` will only be used to pass dropout information.
When the latter, we will use the baseline DataFeed to read the object into the `feed_dict`
"""
def __init__(self, model_params, **kwargs):
super().__init__()
if type(model_params) is dict:
self.model = create_model_for('lm', **model_params)
else:
self.model = model_params
self.sess = self.model.sess
self.loss = self.model.create_loss()
self.test_loss = self.model.create_test_loss()
self.global_step, self.train_op = optimizer(self.loss, colocate_gradients_with_ops=True, variables=self.model.trainable_variables, **kwargs)
self.nsteps = kwargs.get('nsteps', 500)
init = tf.compat.v1.global_variables_initializer()
self.model.sess.run(init)
saver = tf.compat.v1.train.Saver()
self.model.set_saver(saver)
checkpoint = kwargs.get('checkpoint')
if checkpoint is not None:
skip_blocks = kwargs.get('blocks_to_skip', ['OptimizeLoss'])
reload_checkpoint(self.model.sess, checkpoint, skip_blocks)
def checkpoint(self):
"""This method saves a checkpoint
:return: None
"""
checkpoint_dir = '{}-{}'.format("./tf-lm", os.getpid())
self.model.saver.save(self.sess,
os.path.join(checkpoint_dir, 'lm'),
global_step=self.global_step,
write_meta_graph=False)
def recover_last_checkpoint(self):
"""Recover the last saved checkpoint
:return: None
"""
checkpoint_dir = '{}-{}'.format("./tf-lm", os.getpid())
latest = tf.train.latest_checkpoint(checkpoint_dir)
self.model.saver.restore(self.model.sess, latest)
@staticmethod
def _num_toks(batch):
return np.prod(batch['y'].shape)
def train(self, ts, reporting_fns, dataset=True):
"""Train by looping over the steps
For a `tf.dataset`-backed `fit_func`, we are using the previously wired `dataset`s
in the model (and `dataset` is `True`). For `feed_dict`, we convert the ts samples
to `feed_dict`s and hand them in one-by-one
:param ts: The training set
:param reporting_fns: A list of reporting hooks
:param dataset: (`bool`) Are we using `tf.dataset`s
:return: Metrics
"""
epoch_loss = 0.0
epoch_toks = 0
if self.model.requires_state:
state = self.model.sess.run(self.model.initial_state, self.model.make_input(ts[0], True))
fetches = {
"loss": self.loss,
"train_op": self.train_op,
"global_step": self.global_step
}
if self.model.requires_state:
fetches["final_state"] = self.model.final_state
start = time.time()
self.nstep_start = start
for batch_dict in ts:
if dataset:
feed_dict = {TRAIN_FLAG(): 1}
else:
feed_dict = self.model.make_input(batch_dict, True)
_, global_step, lossv = self.sess.run([self.train_op, self.global_step, self.loss], feed_dict=feed_dict)
# In Keras LSTM, the order is h first, c second, its the opposite in TF 1, however I dont think it
# ends up mattering here
if self.model.requires_state:
for i, (s1, s2) in enumerate(self.model.initial_state):
feed_dict[s1] = state[i][0] #.c # 0
feed_dict[s2] = state[i][1] #.h # 1
vals = self.model.sess.run(fetches, feed_dict)
loss = vals["loss"]
if self.model.requires_state:
state = vals["final_state"]
global_step = vals["global_step"]
toks = self._num_toks(batch_dict)
report_loss = loss * toks
epoch_loss += report_loss
epoch_toks += toks
self.nstep_agg += report_loss
self.nstep_div += toks
if (global_step + 1) % self.nsteps == 0:
metrics = self.calc_metrics(self.nstep_agg, self.nstep_div)
self.report(
global_step + 1, metrics, self.nstep_start,
'Train', 'STEP', reporting_fns, self.nsteps
)
self.reset_nstep()
metrics = self.calc_metrics(epoch_loss, epoch_toks)
self.train_epochs += 1
self.report(
self.train_epochs, metrics, start,
'Train', 'EPOCH', reporting_fns
)
return metrics
def calc_metrics(self, agg, norm):
metrics = super().calc_metrics(agg, norm)
metrics['perplexity'] = np.exp(metrics['avg_loss'])
return metrics
def test(self, vs, reporting_fns, phase, dataset=True):
"""Run an epoch of testing over the dataset
If we are using a `tf.dataset`-based `fit_func`, we will just
cycle the number of steps and let the `dataset` yield new batches.
If we are using `feed_dict`s, we convert each batch from the `DataFeed`
and pass that into TF as the `feed_dict`
:param vs: A validation set
:param reporting_fns: Reporting hooks
:param phase: The phase of evaluation (`Test`, `Valid`)
:param dataset: (`bool`) Are we using `tf.dataset`s
:return: Metrics
"""
total_loss = 0.0
total_toks = 0
epochs = 0
if phase == 'Valid':
self.valid_epochs += 1
epochs = self.valid_epochs
if self.model.requires_state:
state = self.model.sess.run(self.model.initial_state, self.model.make_input(vs[0], False))
fetches = {
"loss": self.test_loss,
}
if self.model.requires_state:
fetches["final_state"] = self.model.final_state
start = time.time()
for batch_dict in vs:
feed_dict = {}
if not dataset:
feed_dict = self.model.make_input(batch_dict, False)
# In Keras LSTM, the order is h first, c second, its the opposite in TF 1, however I dont think it
# ends up mattering here
if self.model.requires_state:
for i, (s1, s2) in enumerate(self.model.initial_state):
feed_dict[s1] = state[i][0] # .c # 0
feed_dict[s2] = state[i][1] # .h # 1
vals = self.model.sess.run(fetches, feed_dict)
loss = vals["loss"]
toks = self._num_toks(batch_dict)
if self.model.requires_state:
state = vals["final_state"]
total_loss += loss * toks
total_toks += toks
metrics = self.calc_metrics(total_loss, total_toks)
self.report(
epochs, metrics, start,
phase, 'EPOCH', reporting_fns
)
return metrics
| 1,122 | 0 | 79 |
69d92cd0b0c6e457b212537e440d34cf88d7ff61 | 464 | py | Python | sample/subscribe.py | aldnav/mcutie | 472a7433a4eaeff40770f3e4b006824b2b9e466e | [
"MIT"
] | null | null | null | sample/subscribe.py | aldnav/mcutie | 472a7433a4eaeff40770f3e4b006824b2b9e466e | [
"MIT"
] | null | null | null | sample/subscribe.py | aldnav/mcutie | 472a7433a4eaeff40770f3e4b006824b2b9e466e | [
"MIT"
] | null | null | null | import paho.mqtt.client as mqtt
if __name__ == '__main__':
client = mqtt.Client()
client.on_connect = on_connect
client.on_message = on_message
client.connect('localhost', 1883, 60)
client.loop_forever()
| 23.2 | 65 | 0.681034 | import paho.mqtt.client as mqtt
def on_connect(client, userdata, flags, rc):
print('Connected. Result code: ', str(rc))
client.subscribe('mcutie/init')
def on_message(client, userdata, msg):
print('[PONG] Topic: ', msg.topic, '\nMessage:', msg.payload)
if __name__ == '__main__':
client = mqtt.Client()
client.on_connect = on_connect
client.on_message = on_message
client.connect('localhost', 1883, 60)
client.loop_forever()
| 189 | 0 | 46 |
0f1281bdaa816f07ebc3c6f5aae1d47085a659e8 | 3,534 | py | Python | BFTServer.py | Jackustc/send-back | 6d243e554d72058007125555762be39ce8d1792c | [
"Apache-2.0"
] | null | null | null | BFTServer.py | Jackustc/send-back | 6d243e554d72058007125555762be39ce8d1792c | [
"Apache-2.0"
] | null | null | null | BFTServer.py | Jackustc/send-back | 6d243e554d72058007125555762be39ce8d1792c | [
"Apache-2.0"
] | null | null | null | # coding: utf-8
import parsingconfig
from jpype import *
import sys
import socket
import socks
from io import BytesIO
import struct
#Since we deliver message from java module to python module,
#I think it is ok to just use this socket function to directly
#deliver and process the message
#Need to figure out whether it is true.
if __name__ == '__main__':
if len(sys.argv[1:])<1:
print "Use: python BFTServer.py <ReplicaID>"
exit()
replicaID = sys.argv[1]
(n,f,host,baseport) = parsingconfig.readconfig() #Read in the config number of replicas, failures, host, and port number.
sock = connect_to_channel(host,baseport,replicaID) #The parameters to connect_to_channel are (hostname,port,id)
#original classpath:
#classpath = "lib/commons-codec-1.5.jar:lib/core-0.1.4.jar:lib/netty-all-4.1.9.Final.jar:lib/slf4j-api-1.5.8.jar:lib/slf4j-jdk14-1.5.8.jar:bft-smart/bin/BFT-SMaRt.jar"
#James. Changed classpath, specifically the path to BFT-SMaRt.jar. Commented out the original
classpath = "lib/commons-codec-1.5.jar:lib/core-0.1.4.jar:lib/netty-all-4.1.9.Final.jar:lib/slf4j-api-1.5.8.jar:lib/slf4j-jdk14-1.5.8.jar:bin/BFT-SMaRt.jar"
startJVM(getDefaultJVMPath(),"-Djava.class.path=%s"%classpath)
KVServerClass = JPackage("bftsmart.demo.keyvalue") #Create instance of KVServer class from the demo/keyvalue/KVServer.java class
KVServerClass.KVServer.passArgs((replicaID,"1")) #James. TO DO: Change this call to include host and port number.
listen_to_channel(sock,replicaID)
# and you have to shutdown the VM at the end
shutdownJVM()
| 37.595745 | 171 | 0.640351 | # coding: utf-8
import parsingconfig
from jpype import *
import sys
import socket
import socks
from io import BytesIO
import struct
#Since we deliver message from java module to python module,
#I think it is ok to just use this socket function to directly
#deliver and process the message
#Need to figure out whether it is true.
def connect_to_channel2(hostname,port,id):
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
print 'socket created'
newport = int(port)+int(id)*10
#sock.bind(("localhost", newport))
sock.connect((hostname, newport))
return sock
def connect_to_channel(hostname,port,id):
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
print 'socket created'
newport = int(port)+int(id)
#sock.bind(("localhost", newport))
sock.bind((hostname, newport)) #James. Replaced localhost with parameter passed in.
sock.listen(1)
return sock
def listen_to_channel(sock,replicaID):
while 1:
conn,addr = sock.accept()
print "got a message..."
try:
buf = conn.recv(1024)
#print buf
tmp = BytesIO(buf)
sequence,cid,length = struct.unpack('>iii', tmp.read(12))
msg = tmp.read(length)
if msg=="Dummy Test Request":
print "good."
print "We have assigned sequence number ",sequence," for client ",cid, " and request ",msg
# send to client
# 设置ip和端口
host = socket.gethostname()
tag_conn = True
while tag_conn:
try:
mySocket = connect_to_channel2(host,3333,replicaID)
tag_conn = False
except: continue
print(11111)
mySocket.send('Hello World!'.encode())
#mySocket.close()
except:
print "may have got a not well-formatted message"
#TODO: Need to figure out why sometimes there are empty or not well-formatted messages
pass
if __name__ == '__main__':
if len(sys.argv[1:])<1:
print "Use: python BFTServer.py <ReplicaID>"
exit()
replicaID = sys.argv[1]
(n,f,host,baseport) = parsingconfig.readconfig() #Read in the config number of replicas, failures, host, and port number.
sock = connect_to_channel(host,baseport,replicaID) #The parameters to connect_to_channel are (hostname,port,id)
#original classpath:
#classpath = "lib/commons-codec-1.5.jar:lib/core-0.1.4.jar:lib/netty-all-4.1.9.Final.jar:lib/slf4j-api-1.5.8.jar:lib/slf4j-jdk14-1.5.8.jar:bft-smart/bin/BFT-SMaRt.jar"
#James. Changed classpath, specifically the path to BFT-SMaRt.jar. Commented out the original
classpath = "lib/commons-codec-1.5.jar:lib/core-0.1.4.jar:lib/netty-all-4.1.9.Final.jar:lib/slf4j-api-1.5.8.jar:lib/slf4j-jdk14-1.5.8.jar:bin/BFT-SMaRt.jar"
startJVM(getDefaultJVMPath(),"-Djava.class.path=%s"%classpath)
KVServerClass = JPackage("bftsmart.demo.keyvalue") #Create instance of KVServer class from the demo/keyvalue/KVServer.java class
KVServerClass.KVServer.passArgs((replicaID,"1")) #James. TO DO: Change this call to include host and port number.
listen_to_channel(sock,replicaID)
# and you have to shutdown the VM at the end
shutdownJVM()
| 1,832 | 0 | 68 |
86826bcb1cae1e53320f71bcde15dbef4233804b | 476 | py | Python | setup.py | maizy/flick-archive-extractor | da956b4966f89b07c30eb8d970fe0d21255acea8 | [
"Apache-2.0"
] | 4 | 2019-03-24T14:09:33.000Z | 2021-02-21T18:16:25.000Z | setup.py | maizy/flick-archive-extractor | da956b4966f89b07c30eb8d970fe0d21255acea8 | [
"Apache-2.0"
] | 4 | 2019-03-04T05:47:54.000Z | 2019-03-31T10:44:08.000Z | setup.py | maizy/flickr-archive-extractor | da956b4966f89b07c30eb8d970fe0d21255acea8 | [
"Apache-2.0"
] | null | null | null | from setuptools import setup
setup(
name='flickr-archive-extractor',
version='0.1.1',
install_requires=[],
tests_require=['nose>=1.3', 'pycodestyle'],
test_suite='nose.collector',
scripts=['flickr_archive_extractor.py'],
author='Nikita Kovaliov',
author_email='nikita@maizy.ru',
description='flickr archive extractor',
license='Apache License 2.0',
keywords='flickr',
url='https://github.com/maizy/flickr-archive-extractor',
)
| 28 | 60 | 0.684874 | from setuptools import setup
setup(
name='flickr-archive-extractor',
version='0.1.1',
install_requires=[],
tests_require=['nose>=1.3', 'pycodestyle'],
test_suite='nose.collector',
scripts=['flickr_archive_extractor.py'],
author='Nikita Kovaliov',
author_email='nikita@maizy.ru',
description='flickr archive extractor',
license='Apache License 2.0',
keywords='flickr',
url='https://github.com/maizy/flickr-archive-extractor',
)
| 0 | 0 | 0 |
61a98b492f9b3d93fec447bf972464cc0c8b252f | 2,833 | py | Python | src/wrap.py | rebryant/linear-domino-game | 543ee3289bc3f14d671dab4dc42d7bceedf1d1bd | [
"MIT"
] | null | null | null | src/wrap.py | rebryant/linear-domino-game | 543ee3289bc3f14d671dab4dc42d7bceedf1d1bd | [
"MIT"
] | null | null | null | src/wrap.py | rebryant/linear-domino-game | 543ee3289bc3f14d671dab4dc42d7bceedf1d1bd | [
"MIT"
] | null | null | null | #!/usr/bin/python
#####################################################################################
# Copyright (c) 2022 Marijn Heule, Randal E. Bryant, Carnegie Mellon University
# Last edit: March 23, 2022
#
# Permission is hereby granted, free of charge, to any person obtaining a copy of this software and
# associated documentation files (the "Software"), to deal in the Software without restriction,
# including without limitation the rights to use, copy, modify, merge, publish, distribute,
# sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all copies or
# substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT
# NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
# DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT
# OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
########################################################################################
# Time the execution of a program. Force termination if that program exceeds a time limit
import sys
import subprocess
import datetime
import os.path
name = sys.argv[0]
arglist = sys.argv[1:]
run(name, arglist)
| 37.276316 | 100 | 0.642076 | #!/usr/bin/python
#####################################################################################
# Copyright (c) 2022 Marijn Heule, Randal E. Bryant, Carnegie Mellon University
# Last edit: March 23, 2022
#
# Permission is hereby granted, free of charge, to any person obtaining a copy of this software and
# associated documentation files (the "Software"), to deal in the Software without restriction,
# including without limitation the rights to use, copy, modify, merge, publish, distribute,
# sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all copies or
# substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT
# NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
# DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT
# OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
########################################################################################
# Time the execution of a program. Force termination if that program exceeds a time limit
import sys
import subprocess
import datetime
import os.path
def usage(name):
print("Usage: %s TLIM PATH Args ..." % name)
print(" TLIM: Runtime limit (in seconds)")
print(" PATH: Path of executable program")
print(" Args ... Arguments to pass to invoked program")
def runprog(timelimit, path, arglist):
alist = [path] + arglist
start = datetime.datetime.now()
p = subprocess.Popen(alist)
try:
p.wait(timeout=timelimit)
except subprocess.TimeoutExpired:
p.kill()
print("Execution of %s FAILED. Timed out after %d seconds" % (path, timelimit))
sys.exit(1)
delta = datetime.datetime.now() - start
secs = delta.seconds + 1e-6 * delta.microseconds
print("Program %s completed with exit code %d" % (path, p.returncode))
print("Total time: %.3f seconds" % secs)
return
def run(name, arglist):
if len(arglist) < 2:
usage(name)
return
try:
timelimit = float(arglist[0])
except:
print("Invalid time limit '%s'" % arglist[0])
usage(name)
return
path = arglist[1]
if not os.path.exists(path):
print("Invalid path '%s'" % path)
usage(name)
return
arglist = arglist[2:]
runprog(timelimit, path, arglist)
name = sys.argv[0]
arglist = sys.argv[1:]
run(name, arglist)
| 1,168 | 0 | 73 |
7da25ba8a7a2c09dd2b88ed9b1122198233f010e | 1,886 | py | Python | flow/envs/bayesian_1_env.py | eugenevinitsky/bayesian_reasoning_traffic | de3c14f03fed9cab913bb692877851320a3b6843 | [
"MIT"
] | 2 | 2020-12-03T21:13:39.000Z | 2022-03-13T09:12:43.000Z | flow/envs/bayesian_1_env.py | eugenevinitsky/bayesian_reasoning_traffic | de3c14f03fed9cab913bb692877851320a3b6843 | [
"MIT"
] | null | null | null | flow/envs/bayesian_1_env.py | eugenevinitsky/bayesian_reasoning_traffic | de3c14f03fed9cab913bb692877851320a3b6843 | [
"MIT"
] | 1 | 2021-02-05T16:51:34.000Z | 2021-02-05T16:51:34.000Z | """Environment testing non_RL scenario 1 of the bayesian envs."""
import numpy as np
from gym.spaces.box import Box
from flow.core.rewards import desired_velocity
from flow.envs.ring.accel import AccelEnv
from flow.core.kernel import Kernel
ADDITIONAL_ENV_PARAMS = {
# maximum acceleration for autonomous vehicles, in m/s^2
'max_accel': 3,
# maximum deceleration for autonomous vehicles, in m/s^2
'max_decel': 3,
# desired velocity for all vehicles in the network, in m/s
'target_velocity': 10,
# specifies whether vehicles are to be sorted by position during a
# simulation step. If set to True, the environment parameter
# self.sorted_ids will return a list of all vehicles sorted in accordance
# with the environment
'sort_vehicles': False
}
class Bayesian1Env(AccelEnv):
"""Specify the starting positions and routes of 3 cars and 1 pedestrian"""
| 42.863636 | 123 | 0.695652 | """Environment testing non_RL scenario 1 of the bayesian envs."""
import numpy as np
from gym.spaces.box import Box
from flow.core.rewards import desired_velocity
from flow.envs.ring.accel import AccelEnv
from flow.core.kernel import Kernel
ADDITIONAL_ENV_PARAMS = {
# maximum acceleration for autonomous vehicles, in m/s^2
'max_accel': 3,
# maximum deceleration for autonomous vehicles, in m/s^2
'max_decel': 3,
# desired velocity for all vehicles in the network, in m/s
'target_velocity': 10,
# specifies whether vehicles are to be sorted by position during a
# simulation step. If set to True, the environment parameter
# self.sorted_ids will return a list of all vehicles sorted in accordance
# with the environment
'sort_vehicles': False
}
class Bayesian1Env(AccelEnv):
"""Specify the starting positions and routes of 3 cars and 1 pedestrian"""
def __init__(self, env_params, sim_params, network, simulator='traci'):
super().__init__(env_params, sim_params, network, simulator)
def setup_initial_state(self):
# generate starting position for the 3 vehicles in the network
start_pos, start_lanes = self.k.network.generate_starting_positions(
initial_config=self.initial_config,
num_vehicles=len(self.initial_ids))
# save the initial state. This is used in the _reset function
for i, veh_id in enumerate(self.initial_ids):
type_id = self.k.vehicle.get_type(veh_id)
pos = start_pos[i][1]
lane = start_lanes[i]
speed = self.k.vehicle.get_initial_speed(veh_id)
edge = start_pos[i][0]
depart_time = self.k.vehicle.get_depart_time(veh_id) # don't think this is being used..., yeah, it's not lol
self.initial_state[veh_id] = (type_id, edge, lane, pos, speed, depart_time)
| 931 | 0 | 53 |
8a7e1f1b10b99c9a1e434b86def1949ef288c9b6 | 46 | py | Python | lib/config.py | OrthoDex/railway-traffic-analysis | cf113b70320fa9787c10b753327ec3b7c1fa3dc9 | [
"MIT"
] | null | null | null | lib/config.py | OrthoDex/railway-traffic-analysis | cf113b70320fa9787c10b753327ec3b7c1fa3dc9 | [
"MIT"
] | null | null | null | lib/config.py | OrthoDex/railway-traffic-analysis | cf113b70320fa9787c10b753327ec3b7c1fa3dc9 | [
"MIT"
] | 1 | 2020-07-29T21:35:12.000Z | 2020-07-29T21:35:12.000Z | import pandas as pd
di_matrix = pd.DataFrame() | 23 | 26 | 0.782609 | import pandas as pd
di_matrix = pd.DataFrame() | 0 | 0 | 0 |
e3a92038c5f2e089481bd64f9820469a3905d57c | 1,439 | py | Python | conformalmapping/szmap.py | TorbenFricke/cmtoolkit | f1bf1ec191fd9b20e6edcd3385c8b9fee1d638ca | [
"BSD-3-Clause"
] | 16 | 2017-10-14T17:13:48.000Z | 2022-01-11T22:19:45.000Z | conformalmapping/szmap.py | TorbenFricke/cmtoolkit | f1bf1ec191fd9b20e6edcd3385c8b9fee1d638ca | [
"BSD-3-Clause"
] | 11 | 2015-05-11T08:02:42.000Z | 2020-05-21T16:13:45.000Z | conformalmapping/szmap.py | TorbenFricke/cmtoolkit | f1bf1ec191fd9b20e6edcd3385c8b9fee1d638ca | [
"BSD-3-Clause"
] | 3 | 2019-12-31T23:07:29.000Z | 2021-03-08T02:05:38.000Z | import numpy as np
from .conformalmap import ConformalMap
from .closedcurve import ClosedCurve
from .unitdisk import unitdisk
from .region import Region
from .szego import Szego, SzegoOpts
class SzMap(ConformalMap):
"""SzMap represents a Riemann map via the Szego kernel.
"""
def __init__(self, range=None, conformalCenter=0, **kwargs):
"""Create a new conformal map based on the Szego kernel
Parameters
----------
range : Region or ClosedCurve
an object that represents the range of the map
conformalCenter : complex
the conformal center (forward to the szego kernel)
"""
if isinstance(range, ClosedCurve):
range = Region(range)
if not range.issimplyconnected():
raise Exception('Region must be simply connected')
kwargs['range'] = range
kwargs['domain'] = unitdisk()
super(SzMap, self).__init__(**kwargs)
boundary = self.range.outer
# question, how to alter these?
szargs = SzegoOpts()
S = Szego(boundary, conformalCenter, szargs)
nF = szargs.numFourierPts
t = S.invtheta(2*np.pi*np.arange(nF)/float(nF))
c = np.fft.fft(boundary(t))/float(nF)
c = c[::-1]
self._kernel = S
self._coefficients = c
self._opts = szargs
| 29.367347 | 64 | 0.62057 | import numpy as np
from .conformalmap import ConformalMap
from .closedcurve import ClosedCurve
from .unitdisk import unitdisk
from .region import Region
from .szego import Szego, SzegoOpts
class SzMap(ConformalMap):
"""SzMap represents a Riemann map via the Szego kernel.
"""
def __init__(self, range=None, conformalCenter=0, **kwargs):
"""Create a new conformal map based on the Szego kernel
Parameters
----------
range : Region or ClosedCurve
an object that represents the range of the map
conformalCenter : complex
the conformal center (forward to the szego kernel)
"""
if isinstance(range, ClosedCurve):
range = Region(range)
if not range.issimplyconnected():
raise Exception('Region must be simply connected')
kwargs['range'] = range
kwargs['domain'] = unitdisk()
super(SzMap, self).__init__(**kwargs)
boundary = self.range.outer
# question, how to alter these?
szargs = SzegoOpts()
S = Szego(boundary, conformalCenter, szargs)
nF = szargs.numFourierPts
t = S.invtheta(2*np.pi*np.arange(nF)/float(nF))
c = np.fft.fft(boundary(t))/float(nF)
c = c[::-1]
self._kernel = S
self._coefficients = c
self._opts = szargs
def applyMap(self, z):
return np.polyval(self._coefficients, z)
| 50 | 0 | 27 |
98e83de2c9fbbf99c89027e53d12123d35a61f9c | 940 | py | Python | setup.py | bryankim96/deep-learning-gamma | cac4f2d90b6536eedf95c3a07ea73d4c7a69c5b7 | [
"BSD-3-Clause"
] | 38 | 2018-08-06T13:25:56.000Z | 2022-02-03T16:10:42.000Z | setup.py | bryankim96/deep-learning-gamma | cac4f2d90b6536eedf95c3a07ea73d4c7a69c5b7 | [
"BSD-3-Clause"
] | 88 | 2018-07-13T18:57:40.000Z | 2022-03-29T18:21:54.000Z | setup.py | bryankim96/deep-learning-gamma | cac4f2d90b6536eedf95c3a07ea73d4c7a69c5b7 | [
"BSD-3-Clause"
] | 45 | 2018-09-05T08:41:52.000Z | 2022-03-13T18:07:42.000Z | from setuptools import setup, find_packages
from os import path
from ctlearn.version import *
here = path.abspath(path.dirname(__file__))
with open(path.join(here, 'README.rst'), encoding='utf-8') as f:
long_description = f.read()
setup(name='ctlearn',
version=get_version_pypi(),
author="CTLearn Team",
author_email="d.nieto@ucm.es",
description='Deep learning for analysis and classification of image data for Imaging Atmospheric Cherenkov Telescopes, especially the Cherenkov Telescope Array (CTA).',
long_description=long_description,
long_description_content_type='text/x-rst',
url='https://github.com/ctlearn-project/ctlearn',
license='BSD-3-Clause',
packages=['ctlearn'],
entry_points = {
'console_scripts': ['ctlearn=ctlearn.run_model:main'],
},
include_package_data=True,
dependencies=[],
dependency_links=[],
zip_safe=False)
| 34.814815 | 174 | 0.696809 | from setuptools import setup, find_packages
from os import path
from ctlearn.version import *
here = path.abspath(path.dirname(__file__))
with open(path.join(here, 'README.rst'), encoding='utf-8') as f:
long_description = f.read()
setup(name='ctlearn',
version=get_version_pypi(),
author="CTLearn Team",
author_email="d.nieto@ucm.es",
description='Deep learning for analysis and classification of image data for Imaging Atmospheric Cherenkov Telescopes, especially the Cherenkov Telescope Array (CTA).',
long_description=long_description,
long_description_content_type='text/x-rst',
url='https://github.com/ctlearn-project/ctlearn',
license='BSD-3-Clause',
packages=['ctlearn'],
entry_points = {
'console_scripts': ['ctlearn=ctlearn.run_model:main'],
},
include_package_data=True,
dependencies=[],
dependency_links=[],
zip_safe=False)
| 0 | 0 | 0 |
cca09c07f76cb3d6524e8723ecb46d24ebebb62c | 443 | py | Python | 2_Python Advanced/7_Gui/19_messageDialogs.py | Arunken/PythonScripts | 702d0a3af7a9be3311f9da0afc5285d453f15484 | [
"Apache-2.0"
] | null | null | null | 2_Python Advanced/7_Gui/19_messageDialogs.py | Arunken/PythonScripts | 702d0a3af7a9be3311f9da0afc5285d453f15484 | [
"Apache-2.0"
] | 1 | 2021-06-02T00:58:47.000Z | 2021-06-02T00:58:47.000Z | 2_Python Advanced/7_Gui/19_messageDialogs.py | Arunken/PythonScripts | 702d0a3af7a9be3311f9da0afc5285d453f15484 | [
"Apache-2.0"
] | null | null | null | # -*- coding: utf-8 -*-
"""
Created on Thu May 31 23:54:45 2018
@author: SilverDoe
"""
from tkinter import messagebox
res = messagebox.askquestion('Message title','Message content')
res = messagebox.askyesno('Message title','Message content')
res = messagebox.askyesnocancel('Message title','Message content')
res = messagebox.askokcancel('Message title','Message content')
res = messagebox.askretrycancel('Message title','Message content') | 31.642857 | 66 | 0.749436 | # -*- coding: utf-8 -*-
"""
Created on Thu May 31 23:54:45 2018
@author: SilverDoe
"""
from tkinter import messagebox
res = messagebox.askquestion('Message title','Message content')
res = messagebox.askyesno('Message title','Message content')
res = messagebox.askyesnocancel('Message title','Message content')
res = messagebox.askokcancel('Message title','Message content')
res = messagebox.askretrycancel('Message title','Message content') | 0 | 0 | 0 |
2dfe04f1f5b9b5391e38f6ae6df6ac80facec113 | 7,908 | py | Python | diary/tests.py | pyprism/Diary | b14718c8b8fac6d900075b34a7190a91851159e8 | [
"MIT"
] | 1 | 2015-03-05T08:55:01.000Z | 2015-03-05T08:55:01.000Z | diary/tests.py | pyprism/Diary | b14718c8b8fac6d900075b34a7190a91851159e8 | [
"MIT"
] | 174 | 2016-04-14T13:53:06.000Z | 2022-02-10T07:17:56.000Z | diary/tests.py | pyprism/Diary | b14718c8b8fac6d900075b34a7190a91851159e8 | [
"MIT"
] | 2 | 2017-09-20T11:54:11.000Z | 2019-10-02T19:50:33.000Z | from django.test import TestCase, TransactionTestCase
from .models import Notes, Diary
from django.utils import timezone
from rest_framework.test import APIRequestFactory, APIClient
from django.contrib.auth.models import User
from freezegun import freeze_time
from rest_framework_jwt.views import obtain_jwt_token
import json
class ModelTest(TransactionTestCase):
"""
Test all models
"""
current_date_time = timezone.now()
reset_sequences = True
# class AuthTest(TestCase):
# """
# Test JWT auth (now I am thinking , do I really need this test ? :/ )
# """
# current_date_time = timezone.now()
#
# def setUp(self):
# User.objects.create_user('hiren', 'a@b.com', 'password')
# tag = Tag.objects.create(name="Test tag")
# Notes.objects.create(tag=tag, content="test content ", date=self.current_date_time)
# Diary.objects.create(tag=tag, title="Hello title", content="test content", date=self.current_date_time)
#
# self.factory = APIRequestFactory()
#
# def test_jwt_auth(self):
# request = self.factory.post('/api-token-auth/', {'username': 'hiren', 'password': 'password'})
# response = obtain_jwt_token(request)
# response.render()
# self.assertEqual(response.status_code, 200)
class NotesViewTest(TransactionTestCase):
"""
Test Notes View
"""
reset_sequences = True
current_date_time = timezone.now()
# current_date_time = "2017-01-14T00:00:00Z"
@freeze_time("2012-01-14")
@freeze_time("2012-01-14")
# @freeze_time("2012-01-14")
# def test_new_note_creation_works(self):
# response = self.client.post('/api/notes/', data={'tag': ["xyz"], 'iv': 'random', 'content': "New content",
# 'salt': 'sa', 'date': "2012-01-14T00:00:00"}, format="json")
# print(response.json())
# self.assertEqual(response.json(), {'id': 2, 'tag': [self.tag], 'iv': 'random', 'content': "New content",
# 'date': '2012-01-14T00:00:00',
# 'created_at': '2012-01-14T00:00:00',
# 'updated_at': '2012-01-14T00:00:00'})
#
# def test_deleting_note_works(self):
# #self.client.post('/api/notes/', data={'tag': [self.tag], 'iv': 'random', 'content': "New content !",
# # 'date': self.current_date_time})
# response = self.client.delete('/api/notes/1/')
# self.assertEqual(response.status_code, 204)
#
#
# class DiaryViewTest(TransactionTestCase):
# """
# Test Diary View
# """
# reset_sequences = True
# current_date_time = timezone.now()
#
# def setUp(self):
# self.client = APIClient()
# self.user = User.objects.create_user('hiren', 'a@b.com', 'password')
# self.client.force_authenticate(user=self.user)
# self.tag = "Test tag"
# Diary.objects.create(tag=self.tag, title="Hello title", content="test content", date=self.current_date_time)
#
# def test_login_works(self):
# response = self.client.get('/api/diary/')
# self.assertEqual(response.status_code, 200)
#
# self.client.logout()
# response = self.client.get('/api/diary/')
# self.assertEqual(response.status_code, 403)
#
# def test_return_correct_diary_object(self):
# response = self.client.get('/api/diary/1/')
# self.assertEqual(response.json(), {'content': 'test content', 'id': 1,
# 'tag': 1, 'title': 'Hello title', 'date': self.current_date_time.strftime('%Y-%m-%dT%H:%M:%S.%fZ')})
#
# def test_diary_update_works(self):
# response = self.client.patch('/api/diary/1/', data={'content': 'Updated content'})
# self.assertEqual(response.json(), {'content': 'Updated content', 'id': 1,
# 'tag': 1, 'title': 'Hello title', 'date': self.current_date_time.strftime('%Y-%m-%dT%H:%M:%S.%fZ')})
#
# def test_new_diary_creation_works(self):
# response = self.client.post('/api/diary/', data={'tag': self.tag.id, 'content': "New content",
# 'date': self.current_date_time, 'title': 'New Title'})
# self.assertEqual(response.json(), {'id': 2, 'tag': self.tag.id, 'content': "New content",
# 'date': self.current_date_time.strftime('%Y-%m-%dT%H:%M:%S.%fZ'), 'title': 'New Title' })
#
# def test_deleting_diary_works(self):
# self.client.post('/api/diary/', data={'tag': self.tag.id, 'content': "New content !",
# 'date': self.current_date_time, 'title': 'Delete me :D '})
# response = self.client.delete('/api/diary/2/')
# self.assertEqual(response.status_code, 204)
| 45.710983 | 145 | 0.561077 | from django.test import TestCase, TransactionTestCase
from .models import Notes, Diary
from django.utils import timezone
from rest_framework.test import APIRequestFactory, APIClient
from django.contrib.auth.models import User
from freezegun import freeze_time
from rest_framework_jwt.views import obtain_jwt_token
import json
class ModelTest(TransactionTestCase):
"""
Test all models
"""
current_date_time = timezone.now()
reset_sequences = True
def setUp(self):
self.tag = "Test tag"
note = Notes.objects.create(content="test content ",
iv="something random", date=self.current_date_time)
note.tag.add(self.tag)
diary = Diary.objects.create(title="Hello title", content="test content",
iv="something random", date=self.current_date_time)
diary.tag.add(self.tag)
def test_notes_model(self):
note_item = Notes.objects.all()
self.assertEqual(note_item.count(), 1)
note_result = Notes.objects.get(content="test content ")
self.assertEqual(note_result.content, "test content ")
self.assertEqual(note_result.tag.names()[0], self.tag)
def test_diary_model(self):
diary_item = Diary.objects.all()
self.assertEqual(diary_item.count(), 1)
diary_result = Diary.objects.get(title="Hello title")
self.assertEqual(diary_result.title, "Hello title")
self.assertEqual(diary_result.tag.names()[0], self.tag)
self.assertEqual(diary_result.date, self.current_date_time)
# class AuthTest(TestCase):
# """
# Test JWT auth (now I am thinking , do I really need this test ? :/ )
# """
# current_date_time = timezone.now()
#
# def setUp(self):
# User.objects.create_user('hiren', 'a@b.com', 'password')
# tag = Tag.objects.create(name="Test tag")
# Notes.objects.create(tag=tag, content="test content ", date=self.current_date_time)
# Diary.objects.create(tag=tag, title="Hello title", content="test content", date=self.current_date_time)
#
# self.factory = APIRequestFactory()
#
# def test_jwt_auth(self):
# request = self.factory.post('/api-token-auth/', {'username': 'hiren', 'password': 'password'})
# response = obtain_jwt_token(request)
# response.render()
# self.assertEqual(response.status_code, 200)
class NotesViewTest(TransactionTestCase):
"""
Test Notes View
"""
reset_sequences = True
current_date_time = timezone.now()
# current_date_time = "2017-01-14T00:00:00Z"
@freeze_time("2012-01-14")
def setUp(self):
self.client = APIClient()
self.user = User.objects.create_user('hiren', 'a@b.com', 'password')
self.client.force_authenticate(user=self.user)
self.tag = "Test tag"
note = Notes.objects.create(iv="random", content="test content", date=self.current_date_time)
note.tag.add(self.tag)
def test_login_works(self):
response = self.client.get('/api/notes/')
self.assertEqual(response.status_code, 200)
self.client.logout()
response = self.client.get('/api/notes/')
self.assertEqual(response.status_code, 403)
def test_return_correct_note(self):
response = self.client.get('/api/notes/1/')
self.assertEqual(response.json(), {'content': 'test content', 'id': 1,
'tag': [self.tag], 'iv': "random",
'salt': '',
'date': self.current_date_time.strftime('%Y-%m-%dT%H:%M:%S.%f'),
'created_at': '2012-01-14T00:00:00',
'updated_at': '2012-01-14T00:00:00'})
@freeze_time("2012-01-14")
def test_note_update_works(self):
response = self.client.patch('/api/notes/1/', data={'content': 'Updated content'})
self.assertEqual(response.json(), {'content': 'Updated content', 'id': 1,
'tag': [self.tag], 'iv': 'random',
'salt': '',
'date': self.current_date_time.strftime('%Y-%m-%dT%H:%M:%S.%f'),
'created_at': '2012-01-14T00:00:00',
'updated_at': '2012-01-14T00:00:00'})
# @freeze_time("2012-01-14")
# def test_new_note_creation_works(self):
# response = self.client.post('/api/notes/', data={'tag': ["xyz"], 'iv': 'random', 'content': "New content",
# 'salt': 'sa', 'date': "2012-01-14T00:00:00"}, format="json")
# print(response.json())
# self.assertEqual(response.json(), {'id': 2, 'tag': [self.tag], 'iv': 'random', 'content': "New content",
# 'date': '2012-01-14T00:00:00',
# 'created_at': '2012-01-14T00:00:00',
# 'updated_at': '2012-01-14T00:00:00'})
#
# def test_deleting_note_works(self):
# #self.client.post('/api/notes/', data={'tag': [self.tag], 'iv': 'random', 'content': "New content !",
# # 'date': self.current_date_time})
# response = self.client.delete('/api/notes/1/')
# self.assertEqual(response.status_code, 204)
#
#
# class DiaryViewTest(TransactionTestCase):
# """
# Test Diary View
# """
# reset_sequences = True
# current_date_time = timezone.now()
#
# def setUp(self):
# self.client = APIClient()
# self.user = User.objects.create_user('hiren', 'a@b.com', 'password')
# self.client.force_authenticate(user=self.user)
# self.tag = "Test tag"
# Diary.objects.create(tag=self.tag, title="Hello title", content="test content", date=self.current_date_time)
#
# def test_login_works(self):
# response = self.client.get('/api/diary/')
# self.assertEqual(response.status_code, 200)
#
# self.client.logout()
# response = self.client.get('/api/diary/')
# self.assertEqual(response.status_code, 403)
#
# def test_return_correct_diary_object(self):
# response = self.client.get('/api/diary/1/')
# self.assertEqual(response.json(), {'content': 'test content', 'id': 1,
# 'tag': 1, 'title': 'Hello title', 'date': self.current_date_time.strftime('%Y-%m-%dT%H:%M:%S.%fZ')})
#
# def test_diary_update_works(self):
# response = self.client.patch('/api/diary/1/', data={'content': 'Updated content'})
# self.assertEqual(response.json(), {'content': 'Updated content', 'id': 1,
# 'tag': 1, 'title': 'Hello title', 'date': self.current_date_time.strftime('%Y-%m-%dT%H:%M:%S.%fZ')})
#
# def test_new_diary_creation_works(self):
# response = self.client.post('/api/diary/', data={'tag': self.tag.id, 'content': "New content",
# 'date': self.current_date_time, 'title': 'New Title'})
# self.assertEqual(response.json(), {'id': 2, 'tag': self.tag.id, 'content': "New content",
# 'date': self.current_date_time.strftime('%Y-%m-%dT%H:%M:%S.%fZ'), 'title': 'New Title' })
#
# def test_deleting_diary_works(self):
# self.client.post('/api/diary/', data={'tag': self.tag.id, 'content': "New content !",
# 'date': self.current_date_time, 'title': 'Delete me :D '})
# response = self.client.delete('/api/diary/2/')
# self.assertEqual(response.status_code, 204)
| 2,744 | 0 | 187 |
1d744b283ba3a06f1f0cde6aa9463c07516ddc12 | 1,119 | py | Python | main.py | BradLyman/MafiaGame | c6f14a303b126886e8593ed91d034650cb7d82b0 | [
"MIT"
] | null | null | null | main.py | BradLyman/MafiaGame | c6f14a303b126886e8593ed91d034650cb7d82b0 | [
"MIT"
] | null | null | null | main.py | BradLyman/MafiaGame | c6f14a303b126886e8593ed91d034650cb7d82b0 | [
"MIT"
] | null | null | null | from gameInitialization import *
from getAndDoActions import *
from checkState import *
from voting import *
from displayAndPrompt import *
import Globals
teamInfoList = promptTeams()
teams = createTeams(teamInfoList)
roleList = []
for team in teams:
roleList += getListOfTeamRoles(team)
playerInfoList = getPlayerNames(roleList)
players = addPlayersToTeams(playerInfoList)
Day = False
for team in teams:
team.cleanUp()
cleanUpGlobalTraits(players)
while True:
alivePlayers = getSpecificPlayers(players, {'alive' : True})
if Day:
voteInfoList = getVotes(alivePlayers)
applyVotes(voteInfoList)
voteCountList = countVotes(alivePlayers)
displayVotes()
killedOff = killTopVoted(voteCountList)
displayKilledOff(killedOff)
Day = False
else:
actionInfoList = getActions(alivePlayers)
actionDict = primeActions(actionInfoList)
actionOrderPlayers = orderPlayers(actionDict)
doActions(actionOrderPlayers)
for team in teams:
team.cleanUp()
cleanUpGlobalTraits(players)
Day = True
winners = checkWinners(teams, players)
if winners:
print('The winners are:')
displayTeams(winners)
break | 26.642857 | 61 | 0.781948 | from gameInitialization import *
from getAndDoActions import *
from checkState import *
from voting import *
from displayAndPrompt import *
import Globals
teamInfoList = promptTeams()
teams = createTeams(teamInfoList)
roleList = []
for team in teams:
roleList += getListOfTeamRoles(team)
playerInfoList = getPlayerNames(roleList)
players = addPlayersToTeams(playerInfoList)
Day = False
for team in teams:
team.cleanUp()
cleanUpGlobalTraits(players)
while True:
alivePlayers = getSpecificPlayers(players, {'alive' : True})
if Day:
voteInfoList = getVotes(alivePlayers)
applyVotes(voteInfoList)
voteCountList = countVotes(alivePlayers)
displayVotes()
killedOff = killTopVoted(voteCountList)
displayKilledOff(killedOff)
Day = False
else:
actionInfoList = getActions(alivePlayers)
actionDict = primeActions(actionInfoList)
actionOrderPlayers = orderPlayers(actionDict)
doActions(actionOrderPlayers)
for team in teams:
team.cleanUp()
cleanUpGlobalTraits(players)
Day = True
winners = checkWinners(teams, players)
if winners:
print('The winners are:')
displayTeams(winners)
break | 0 | 0 | 0 |
6520e33a48577a2b3b99bd3dc7570181bc28a9f8 | 3,740 | py | Python | universe.py | BhanuPrakashNani/physics-simulation | 7d7ad4bff654f4ad80dbc6a7ab254489d623658f | [
"MIT"
] | 7 | 2018-12-07T14:25:15.000Z | 2021-04-07T22:14:49.000Z | universe.py | BhanuPrakashNani/physics-simulation | 7d7ad4bff654f4ad80dbc6a7ab254489d623658f | [
"MIT"
] | 9 | 2018-12-07T18:11:29.000Z | 2018-12-22T09:39:39.000Z | universe.py | BhanuPrakashNani/physics-simulation | 7d7ad4bff654f4ad80dbc6a7ab254489d623658f | [
"MIT"
] | 22 | 2018-12-06T16:35:34.000Z | 2019-01-26T13:08:14.000Z | import math,random,pygame
from pygame.locals import *
def combineVectors(vector1, vector2):
""" Adds together two vectors given as an angle plus a magnitude (length)"""
(angle1, length1) = vector1
(angle2, length2) = vector2
x = math.sin(angle1) * length1
y = math.cos(angle1) * length1
x1 = x + math.sin(angle2) * length2
y1 = y + math.cos(angle2) * length2
angle = 0.5*math.pi - math.atan2(y1, x1)
length = math.hypot(x1, y1)
return (angle, length)
# Set up Pygame variables
pygame.init()
BG_colour = (0,0,0)
particle_colour = (200,200,200)
(width, height) = (480, 360)
screen = pygame.display.set_mode((width, height))
number_of_particles = 170
particles = []
for p in range(number_of_particles):
mass = random.randint(1, 4)
#mass = 1
x = random.randrange(0, width)
y = random.randrange(0, height)
particles.append(Particle(x, y, mass))
running = True
while running:
for event in pygame.event.get():
if event.type == QUIT:
running = False
screen.fill(BG_colour)
for i in range(number_of_particles):
j = i+1
while j < number_of_particles:
collide = particles[i].attract(particles[j])
if collide != None:
particles.remove(collide)
number_of_particles -= 1
else:
j += 1
for p in particles:
p.move()
# if p.size < 1:
# screen.set_at((int(p.x), int(p.y)), particle_colour)
if p.size < 2:
pygame.draw.rect(screen, p.colour, (int(p.x), int(p.y), 2, 2))
else:
pygame.draw.circle(screen, p.colour, (int(p.x), int(p.y)), p.size, 0)
pygame.display.flip()
for p in particles:
dx = math.sin(p.angle) * p.speed
dy = math.cos(p.angle) * p.speed
print ("(%d, %d)\t(dx=%f, dy=%f)\tmass = %d" % (p.x, p.y, dx, dy, p.mass))
| 30.655738 | 117 | 0.564706 | import math,random,pygame
from pygame.locals import *
def combineVectors(vector1, vector2):
""" Adds together two vectors given as an angle plus a magnitude (length)"""
(angle1, length1) = vector1
(angle2, length2) = vector2
x = math.sin(angle1) * length1
y = math.cos(angle1) * length1
x1 = x + math.sin(angle2) * length2
y1 = y + math.cos(angle2) * length2
angle = 0.5*math.pi - math.atan2(y1, x1)
length = math.hypot(x1, y1)
return (angle, length)
class Particle():
def __init__(self, x, y, mass=1):
self.x = x
self.y = y
self.mass = mass
self.findRadius()
self.speed = 0
self.angle = 0
def findRadius(self):
self.radius = 0.4 * self.mass ** (1.0/3.0)
self.size = int(self.radius)
if self.size < 2:
self.colour = (100+self.mass, 100+self.mass, 100+self.mass)
else:
self.colour = (255,255, 0)
def move(self):
""" Moves the particle based on its speed and direction """
self.x += math.sin(self.angle) * self.speed
self.y += math.cos(self.angle) * self.speed
def attract(self, other):
"""" Particles attract one another based on their distance and masses"""
dx = (self.x - other.x) * 2
dy = (self.y - other.y) * 2
dist = math.hypot(dx, dy)
force = 0.1 * self.mass * other.mass / dist**2
theta = 0.5 * math.pi - math.atan2(dy, dx)
if dist < self.radius + other.radius:
total_mass = self.mass + other.mass
self.x = (self.x * self.mass + other.x * other.mass) / total_mass
self.y = (self.y * self.mass + other.y * other.mass) / total_mass
self.speed = self.speed * self.mass / total_mass
other.speed = other.speed * other.mass / total_mass
(self.angle, self.speed) = combineVectors((self.angle, self.speed), (other.angle, other.speed))
self.mass = total_mass
self.findRadius()
return other
else:
(self.angle, self.speed) = combineVectors((self.angle, self.speed), (theta+math.pi, force/self.mass))
(other.angle, other.speed) = combineVectors((other.angle, other.speed), (theta, force/other.mass))
# Set up Pygame variables
pygame.init()
BG_colour = (0,0,0)
particle_colour = (200,200,200)
(width, height) = (480, 360)
screen = pygame.display.set_mode((width, height))
number_of_particles = 170
particles = []
for p in range(number_of_particles):
mass = random.randint(1, 4)
#mass = 1
x = random.randrange(0, width)
y = random.randrange(0, height)
particles.append(Particle(x, y, mass))
running = True
while running:
for event in pygame.event.get():
if event.type == QUIT:
running = False
screen.fill(BG_colour)
for i in range(number_of_particles):
j = i+1
while j < number_of_particles:
collide = particles[i].attract(particles[j])
if collide != None:
particles.remove(collide)
number_of_particles -= 1
else:
j += 1
for p in particles:
p.move()
# if p.size < 1:
# screen.set_at((int(p.x), int(p.y)), particle_colour)
if p.size < 2:
pygame.draw.rect(screen, p.colour, (int(p.x), int(p.y), 2, 2))
else:
pygame.draw.circle(screen, p.colour, (int(p.x), int(p.y)), p.size, 0)
pygame.display.flip()
for p in particles:
dx = math.sin(p.angle) * p.speed
dy = math.cos(p.angle) * p.speed
print ("(%d, %d)\t(dx=%f, dy=%f)\tmass = %d" % (p.x, p.y, dx, dy, p.mass))
| 387 | 1,414 | 23 |
1baf13c33c8af2896e5c701f2bcbe93cba9095ab | 1,740 | py | Python | plots/Nell_alpha.py | damonge/SNELL | 4bb276225fce8f535619d0f2133a19f3c42aa44f | [
"BSD-3-Clause"
] | 2 | 2020-05-07T03:22:37.000Z | 2021-02-19T14:34:42.000Z | plots/Nell_alpha.py | damonge/SNELL | 4bb276225fce8f535619d0f2133a19f3c42aa44f | [
"BSD-3-Clause"
] | 2 | 2020-04-28T11:13:10.000Z | 2021-06-08T12:20:25.000Z | plots/Nell_alpha.py | damonge/GWSN | 4bb276225fce8f535619d0f2133a19f3c42aa44f | [
"BSD-3-Clause"
] | 2 | 2020-05-07T03:22:43.000Z | 2021-12-05T15:41:05.000Z | import numpy as np
import schnell as snl
import matplotlib.pyplot as plt
from matplotlib import rc
rc('font', **{'family': 'sans-serif',
'sans-serif': ['Helvetica']})
rc('text', usetex=True)
t_obs = 1
f_ref = 63
nside = 64
obs_time = t_obs*365*24*3600.
freqs = np.linspace(10., 1010., 101)
dets = [snl.GroundDetector('Hanford', 46.4, -119.4, 171.8,
'data/aLIGO.txt'),
snl.GroundDetector('Livingstone', 30.7, -90.8, 243.0,
'data/aLIGO.txt'),
snl.GroundDetector('Virgo', 43.6, 10.5, 116.5,
'data/Virgo.txt'),
snl.GroundDetector('KAGRA', 36.3, 137.2, 225.0,
'data/KAGRA.txt')]
print("0")
mc = snl.MapCalculator(dets, f_pivot=f_ref,
spectral_index=0.)
nl_a0 = mc.get_N_ell(obs_time, freqs, nside, no_autos=True)
print("2/3")
mc = snl.MapCalculator(dets, f_pivot=f_ref,
spectral_index=2./3.)
nl_a2o3 = mc.get_N_ell(obs_time, freqs, nside, no_autos=True)
print("3")
mc = snl.MapCalculator(dets, f_pivot=f_ref,
spectral_index=3.)
nl_a3 = mc.get_N_ell(obs_time, freqs, nside, no_autos=True)
ls = np.arange(3*nside)
plt.figure()
plt.plot(ls, (ls+0.5)*nl_a3, 'k--', label=r'$\alpha=3$')
plt.plot(ls, (ls+0.5)*nl_a2o3, 'k-', label=r'$\alpha=2/3$')
plt.plot(ls, (ls+0.5)*nl_a0, 'k:', label=r'$\alpha=0$')
plt.loglog()
plt.xlabel(r'$\ell$', fontsize=16)
plt.ylabel(r'$(\ell+1/2)\,N_\ell$', fontsize=16)
plt.ylim([3E-20, 1E-10])
plt.xlim([1, 100])
plt.legend(loc='upper left', fontsize='x-large', frameon=False)
plt.gca().tick_params(labelsize="large")
plt.savefig("Nell_alphas.pdf", bbox_inches='tight')
plt.show()
| 34.8 | 63 | 0.594253 | import numpy as np
import schnell as snl
import matplotlib.pyplot as plt
from matplotlib import rc
rc('font', **{'family': 'sans-serif',
'sans-serif': ['Helvetica']})
rc('text', usetex=True)
t_obs = 1
f_ref = 63
nside = 64
obs_time = t_obs*365*24*3600.
freqs = np.linspace(10., 1010., 101)
dets = [snl.GroundDetector('Hanford', 46.4, -119.4, 171.8,
'data/aLIGO.txt'),
snl.GroundDetector('Livingstone', 30.7, -90.8, 243.0,
'data/aLIGO.txt'),
snl.GroundDetector('Virgo', 43.6, 10.5, 116.5,
'data/Virgo.txt'),
snl.GroundDetector('KAGRA', 36.3, 137.2, 225.0,
'data/KAGRA.txt')]
print("0")
mc = snl.MapCalculator(dets, f_pivot=f_ref,
spectral_index=0.)
nl_a0 = mc.get_N_ell(obs_time, freqs, nside, no_autos=True)
print("2/3")
mc = snl.MapCalculator(dets, f_pivot=f_ref,
spectral_index=2./3.)
nl_a2o3 = mc.get_N_ell(obs_time, freqs, nside, no_autos=True)
print("3")
mc = snl.MapCalculator(dets, f_pivot=f_ref,
spectral_index=3.)
nl_a3 = mc.get_N_ell(obs_time, freqs, nside, no_autos=True)
ls = np.arange(3*nside)
plt.figure()
plt.plot(ls, (ls+0.5)*nl_a3, 'k--', label=r'$\alpha=3$')
plt.plot(ls, (ls+0.5)*nl_a2o3, 'k-', label=r'$\alpha=2/3$')
plt.plot(ls, (ls+0.5)*nl_a0, 'k:', label=r'$\alpha=0$')
plt.loglog()
plt.xlabel(r'$\ell$', fontsize=16)
plt.ylabel(r'$(\ell+1/2)\,N_\ell$', fontsize=16)
plt.ylim([3E-20, 1E-10])
plt.xlim([1, 100])
plt.legend(loc='upper left', fontsize='x-large', frameon=False)
plt.gca().tick_params(labelsize="large")
plt.savefig("Nell_alphas.pdf", bbox_inches='tight')
plt.show()
| 0 | 0 | 0 |
a8b8cea006a63544d98c3081b12d05f97b89966d | 445 | py | Python | src/tools/template/models/users.py | Mimalef/paasta | 33c1abddc37e6ecf573beb3ea691a941b082d4c5 | [
"MIT"
] | null | null | null | src/tools/template/models/users.py | Mimalef/paasta | 33c1abddc37e6ecf573beb3ea691a941b082d4c5 | [
"MIT"
] | null | null | null | src/tools/template/models/users.py | Mimalef/paasta | 33c1abddc37e6ecf573beb3ea691a941b082d4c5 | [
"MIT"
] | null | null | null | from kernel.models.tools import Base, tool_engine
from sqlalchemy.orm import sessionmaker
user_engine = tool_engine(tool)
user_session = sessionmaker(bind = user_engine)
user_session = user_session()
Base.metadata.create_all(user_engine) | 24.722222 | 59 | 0.777528 | from kernel.models.tools import Base, tool_engine
from sqlalchemy.orm import sessionmaker
user_engine = tool_engine(tool)
user_session = sessionmaker(bind = user_engine)
user_session = user_session()
class User(Base):
from sqlalchemy import Column, Integer
__tablename__ = "users"
user_id = Column(Integer, nullable = False, unique = True)
def __init__(self, user_id):
self.user_id = user_id
Base.metadata.create_all(user_engine) | 32 | 149 | 23 |
0c8a10dbff93a82e081ee713c00650012c79c508 | 8,797 | py | Python | logistic_forecast.py | ivandebono/covid19 | 7ed8fc7fccbc380dd8c1f1f6b283b703b101c6bf | [
"MIT"
] | 2 | 2020-04-14T10:24:46.000Z | 2021-12-19T20:38:41.000Z | logistic_forecast.py | ivandebono/covid19 | 7ed8fc7fccbc380dd8c1f1f6b283b703b101c6bf | [
"MIT"
] | null | null | null | logistic_forecast.py | ivandebono/covid19 | 7ed8fc7fccbc380dd8c1f1f6b283b703b101c6bf | [
"MIT"
] | null | null | null | import numpy as np
import pandas as pd
from datetime import timedelta
from datetime import datetime
from io import StringIO
from urllib import request as url_request
import os
import sys
import matplotlib.pyplot as plt
import matplotlib.dates as mdates
from scipy.optimize import curve_fit
# This stuff because pandas or matplot lib complained...
from pandas.plotting import register_matplotlib_converters
register_matplotlib_converters()
from scipy import stats,integrate
from scipy.optimize import curve_fit
from scipy import stats,integrate
def logistic(x, L, k, x0, y0):
"""
General Logistic function.
Args:
x float or array-like, it represents the time
L float, the curve's maximum value
k float, the logistic growth rate or steepness of the curve.
x0 float, the x-value of the sigmoid's midpoint
y0 float, curve's shift in the y axis
"""
y = L / (1 + np.exp(-k*(x-x0))) + y0
return y
def logistic_derivative(x, L, k, x0):
"""
General Gaussian-like function (derivative of the logistic).
Args:
x float or array-like, it represents the time
L float, the curve's integral (area under the curve)
k float, the logistic growth rate or steepness of the curve.
x0 float, the x-value of the max value
"""
y = k * L * (np.exp(-k*(x-x0))) / np.power(1 + np.exp(-k*(x-x0)), 2)
return y
| 32.223443 | 119 | 0.615096 | import numpy as np
import pandas as pd
from datetime import timedelta
from datetime import datetime
from io import StringIO
from urllib import request as url_request
import os
import sys
import matplotlib.pyplot as plt
import matplotlib.dates as mdates
from scipy.optimize import curve_fit
# This stuff because pandas or matplot lib complained...
from pandas.plotting import register_matplotlib_converters
register_matplotlib_converters()
from scipy import stats,integrate
from scipy.optimize import curve_fit
from scipy import stats,integrate
def logistic(x, L, k, x0, y0):
"""
General Logistic function.
Args:
x float or array-like, it represents the time
L float, the curve's maximum value
k float, the logistic growth rate or steepness of the curve.
x0 float, the x-value of the sigmoid's midpoint
y0 float, curve's shift in the y axis
"""
y = L / (1 + np.exp(-k*(x-x0))) + y0
return y
def logistic_derivative(x, L, k, x0):
"""
General Gaussian-like function (derivative of the logistic).
Args:
x float or array-like, it represents the time
L float, the curve's integral (area under the curve)
k float, the logistic growth rate or steepness of the curve.
x0 float, the x-value of the max value
"""
y = k * L * (np.exp(-k*(x-x0))) / np.power(1 + np.exp(-k*(x-x0)), 2)
return y
def fit_curve(curve, ydata, title, ylabel, last_date, coeff_std, do_imgs=False,plt_forecast=False,show_every=5):
xdata = -np.flip(np.arange(len(ydata)))
days_past = -2 # days beyond the start of the data to plot
days_future = 40 # days after the end of the data to predict and plot
#show_every = 3 # int value that defines how often to show a date in the x axis. (used not to clutter the axis)
myFmt = mdates.DateFormatter('%d/%m') # date formatter for matplotlib
total_xaxis = np.array(range(-len(ydata) + days_past, days_future)) + 1
date_xdata = [last_date + timedelta(days=int(i)) for i in xdata]
date_total_xaxis = [last_date + timedelta(days=int(i)) for i in total_xaxis]
future_axis = total_xaxis[len(ydata) - days_past:]
date_future_axis = [last_date + timedelta(days=int(i)) for i in future_axis]
# Plotting
fig, ax = plt.subplots(figsize=(15,8))
ax.xaxis.set_major_formatter(myFmt)
fig.autofmt_xdate()
start = (len(ydata) - days_past - 1) % show_every
ax.set_xticks(date_total_xaxis[start::show_every])
ax.set_xlabel('Date')
ax.set_ylabel(ylabel)
ax.set_title(title + ': ' + str(last_date.strftime("%d-%m-%Y")))
ax.grid(True)
if curve.__name__ == 'logistic':
p0_1=ydata[-1]
p0=[p0_1, 0.5, 1, 0]
bounds=([0, 0, -100, 0], [2*p0_1, 10, 100, 1])
#bounds=([0,0,-np.inf,0],[np.inf,np.inf,np.inf,np.inf])
params_names = ['L', 'k', 'x0', 'y0']
elif curve.__name__ == 'logistic_derivative':
p0_1=3*max(ydata)
p0=[p0_1, 0.5, 1]
bounds=([0, 0, -100], [10*p0_1, 10, 100])
#bounds=([0,0,-np.inf],[np.inf,np.inf,np.inf])
params_names = ['L', 'k', 'x0']
else:
print('this curve is unknown')
return -1
popt, pcov = curve_fit(curve, xdata, ydata, p0=p0, bounds=bounds,maxfev=20000)
print(title)
descr = ' fit: '
for i, param in enumerate(params_names):
descr = descr + "{}={:.3f}".format(param, popt[i])
if i < len(params_names) - 1:
descr = descr + ', '
print(descr)
perr = np.sqrt(np.diag(pcov))
print('perr',perr)
pworst = popt + coeff_std*perr
pbest = popt - coeff_std*perr
# Plotting
# fig, ax = plt.subplots(figsize=(15,8))
# ax.xaxis.set_major_formatter(myFmt)
# fig.autofmt_xdate()
total_xaxis = np.array(range(-len(ydata) + days_past, days_future)) + 1
date_total_xaxis = [last_date + timedelta(days=int(i)) for i in total_xaxis]
date_xdata = [last_date + timedelta(days=int(i)) for i in xdata]
future_axis = total_xaxis[len(ydata) - days_past:]
date_future_axis = [last_date + timedelta(days=int(i)) for i in future_axis]
#print('pbest',pbest)
#print('pworst',pworst)
if plt_forecast==True:
ax.plot(date_total_xaxis, curve(total_xaxis, *popt), 'g-', label='prediction')
ax.fill_between(date_future_axis, curve(future_axis, *pbest), curve(future_axis, *pworst),
facecolor='red', alpha=0.2, label='std')
#print('Integral=',np.trapz(curve(total_xaxis, *popt)))
ax.scatter(date_xdata, ydata, color='blue', label='real data',s=8)
ax.plot(date_xdata, ydata, color='blue',alpha=0.5)
# start = (len(ydata) - days_past - 1) % show_every
# ax.set_xticks(date_total_xaxis[start::show_every])
# ax.set_xlabel('Date')
# ax.set_ylabel(ylabel)
# ax.set_title(title + ': ' + str(last_date.strftime("%d-%m-%Y")))
ax.legend(loc='upper left')
# ax.grid(True)
fig=plt.gcf()
plt.show()
if do_imgs:
fig.savefig('plots/'+ title + '.png', dpi=200)
return popt, perr
def fit_curve_evolution(curve, ydata1, title, ylabel, last_date, coeff_std, do_imgs=False,plt_forecast=False,
daily=True,days_past = -2,days_future = 40,show_every = 3,day_start=30):
xdata = -np.flip(np.arange(len(ydata1)))
#days_past = -2 # days beyond the start of the data to plot
#days_future = 40 # days after the end of the data to predict and plot
#show_every = 3 # int value that defines how often to show a date in the x axis. (used not to clutter the axis)
myFmt = mdates.DateFormatter('%d/%m') # date formatter for matplotlib
total_xaxis = np.array(range(-len(ydata1) + days_past, days_future)) + 1
date_xdata = [last_date + timedelta(days=int(i)) for i in xdata]
date_total_xaxis = [last_date + timedelta(days=int(i)) for i in total_xaxis]
future_axis = total_xaxis[len(ydata1) - days_past:]
date_future_axis = [last_date + timedelta(days=int(i)) for i in future_axis]
# Plotting
fig, ax = plt.subplots(figsize=(15,8))
ax.xaxis.set_major_formatter(myFmt)
fig.autofmt_xdate()
start = (len(ydata1) - days_past - 1) % show_every
ax.set_xticks(date_total_xaxis[start::show_every])
ax.set_xlabel('Date')
ax.set_ylabel(ylabel)
ax.set_title(title + ': ' + str(last_date.strftime("%d-%m-%Y")))
ax.grid(True)
coln=len(np.arange(day_start,len(ydata1)+1))
colors = plt.cm.YlGn(np.linspace(0,1,coln))
for j in np.arange(day_start,len(ydata1)+1):
#print('STEP',j)
ydata=ydata1[:j]
xdata = -np.flip(np.arange(len(ydata)))
#print(len(xdata),len(ydata))
if curve.__name__ == 'logistic':
p0_1=ydata[-1]
p0=[p0_1, 0.5, 1, 0]
bounds=([0, 0, -100, 0], [2*p0_1, 10, 100, 1])
#bounds=([0,0,-np.inf,0],[np.inf,np.inf,np.inf,np.inf])
params_names = ['L', 'k', 'x0', 'y0']
elif curve.__name__ == 'logistic_derivative':
p0_1=3*max(ydata)
p0=[p0_1, 0.5, 1]
bounds=([0, 0, -100], [10*p0_1, 10, 100])
#bounds=([0,0,-np.inf],[np.inf,np.inf,np.inf])
params_names = ['L', 'k', 'x0']
else:
print('this curve is unknown')
return -1
popt, pcov = curve_fit(curve, xdata, ydata, p0=p0, bounds=bounds,maxfev=20000)
perr = np.sqrt(np.diag(pcov))
pworst = popt + coeff_std*perr
pbest = popt - coeff_std*perr
total_xaxis = np.array(list(range(-len(ydata) + days_past, days_future))) + 1
if plt_forecast==True:
ax.plot(date_total_xaxis[:len(total_xaxis)], curve(total_xaxis, *popt), color=colors[j-day_start])
#ax.fill_between(date_future_axis, curve(future_axis, *pbest), curve(future_axis, *pworst),
#facecolor='red', alpha=0.2)
#print('Integral=',np.trapz(curve(total_xaxis, *popt)))
if daily:
sm = plt.cm.ScalarMappable(cmap='YlGn', norm=plt.Normalize(vmin=day_start+1, vmax=len(ydata1)+1))
else:
sm = plt.cm.ScalarMappable(cmap='YlGn', norm=plt.Normalize(vmin=day_start, vmax=len(ydata1)))
plt.colorbar(sm,label='No. of days in data')
ax.scatter(date_xdata, ydata1, color='blue', label='real data',s=8)
ax.plot(date_xdata, ydata1, color='blue',alpha=1.0)
ax.legend(loc='upper left')
#plt.ylim(bottom=0, top=1.1*max(ydata1))
fig=plt.gcf()
plt.show()
if do_imgs:
fig.savefig('plots/evolution'+ title + '.png', dpi=200)
return
| 7,316 | 0 | 47 |
cf4ae205540f9495fc6c25a0d7ed538dc95cc1ed | 615 | py | Python | clinica/utils/epi.py | Raelag0112/clinica | d301b1abfdf4d3b62dc4b329622340795ae51ef8 | [
"MIT"
] | 1 | 2020-06-08T15:27:55.000Z | 2020-06-08T15:27:55.000Z | clinica/utils/epi.py | Raelag0112/clinica | d301b1abfdf4d3b62dc4b329622340795ae51ef8 | [
"MIT"
] | null | null | null | clinica/utils/epi.py | Raelag0112/clinica | d301b1abfdf4d3b62dc4b329622340795ae51ef8 | [
"MIT"
] | null | null | null | # coding: utf-8
def bids_dir_to_fsl_dir(bids_dir):
"""
Converts BIDS PhaseEncodingDirection parameters (i,j,k,i-,j-,k-) to
FSL direction (x,y,z,x-,y-,z-).
"""
fsl_dir = bids_dir.lower()
if fsl_dir == "i-":
return "x-"
if fsl_dir == "i":
return "x"
if fsl_dir == "j-":
return "y-"
if fsl_dir == "j":
return "y"
if fsl_dir == "k-":
return "z-"
if fsl_dir == "k":
return "z"
raise RuntimeError(
f"PhaseEncodingDirection {fsl_dir} is unknown, it should be a value in (x,y,z,x-,y-,z-)"
)
return fsl_dir
| 21.964286 | 97 | 0.528455 | # coding: utf-8
def bids_dir_to_fsl_dir(bids_dir):
"""
Converts BIDS PhaseEncodingDirection parameters (i,j,k,i-,j-,k-) to
FSL direction (x,y,z,x-,y-,z-).
"""
fsl_dir = bids_dir.lower()
if fsl_dir == "i-":
return "x-"
if fsl_dir == "i":
return "x"
if fsl_dir == "j-":
return "y-"
if fsl_dir == "j":
return "y"
if fsl_dir == "k-":
return "z-"
if fsl_dir == "k":
return "z"
raise RuntimeError(
f"PhaseEncodingDirection {fsl_dir} is unknown, it should be a value in (x,y,z,x-,y-,z-)"
)
return fsl_dir
| 0 | 0 | 0 |
84490ad221e4b6de786c2d265df85bd59684f75c | 145 | py | Python | tokenizer/__init__.py | skorani/tokenizer | ee80488875d9e774c58528d49eb3454fbd99b053 | [
"MIT"
] | 10 | 2020-02-11T19:43:59.000Z | 2021-09-28T15:14:51.000Z | tokenizer/__init__.py | skorani/tokenizer | ee80488875d9e774c58528d49eb3454fbd99b053 | [
"MIT"
] | 2 | 2020-02-11T00:18:15.000Z | 2020-02-12T12:38:27.000Z | tokenizer/__init__.py | skorani/tokenizer | ee80488875d9e774c58528d49eb3454fbd99b053 | [
"MIT"
] | 3 | 2020-02-12T08:47:27.000Z | 2020-04-15T20:07:38.000Z | import logging as __logging
from ._tokenizer import Tokenizer
__logging.getLogger(f"pizza_nlp.{__name__}").addHandler(__logging.NullHandler())
| 24.166667 | 80 | 0.82069 | import logging as __logging
from ._tokenizer import Tokenizer
__logging.getLogger(f"pizza_nlp.{__name__}").addHandler(__logging.NullHandler())
| 0 | 0 | 0 |
1f80949a23a59577f897d8c63d05809efc1850f5 | 3,479 | py | Python | High-Rated-app-on-play-store/code.py | karunakar9869/ga-learner-dsmp-repo | 597e02bc1682cfd48172fee0d67a22e2274b6822 | [
"MIT"
] | null | null | null | High-Rated-app-on-play-store/code.py | karunakar9869/ga-learner-dsmp-repo | 597e02bc1682cfd48172fee0d67a22e2274b6822 | [
"MIT"
] | null | null | null | High-Rated-app-on-play-store/code.py | karunakar9869/ga-learner-dsmp-repo | 597e02bc1682cfd48172fee0d67a22e2274b6822 | [
"MIT"
] | null | null | null | # --------------
#Importing header files
import pandas as pd
import matplotlib.pyplot as plt
import seaborn as sns
#Code starts here
#Code ends here
data=pd.read_csv(path)
#Plotting histogram of Rating
data['Rating'].plot(kind='hist')
plt.show()
#Subsetting the dataframe based on `Rating` column
data=data[data['Rating']<=5]
#Plotting histogram of Rating
data['Rating'].plot(kind='hist')
# --------------
# code starts here
# code ends here
total_null=data.isnull().sum()
total_null
k=[]
for i in range (0,len(total_null)):
s=(total_null[i]/len(data))*100
k.append(s)
k
percent_null=pd.Series(k,total_null.index)
percent_null
missing_data=pd.DataFrame({'Total':total_null,'Percent':percent_null})
missing_data
data=data.dropna()
total_null_1=data.isnull().sum()
total_null_1
r=[]
for i in range (0,len(total_null_1)):
t=(total_null_1[i]/len(data))*100
r.append(t)
r
percent_null_1=pd.Series(r,total_null_1.index)
percent_null_1
missing_data_1=pd.DataFrame({'Total':total_null_1,'Percent':percent_null_1})
missing_data_1
# --------------
#Code starts here
#Code ends here
g=sns.catplot(x="Category",y="Rating",data=data, kind="box", height=10)
g.set_xticklabels(rotation=90)
g.set_titles('Rating vs Category [BoxPlot]')
# --------------
#Importing header files
from sklearn.preprocessing import MinMaxScaler, LabelEncoder
#Code starts here
#Code ends here
data['Installs']=data['Installs'].str.replace(',','')
#Removing `+` from the column
data['Installs']=data['Installs'].str.replace('+','')
#Converting the column to `int` datatype
data['Installs'] = data['Installs'].astype(int)
#Creating a label encoder object
le=LabelEncoder()
#Label encoding the column to reduce the effect of a large range of values
data['Installs']=le.fit_transform(data['Installs'])
#Setting figure size
plt.figure(figsize = (10,10))
#Plotting Regression plot between Rating and Installs
sns.regplot(x="Installs", y="Rating", color = 'teal',data=data)
#Setting the title of the plot
plt.title('Rating vs Installs[RegPlot]',size = 20)
#Code ends here
# --------------
#Code starts here
#Code ends here
data['Price'].value_counts()
data['Price']=data['Price'].str.replace('$','').astype(float)
sns.regplot(x='Price',y='Rating',data=data)
plt.figure(figsize=(10,10))
plt.title('Rating vs Price [RegPlot]',size=20)
# --------------
#Code starts here
#Code ends here
print( len(data['Genres'].unique()) , "genres")
#Splitting the column to include only the first genre of each app
data['Genres'] = data['Genres'].str.split(';').str[0]
#Grouping Genres and Rating
gr_mean=data[['Genres', 'Rating']].groupby(['Genres'], as_index=False).mean()
print(gr_mean.describe())
#Sorting the grouped dataframe by Rating
gr_mean=gr_mean.sort_values('Rating')
print(gr_mean.head(1))
print(gr_mean.tail(1))
# --------------
#Code starts here
#Code ends here
data['Last Updated'] = pd.to_datetime(data['Last Updated'])
#Creating new column having `Last Updated` in days
data['Last Updated Days'] = (data['Last Updated'].max()-data['Last Updated'] ).dt.days
#Setting the size of the figure
plt.figure(figsize = (10,10))
#Plotting a regression plot between `Rating` and `Last Updated Days`
sns.regplot(x="Last Updated Days", y="Rating", color = 'lightpink',data=data )
#Setting the title of the plot
plt.title('Rating vs Last Updated [RegPlot]',size = 20)
#Code ends here
| 15.126087 | 87 | 0.688416 | # --------------
#Importing header files
import pandas as pd
import matplotlib.pyplot as plt
import seaborn as sns
#Code starts here
#Code ends here
data=pd.read_csv(path)
#Plotting histogram of Rating
data['Rating'].plot(kind='hist')
plt.show()
#Subsetting the dataframe based on `Rating` column
data=data[data['Rating']<=5]
#Plotting histogram of Rating
data['Rating'].plot(kind='hist')
# --------------
# code starts here
# code ends here
total_null=data.isnull().sum()
total_null
k=[]
for i in range (0,len(total_null)):
s=(total_null[i]/len(data))*100
k.append(s)
k
percent_null=pd.Series(k,total_null.index)
percent_null
missing_data=pd.DataFrame({'Total':total_null,'Percent':percent_null})
missing_data
data=data.dropna()
total_null_1=data.isnull().sum()
total_null_1
r=[]
for i in range (0,len(total_null_1)):
t=(total_null_1[i]/len(data))*100
r.append(t)
r
percent_null_1=pd.Series(r,total_null_1.index)
percent_null_1
missing_data_1=pd.DataFrame({'Total':total_null_1,'Percent':percent_null_1})
missing_data_1
# --------------
#Code starts here
#Code ends here
g=sns.catplot(x="Category",y="Rating",data=data, kind="box", height=10)
g.set_xticklabels(rotation=90)
g.set_titles('Rating vs Category [BoxPlot]')
# --------------
#Importing header files
from sklearn.preprocessing import MinMaxScaler, LabelEncoder
#Code starts here
#Code ends here
data['Installs']=data['Installs'].str.replace(',','')
#Removing `+` from the column
data['Installs']=data['Installs'].str.replace('+','')
#Converting the column to `int` datatype
data['Installs'] = data['Installs'].astype(int)
#Creating a label encoder object
le=LabelEncoder()
#Label encoding the column to reduce the effect of a large range of values
data['Installs']=le.fit_transform(data['Installs'])
#Setting figure size
plt.figure(figsize = (10,10))
#Plotting Regression plot between Rating and Installs
sns.regplot(x="Installs", y="Rating", color = 'teal',data=data)
#Setting the title of the plot
plt.title('Rating vs Installs[RegPlot]',size = 20)
#Code ends here
# --------------
#Code starts here
#Code ends here
data['Price'].value_counts()
data['Price']=data['Price'].str.replace('$','').astype(float)
sns.regplot(x='Price',y='Rating',data=data)
plt.figure(figsize=(10,10))
plt.title('Rating vs Price [RegPlot]',size=20)
# --------------
#Code starts here
#Code ends here
print( len(data['Genres'].unique()) , "genres")
#Splitting the column to include only the first genre of each app
data['Genres'] = data['Genres'].str.split(';').str[0]
#Grouping Genres and Rating
gr_mean=data[['Genres', 'Rating']].groupby(['Genres'], as_index=False).mean()
print(gr_mean.describe())
#Sorting the grouped dataframe by Rating
gr_mean=gr_mean.sort_values('Rating')
print(gr_mean.head(1))
print(gr_mean.tail(1))
# --------------
#Code starts here
#Code ends here
data['Last Updated'] = pd.to_datetime(data['Last Updated'])
#Creating new column having `Last Updated` in days
data['Last Updated Days'] = (data['Last Updated'].max()-data['Last Updated'] ).dt.days
#Setting the size of the figure
plt.figure(figsize = (10,10))
#Plotting a regression plot between `Rating` and `Last Updated Days`
sns.regplot(x="Last Updated Days", y="Rating", color = 'lightpink',data=data )
#Setting the title of the plot
plt.title('Rating vs Last Updated [RegPlot]',size = 20)
#Code ends here
| 0 | 0 | 0 |
6abe5c857acd38226ebe7f2fa4cf07165b0b50c5 | 12,425 | py | Python | tests/neptune/test_project.py | janbolle/neptune-client | 33b1876b361d9a7184f557d7bd6e016cb08bd59f | [
"Apache-2.0"
] | null | null | null | tests/neptune/test_project.py | janbolle/neptune-client | 33b1876b361d9a7184f557d7bd6e016cb08bd59f | [
"Apache-2.0"
] | null | null | null | tests/neptune/test_project.py | janbolle/neptune-client | 33b1876b361d9a7184f557d7bd6e016cb08bd59f | [
"Apache-2.0"
] | null | null | null | #
# Copyright (c) 2019, Neptune Labs Sp. z o.o.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import os.path
import sys
import unittest
from random import randint
import ntpath
import pandas as pd
from mock import MagicMock, patch
from munch import Munch
from neptune.exceptions import NeptuneNoExperimentContextException
from neptune.experiments import Experiment
from neptune.model import LeaderboardEntry
from neptune.projects import Project
from tests.neptune.api_objects_factory import a_registered_project_member, an_invited_project_member
from tests.neptune.project_test_fixture import some_exp_entry_dto, some_exp_entry_row
from tests.neptune.random_utils import a_string, a_string_list, a_uuid_string
if __name__ == '__main__':
unittest.main()
| 36.014493 | 120 | 0.661408 | #
# Copyright (c) 2019, Neptune Labs Sp. z o.o.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import os.path
import sys
import unittest
from random import randint
import ntpath
import pandas as pd
from mock import MagicMock, patch
from munch import Munch
from neptune.exceptions import NeptuneNoExperimentContextException
from neptune.experiments import Experiment
from neptune.model import LeaderboardEntry
from neptune.projects import Project
from tests.neptune.api_objects_factory import a_registered_project_member, an_invited_project_member
from tests.neptune.project_test_fixture import some_exp_entry_dto, some_exp_entry_row
from tests.neptune.random_utils import a_string, a_string_list, a_uuid_string
class TestProject(unittest.TestCase):
def setUp(self):
super(TestProject, self).setUp()
self.backend = MagicMock()
self.project = Project(backend=self.backend, internal_id=a_uuid_string(), namespace=a_string(), name=a_string())
self.current_directory = os.getcwd()
def tearDown(self):
# revert initial directory after changing location in tests
os.chdir(self.current_directory)
def test_get_members(self):
# given
member_usernames = [a_string() for _ in range(0, 2)]
members = [a_registered_project_member(username) for username in member_usernames]
# and
self.backend.get_project_members.return_value = members + [an_invited_project_member()]
# when
fetched_member_usernames = self.project.get_members()
# then
self.backend.get_project_members.assert_called_once_with(self.project.internal_id)
# and
self.assertEqual(member_usernames, fetched_member_usernames)
def test_get_experiments_with_no_params(self):
# given
leaderboard_entries = [MagicMock() for _ in range(0, 2)]
self.backend.get_leaderboard_entries.return_value = leaderboard_entries
# when
experiments = self.project.get_experiments()
# then
self.backend.get_leaderboard_entries.assert_called_once_with(
project=self.project,
ids=None,
states=None, owners=None, tags=None,
min_running_time=None)
# and
expected_experiments = [Experiment(self.backend, self.project, entry.id, entry.internal_id)
for entry in leaderboard_entries]
self.assertEqual(expected_experiments, experiments)
def test_get_experiments_with_scalar_params(self):
# given
leaderboard_entries = [MagicMock() for _ in range(0, 2)]
self.backend.get_leaderboard_entries.return_value = leaderboard_entries
# and
params = dict(
id=a_string(),
state='succeeded', owner=a_string(), tag=a_string(),
min_running_time=randint(1, 100))
# when
experiments = self.project.get_experiments(**params)
# then
expected_params = dict(
project=self.project,
ids=[params['id']],
states=[params['state']], owners=[params['owner']], tags=[params['tag']],
min_running_time=params['min_running_time']
)
self.backend.get_leaderboard_entries.assert_called_once_with(**expected_params)
# and
expected_experiments = [Experiment(self.backend, self.project, entry.id, entry.internal_id)
for entry in leaderboard_entries]
self.assertEqual(expected_experiments, experiments)
def test_get_experiments_with_list_params(self):
# given
leaderboard_entries = [MagicMock() for _ in range(0, 2)]
self.backend.get_leaderboard_entries.return_value = leaderboard_entries
# and
params = dict(
id=a_string_list(),
state=['succeeded', 'failed'], owner=a_string_list(), tag=a_string_list(),
min_running_time=randint(1, 100))
# when
experiments = self.project.get_experiments(**params)
# then
expected_params = dict(
project=self.project,
ids=params['id'],
states=params['state'], owners=params['owner'], tags=params['tag'],
min_running_time=params['min_running_time']
)
self.backend.get_leaderboard_entries.assert_called_once_with(**expected_params)
# and
expected_experiments = [Experiment(self.backend, self.project, entry.id, entry.internal_id)
for entry in leaderboard_entries]
self.assertEqual(expected_experiments, experiments)
def test_get_leaderboard(self):
# given
self.backend.get_leaderboard_entries.return_value = [LeaderboardEntry(some_exp_entry_dto)]
# when
leaderboard = self.project.get_leaderboard()
# then
self.backend.get_leaderboard_entries.assert_called_once_with(
project=self.project,
ids=None,
states=None, owners=None, tags=None,
min_running_time=None)
# and
expected_data = {0: some_exp_entry_row}
expected_leaderboard = pd.DataFrame.from_dict(data=expected_data, orient='index')
expected_leaderboard = expected_leaderboard.reindex(
# pylint: disable=protected-access
self.project._sort_leaderboard_columns(expected_leaderboard.columns), axis='columns')
self.assertTrue(leaderboard.equals(expected_leaderboard))
def test_sort_leaderboard_columns(self):
# given
columns_in_expected_order = [
'id', 'name', 'created', 'finished', 'owner',
'notes', 'size', 'tags',
'channel_abc', 'channel_def',
'parameter_abc', 'parameter_def',
'property_abc', 'property_def'
]
# when
# pylint: disable=protected-access
sorted_columns = self.project._sort_leaderboard_columns(reversed(columns_in_expected_order))
# then
self.assertEqual(columns_in_expected_order, sorted_columns)
def test_full_id(self):
# expect
self.assertEqual(self.project.namespace + '/' + self.project.name, self.project.full_id)
def test_to_string(self):
# expect
self.assertEqual('Project({})'.format(self.project.full_id), str(self.project))
def test_repr(self):
# expect
self.assertEqual('Project({})'.format(self.project.full_id), repr(self.project))
# pylint: disable=protected-access
def test_get_current_experiment_from_stack(self):
# given
experiment = Munch(internal_id=a_uuid_string())
# when
self.project._push_new_experiment(experiment)
# then
self.assertEqual(self.project._get_current_experiment(), experiment)
# pylint: disable=protected-access
def test_pop_experiment_from_stack(self):
# given
first_experiment = Munch(internal_id=a_uuid_string())
second_experiment = Munch(internal_id=a_uuid_string())
# and
self.project._push_new_experiment(first_experiment)
# when
self.project._push_new_experiment(second_experiment)
# then
self.assertEqual(self.project._get_current_experiment(), second_experiment)
# and
self.project._remove_stopped_experiment(second_experiment)
# and
self.assertEqual(self.project._get_current_experiment(), first_experiment)
# pylint: disable=protected-access
def test_empty_stack(self):
# expect
with self.assertRaises(NeptuneNoExperimentContextException):
self.project._get_current_experiment()
def test_create_experiment_with_relative_upload_sources(self):
# skip if
if sys.version_info.major < 3 or (sys.version_info.major == 3 and sys.version_info.minor < 5):
self.skipTest("not supported in this Python version")
# given
os.chdir('tests/neptune')
# and
anExperiment = MagicMock()
self.backend.create_experiment.return_value = anExperiment
# when
self.project.create_experiment(upload_source_files=[
"test_project.*",
"../../*.md"
])
# then
self.backend.upload_source_code.assert_called_once()
source_target_pairs_targets = [
target_p for source_p, target_p in self.backend.upload_source_code.call_args[0][1]
]
self.assertTrue(
set(source_target_pairs_targets) == {"CODE_OF_CONDUCT.md", "README.md", "tests/neptune/test_project.py"}
)
def test_create_experiment_with_absolute_upload_sources(self):
# skip if
if sys.version_info.major < 3 or (sys.version_info.major == 3 and sys.version_info.minor < 5):
self.skipTest("not supported in this Python version")
# given
os.chdir('tests/neptune')
# and
anExperiment = MagicMock()
self.backend.create_experiment.return_value = anExperiment
# when
self.project.create_experiment(upload_source_files=[
os.path.abspath('test_project.py'),
"../../*.md"
])
# then
self.backend.upload_source_code.assert_called_once()
source_target_pairs_targets = [
target_p for source_p, target_p in self.backend.upload_source_code.call_args[0][1]
]
self.assertTrue(
set(source_target_pairs_targets) == {"CODE_OF_CONDUCT.md", "README.md", "tests/neptune/test_project.py"}
)
def test_create_experiment_with_upload_single_sources(self):
# given
os.chdir('tests/neptune')
# and
anExperiment = MagicMock()
self.backend.create_experiment.return_value = anExperiment
# when
self.project.create_experiment(upload_source_files=[
'test_project.py'
])
# then
self.backend.upload_source_code.assert_called_once()
source_target_pairs_targets = [
target_p for source_p, target_p in self.backend.upload_source_code.call_args[0][1]
]
self.assertTrue(
set(source_target_pairs_targets) == {"test_project.py"}
)
def test_create_experiment_with_common_path_below_current_directory(self):
# given
anExperiment = MagicMock()
self.backend.create_experiment.return_value = anExperiment
# when
self.project.create_experiment(upload_source_files=[
'tests/neptune/*.*'
])
# then
self.backend.upload_source_code.assert_called_once()
source_target_pairs_targets = [
target_p for source_p, target_p in self.backend.upload_source_code.call_args[0][1]
]
self.assertTrue(
all(target_p.startswith('tests/neptune/') for target_p in source_target_pairs_targets)
)
@patch('neptune.internal.utils.source_code.glob', new=lambda path: [path.replace('*', 'file.txt')])
@patch('neptune.projects.os.path', new=ntpath)
@patch('neptune.internal.storage.storage_utils.os.sep', new=ntpath.sep)
def test_create_experiment_with_upload_sources_from_multiple_drives_on_windows(self):
# given
anExperiment = MagicMock()
# and
self.backend.create_experiment.return_value = anExperiment
# when
self.project.create_experiment(upload_source_files=[
'c:\\test1\\*',
'd:\\test2\\*'
])
# then
self.backend.upload_source_code.assert_called_once()
source_target_pairs_targets = [
target_p for source_p, target_p in self.backend.upload_source_code.call_args[0][1]
]
self.assertTrue(
set(source_target_pairs_targets) == {'c:/test1/file.txt', 'd:/test2/file.txt'}
)
if __name__ == '__main__':
unittest.main()
| 10,262 | 876 | 23 |
004179b2b46dcc8a6902c537aa3c3381f7c65123 | 658 | py | Python | adts/functions.py | jasonsbarr/python-adts | 0bf079062b1356b2a7e6f044afa0a932340aac65 | [
"MIT"
] | null | null | null | adts/functions.py | jasonsbarr/python-adts | 0bf079062b1356b2a7e6f044afa0a932340aac65 | [
"MIT"
] | null | null | null | adts/functions.py | jasonsbarr/python-adts | 0bf079062b1356b2a7e6f044afa0a932340aac65 | [
"MIT"
] | null | null | null | from typing import Any
py_map = map
py_type = type | 20.5625 | 69 | 0.62614 | from typing import Any
def hasmethod(obj: Any, meth: str) -> bool:
hasattr(obj, meth) and callable(getattr(obj, meth))
py_map = map
def map(fn, data):
if hasattr(data, "__type__") and hasmethod(data.__type__, "map"):
return data.__type__.map(fn, data)
return py_map(fn, data)
def compose2(f, g):
return lambda x: g(f(x))
def compose(*fns):
acc = lambda x: x
for fn in fns:
acc = compose2(fn, acc)
return acc
def pipe(val, *fns):
return compose(*fns)(val)
py_type = type
def type(*args):
if hasattr(args[0], "__type__") and len(args) == 1:
return args[0].__type__
return py_type(*args) | 468 | 0 | 138 |
27156196462441a53debecb5d878ddc0123acc21 | 5,057 | py | Python | VueConsole.py | fatalkiller/Teeko | c89ae47e4d3f88d921497964c2e2c32cb682621c | [
"MIT"
] | null | null | null | VueConsole.py | fatalkiller/Teeko | c89ae47e4d3f88d921497964c2e2c32cb682621c | [
"MIT"
] | null | null | null | VueConsole.py | fatalkiller/Teeko | c89ae47e4d3f88d921497964c2e2c32cb682621c | [
"MIT"
] | null | null | null | from Model import *
import MinMax
import AlphaBeta
import parameters
import threading
import sys
from random import randint
from time import sleep
# Lancer le combat d'ia
model = Model(1, 1)
model.pMax_ias[1] = int(sys.argv[1]) # pMax j1
model.eval_ias[1] = int(sys.argv[2]) # eval j1
model.pMax_ias[2] = int(sys.argv[3]) # pMax j2
model.eval_ias[2] = int(sys.argv[4]) # eval j2
game = VueConsole(model)
game.ia_vs_ia()
| 33.713333 | 149 | 0.477358 | from Model import *
import MinMax
import AlphaBeta
import parameters
import threading
import sys
from random import randint
from time import sleep
class VueConsole:
def __init__(self, model):
self.model = model
def affiche_plateau(self):
print("\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n")
affichage = "-------------------------\n" \
"| / | 0 | 1 | 2 | 3 | 4 |\n" \
"-------------------------\n"
for j in range(5):
affichage += "| " + str(j) + " |"
for i in range(5):
sym = " "
if self.model.plateau[i][j] == 1:
sym = "O"
if self.model.plateau[i][j] == 2:
sym = "X"
affichage += " " + sym + " |"
affichage += "\n" \
"-------------------------\n"
print(affichage)
def change_tour(self):
self.model.change_tour()
self.affiche_plateau()
def tour_de_jeu(self):
self.affiche_plateau()
for i in range(0, 8):
self.pose_pion()
self.change_tour()
if self.model.gagnant:
break
while not self.model.gagnant:
self.deplace_pion()
self.change_tour()
def pose_pion(self):
while True:
posX = int(
input('Veuillez rentrez la colonne du pion à posé (entre 0 et 4) : \n'))
posY = int(
input('Veuillez rentrez la ligne du pion à posé (entre 0 et 4) : \n'))
if 0 <= posX < 5 and 0 <= posY < 5 and self.model.plateau[posX][posY] == 0:
self.model.plateau[posX][posY] = self.model.tour
break
print('mauvaise position')
def deplace_pion(self):
while True:
posX = int(
input('Veuillez rentrez la colonne du pion à déplacer (entre 0 et 4) : \n'))
posY = int(
input('Veuillez rentrez la ligne du pion à déplacer (entre 0 et 4) : \n'))
if posX < 0 or posX > 4 or posY < 0 or posY > 4:
print("numéro de ligne est colonne entre 0 et 4 ! \n")
elif self.model.plateau[posX][posY] == self.model.tour:
break
else:
print("Aucun pion de votre couleur à cette position ! \n")
self.model.plateau[posX][posY] = 0
posXA = posX
posYA = posY
while True:
deplacement = int(input(
'Veuillez rentrez le deplacement du pion (entre 1 et 8 dans le sens des aiguilles d une montre en partant du haut à gauche) : \n'))
if deplacement == 1:
posXA = posX-1
posYA = posY-1
elif deplacement == 2:
posYA = posY-1
elif deplacement == 3:
posXA = posX+1
posYA = posY-1
elif deplacement == 4:
posXA = posX+1
elif deplacement == 5:
posXA = posX+1
posYA = posY+1
elif deplacement == 6:
posYA = posY+1
elif deplacement == 7:
posXA = posX-1
posYA = posY+1
elif deplacement == 8:
posXA = posX-1
else:
print("Deplacement entre (1 et 8 uniquement)")
continue
if 0 <= posXA < 5 and 0 <= posYA < 5 and self.model.plateau[posXA][posYA] == 0:
self.model.plateau[posXA][posYA] = self.tour
break
def affiche_gagnant(self):
self.model.gagnant = True
print("Le joueur " + str(self.tour) + " à gagné \n")
def ia_vs_ia(self):
# Pose un premier pion aléatoirement
x = randint(0, 4)
y = randint(0, 4)
self.model.pose_pion(x, y)
self.affiche_plateau()
# Joue tant qu'il n'y a pas de gagnant
while not self.model.gagnant:
if parameters.elagage:
functarget = AlphaBeta.min_max
else:
functarget = MinMax.min_max
pMax = self.model.pMax_ias[self.model.tour]
eval_enable = self.model.eval_ias[self.model.tour]
# Lance calcul de l'ia dans un thread
t = threading.Thread(
target=functarget, args=(self.model, pMax, eval_enable))
t.start()
# Attendre que l'IA joue...
t.join()
# Rafraichit le plateau de jeu
self.affiche_plateau()
sleep(1)
# Lancer le combat d'ia
model = Model(1, 1)
model.pMax_ias[1] = int(sys.argv[1]) # pMax j1
model.eval_ias[1] = int(sys.argv[2]) # eval j1
model.pMax_ias[2] = int(sys.argv[3]) # pMax j2
model.eval_ias[2] = int(sys.argv[4]) # eval j2
game = VueConsole(model)
game.ia_vs_ia()
| 4,372 | -4 | 255 |
223c4d335d8ee9e899cdbaa7b6a65436c2e9dda3 | 5,074 | py | Python | implementation/attackers/BlackBoxAttacker.py | sprkrd/UPC-MAI-ISP | b06e9370d3c8df374a8a776b7c2e45b0a83387e8 | [
"MIT"
] | null | null | null | implementation/attackers/BlackBoxAttacker.py | sprkrd/UPC-MAI-ISP | b06e9370d3c8df374a8a776b7c2e45b0a83387e8 | [
"MIT"
] | null | null | null | implementation/attackers/BlackBoxAttacker.py | sprkrd/UPC-MAI-ISP | b06e9370d3c8df374a8a776b7c2e45b0a83387e8 | [
"MIT"
] | null | null | null | import numpy as np
import torch
from torch.autograd import Variable
from .Attacker import Attacker
from .WhiteBoxAttacker import PGDAttack
from ..models.PixelLevelTransferN import PixelLevelTransferN
from ..utils import clamp_to_valid_img, tform1, tform2, retrieve_image
if __name__ == "__main__":
# gan_attacker = GANAttack(None, None)
from ..models.ResNet18 import pretrained_res18
from ..models.ModelWrapper import ModelWrapper
from skimage.io import imread, imshow
from ..utils import heat_map
import matplotlib.pyplot as plt
import numpy as np
# quick test
model = pretrained_res18()
model_att = pretrained_res18(which=3)
wrapper = ModelWrapper(model)
attack = PGDAttackBB(model, epsilon=0.03, k=5)
# img = imread("../../data-augmentation/banknotes_augmented/val/img_10_76_98.jpg")
# img = imread("data-augmentation/banknotes_augmented/test/img_5_90_10.jpg")
# img = imread("../../data-augmentation/banknotes_augmented/test/img_20_133_100.jpg")
# img = imread("../../data-augmentation/banknotes_augmented/val/img_50_71_2.jpg")
img = imread("data-augmentation/banknotes_augmented_small/test/img_10_100_1.jpg")
img, p1 = wrapper(img, True)
img_pert, p2 = attack(model_att, img, True)
# print(p1)
# print(p2)
plt.subplot(2,2,1)
plt.imshow(img)
plt.gca().axes.get_xaxis().set_visible(False)
plt.gca().axes.get_yaxis().set_visible(False)
plt.title("Original image")
plt.subplot(2,2,2)
plt.imshow(img_pert)
plt.gca().axes.get_xaxis().set_visible(False)
plt.gca().axes.get_yaxis().set_visible(False)
plt.title("Perturbed image (undirected PGD)")
plt.subplot(2,2,3)
plt.title("Probability distribution")
plt.bar([1.5,2.5,3.5,4.5], p1, tick_label=["5", "10", "20", "50"], log=True)
plt.xlabel("Banknote")
plt.subplot(2,2,4)
plt.title("Probability distribution")
plt.bar([1.5,2.5,3.5,4.5], p2, tick_label=["5", "10", "20", "50"], log=True)
plt.xlabel("Banknote")
plt.tight_layout()
plt.show()
plt.close()
# diff = np.abs(img_pert.astype(np.float) - img)
# diff = np.mean(diff, 2)
# min_ = np.min(diff)
# max_ = np.max(diff)
# diff = (diff-min_)/(max_-min_)
# plt.imshow(diff, cmap=plt.get_cmap("hot"))
# plt.title("Normalized differences (maximum diff: {:.00f})".format(max_))
# plt.gca().axes.get_xaxis().set_visible(False)
# plt.gca().axes.get_yaxis().set_visible(False)
# plt.colorbar()
# plt.tight_layout()
# plt.show()
heatmap = heat_map(img, img_pert)
plt.imshow(heatmap)
plt.show()
| 34.753425 | 115 | 0.669492 | import numpy as np
import torch
from torch.autograd import Variable
from .Attacker import Attacker
from .WhiteBoxAttacker import PGDAttack
from ..models.PixelLevelTransferN import PixelLevelTransferN
from ..utils import clamp_to_valid_img, tform1, tform2, retrieve_image
class BlackBoxAttacker(Attacker):
def __init__(self, attack_shape):
Attacker.__init__(self, attack_shape)
def attack(self, input_data):
raise NotImplementedError
def feedback(self, last_corrects):
raise NotImplementedError
def __call__(self, model, img, return_img=False):
"""
Performs an undirected attack against the given model and returns the
classifier's output PDF and (optionally) the perturbed image
"""
s = torch.nn.Softmax(1)
x = tform2(tform1(img))
x_pert = Variable(self.attack(x.data), volatile=True)
y = s(model(x_pert))
y = y.data.tolist()[0]
if return_img:
img_pert = retrieve_image(x_pert)
return img_pert, y
return y
class PGDAttackBB(BlackBoxAttacker):
def __init__(self, model, attack_shape=None, epsilon=0.03, a=0.01, k=40):
super(PGDAttackBB, self).__init__(attack_shape)
self.model = model
self.attacker = PGDAttack(epsilon=epsilon, a=a, k=k)
def attack(self, input_data):
# obtain label predicted by attacker's model
y = self.model(Variable(input_data, volatile=True))
_, label = torch.max(y.data, 1)
x_pert = self.attacker.attack(self.model, input_data, label)
return x_pert
class GANAttack(BlackBoxAttacker):
def __init__(self, attack_shape=None, intensity=0.2):
BlackBoxAttacker.__init__(self, attack_shape)
attacker = PixelLevelTransferN(in_channels=3, out_channels=3, intensity=intensity)
state_dict = torch.load('implementation/models/pretrained/gan_attacker.pkl')
attacker.load_state_dict(state_dict)
self.attacker = attacker
def attack(self, input_data):
images = Variable(input_data, volatile=True)
images = (images + self.attacker(images)).data
images = clamp_to_valid_img(images)
return images
class WhiteNoiseAttack(BlackBoxAttacker):
def __init__(self, attack_shape=None, intensity=0.2):
BlackBoxAttacker.__init__(self, attack_shape)
self.intensity = intensity
def attack(self, input_data):
noise = np.random.uniform(low=-self.intensity, high=self.intensity, size=np.array(tuple(input_data.shape)))
noise = torch.from_numpy(noise).float()
images = input_data + noise
images = clamp_to_valid_img(images)
return images
if __name__ == "__main__":
# gan_attacker = GANAttack(None, None)
from ..models.ResNet18 import pretrained_res18
from ..models.ModelWrapper import ModelWrapper
from skimage.io import imread, imshow
from ..utils import heat_map
import matplotlib.pyplot as plt
import numpy as np
# quick test
model = pretrained_res18()
model_att = pretrained_res18(which=3)
wrapper = ModelWrapper(model)
attack = PGDAttackBB(model, epsilon=0.03, k=5)
# img = imread("../../data-augmentation/banknotes_augmented/val/img_10_76_98.jpg")
# img = imread("data-augmentation/banknotes_augmented/test/img_5_90_10.jpg")
# img = imread("../../data-augmentation/banknotes_augmented/test/img_20_133_100.jpg")
# img = imread("../../data-augmentation/banknotes_augmented/val/img_50_71_2.jpg")
img = imread("data-augmentation/banknotes_augmented_small/test/img_10_100_1.jpg")
img, p1 = wrapper(img, True)
img_pert, p2 = attack(model_att, img, True)
# print(p1)
# print(p2)
plt.subplot(2,2,1)
plt.imshow(img)
plt.gca().axes.get_xaxis().set_visible(False)
plt.gca().axes.get_yaxis().set_visible(False)
plt.title("Original image")
plt.subplot(2,2,2)
plt.imshow(img_pert)
plt.gca().axes.get_xaxis().set_visible(False)
plt.gca().axes.get_yaxis().set_visible(False)
plt.title("Perturbed image (undirected PGD)")
plt.subplot(2,2,3)
plt.title("Probability distribution")
plt.bar([1.5,2.5,3.5,4.5], p1, tick_label=["5", "10", "20", "50"], log=True)
plt.xlabel("Banknote")
plt.subplot(2,2,4)
plt.title("Probability distribution")
plt.bar([1.5,2.5,3.5,4.5], p2, tick_label=["5", "10", "20", "50"], log=True)
plt.xlabel("Banknote")
plt.tight_layout()
plt.show()
plt.close()
# diff = np.abs(img_pert.astype(np.float) - img)
# diff = np.mean(diff, 2)
# min_ = np.min(diff)
# max_ = np.max(diff)
# diff = (diff-min_)/(max_-min_)
# plt.imshow(diff, cmap=plt.get_cmap("hot"))
# plt.title("Normalized differences (maximum diff: {:.00f})".format(max_))
# plt.gca().axes.get_xaxis().set_visible(False)
# plt.gca().axes.get_yaxis().set_visible(False)
# plt.colorbar()
# plt.tight_layout()
# plt.show()
heatmap = heat_map(img, img_pert)
plt.imshow(heatmap)
plt.show()
| 1,514 | 680 | 253 |
7aa7309223f60a28d54ea0802eb6a957ffa46a02 | 10,113 | py | Python | train_cifar_tiny_imagenet.py | verafeldman/force | 2f725128ec68a2ba721d11f319b7c202a1f57cbc | [
"MIT"
] | null | null | null | train_cifar_tiny_imagenet.py | verafeldman/force | 2f725128ec68a2ba721d11f319b7c202a1f57cbc | [
"MIT"
] | null | null | null | train_cifar_tiny_imagenet.py | verafeldman/force | 2f725128ec68a2ba721d11f319b7c202a1f57cbc | [
"MIT"
] | null | null | null | '''
FORCE
Copyright (c) 2020-present NAVER Corp.
MIT license
'''
import torch
import torch.nn as nn
import torch.nn.functional as F
from tensorboardX import SummaryWriter
from ignite.engine import create_supervised_evaluator
from ignite.metrics import Accuracy, Loss
from pruning.pruning_algos import iterative_pruning
from experiments.experiments import *
from pruning.mask_networks import apply_prune_mask
import os
import argparse
import random
# from IPython import embed
LOG_INTERVAL = 20
REPEAT_WITH_DIFFERENT_SEED = 3 # Number of initialize-prune-train trials (minimum of 1)
torch.backends.cudnn.deterministic = True
torch.backends.cudnn.benchmark = False
device = torch.device("cuda" if torch.cuda.is_available() else "cpu")
# New additions
args = parseArgs()
if __name__ == '__main__':
# Randomly pick a random seed for the experiment
# Multiply the number of seeds to be sampled by 300 so there is wide range of seeds
seeds = list(range(300 * REPEAT_WITH_DIFFERENT_SEED))
random.shuffle(seeds)
for seed in seeds[:REPEAT_WITH_DIFFERENT_SEED]:
train(seed)
| 44.550661 | 108 | 0.564323 | '''
FORCE
Copyright (c) 2020-present NAVER Corp.
MIT license
'''
import torch
import torch.nn as nn
import torch.nn.functional as F
from tensorboardX import SummaryWriter
from ignite.engine import create_supervised_evaluator
from ignite.metrics import Accuracy, Loss
from pruning.pruning_algos import iterative_pruning
from experiments.experiments import *
from pruning.mask_networks import apply_prune_mask
import os
import argparse
import random
# from IPython import embed
def parseArgs():
parser = argparse.ArgumentParser(
description="Training CIFAR / Tiny-Imagenet.",
formatter_class=argparse.ArgumentDefaultsHelpFormatter)
parser.add_argument("--pruning_factor", type=float, default=0.01, dest="pruning_factor",
help='Fraction of connections after pruning')
parser.add_argument("--prune_method", type=int, default=3, dest="prune_method",
help="""Which pruning method to use:
1->Iter SNIP
2->GRASP-It
3->FORCE (default). """)
parser.add_argument("--dataset", type=str, default='CIFAR10',
dest="dataset_name", help='Dataset to train on')
parser.add_argument("--network_name", type=str, default='resnet50', dest="network_name",
help='Model to train')
parser.add_argument("--num_steps", type=int, default=10,
help='Number of steps to use with iterative pruning')
parser.add_argument("--mode", type=str, default='exp',
help='Mode of creating the iterative pruning steps one of "linear" or "exp".')
parser.add_argument("--num_batches", type=int, default=1,
help='''Number of batches to be used when computing the gradient.
If set to -1 they will be averaged over the whole dataset.''')
parser.add_argument("--save_interval", type=int, default=50,
dest="save_interval", help="Number of epochs between model checkpoints.")
parser.add_argument("--save_loc", type=str, default='saved_models/',
dest="save_loc", help='Path where to save the model')
parser.add_argument("--opt", type=str, default='sgd',
dest="optimiser",
help='Choice of optimisation algorithm')
parser.add_argument("--saved_model_name", type=str, default="cnn.model",
dest="saved_model_name", help="Filename of the pre-trained model")
parser.add_argument("--frac-train-data", type=float, default=0.9, dest="frac_data_for_train",
help='Fraction of data used for training (only applied in CIFAR)')
parser.add_argument("--init", type=str, default='normal_kaiming',
help='Which initialization method to use')
parser.add_argument("--in_planes", type=int, default=64,
help='''Number of input planes in Resnet. Afterwards they duplicate after
each conv with stride 2 as usual.''')
return parser.parse_args()
LOG_INTERVAL = 20
REPEAT_WITH_DIFFERENT_SEED = 3 # Number of initialize-prune-train trials (minimum of 1)
torch.backends.cudnn.deterministic = True
torch.backends.cudnn.benchmark = False
device = torch.device("cuda" if torch.cuda.is_available() else "cpu")
# New additions
args = parseArgs()
def train(seed):
# Set manual seed
torch.manual_seed(seed)
if 'resnet' in args.network_name:
stable_resnet = False
if 'stable' in args.network_name:
stable_resnet = True
if 'CIFAR' in args.dataset_name:
[net, optimiser, lr_scheduler,
train_loader, val_loader,
test_loader, loss, EPOCHS] = resnet_cifar_experiment(device, args.network_name,
args.dataset_name, args.optimiser,
args.frac_data_for_train,
stable_resnet, args.in_planes)
elif 'tiny_imagenet' in args.dataset_name:
[net, optimiser, lr_scheduler,
train_loader, val_loader,
test_loader, loss, EPOCHS] = resnet_tiny_imagenet_experiment(device, args.network_name,
args.dataset_name, args.in_planes)
elif 'vgg' in args.network_name or 'VGG' in args.network_name:
if 'tiny_imagenet' in args.dataset_name:
[net, optimiser, lr_scheduler,
train_loader, val_loader,
test_loader, loss, EPOCHS] = vgg_tiny_imagenet_experiment(device, args.network_name,
args.dataset_name)
else:
[net, optimiser, lr_scheduler,
train_loader, val_loader,
test_loader, loss, EPOCHS] = vgg_cifar_experiment(device, args.network_name,
args.dataset_name, args.frac_data_for_train)
elif 'mobilenet' in args.network_name:
if 'tiny_imagenet' in args.dataset_name:
[net, optimiser, lr_scheduler,
train_loader, val_loader,
test_loader, loss, EPOCHS] = mobilenet_tiny_imagenet_experiment(device, args.dataset_name)
else:
[net, optimiser, lr_scheduler,
train_loader, val_loader,
test_loader, loss, EPOCHS] = mobilenet_cifar_experiment(device, args.dataset_name,
args.frac_data_for_train)
if torch.cuda.device_count() > 1:
net = torch.nn.DataParallel(net)
print("Using {} GPUs".format(torch.cuda.device_count()))
# Initialize network
for layer in net.modules():
if isinstance(layer, nn.Conv2d) or isinstance(layer, nn.Linear):
if args.init == 'normal_kaiming':
nn.init.kaiming_normal_(layer.weight, nonlinearity='relu')
elif args.init == 'normal_kaiming_fout':
nn.init.kaiming_normal_(layer.weight, nonlinearity='relu', mode='fan_out')
elif args.init == 'normal_xavier':
nn.init.xavier_normal_(layer.weight)
elif args.init == 'orthogonal':
nn.init.orthogonal_(layer.weight)
else:
raise ValueError(f"Unrecognised initialisation parameter {args.init}")
############################################################################
#################### Pruning at init ########################
############################################################################
pruning_factor = args.pruning_factor
keep_masks=[]
if pruning_factor != 1:
print(f'Pruning network iteratively for {args.num_steps} steps')
keep_masks = iterative_pruning(net, train_loader, device, pruning_factor,
prune_method=args.prune_method,
num_steps=args.num_steps,
mode=args.mode, num_batches=args.num_batches)
apply_prune_mask(net, keep_masks)
filename = f'iter_prun_{args.num_steps}'
############################################################################
#################### Training ########################
############################################################################
evaluator = create_supervised_evaluator(net, {
'accuracy': Accuracy(),
'cross_entropy': Loss(loss)
}, device)
run_name = (args.network_name + '_' + args.dataset_name + '_spars' +
str(1 - pruning_factor) + '_variant' + str(args.prune_method) +
'_train-frac' + str(args.frac_data_for_train) +
f'_steps{args.num_steps}_{args.mode}' + f'_{args.init}' +
f'_batch{args.num_batches}' + f'_rseed_{seed}')
writer_name= 'runs/' + run_name
writer = SummaryWriter(writer_name)
iterations = 0
for epoch in range(0, EPOCHS):
lr_scheduler.step()
train_loss = train_cross_entropy(epoch, net, train_loader, optimiser, device,
writer, LOG_INTERVAL=20)
iterations +=len(train_loader)
evaluator.run(train_loader)
metrics = evaluator.state.metrics
avg_accuracy = metrics['accuracy']
writer.add_scalar("train/accuracy", avg_accuracy, epoch)
# Evaluate
evaluator.run(test_loader)
metrics = evaluator.state.metrics
# Save history
avg_accuracy = metrics['accuracy']
avg_cross_entropy = metrics['cross_entropy']
writer.add_scalar("test/loss", avg_cross_entropy, epoch)
writer.add_scalar("test/accuracy", avg_accuracy, epoch)
# Save model checkpoints
if (epoch + 1) % args.save_interval == 0:
if not os.path.exists(args.save_loc):
os.makedirs(args.save_loc)
save_name = args.save_loc + run_name + '_cross_entropy_' + str(epoch + 1) + '.model'
torch.save(net.state_dict(), save_name)
elif (epoch + 1) == EPOCHS:
if not os.path.exists(args.save_loc):
os.makedirs(args.save_loc)
save_name = args.save_loc + run_name + '_cross_entropy_' + str(epoch + 1) + '.model'
torch.save(net.state_dict(), save_name)
if __name__ == '__main__':
# Randomly pick a random seed for the experiment
# Multiply the number of seeds to be sampled by 300 so there is wide range of seeds
seeds = list(range(300 * REPEAT_WITH_DIFFERENT_SEED))
random.shuffle(seeds)
for seed in seeds[:REPEAT_WITH_DIFFERENT_SEED]:
train(seed)
| 8,951 | 0 | 46 |
2e36c5466cf37ca5a540f4ca58cec704533951b3 | 9,055 | py | Python | graph.py | mozillakab/various-kab-processing | 095de54a12b5ca9293778761c14b967249ccc3d8 | [
"Apache-2.0"
] | null | null | null | graph.py | mozillakab/various-kab-processing | 095de54a12b5ca9293778761c14b967249ccc3d8 | [
"Apache-2.0"
] | null | null | null | graph.py | mozillakab/various-kab-processing | 095de54a12b5ca9293778761c14b967249ccc3d8 | [
"Apache-2.0"
] | null | null | null | import networkx as nx
import numpy as np
import matplotlib.pyplot as plt
import matplotlib.pyplot as plt1
import pylab
G = nx.MultiDiGraph()
#list of kabyle tags
tags=[]
i=0
#extraction du tableau des tags
for ligne in open("c:/tal/tagspos.txt",encoding='utf-8'):
a=ligne.replace('\n',"")
if (i!=0):
b=(a,0,())
tags.append(b)
i=i+1
edges=[] # Edges list
#this function renders the tag index in the tags kab array
regexp ='[-A-Zḍčǧḥṛṣẓṭţɣɛ« ».,:1-9a-z]+/[A-Z]+' # regular expression to retreive the couple (tagged wor/tag)
text=""
#Construction du texte global
first=0
for ligne in open("c:/tal/corpus-kab.txt",encoding='utf-8'):
if (first!=0):
text=text+ligne
first=1
text=text.replace('\n'," ")
text=text.replace(" "," ")
text=text.replace(" "," ")
text=text.replace("\ufeff","")
a=text.split(" ")
i=0
start=0
b=''
while i<len(a)-1:
iii=b
#récupérer la paire mot tag
b=a[i].split("/") #split a couple
#print (b[1])
try:
tuplea=tags[index_of_tag(b[1])] #look for the index of the tag
except:
print (b,iii,'here',b)
exit()
#print (tuple)
number=tuplea[1]+1#increment the tag count
tuple_tag=tuplea[2]
list_a=list(tuple_tag)
if b[1]=='NMP':
list_a.append(b[0])
else:
list_a.append(b[0].lower())
#print (list_a)
tuple_tag=tuple(list_a)
tags[index_of_tag(b[1])]=(tuplea[0],number,tuple_tag)# update une tag count
c=a[i+1].split("/") # this is for the last couple word/tag
if (len(c)!=2):
print (b,c,'moins de deux',a[i-1])
exit()
if(start==0) and (i==0): # the first start edge : First word in the text or the first edge after a dot
G.add_edges_from([('Start',b[1])], weight=0)
edges.append(('Start->'+b[1],1))
G.add_edges_from([(b[1],c[1])], weight=0) # and create an edge betwen the dot and the previous tags
edges.append((b[1]+'->'+c[1],1))
start=1
#print ('start')
elif (start==0):
try:
G.add_edges_from([('Start',c[1])], weight=0) # edge start -> next word after a dot .
start=1
edges.append(('Start->'+c[1],1))
except:
print(c,b,iii)
exit()
elif (c[1]=='.'):
G.add_edges_from([(c[1],'Stop')], weight=0) # when a dot is found, create an end
edges.append((c[1]+'->Stop',1))
G.add_edges_from([(b[1],c[1])], weight=0) # and create an edge betwen the dot and the previous tags
edges.append((b[1]+'->'+c[1],1))
start=0
else:
G.add_edges_from([(b[1],c[1])], weight=0) # create and edge between two neighbours
edges.append((b[1]+'->'+c[1],1))
i=i+1
# this is for the last tag. We will increment its occurence
try:
tuplea=tags[index_of_tag(c[1])]
except:
print (c[1])
exit()
number=tuplea[1]+1
tuple_tag=tuplea[2]
list_a=list(tuple_tag)
list_a.append(c[0])
tuple_tag=tuple(list_a)
try:
tags[index_of_tag(c[1])]=(tuplea[0],number,tuple_tag)
except:
print (c[1])
exit()
#print (tags)
val_map = {}
values = [val_map.get(node, 0.45) for node in G.nodes()]
edge_labels=dict([((u,v,),d['weight'])
for u,v,d in G.edges(data=True)])
red_edges = [('Start','NMC'),('NMC','Stop')]
edge_colors = ['black' if not edge in red_edges else 'black' for edge in G.edges()]
pos=nx.spring_layout(G)
options = {
'node_color': 'blue',
'node_size': 800,
'width': 1,
'arrowstyle': '-|>',
'arrowsize': 13,
}
color_map = []
j=0
for node in G:
#print (node)
if str(node) =='Start' or str(node) =='Stop':
color_map.append('blue')
elif (len(str(node))>=4):
color_map.append('olive')
elif (len(str(node))==3):
color_map.append('yellow')
elif (len(str(node))==2):
color_map.append('purple')
else:
color_map.append('red')
j=j+1
nx.draw(G,pos, node_color = color_map, node_size=1500,edge_color=edge_colors,edge_cmap=plt.cm.Reds)
#nx.draw_networkx_labels()
#networkx.draw_networkx_labels(graph,node_positions,font_size=16)
#nx.coloring.greedy_color(G, strategy='largest_first')
#nx.draw_networkx(G, arrows=True, **options)
#print (words)i
j=0
labels={}
for i in G.nodes:
labels[i]=i
nx.draw_networkx_labels(G,pos,labels,font_size=16)
pylab.axis('off')
pylab.show()
# calculate the occurences of grammatical classes ant show them on histogram
x = np.arange(len(tags))
valeurs=[]
symbols=[]
i=0
while i< len (tags):
if (tags[i][1] != 0):
valeurs.append(tags[i][1])
symbols.append(tags[i][0])
i=i+1
x = np.arange(len(valeurs))
plt.bar(x, height= valeurs)
##
plt.xticks(x+.5, symbols);
plt.ylabel('Timeḍriwt/Tiseqqaṛ')
plt.xlabel('Ismilen inejrumen')
plt.show()
#calculate probabilities
edges_probabilities=[]
edges_probabilities=[[x,edges.count(x)] for x in set(edges)]
for i in edges_probabilities:
edges_probabilities[edges_probabilities.index(i)]=(i[0],i[1]/len(edges))
#print(i[0][0],'+',i[1])
x = np.arange(len(tags))
valeurs=[]
symbols=[]
i=0
while i< len (edges_probabilities):
if (edges_probabilities[i][1] != 0):
valeurs.append(edges_probabilities[i][1]*100)
symbols.append(edges_probabilities[i][0][0])
i=i+1
x = np.arange(len(valeurs))
plt.bar(x, height= valeurs)
plt.xticks(x+.1, symbols);
plt.ylabel('Probabilité')
plt.xlabel('Transitions')
plt.show()
#print ('yes')
#calcul de la matrice de probabilité
probablilities = []
line=[]
l=0
for i in tags:
k=0
line=[]
for j in tags:
line.append(0)
k=k+1
probablilities.append(line)
l=l+1
x=0
for j in edges_probabilities:
x=a
a=j[0][0].split("->")
#print (j,'-> ',index_of_tag(a[0]))# print (j[1])
try:
probablilities[index_of_tag(a[0])][index_of_tag(a[1])]=j[1]
except:
print (x,a,a[0],'->',a[1],j[1])
exit()
for i in probablilities:
k=0
x=0
for j in i:
x=j+x
#print (x)
#######begin cloud
tags1=[]
i=0
for ligne in open("c:/tal/tagspos.txt",encoding='utf-8'):
a=ligne.replace('\n',"")
if (i!=0):
tags1.append(a)
i=i+1
x=[]
y=[]
for i in tags1:
x.append(0)
y.append(0)
#this function renders the tag index in the tags kab array
for i in edges_probabilities:
h=i[0][0]
j=h.split('->')
x[index_of_tag1(j[0],tags1)]=x[index_of_tag1(j[0],tags1)]+1
y[index_of_tag1(j[1],tags1)]=y[index_of_tag1(j[1],tags1)]+1
plt1.scatter(x,y,s=10)
plt1.title('Asigna n waggazen : ismilen n tjerrumt')
plt1.xlabel('x')
plt1.ylabel('y')
plt1.show()
x1=[]
y1=[]
for i in edges_probabilities:
h=i[0]
j=h[0].split('->')
## print(j)
x1.append(x[index_of_tag1(j[0],tags1)]*10000)
y1.append(x[index_of_tag1(j[1],tags1)]*10000)
plt1.scatter(x1,y1,s=5)
plt1.title('Asigna n waggazen : Tiyugiwin n yismilen n tjerrumt')
plt1.xlabel('x')
plt1.ylabel('y')
plt1.show()
x1=[]
y1=[]
for i in edges_probabilities:
h=i[0]
j=h[0].split('->')
## print(j)
x1.append(x[index_of_tag1(j[0],tags1)]*10000)
y1.append(y[index_of_tag1(j[1],tags1)]*10000)
plt1.scatter(x1,y1,s=5)
plt1.title('Asigna n waggazen : Tiyugiwin n yismilen n tjerrumt')
plt1.xlabel('x')
plt1.ylabel('y')
plt1.show()
x1=[]
y1=[]
for i in edges_probabilities:
h=i[0]
j=h[0].split('->')
## print(j)
x1.append(y[index_of_tag1(j[0],tags1)]*10000)
y1.append(x[index_of_tag1(j[1],tags1)]*10000)
plt1.scatter(x1,y1,s=5)
plt1.title('Asigna n waggazen : Tiyugiwin n yismilen n tjerrumt')
plt1.xlabel('x')
plt1.ylabel('y')
plt1.show()
x1=[]
y1=[]
for i in edges_probabilities:
h=i[0]
j=h[0].split('->')
## print(j)
x1.append(y[index_of_tag1(j[0],tags1)]*10000)
y1.append(y[index_of_tag1(j[1],tags1)]*10000)
plt1.scatter(x1,y1,s=5)
plt1.title('Asigna n waggazen : Tiyugiwin n yismilen n tjerrumt')
plt1.xlabel('x')
plt1.ylabel('y')
plt1.show()
x1=[]
y1=[]
for i in edges_probabilities:
h=i[0]
print(i[1])
j=h[0].split('->')
## print(j)
x1.append(y[index_of_tag1(j[0],tags1)]*10000)
y1.append(i[1]*10000)
plt1.scatter(x1,y1,s=5)
plt1.title('Asigna n waggazen : Tiyugiwin n yismilen n tjerrumt')
plt1.xlabel('x')
plt1.ylabel('y')
plt1.show()
| 22.413366 | 109 | 0.573385 | import networkx as nx
import numpy as np
import matplotlib.pyplot as plt
import matplotlib.pyplot as plt1
import pylab
G = nx.MultiDiGraph()
#list of kabyle tags
tags=[]
i=0
#extraction du tableau des tags
for ligne in open("c:/tal/tagspos.txt",encoding='utf-8'):
a=ligne.replace('\n',"")
if (i!=0):
b=(a,0,())
tags.append(b)
i=i+1
edges=[] # Edges list
#this function renders the tag index in the tags kab array
def index_of_tag(tag):
l=0
while l< len(tags):
c= tags[l]
b=c[0]
#print (b)
#print (tag)
if (tag==b):
return (l)
l=l+1
regexp ='[-A-Zḍčǧḥṛṣẓṭţɣɛ« ».,:1-9a-z]+/[A-Z]+' # regular expression to retreive the couple (tagged wor/tag)
text=""
#Construction du texte global
first=0
for ligne in open("c:/tal/corpus-kab.txt",encoding='utf-8'):
if (first!=0):
text=text+ligne
first=1
text=text.replace('\n'," ")
text=text.replace(" "," ")
text=text.replace(" "," ")
text=text.replace("\ufeff","")
a=text.split(" ")
i=0
start=0
b=''
while i<len(a)-1:
iii=b
#récupérer la paire mot tag
b=a[i].split("/") #split a couple
#print (b[1])
try:
tuplea=tags[index_of_tag(b[1])] #look for the index of the tag
except:
print (b,iii,'here',b)
exit()
#print (tuple)
number=tuplea[1]+1#increment the tag count
tuple_tag=tuplea[2]
list_a=list(tuple_tag)
if b[1]=='NMP':
list_a.append(b[0])
else:
list_a.append(b[0].lower())
#print (list_a)
tuple_tag=tuple(list_a)
tags[index_of_tag(b[1])]=(tuplea[0],number,tuple_tag)# update une tag count
c=a[i+1].split("/") # this is for the last couple word/tag
if (len(c)!=2):
print (b,c,'moins de deux',a[i-1])
exit()
if(start==0) and (i==0): # the first start edge : First word in the text or the first edge after a dot
G.add_edges_from([('Start',b[1])], weight=0)
edges.append(('Start->'+b[1],1))
G.add_edges_from([(b[1],c[1])], weight=0) # and create an edge betwen the dot and the previous tags
edges.append((b[1]+'->'+c[1],1))
start=1
#print ('start')
elif (start==0):
try:
G.add_edges_from([('Start',c[1])], weight=0) # edge start -> next word after a dot .
start=1
edges.append(('Start->'+c[1],1))
except:
print(c,b,iii)
exit()
elif (c[1]=='.'):
G.add_edges_from([(c[1],'Stop')], weight=0) # when a dot is found, create an end
edges.append((c[1]+'->Stop',1))
G.add_edges_from([(b[1],c[1])], weight=0) # and create an edge betwen the dot and the previous tags
edges.append((b[1]+'->'+c[1],1))
start=0
else:
G.add_edges_from([(b[1],c[1])], weight=0) # create and edge between two neighbours
edges.append((b[1]+'->'+c[1],1))
i=i+1
# this is for the last tag. We will increment its occurence
try:
tuplea=tags[index_of_tag(c[1])]
except:
print (c[1])
exit()
number=tuplea[1]+1
tuple_tag=tuplea[2]
list_a=list(tuple_tag)
list_a.append(c[0])
tuple_tag=tuple(list_a)
try:
tags[index_of_tag(c[1])]=(tuplea[0],number,tuple_tag)
except:
print (c[1])
exit()
#print (tags)
val_map = {}
values = [val_map.get(node, 0.45) for node in G.nodes()]
edge_labels=dict([((u,v,),d['weight'])
for u,v,d in G.edges(data=True)])
red_edges = [('Start','NMC'),('NMC','Stop')]
edge_colors = ['black' if not edge in red_edges else 'black' for edge in G.edges()]
pos=nx.spring_layout(G)
options = {
'node_color': 'blue',
'node_size': 800,
'width': 1,
'arrowstyle': '-|>',
'arrowsize': 13,
}
color_map = []
j=0
for node in G:
#print (node)
if str(node) =='Start' or str(node) =='Stop':
color_map.append('blue')
elif (len(str(node))>=4):
color_map.append('olive')
elif (len(str(node))==3):
color_map.append('yellow')
elif (len(str(node))==2):
color_map.append('purple')
else:
color_map.append('red')
j=j+1
nx.draw(G,pos, node_color = color_map, node_size=1500,edge_color=edge_colors,edge_cmap=plt.cm.Reds)
#nx.draw_networkx_labels()
#networkx.draw_networkx_labels(graph,node_positions,font_size=16)
#nx.coloring.greedy_color(G, strategy='largest_first')
#nx.draw_networkx(G, arrows=True, **options)
#print (words)i
j=0
labels={}
for i in G.nodes:
labels[i]=i
nx.draw_networkx_labels(G,pos,labels,font_size=16)
pylab.axis('off')
pylab.show()
# calculate the occurences of grammatical classes ant show them on histogram
x = np.arange(len(tags))
valeurs=[]
symbols=[]
i=0
while i< len (tags):
if (tags[i][1] != 0):
valeurs.append(tags[i][1])
symbols.append(tags[i][0])
i=i+1
x = np.arange(len(valeurs))
plt.bar(x, height= valeurs)
##
plt.xticks(x+.5, symbols);
plt.ylabel('Timeḍriwt/Tiseqqaṛ')
plt.xlabel('Ismilen inejrumen')
plt.show()
#calculate probabilities
edges_probabilities=[]
edges_probabilities=[[x,edges.count(x)] for x in set(edges)]
for i in edges_probabilities:
edges_probabilities[edges_probabilities.index(i)]=(i[0],i[1]/len(edges))
#print(i[0][0],'+',i[1])
x = np.arange(len(tags))
valeurs=[]
symbols=[]
i=0
while i< len (edges_probabilities):
if (edges_probabilities[i][1] != 0):
valeurs.append(edges_probabilities[i][1]*100)
symbols.append(edges_probabilities[i][0][0])
i=i+1
x = np.arange(len(valeurs))
plt.bar(x, height= valeurs)
plt.xticks(x+.1, symbols);
plt.ylabel('Probabilité')
plt.xlabel('Transitions')
plt.show()
#print ('yes')
#calcul de la matrice de probabilité
probablilities = []
line=[]
l=0
for i in tags:
k=0
line=[]
for j in tags:
line.append(0)
k=k+1
probablilities.append(line)
l=l+1
x=0
for j in edges_probabilities:
x=a
a=j[0][0].split("->")
#print (j,'-> ',index_of_tag(a[0]))# print (j[1])
try:
probablilities[index_of_tag(a[0])][index_of_tag(a[1])]=j[1]
except:
print (x,a,a[0],'->',a[1],j[1])
exit()
for i in probablilities:
k=0
x=0
for j in i:
x=j+x
#print (x)
#######begin cloud
tags1=[]
i=0
for ligne in open("c:/tal/tagspos.txt",encoding='utf-8'):
a=ligne.replace('\n',"")
if (i!=0):
tags1.append(a)
i=i+1
x=[]
y=[]
for i in tags1:
x.append(0)
y.append(0)
#this function renders the tag index in the tags kab array
def index_of_tag1(tag,tags1):
l=0
while l< len(tags1):
c= tags1[l]
if (tag==c):
return (l)
l=l+1
return tag
for i in edges_probabilities:
h=i[0][0]
j=h.split('->')
x[index_of_tag1(j[0],tags1)]=x[index_of_tag1(j[0],tags1)]+1
y[index_of_tag1(j[1],tags1)]=y[index_of_tag1(j[1],tags1)]+1
plt1.scatter(x,y,s=10)
plt1.title('Asigna n waggazen : ismilen n tjerrumt')
plt1.xlabel('x')
plt1.ylabel('y')
plt1.show()
x1=[]
y1=[]
for i in edges_probabilities:
h=i[0]
j=h[0].split('->')
## print(j)
x1.append(x[index_of_tag1(j[0],tags1)]*10000)
y1.append(x[index_of_tag1(j[1],tags1)]*10000)
plt1.scatter(x1,y1,s=5)
plt1.title('Asigna n waggazen : Tiyugiwin n yismilen n tjerrumt')
plt1.xlabel('x')
plt1.ylabel('y')
plt1.show()
x1=[]
y1=[]
for i in edges_probabilities:
h=i[0]
j=h[0].split('->')
## print(j)
x1.append(x[index_of_tag1(j[0],tags1)]*10000)
y1.append(y[index_of_tag1(j[1],tags1)]*10000)
plt1.scatter(x1,y1,s=5)
plt1.title('Asigna n waggazen : Tiyugiwin n yismilen n tjerrumt')
plt1.xlabel('x')
plt1.ylabel('y')
plt1.show()
x1=[]
y1=[]
for i in edges_probabilities:
h=i[0]
j=h[0].split('->')
## print(j)
x1.append(y[index_of_tag1(j[0],tags1)]*10000)
y1.append(x[index_of_tag1(j[1],tags1)]*10000)
plt1.scatter(x1,y1,s=5)
plt1.title('Asigna n waggazen : Tiyugiwin n yismilen n tjerrumt')
plt1.xlabel('x')
plt1.ylabel('y')
plt1.show()
x1=[]
y1=[]
for i in edges_probabilities:
h=i[0]
j=h[0].split('->')
## print(j)
x1.append(y[index_of_tag1(j[0],tags1)]*10000)
y1.append(y[index_of_tag1(j[1],tags1)]*10000)
plt1.scatter(x1,y1,s=5)
plt1.title('Asigna n waggazen : Tiyugiwin n yismilen n tjerrumt')
plt1.xlabel('x')
plt1.ylabel('y')
plt1.show()
x1=[]
y1=[]
for i in edges_probabilities:
h=i[0]
print(i[1])
j=h[0].split('->')
## print(j)
x1.append(y[index_of_tag1(j[0],tags1)]*10000)
y1.append(i[1]*10000)
plt1.scatter(x1,y1,s=5)
plt1.title('Asigna n waggazen : Tiyugiwin n yismilen n tjerrumt')
plt1.xlabel('x')
plt1.ylabel('y')
plt1.show()
| 315 | 0 | 46 |
e6c99295a7450a8c324a0e9f429484bf58772218 | 648 | py | Python | api/models.py | 11pawan11/ehealth | 4385881e43b441937a9b6f90d7d00122ae568c35 | [
"MIT"
] | null | null | null | api/models.py | 11pawan11/ehealth | 4385881e43b441937a9b6f90d7d00122ae568c35 | [
"MIT"
] | 1 | 2021-03-08T09:13:02.000Z | 2021-03-08T09:13:02.000Z | api/models.py | 11pawan11/ehealth | 4385881e43b441937a9b6f90d7d00122ae568c35 | [
"MIT"
] | null | null | null | from django.db import models
# Create your models here.
| 29.454545 | 47 | 0.615741 | from django.db import models
# Create your models here.
class Disease(models.Model):
value_1 = models.CharField(max_length = 50)
value_2 = models.CharField(max_length = 50)
value_3 = models.CharField(max_length = 50)
value_4 = models.CharField(max_length = 50)
value_5 = models.CharField(max_length = 50)
value_6 = models.CharField(max_length = 50)
def to_dict(self):
return{
'value_1': self.value_1,
'value_2': self.value_2,
'value_3': self.value_3,
'value_4': self.value_4,
'value_5': self.value_5,
'value_6': self.value_6
}
| 244 | 323 | 22 |
c98379981c7cf88b65c84d6ed643c218e954d9dd | 963 | py | Python | tests/test_cli.py | upciti/debops | a767528cccada778116748ec0fa702a39df3a6e2 | [
"MIT"
] | null | null | null | tests/test_cli.py | upciti/debops | a767528cccada778116748ec0fa702a39df3a6e2 | [
"MIT"
] | null | null | null | tests/test_cli.py | upciti/debops | a767528cccada778116748ec0fa702a39df3a6e2 | [
"MIT"
] | null | null | null | import os
import pytest
from typer.testing import CliRunner
from ops2deb.cli import app
runner = CliRunner()
@pytest.mark.parametrize("args", [[], ["-v"], ["-v", "-e", "10"]])
| 31.064516 | 87 | 0.751817 | import os
import pytest
from typer.testing import CliRunner
from ops2deb.cli import app
runner = CliRunner()
def test_app_should_exit_with_error_when_subcommand_does_not_exist():
result = runner.invoke(app, ["not-a-subcommand"], catch_exceptions=False)
assert result.exit_code != 0
def test_app_should_exit_with_error_when_option_does_not_exist():
result = runner.invoke(app, ["--not-an-option"], catch_exceptions=False)
assert result.exit_code != 0
def test_app_should_exit_with_0_when_help_option_is_used():
result = runner.invoke(app, ["--help"], catch_exceptions=False)
assert result.exit_code == 0
@pytest.mark.parametrize("args", [[], ["-v"], ["-v", "-e", "10"]])
def test_app_should_call_default_subcommand_when_no_subcommand_is_used(args, tmp_path):
os.environ["OPS2DEB_CONFIG"] = str(tmp_path)
result = runner.invoke(app, args, catch_exceptions=False)
assert "Path points to a directory: " in result.stdout
| 688 | 0 | 91 |
52d0436ebff9c1f25974378cbb7e42eedcedfb40 | 3,237 | py | Python | config.py | juanmc2005/continual-cross-lingual-nlu | ce2a01ddaa8754404f3f6b5b0fe81953c8a6951f | [
"MIT"
] | null | null | null | config.py | juanmc2005/continual-cross-lingual-nlu | ce2a01ddaa8754404f3f6b5b0fe81953c8a6951f | [
"MIT"
] | null | null | null | config.py | juanmc2005/continual-cross-lingual-nlu | ce2a01ddaa8754404f3f6b5b0fe81953c8a6951f | [
"MIT"
] | null | null | null | # MIT License
#
# Copyright (c) 2021 Université Paris-Saclay
# Copyright (c) 2021 Laboratoire national de métrologie et d'essais (LNE)
# Copyright (c) 2021 CNRS
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
from pathlib import Path
from typing import Union, Text
import yaml
from box import Box
from utils import fix_seed
class Config:
"""
Dot-based access to configuration parameters saved in a YAML file.
"""
def __init__(self, file: Union[Path, Text]):
"""
Load the parameters from the YAML file.
If no path are given in the YAML file for bert_checkpoint and seqeval, the corresponding objects will be load
if used (needs an internet connection).
"""
# get a Box object from the YAML file
with open(str(file), 'r') as ymlfile:
cfg = Box(yaml.safe_load(ymlfile), default_box=True, default_box_attr=None)
# manually populate the current Config object with the Box object (since Box inheritance fails)
for key in cfg.keys():
setattr(self, key, getattr(cfg, key))
# resolve seqeval config into a name or a path
seqeval_path = getattr(self, "seqeval_path", None)
self.seqeval_path = seqeval_path if seqeval_path is not None else 'seqeval'
self.dataset.path = Path(self.dataset.path)
# Don't lowercase if the corresponding attribute is not defined in config.yml
self.dataset.do_lowercase = getattr(self.dataset, 'do_lowercase', False)
# Correct types in train (ex. lr = 5e-5 is read as string)
for float_var in ["dropout", "learning_rate", "slot_loss_coeff"]:
val = getattr(self.train, float_var)
if type(val) != float:
setattr(self.train, float_var, float(val))
assert self.train.validation_metric in ["intent_acc", "slot_f1", "loss"], "Unrecognized validation metric"
# Some attributes could not be defined in config.yml, set them as None
self.train.num_workers = getattr(self.train, "num_workers", None)
self.train.seed = getattr(self.train, "seed", None)
# Fix seed if specified
if self.train.seed is not None:
fix_seed(self.train.seed) | 43.16 | 117 | 0.701576 | # MIT License
#
# Copyright (c) 2021 Université Paris-Saclay
# Copyright (c) 2021 Laboratoire national de métrologie et d'essais (LNE)
# Copyright (c) 2021 CNRS
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
from pathlib import Path
from typing import Union, Text
import yaml
from box import Box
from utils import fix_seed
class Config:
"""
Dot-based access to configuration parameters saved in a YAML file.
"""
def __init__(self, file: Union[Path, Text]):
"""
Load the parameters from the YAML file.
If no path are given in the YAML file for bert_checkpoint and seqeval, the corresponding objects will be load
if used (needs an internet connection).
"""
# get a Box object from the YAML file
with open(str(file), 'r') as ymlfile:
cfg = Box(yaml.safe_load(ymlfile), default_box=True, default_box_attr=None)
# manually populate the current Config object with the Box object (since Box inheritance fails)
for key in cfg.keys():
setattr(self, key, getattr(cfg, key))
# resolve seqeval config into a name or a path
seqeval_path = getattr(self, "seqeval_path", None)
self.seqeval_path = seqeval_path if seqeval_path is not None else 'seqeval'
self.dataset.path = Path(self.dataset.path)
# Don't lowercase if the corresponding attribute is not defined in config.yml
self.dataset.do_lowercase = getattr(self.dataset, 'do_lowercase', False)
# Correct types in train (ex. lr = 5e-5 is read as string)
for float_var in ["dropout", "learning_rate", "slot_loss_coeff"]:
val = getattr(self.train, float_var)
if type(val) != float:
setattr(self.train, float_var, float(val))
assert self.train.validation_metric in ["intent_acc", "slot_f1", "loss"], "Unrecognized validation metric"
# Some attributes could not be defined in config.yml, set them as None
self.train.num_workers = getattr(self.train, "num_workers", None)
self.train.seed = getattr(self.train, "seed", None)
# Fix seed if specified
if self.train.seed is not None:
fix_seed(self.train.seed) | 0 | 0 | 0 |
614fab382bfc743156cd28a4a6d92567c097f939 | 1,642 | py | Python | tests/test_launcher.py | cr1pt/pypyteer | b3aade3741b385f2e1dde600b501776f1f5e8479 | [
"MIT"
] | null | null | null | tests/test_launcher.py | cr1pt/pypyteer | b3aade3741b385f2e1dde600b501776f1f5e8479 | [
"MIT"
] | null | null | null | tests/test_launcher.py | cr1pt/pypyteer | b3aade3741b385f2e1dde600b501776f1f5e8479 | [
"MIT"
] | null | null | null | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
import unittest
from pyppeteer.launcher import Launcher
from pyppeteer.chromium_downloader import chromium_excutable
| 33.510204 | 79 | 0.6419 | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
import unittest
from pyppeteer.launcher import Launcher
from pyppeteer.chromium_downloader import chromium_excutable
class TestLauncher(unittest.TestCase):
def setUp(self):
self.headless_options = [
'--headless',
'--disable-gpu',
'--hide-scrollbars',
'--mute-audio',
]
def check_default_args(self, launcher):
for opt in self.headless_options:
self.assertIn(opt, launcher.chrome_args)
self.assertTrue(any(opt for opt in launcher.chrome_args
if opt.startswith('--user-data-dir')))
def test_no_option(self):
launcher = Launcher()
self.check_default_args(launcher)
self.assertEqual(launcher.exec, str(chromium_excutable()))
def test_disable_headless(self):
launcher = Launcher({'headless': False})
for opt in self.headless_options:
self.assertNotIn(opt, launcher.chrome_args)
def test_executable(self):
launcher = Launcher({'executablePath': '/path/to/chrome'})
self.assertEqual(launcher.exec, '/path/to/chrome')
def test_args(self):
launcher = Launcher({'args': ['--some-args']})
self.check_default_args(launcher)
self.assertIn('--some-args', launcher.chrome_args)
def test_user_data_dir(self):
launcher = Launcher({'args': ['--user-data-dir=/path/to/profile']})
self.check_default_args(launcher)
self.assertIn('--user-data-dir=/path/to/profile', launcher.chrome_args)
self.assertIsNone(launcher._tmp_user_data_dir)
| 1,247 | 17 | 211 |