commit
stringlengths
40
40
old_file
stringlengths
4
118
new_file
stringlengths
4
118
old_contents
stringlengths
0
2.94k
new_contents
stringlengths
1
4.43k
subject
stringlengths
15
444
message
stringlengths
16
3.45k
lang
stringclasses
1 value
license
stringclasses
13 values
repos
stringlengths
5
43.2k
prompt
stringlengths
17
4.58k
response
stringlengths
1
4.43k
prompt_tagged
stringlengths
58
4.62k
response_tagged
stringlengths
1
4.43k
text
stringlengths
132
7.29k
text_tagged
stringlengths
173
7.33k
532e84e2dedb83c2584fcd4122f0f3f905413b63
data/vcfClean.py
data/vcfClean.py
#!/usr/bin/env python2.7 """ The 1000 Genomes VCF files have some SNPs which are not congruant with the fasta sequence. This script simply deletes such cases so they don't cause trouble down the road """ import argparse, sys, os, os.path, random, subprocess, shutil, itertools from Bio import SeqIO from Bio.Seq import Seq def parse_args(args): parser = argparse.ArgumentParser(description=__doc__, formatter_class=argparse.RawDescriptionHelpFormatter) parser.add_argument("in_vcf", type=str, help="Input vcf file"), parser.add_argument("in_fa", type=str, help="Input fa") args = args[1:] options = parser.parse_args(args) return options def main(args): options = parse_args(args) in_vcf = open(options.in_vcf, "r") in_fa = open(options.in_fa) records = list(SeqIO.parse(in_fa, "fasta")) assert len(records) == 1 fa_seq = records[0] skip_count = 0 rec_count = 0 while True: line = in_vcf.readline() if line != "": skip = False if line[0] != "#": rec_count += 1 toks = line.split() assert fa_seq.name == toks[0] assert len(toks) > 3 vcf_ref = toks[3] start = int(toks[1]) - 1 fa_ref = "" for i in xrange(len(vcf_ref)): fa_ref += str(fa_seq[start + i]) if fa_ref.upper() != vcf_ref.upper(): skip = True skip_count += 1 sys.stderr.write("Skipping VCF variant at {} with ref {} because it does not match fasta {}\n".format(toks[1], vcf_ref, fa_ref)) if not skip: sys.stdout.write(line) else: break sys.stderr.write("Skipped {} out of {} records".format(skip_count, rec_count)) in_vcf.close() in_fa.close() if __name__ == "__main__" : sys.exit(main(sys.argv))
Add vcf clean script to take out variants from vcf that dont match fasta
Add vcf clean script to take out variants from vcf that dont match fasta
Python
mit
glennhickey/vg2sg,glennhickey/vg2sg,glennhickey/vg2sg,glennhickey/vg2sg
Add vcf clean script to take out variants from vcf that dont match fasta
#!/usr/bin/env python2.7 """ The 1000 Genomes VCF files have some SNPs which are not congruant with the fasta sequence. This script simply deletes such cases so they don't cause trouble down the road """ import argparse, sys, os, os.path, random, subprocess, shutil, itertools from Bio import SeqIO from Bio.Seq import Seq def parse_args(args): parser = argparse.ArgumentParser(description=__doc__, formatter_class=argparse.RawDescriptionHelpFormatter) parser.add_argument("in_vcf", type=str, help="Input vcf file"), parser.add_argument("in_fa", type=str, help="Input fa") args = args[1:] options = parser.parse_args(args) return options def main(args): options = parse_args(args) in_vcf = open(options.in_vcf, "r") in_fa = open(options.in_fa) records = list(SeqIO.parse(in_fa, "fasta")) assert len(records) == 1 fa_seq = records[0] skip_count = 0 rec_count = 0 while True: line = in_vcf.readline() if line != "": skip = False if line[0] != "#": rec_count += 1 toks = line.split() assert fa_seq.name == toks[0] assert len(toks) > 3 vcf_ref = toks[3] start = int(toks[1]) - 1 fa_ref = "" for i in xrange(len(vcf_ref)): fa_ref += str(fa_seq[start + i]) if fa_ref.upper() != vcf_ref.upper(): skip = True skip_count += 1 sys.stderr.write("Skipping VCF variant at {} with ref {} because it does not match fasta {}\n".format(toks[1], vcf_ref, fa_ref)) if not skip: sys.stdout.write(line) else: break sys.stderr.write("Skipped {} out of {} records".format(skip_count, rec_count)) in_vcf.close() in_fa.close() if __name__ == "__main__" : sys.exit(main(sys.argv))
<commit_before><commit_msg>Add vcf clean script to take out variants from vcf that dont match fasta<commit_after>
#!/usr/bin/env python2.7 """ The 1000 Genomes VCF files have some SNPs which are not congruant with the fasta sequence. This script simply deletes such cases so they don't cause trouble down the road """ import argparse, sys, os, os.path, random, subprocess, shutil, itertools from Bio import SeqIO from Bio.Seq import Seq def parse_args(args): parser = argparse.ArgumentParser(description=__doc__, formatter_class=argparse.RawDescriptionHelpFormatter) parser.add_argument("in_vcf", type=str, help="Input vcf file"), parser.add_argument("in_fa", type=str, help="Input fa") args = args[1:] options = parser.parse_args(args) return options def main(args): options = parse_args(args) in_vcf = open(options.in_vcf, "r") in_fa = open(options.in_fa) records = list(SeqIO.parse(in_fa, "fasta")) assert len(records) == 1 fa_seq = records[0] skip_count = 0 rec_count = 0 while True: line = in_vcf.readline() if line != "": skip = False if line[0] != "#": rec_count += 1 toks = line.split() assert fa_seq.name == toks[0] assert len(toks) > 3 vcf_ref = toks[3] start = int(toks[1]) - 1 fa_ref = "" for i in xrange(len(vcf_ref)): fa_ref += str(fa_seq[start + i]) if fa_ref.upper() != vcf_ref.upper(): skip = True skip_count += 1 sys.stderr.write("Skipping VCF variant at {} with ref {} because it does not match fasta {}\n".format(toks[1], vcf_ref, fa_ref)) if not skip: sys.stdout.write(line) else: break sys.stderr.write("Skipped {} out of {} records".format(skip_count, rec_count)) in_vcf.close() in_fa.close() if __name__ == "__main__" : sys.exit(main(sys.argv))
Add vcf clean script to take out variants from vcf that dont match fasta#!/usr/bin/env python2.7 """ The 1000 Genomes VCF files have some SNPs which are not congruant with the fasta sequence. This script simply deletes such cases so they don't cause trouble down the road """ import argparse, sys, os, os.path, random, subprocess, shutil, itertools from Bio import SeqIO from Bio.Seq import Seq def parse_args(args): parser = argparse.ArgumentParser(description=__doc__, formatter_class=argparse.RawDescriptionHelpFormatter) parser.add_argument("in_vcf", type=str, help="Input vcf file"), parser.add_argument("in_fa", type=str, help="Input fa") args = args[1:] options = parser.parse_args(args) return options def main(args): options = parse_args(args) in_vcf = open(options.in_vcf, "r") in_fa = open(options.in_fa) records = list(SeqIO.parse(in_fa, "fasta")) assert len(records) == 1 fa_seq = records[0] skip_count = 0 rec_count = 0 while True: line = in_vcf.readline() if line != "": skip = False if line[0] != "#": rec_count += 1 toks = line.split() assert fa_seq.name == toks[0] assert len(toks) > 3 vcf_ref = toks[3] start = int(toks[1]) - 1 fa_ref = "" for i in xrange(len(vcf_ref)): fa_ref += str(fa_seq[start + i]) if fa_ref.upper() != vcf_ref.upper(): skip = True skip_count += 1 sys.stderr.write("Skipping VCF variant at {} with ref {} because it does not match fasta {}\n".format(toks[1], vcf_ref, fa_ref)) if not skip: sys.stdout.write(line) else: break sys.stderr.write("Skipped {} out of {} records".format(skip_count, rec_count)) in_vcf.close() in_fa.close() if __name__ == "__main__" : sys.exit(main(sys.argv))
<commit_before><commit_msg>Add vcf clean script to take out variants from vcf that dont match fasta<commit_after>#!/usr/bin/env python2.7 """ The 1000 Genomes VCF files have some SNPs which are not congruant with the fasta sequence. This script simply deletes such cases so they don't cause trouble down the road """ import argparse, sys, os, os.path, random, subprocess, shutil, itertools from Bio import SeqIO from Bio.Seq import Seq def parse_args(args): parser = argparse.ArgumentParser(description=__doc__, formatter_class=argparse.RawDescriptionHelpFormatter) parser.add_argument("in_vcf", type=str, help="Input vcf file"), parser.add_argument("in_fa", type=str, help="Input fa") args = args[1:] options = parser.parse_args(args) return options def main(args): options = parse_args(args) in_vcf = open(options.in_vcf, "r") in_fa = open(options.in_fa) records = list(SeqIO.parse(in_fa, "fasta")) assert len(records) == 1 fa_seq = records[0] skip_count = 0 rec_count = 0 while True: line = in_vcf.readline() if line != "": skip = False if line[0] != "#": rec_count += 1 toks = line.split() assert fa_seq.name == toks[0] assert len(toks) > 3 vcf_ref = toks[3] start = int(toks[1]) - 1 fa_ref = "" for i in xrange(len(vcf_ref)): fa_ref += str(fa_seq[start + i]) if fa_ref.upper() != vcf_ref.upper(): skip = True skip_count += 1 sys.stderr.write("Skipping VCF variant at {} with ref {} because it does not match fasta {}\n".format(toks[1], vcf_ref, fa_ref)) if not skip: sys.stdout.write(line) else: break sys.stderr.write("Skipped {} out of {} records".format(skip_count, rec_count)) in_vcf.close() in_fa.close() if __name__ == "__main__" : sys.exit(main(sys.argv))
e75860ada9b64054c9f45e1af2ddf485e5efb6d0
latlng2utm/detect-utm-zone.py
latlng2utm/detect-utm-zone.py
# modified code from https://pcjericks.github.io/py-gdalogr-cookbook/ # vector_layers.html import os from osgeo import ogr from sys import argv import math script, daShapefile = argv # /Users/jbranigan/Documents/phila-city_limits_shp def check_latlng(bbox): for i in bbox: if i < -180 or i > 180: failure('This file is already projected.') def check_width(bbox): width = bbox[1] - bbox[0] if width > 3: failure('This file is too many degrees wide for UTM') def get_zone(coord): # print 'zone function on ', coord # There are 60 longitudinal projection zones numbered 1 to 60 starting at 180W # So that's -180 = 1, -174 = 2, -168 = 3 zone = ((coord - -180) / 6.0) return math.ceil(zone) def get_bbox(daShapefile): driver = ogr.GetDriverByName('ESRI Shapefile') dataSource = driver.Open(daShapefile, 0) # 0 means read, 1 means write # Check to see if shapefile is found. if dataSource is None: print 'Could not open %s' % (daShapefile) else: print 'Opened %s' % (daShapefile) layer = dataSource.GetLayer() bbox = layer.GetExtent() return bbox def failure(why): print why raise SystemExit bbox = get_bbox(daShapefile) latlng = check_latlng(bbox) width = check_width(bbox) bbox_center = ((bbox[1] - bbox[0]) / 2) + bbox[0] utmzone = get_zone(bbox_center) print 'The UTM zone is: %d' % utmzone
Add script that detects a shapefile UTM zone
Add script that detects a shapefile UTM zone
Python
mit
jbranigan/geo-scripts-python
Add script that detects a shapefile UTM zone
# modified code from https://pcjericks.github.io/py-gdalogr-cookbook/ # vector_layers.html import os from osgeo import ogr from sys import argv import math script, daShapefile = argv # /Users/jbranigan/Documents/phila-city_limits_shp def check_latlng(bbox): for i in bbox: if i < -180 or i > 180: failure('This file is already projected.') def check_width(bbox): width = bbox[1] - bbox[0] if width > 3: failure('This file is too many degrees wide for UTM') def get_zone(coord): # print 'zone function on ', coord # There are 60 longitudinal projection zones numbered 1 to 60 starting at 180W # So that's -180 = 1, -174 = 2, -168 = 3 zone = ((coord - -180) / 6.0) return math.ceil(zone) def get_bbox(daShapefile): driver = ogr.GetDriverByName('ESRI Shapefile') dataSource = driver.Open(daShapefile, 0) # 0 means read, 1 means write # Check to see if shapefile is found. if dataSource is None: print 'Could not open %s' % (daShapefile) else: print 'Opened %s' % (daShapefile) layer = dataSource.GetLayer() bbox = layer.GetExtent() return bbox def failure(why): print why raise SystemExit bbox = get_bbox(daShapefile) latlng = check_latlng(bbox) width = check_width(bbox) bbox_center = ((bbox[1] - bbox[0]) / 2) + bbox[0] utmzone = get_zone(bbox_center) print 'The UTM zone is: %d' % utmzone
<commit_before><commit_msg>Add script that detects a shapefile UTM zone<commit_after>
# modified code from https://pcjericks.github.io/py-gdalogr-cookbook/ # vector_layers.html import os from osgeo import ogr from sys import argv import math script, daShapefile = argv # /Users/jbranigan/Documents/phila-city_limits_shp def check_latlng(bbox): for i in bbox: if i < -180 or i > 180: failure('This file is already projected.') def check_width(bbox): width = bbox[1] - bbox[0] if width > 3: failure('This file is too many degrees wide for UTM') def get_zone(coord): # print 'zone function on ', coord # There are 60 longitudinal projection zones numbered 1 to 60 starting at 180W # So that's -180 = 1, -174 = 2, -168 = 3 zone = ((coord - -180) / 6.0) return math.ceil(zone) def get_bbox(daShapefile): driver = ogr.GetDriverByName('ESRI Shapefile') dataSource = driver.Open(daShapefile, 0) # 0 means read, 1 means write # Check to see if shapefile is found. if dataSource is None: print 'Could not open %s' % (daShapefile) else: print 'Opened %s' % (daShapefile) layer = dataSource.GetLayer() bbox = layer.GetExtent() return bbox def failure(why): print why raise SystemExit bbox = get_bbox(daShapefile) latlng = check_latlng(bbox) width = check_width(bbox) bbox_center = ((bbox[1] - bbox[0]) / 2) + bbox[0] utmzone = get_zone(bbox_center) print 'The UTM zone is: %d' % utmzone
Add script that detects a shapefile UTM zone# modified code from https://pcjericks.github.io/py-gdalogr-cookbook/ # vector_layers.html import os from osgeo import ogr from sys import argv import math script, daShapefile = argv # /Users/jbranigan/Documents/phila-city_limits_shp def check_latlng(bbox): for i in bbox: if i < -180 or i > 180: failure('This file is already projected.') def check_width(bbox): width = bbox[1] - bbox[0] if width > 3: failure('This file is too many degrees wide for UTM') def get_zone(coord): # print 'zone function on ', coord # There are 60 longitudinal projection zones numbered 1 to 60 starting at 180W # So that's -180 = 1, -174 = 2, -168 = 3 zone = ((coord - -180) / 6.0) return math.ceil(zone) def get_bbox(daShapefile): driver = ogr.GetDriverByName('ESRI Shapefile') dataSource = driver.Open(daShapefile, 0) # 0 means read, 1 means write # Check to see if shapefile is found. if dataSource is None: print 'Could not open %s' % (daShapefile) else: print 'Opened %s' % (daShapefile) layer = dataSource.GetLayer() bbox = layer.GetExtent() return bbox def failure(why): print why raise SystemExit bbox = get_bbox(daShapefile) latlng = check_latlng(bbox) width = check_width(bbox) bbox_center = ((bbox[1] - bbox[0]) / 2) + bbox[0] utmzone = get_zone(bbox_center) print 'The UTM zone is: %d' % utmzone
<commit_before><commit_msg>Add script that detects a shapefile UTM zone<commit_after># modified code from https://pcjericks.github.io/py-gdalogr-cookbook/ # vector_layers.html import os from osgeo import ogr from sys import argv import math script, daShapefile = argv # /Users/jbranigan/Documents/phila-city_limits_shp def check_latlng(bbox): for i in bbox: if i < -180 or i > 180: failure('This file is already projected.') def check_width(bbox): width = bbox[1] - bbox[0] if width > 3: failure('This file is too many degrees wide for UTM') def get_zone(coord): # print 'zone function on ', coord # There are 60 longitudinal projection zones numbered 1 to 60 starting at 180W # So that's -180 = 1, -174 = 2, -168 = 3 zone = ((coord - -180) / 6.0) return math.ceil(zone) def get_bbox(daShapefile): driver = ogr.GetDriverByName('ESRI Shapefile') dataSource = driver.Open(daShapefile, 0) # 0 means read, 1 means write # Check to see if shapefile is found. if dataSource is None: print 'Could not open %s' % (daShapefile) else: print 'Opened %s' % (daShapefile) layer = dataSource.GetLayer() bbox = layer.GetExtent() return bbox def failure(why): print why raise SystemExit bbox = get_bbox(daShapefile) latlng = check_latlng(bbox) width = check_width(bbox) bbox_center = ((bbox[1] - bbox[0]) / 2) + bbox[0] utmzone = get_zone(bbox_center) print 'The UTM zone is: %d' % utmzone
2998801fa337a6f89093380d5d05c5299e3617b1
migrations/versions/f092dbba3026_.py
migrations/versions/f092dbba3026_.py
"""empty message Revision ID: f092dbba3026 Revises: ad8c63f1135e Create Date: 2018-11-20 13:01:18.328112 """ # revision identifiers, used by Alembic. revision = 'f092dbba3026' down_revision = 'ad8c63f1135e' from alembic import op import sqlalchemy as sa def upgrade(): # commands auto generated by Alembic - please adjust! ### op.add_column('users', sa.Column('email_id', sa.String(length=128), nullable=True)) op.create_unique_constraint(op.f('uq_users_email_id'), 'users', ['email_id']) # end Alembic commands ### def downgrade(): # commands auto generated by Alembic - please adjust! ### op.drop_constraint(op.f('uq_users_email_id'), 'users', type_='unique') op.drop_column('users', 'email_id') # end Alembic commands ###
Add new field for email - migration file
Add new field for email - migration file
Python
mit
CSC-IT-Center-for-Science/pouta-blueprints,CSC-IT-Center-for-Science/pouta-blueprints,CSC-IT-Center-for-Science/pouta-blueprints,CSC-IT-Center-for-Science/pouta-blueprints
Add new field for email - migration file
"""empty message Revision ID: f092dbba3026 Revises: ad8c63f1135e Create Date: 2018-11-20 13:01:18.328112 """ # revision identifiers, used by Alembic. revision = 'f092dbba3026' down_revision = 'ad8c63f1135e' from alembic import op import sqlalchemy as sa def upgrade(): # commands auto generated by Alembic - please adjust! ### op.add_column('users', sa.Column('email_id', sa.String(length=128), nullable=True)) op.create_unique_constraint(op.f('uq_users_email_id'), 'users', ['email_id']) # end Alembic commands ### def downgrade(): # commands auto generated by Alembic - please adjust! ### op.drop_constraint(op.f('uq_users_email_id'), 'users', type_='unique') op.drop_column('users', 'email_id') # end Alembic commands ###
<commit_before><commit_msg>Add new field for email - migration file<commit_after>
"""empty message Revision ID: f092dbba3026 Revises: ad8c63f1135e Create Date: 2018-11-20 13:01:18.328112 """ # revision identifiers, used by Alembic. revision = 'f092dbba3026' down_revision = 'ad8c63f1135e' from alembic import op import sqlalchemy as sa def upgrade(): # commands auto generated by Alembic - please adjust! ### op.add_column('users', sa.Column('email_id', sa.String(length=128), nullable=True)) op.create_unique_constraint(op.f('uq_users_email_id'), 'users', ['email_id']) # end Alembic commands ### def downgrade(): # commands auto generated by Alembic - please adjust! ### op.drop_constraint(op.f('uq_users_email_id'), 'users', type_='unique') op.drop_column('users', 'email_id') # end Alembic commands ###
Add new field for email - migration file"""empty message Revision ID: f092dbba3026 Revises: ad8c63f1135e Create Date: 2018-11-20 13:01:18.328112 """ # revision identifiers, used by Alembic. revision = 'f092dbba3026' down_revision = 'ad8c63f1135e' from alembic import op import sqlalchemy as sa def upgrade(): # commands auto generated by Alembic - please adjust! ### op.add_column('users', sa.Column('email_id', sa.String(length=128), nullable=True)) op.create_unique_constraint(op.f('uq_users_email_id'), 'users', ['email_id']) # end Alembic commands ### def downgrade(): # commands auto generated by Alembic - please adjust! ### op.drop_constraint(op.f('uq_users_email_id'), 'users', type_='unique') op.drop_column('users', 'email_id') # end Alembic commands ###
<commit_before><commit_msg>Add new field for email - migration file<commit_after>"""empty message Revision ID: f092dbba3026 Revises: ad8c63f1135e Create Date: 2018-11-20 13:01:18.328112 """ # revision identifiers, used by Alembic. revision = 'f092dbba3026' down_revision = 'ad8c63f1135e' from alembic import op import sqlalchemy as sa def upgrade(): # commands auto generated by Alembic - please adjust! ### op.add_column('users', sa.Column('email_id', sa.String(length=128), nullable=True)) op.create_unique_constraint(op.f('uq_users_email_id'), 'users', ['email_id']) # end Alembic commands ### def downgrade(): # commands auto generated by Alembic - please adjust! ### op.drop_constraint(op.f('uq_users_email_id'), 'users', type_='unique') op.drop_column('users', 'email_id') # end Alembic commands ###
a926e216426c66dd93ea2e2ac5bad4aedd0c13b9
lintcode/Easy/174_Remove_nth_Node_from_End_of_List.py
lintcode/Easy/174_Remove_nth_Node_from_End_of_List.py
""" Definition of ListNode class ListNode(object): def __init__(self, val, next=None): self.val = val self.next = next """ class Solution: """ @param head: The first node of linked list. @param n: An integer. @return: The head of linked list. """ def removeNthFromEnd(self, head, n): # write your code here # Solution 1 # temp = head # arr = [] # nodes = [] # while (temp): # arr.append(temp.val) # temp = temp.next # arr = arr[:len(arr) - n] + arr[len(arr) - n + 1:] # for i in arr: # nodes.append(ListNode(i)) # for i in range(len(nodes) - 1): # nodes[i].next = nodes[i + 1] # return nodes[0] if len(nodes) != 0 else None # Solution 2 dummy = ListNode(0) dummy.next = head tmp = dummy for i in range(n): head = head.next while (head is not None): head = head.next tmp = tmp.next tmp.next = tmp.next.next return dummy.next
Add solution to lintcode question 174
Add solution to lintcode question 174
Python
mit
Rhadow/leetcode,Rhadow/leetcode,Rhadow/leetcode,Rhadow/leetcode
Add solution to lintcode question 174
""" Definition of ListNode class ListNode(object): def __init__(self, val, next=None): self.val = val self.next = next """ class Solution: """ @param head: The first node of linked list. @param n: An integer. @return: The head of linked list. """ def removeNthFromEnd(self, head, n): # write your code here # Solution 1 # temp = head # arr = [] # nodes = [] # while (temp): # arr.append(temp.val) # temp = temp.next # arr = arr[:len(arr) - n] + arr[len(arr) - n + 1:] # for i in arr: # nodes.append(ListNode(i)) # for i in range(len(nodes) - 1): # nodes[i].next = nodes[i + 1] # return nodes[0] if len(nodes) != 0 else None # Solution 2 dummy = ListNode(0) dummy.next = head tmp = dummy for i in range(n): head = head.next while (head is not None): head = head.next tmp = tmp.next tmp.next = tmp.next.next return dummy.next
<commit_before><commit_msg>Add solution to lintcode question 174<commit_after>
""" Definition of ListNode class ListNode(object): def __init__(self, val, next=None): self.val = val self.next = next """ class Solution: """ @param head: The first node of linked list. @param n: An integer. @return: The head of linked list. """ def removeNthFromEnd(self, head, n): # write your code here # Solution 1 # temp = head # arr = [] # nodes = [] # while (temp): # arr.append(temp.val) # temp = temp.next # arr = arr[:len(arr) - n] + arr[len(arr) - n + 1:] # for i in arr: # nodes.append(ListNode(i)) # for i in range(len(nodes) - 1): # nodes[i].next = nodes[i + 1] # return nodes[0] if len(nodes) != 0 else None # Solution 2 dummy = ListNode(0) dummy.next = head tmp = dummy for i in range(n): head = head.next while (head is not None): head = head.next tmp = tmp.next tmp.next = tmp.next.next return dummy.next
Add solution to lintcode question 174""" Definition of ListNode class ListNode(object): def __init__(self, val, next=None): self.val = val self.next = next """ class Solution: """ @param head: The first node of linked list. @param n: An integer. @return: The head of linked list. """ def removeNthFromEnd(self, head, n): # write your code here # Solution 1 # temp = head # arr = [] # nodes = [] # while (temp): # arr.append(temp.val) # temp = temp.next # arr = arr[:len(arr) - n] + arr[len(arr) - n + 1:] # for i in arr: # nodes.append(ListNode(i)) # for i in range(len(nodes) - 1): # nodes[i].next = nodes[i + 1] # return nodes[0] if len(nodes) != 0 else None # Solution 2 dummy = ListNode(0) dummy.next = head tmp = dummy for i in range(n): head = head.next while (head is not None): head = head.next tmp = tmp.next tmp.next = tmp.next.next return dummy.next
<commit_before><commit_msg>Add solution to lintcode question 174<commit_after>""" Definition of ListNode class ListNode(object): def __init__(self, val, next=None): self.val = val self.next = next """ class Solution: """ @param head: The first node of linked list. @param n: An integer. @return: The head of linked list. """ def removeNthFromEnd(self, head, n): # write your code here # Solution 1 # temp = head # arr = [] # nodes = [] # while (temp): # arr.append(temp.val) # temp = temp.next # arr = arr[:len(arr) - n] + arr[len(arr) - n + 1:] # for i in arr: # nodes.append(ListNode(i)) # for i in range(len(nodes) - 1): # nodes[i].next = nodes[i + 1] # return nodes[0] if len(nodes) != 0 else None # Solution 2 dummy = ListNode(0) dummy.next = head tmp = dummy for i in range(n): head = head.next while (head is not None): head = head.next tmp = tmp.next tmp.next = tmp.next.next return dummy.next
13fb6dda7ba7dfc63a54cf8368216c6dfe3d20c9
src/scripts/create_conll.py
src/scripts/create_conll.py
#!/usr/bin/env python3 """"Create a CoNLL corpus from FrameNet fulltext data tokens This CoNLL corpus will then be lemmatized using WordNet, and parsed using TurboParser. """ from pathlib import Path from xml.etree import ElementTree as ET import os from nltk.corpus import wordnet from paths import FRAMENET_FULLTEXT xmlns = 'http://framenet.icsi.berkeley.edu' conll_dir = Path(FRAMENET_FULLTEXT).parents[1] / 'framenet_turpobarsed' os.makedirs(str(conll_dir), exist_ok=True) for fulltext_filename in Path(FRAMENET_FULLTEXT).glob('*.xml'): fulltext_xml = ET.ElementTree(file=str(fulltext_filename)) conll_file = open(str(conll_dir / (fulltext_filename.stem + '.conll')), 'w') for sentence in fulltext_xml.findall('{{{}}}sentence'.format(xmlns)): word_id = 1 sentence_text = sentence.find('{{{}}}text'.format(xmlns)).text for word_label in sentence.findall('{{{0}}}annotationSet/{{{0}}}layer[@name="PENN"]/{{{0}}}label'.format(xmlns)): start = int(word_label.get('start')) end = int(word_label.get('end')) word = sentence_text[start:end+1] morphy_lemma = wordnet.morphy(word.lower()) lemma = morphy_lemma if morphy_lemma is not None else word print('\t'.join([str(word_id), word, lemma] + ['_'] * 7), file=conll_file) word_id += 1 print(file=conll_file) print('Wrote files in {}'.format(str(conll_dir)))
Add script to create CoNLL corpus from FN fulltext
Add script to create CoNLL corpus from FN fulltext
Python
agpl-3.0
aymara/knowledgesrl,aymara/knowledgesrl
Add script to create CoNLL corpus from FN fulltext
#!/usr/bin/env python3 """"Create a CoNLL corpus from FrameNet fulltext data tokens This CoNLL corpus will then be lemmatized using WordNet, and parsed using TurboParser. """ from pathlib import Path from xml.etree import ElementTree as ET import os from nltk.corpus import wordnet from paths import FRAMENET_FULLTEXT xmlns = 'http://framenet.icsi.berkeley.edu' conll_dir = Path(FRAMENET_FULLTEXT).parents[1] / 'framenet_turpobarsed' os.makedirs(str(conll_dir), exist_ok=True) for fulltext_filename in Path(FRAMENET_FULLTEXT).glob('*.xml'): fulltext_xml = ET.ElementTree(file=str(fulltext_filename)) conll_file = open(str(conll_dir / (fulltext_filename.stem + '.conll')), 'w') for sentence in fulltext_xml.findall('{{{}}}sentence'.format(xmlns)): word_id = 1 sentence_text = sentence.find('{{{}}}text'.format(xmlns)).text for word_label in sentence.findall('{{{0}}}annotationSet/{{{0}}}layer[@name="PENN"]/{{{0}}}label'.format(xmlns)): start = int(word_label.get('start')) end = int(word_label.get('end')) word = sentence_text[start:end+1] morphy_lemma = wordnet.morphy(word.lower()) lemma = morphy_lemma if morphy_lemma is not None else word print('\t'.join([str(word_id), word, lemma] + ['_'] * 7), file=conll_file) word_id += 1 print(file=conll_file) print('Wrote files in {}'.format(str(conll_dir)))
<commit_before><commit_msg>Add script to create CoNLL corpus from FN fulltext<commit_after>
#!/usr/bin/env python3 """"Create a CoNLL corpus from FrameNet fulltext data tokens This CoNLL corpus will then be lemmatized using WordNet, and parsed using TurboParser. """ from pathlib import Path from xml.etree import ElementTree as ET import os from nltk.corpus import wordnet from paths import FRAMENET_FULLTEXT xmlns = 'http://framenet.icsi.berkeley.edu' conll_dir = Path(FRAMENET_FULLTEXT).parents[1] / 'framenet_turpobarsed' os.makedirs(str(conll_dir), exist_ok=True) for fulltext_filename in Path(FRAMENET_FULLTEXT).glob('*.xml'): fulltext_xml = ET.ElementTree(file=str(fulltext_filename)) conll_file = open(str(conll_dir / (fulltext_filename.stem + '.conll')), 'w') for sentence in fulltext_xml.findall('{{{}}}sentence'.format(xmlns)): word_id = 1 sentence_text = sentence.find('{{{}}}text'.format(xmlns)).text for word_label in sentence.findall('{{{0}}}annotationSet/{{{0}}}layer[@name="PENN"]/{{{0}}}label'.format(xmlns)): start = int(word_label.get('start')) end = int(word_label.get('end')) word = sentence_text[start:end+1] morphy_lemma = wordnet.morphy(word.lower()) lemma = morphy_lemma if morphy_lemma is not None else word print('\t'.join([str(word_id), word, lemma] + ['_'] * 7), file=conll_file) word_id += 1 print(file=conll_file) print('Wrote files in {}'.format(str(conll_dir)))
Add script to create CoNLL corpus from FN fulltext#!/usr/bin/env python3 """"Create a CoNLL corpus from FrameNet fulltext data tokens This CoNLL corpus will then be lemmatized using WordNet, and parsed using TurboParser. """ from pathlib import Path from xml.etree import ElementTree as ET import os from nltk.corpus import wordnet from paths import FRAMENET_FULLTEXT xmlns = 'http://framenet.icsi.berkeley.edu' conll_dir = Path(FRAMENET_FULLTEXT).parents[1] / 'framenet_turpobarsed' os.makedirs(str(conll_dir), exist_ok=True) for fulltext_filename in Path(FRAMENET_FULLTEXT).glob('*.xml'): fulltext_xml = ET.ElementTree(file=str(fulltext_filename)) conll_file = open(str(conll_dir / (fulltext_filename.stem + '.conll')), 'w') for sentence in fulltext_xml.findall('{{{}}}sentence'.format(xmlns)): word_id = 1 sentence_text = sentence.find('{{{}}}text'.format(xmlns)).text for word_label in sentence.findall('{{{0}}}annotationSet/{{{0}}}layer[@name="PENN"]/{{{0}}}label'.format(xmlns)): start = int(word_label.get('start')) end = int(word_label.get('end')) word = sentence_text[start:end+1] morphy_lemma = wordnet.morphy(word.lower()) lemma = morphy_lemma if morphy_lemma is not None else word print('\t'.join([str(word_id), word, lemma] + ['_'] * 7), file=conll_file) word_id += 1 print(file=conll_file) print('Wrote files in {}'.format(str(conll_dir)))
<commit_before><commit_msg>Add script to create CoNLL corpus from FN fulltext<commit_after>#!/usr/bin/env python3 """"Create a CoNLL corpus from FrameNet fulltext data tokens This CoNLL corpus will then be lemmatized using WordNet, and parsed using TurboParser. """ from pathlib import Path from xml.etree import ElementTree as ET import os from nltk.corpus import wordnet from paths import FRAMENET_FULLTEXT xmlns = 'http://framenet.icsi.berkeley.edu' conll_dir = Path(FRAMENET_FULLTEXT).parents[1] / 'framenet_turpobarsed' os.makedirs(str(conll_dir), exist_ok=True) for fulltext_filename in Path(FRAMENET_FULLTEXT).glob('*.xml'): fulltext_xml = ET.ElementTree(file=str(fulltext_filename)) conll_file = open(str(conll_dir / (fulltext_filename.stem + '.conll')), 'w') for sentence in fulltext_xml.findall('{{{}}}sentence'.format(xmlns)): word_id = 1 sentence_text = sentence.find('{{{}}}text'.format(xmlns)).text for word_label in sentence.findall('{{{0}}}annotationSet/{{{0}}}layer[@name="PENN"]/{{{0}}}label'.format(xmlns)): start = int(word_label.get('start')) end = int(word_label.get('end')) word = sentence_text[start:end+1] morphy_lemma = wordnet.morphy(word.lower()) lemma = morphy_lemma if morphy_lemma is not None else word print('\t'.join([str(word_id), word, lemma] + ['_'] * 7), file=conll_file) word_id += 1 print(file=conll_file) print('Wrote files in {}'.format(str(conll_dir)))
523447ef1bd8c0af785c382975ef61c0a374a833
testinfra/mon/test_ossec_ruleset.py
testinfra/mon/test_ossec_ruleset.py
import re alert_level_regex = re.compile(r"Level: '(\d+)'") def test_grsec_denied_rwx_mapping_produces_alert(Command, Sudo): """Check that a denied RWX mmaping produces an OSSEC alert""" test_alert = ("Feb 10 23:34:40 app kernel: [ 124.188641] grsec: denied " "RWX mmap of <anonymous mapping> by /usr/sbin/apache2" "[apache2:1328] uid/euid:33/33 gid/egid:33/33, parent " "/usr/sbin/apache2[apache2:1309] uid/euid:0/0 gid/egid:0/0") with Sudo(): c = Command('echo "{}" | /var/ossec/bin/ossec-logtest'.format( test_alert)) # Level 7 alert should be triggered by rule 100101 assert "Alert to be generated" in c.stderr alert_level = alert_level_regex.findall(c.stderr)[0] assert alert_level == "7"
Add initial automated test for OSSEC alert using ossec-logtest
Add initial automated test for OSSEC alert using ossec-logtest Verify that the log event describes in PR #871 (grsec denying RWX mmap) produces an OSSEC alert of level 7. Note: The rule added in PR #871 was later reverted, which is why current SecureDrop produces OSSEC alerts for this kind of log event.
Python
agpl-3.0
ehartsuyker/securedrop,ehartsuyker/securedrop,heartsucker/securedrop,heartsucker/securedrop,heartsucker/securedrop,conorsch/securedrop,conorsch/securedrop,conorsch/securedrop,conorsch/securedrop,ehartsuyker/securedrop,micahflee/securedrop,garrettr/securedrop,garrettr/securedrop,conorsch/securedrop,micahflee/securedrop,micahflee/securedrop,ehartsuyker/securedrop,garrettr/securedrop,heartsucker/securedrop,garrettr/securedrop,ehartsuyker/securedrop,ehartsuyker/securedrop,heartsucker/securedrop,micahflee/securedrop
Add initial automated test for OSSEC alert using ossec-logtest Verify that the log event describes in PR #871 (grsec denying RWX mmap) produces an OSSEC alert of level 7. Note: The rule added in PR #871 was later reverted, which is why current SecureDrop produces OSSEC alerts for this kind of log event.
import re alert_level_regex = re.compile(r"Level: '(\d+)'") def test_grsec_denied_rwx_mapping_produces_alert(Command, Sudo): """Check that a denied RWX mmaping produces an OSSEC alert""" test_alert = ("Feb 10 23:34:40 app kernel: [ 124.188641] grsec: denied " "RWX mmap of <anonymous mapping> by /usr/sbin/apache2" "[apache2:1328] uid/euid:33/33 gid/egid:33/33, parent " "/usr/sbin/apache2[apache2:1309] uid/euid:0/0 gid/egid:0/0") with Sudo(): c = Command('echo "{}" | /var/ossec/bin/ossec-logtest'.format( test_alert)) # Level 7 alert should be triggered by rule 100101 assert "Alert to be generated" in c.stderr alert_level = alert_level_regex.findall(c.stderr)[0] assert alert_level == "7"
<commit_before><commit_msg>Add initial automated test for OSSEC alert using ossec-logtest Verify that the log event describes in PR #871 (grsec denying RWX mmap) produces an OSSEC alert of level 7. Note: The rule added in PR #871 was later reverted, which is why current SecureDrop produces OSSEC alerts for this kind of log event.<commit_after>
import re alert_level_regex = re.compile(r"Level: '(\d+)'") def test_grsec_denied_rwx_mapping_produces_alert(Command, Sudo): """Check that a denied RWX mmaping produces an OSSEC alert""" test_alert = ("Feb 10 23:34:40 app kernel: [ 124.188641] grsec: denied " "RWX mmap of <anonymous mapping> by /usr/sbin/apache2" "[apache2:1328] uid/euid:33/33 gid/egid:33/33, parent " "/usr/sbin/apache2[apache2:1309] uid/euid:0/0 gid/egid:0/0") with Sudo(): c = Command('echo "{}" | /var/ossec/bin/ossec-logtest'.format( test_alert)) # Level 7 alert should be triggered by rule 100101 assert "Alert to be generated" in c.stderr alert_level = alert_level_regex.findall(c.stderr)[0] assert alert_level == "7"
Add initial automated test for OSSEC alert using ossec-logtest Verify that the log event describes in PR #871 (grsec denying RWX mmap) produces an OSSEC alert of level 7. Note: The rule added in PR #871 was later reverted, which is why current SecureDrop produces OSSEC alerts for this kind of log event.import re alert_level_regex = re.compile(r"Level: '(\d+)'") def test_grsec_denied_rwx_mapping_produces_alert(Command, Sudo): """Check that a denied RWX mmaping produces an OSSEC alert""" test_alert = ("Feb 10 23:34:40 app kernel: [ 124.188641] grsec: denied " "RWX mmap of <anonymous mapping> by /usr/sbin/apache2" "[apache2:1328] uid/euid:33/33 gid/egid:33/33, parent " "/usr/sbin/apache2[apache2:1309] uid/euid:0/0 gid/egid:0/0") with Sudo(): c = Command('echo "{}" | /var/ossec/bin/ossec-logtest'.format( test_alert)) # Level 7 alert should be triggered by rule 100101 assert "Alert to be generated" in c.stderr alert_level = alert_level_regex.findall(c.stderr)[0] assert alert_level == "7"
<commit_before><commit_msg>Add initial automated test for OSSEC alert using ossec-logtest Verify that the log event describes in PR #871 (grsec denying RWX mmap) produces an OSSEC alert of level 7. Note: The rule added in PR #871 was later reverted, which is why current SecureDrop produces OSSEC alerts for this kind of log event.<commit_after>import re alert_level_regex = re.compile(r"Level: '(\d+)'") def test_grsec_denied_rwx_mapping_produces_alert(Command, Sudo): """Check that a denied RWX mmaping produces an OSSEC alert""" test_alert = ("Feb 10 23:34:40 app kernel: [ 124.188641] grsec: denied " "RWX mmap of <anonymous mapping> by /usr/sbin/apache2" "[apache2:1328] uid/euid:33/33 gid/egid:33/33, parent " "/usr/sbin/apache2[apache2:1309] uid/euid:0/0 gid/egid:0/0") with Sudo(): c = Command('echo "{}" | /var/ossec/bin/ossec-logtest'.format( test_alert)) # Level 7 alert should be triggered by rule 100101 assert "Alert to be generated" in c.stderr alert_level = alert_level_regex.findall(c.stderr)[0] assert alert_level == "7"
f5fb79cc637aa0305b4cbea144330208d2da0378
tests/test_cmd.py
tests/test_cmd.py
from tornado.ioloop import IOLoop from tests.helper import ExternalVersionTestCase class CMDTest(ExternalVersionTestCase): def get_new_ioloop(self): return IOLoop.instance() def test_cmd(self): self.assertEqual(self.sync_get_version("example", {"cmd": "echo Meow"}), "Meow")
Add a testcase for Command
Add a testcase for Command
Python
mit
lilydjwg/nvchecker
Add a testcase for Command
from tornado.ioloop import IOLoop from tests.helper import ExternalVersionTestCase class CMDTest(ExternalVersionTestCase): def get_new_ioloop(self): return IOLoop.instance() def test_cmd(self): self.assertEqual(self.sync_get_version("example", {"cmd": "echo Meow"}), "Meow")
<commit_before><commit_msg>Add a testcase for Command<commit_after>
from tornado.ioloop import IOLoop from tests.helper import ExternalVersionTestCase class CMDTest(ExternalVersionTestCase): def get_new_ioloop(self): return IOLoop.instance() def test_cmd(self): self.assertEqual(self.sync_get_version("example", {"cmd": "echo Meow"}), "Meow")
Add a testcase for Commandfrom tornado.ioloop import IOLoop from tests.helper import ExternalVersionTestCase class CMDTest(ExternalVersionTestCase): def get_new_ioloop(self): return IOLoop.instance() def test_cmd(self): self.assertEqual(self.sync_get_version("example", {"cmd": "echo Meow"}), "Meow")
<commit_before><commit_msg>Add a testcase for Command<commit_after>from tornado.ioloop import IOLoop from tests.helper import ExternalVersionTestCase class CMDTest(ExternalVersionTestCase): def get_new_ioloop(self): return IOLoop.instance() def test_cmd(self): self.assertEqual(self.sync_get_version("example", {"cmd": "echo Meow"}), "Meow")
5b70b4a04fe7d6eb9d5836bef297647dee85124a
zephyr/management/commands/create_user.py
zephyr/management/commands/create_user.py
import sys from django.core.management.base import BaseCommand, CommandError from django.core.exceptions import ValidationError from django.db.utils import IntegrityError from django.utils.timezone import now from django.core import validators from zephyr.models import Realm, do_create_user from zephyr.views import do_send_message from zephyr.lib.initial_password import initial_password class Command(BaseCommand): help = "Create the specified user with a default initial password." def handle(self, *args, **options): try: email, full_name = args try: validators.validate_email(email) except ValidationError: raise CommandError("Invalid email address.") except ValueError: if len(args) != 0: raise CommandError("Either specify an email and full name" + \ "as two parameters, or specify no parameters for" + \ "interactive user creation.") return 1 else: while True: email = raw_input("Email: ") try: validators.validate_email(email) break except ValidationError: print >> sys.stderr, "Invalid email address." full_name = raw_input("Full name: ") try: realm = Realm.objects.get(domain=email.split('@')[-1]) except Realm.DoesNotExist: raise CommandError("Realm does not exist.") try: do_create_user(email, initial_password(email), realm, full_name, email.split('@')[0]) except IntegrityError: raise CommandError("User already exists.")
Introduce new manage.py command which creates users with default passwords.
Introduce new manage.py command which creates users with default passwords. (imported from commit ba5ed9cb6ee91435b184845019391e5dc38fc3aa)
Python
apache-2.0
Gabriel0402/zulip,j831/zulip,swinghu/zulip,sup95/zulip,tiansiyuan/zulip,krtkmj/zulip,yuvipanda/zulip,Batterfii/zulip,kou/zulip,ashwinirudrappa/zulip,Batterfii/zulip,levixie/zulip,he15his/zulip,dotcool/zulip,kokoar/zulip,KJin99/zulip,lfranchi/zulip,itnihao/zulip,grave-w-grave/zulip,vaidap/zulip,niftynei/zulip,rht/zulip,sup95/zulip,dxq-git/zulip,hj3938/zulip,wangdeshui/zulip,mansilladev/zulip,praveenaki/zulip,wweiradio/zulip,arpitpanwar/zulip,cosmicAsymmetry/zulip,levixie/zulip,esander91/zulip,aps-sids/zulip,reyha/zulip,peiwei/zulip,voidException/zulip,zhaoweigg/zulip,dawran6/zulip,hengqujushi/zulip,brockwhittaker/zulip,eeshangarg/zulip,ryansnowboarder/zulip,jphilipsen05/zulip,lfranchi/zulip,Qgap/zulip,bitemyapp/zulip,zhaoweigg/zulip,esander91/zulip,bssrdf/zulip,bastianh/zulip,brainwane/zulip,ApsOps/zulip,blaze225/zulip,deer-hope/zulip,so0k/zulip,mansilladev/zulip,codeKonami/zulip,niftynei/zulip,bluesea/zulip,mohsenSy/zulip,kaiyuanheshang/zulip,Frouk/zulip,xuanhan863/zulip,vakila/zulip,bluesea/zulip,m1ssou/zulip,hj3938/zulip,Juanvulcano/zulip,ufosky-server/zulip,developerfm/zulip,ApsOps/zulip,sonali0901/zulip,dxq-git/zulip,zwily/zulip,paxapy/zulip,dotcool/zulip,rishig/zulip,JanzTam/zulip,ryansnowboarder/zulip,wdaher/zulip,jonesgithub/zulip,tbutter/zulip,kou/zulip,mansilladev/zulip,seapasulli/zulip,peiwei/zulip,Cheppers/zulip,pradiptad/zulip,JanzTam/zulip,aakash-cr7/zulip,aakash-cr7/zulip,peguin40/zulip,jeffcao/zulip,dhcrzf/zulip,reyha/zulip,hengqujushi/zulip,Qgap/zulip,Cheppers/zulip,jerryge/zulip,bluesea/zulip,mdavid/zulip,xuxiao/zulip,zachallaun/zulip,joshisa/zulip,bluesea/zulip,timabbott/zulip,ApsOps/zulip,arpitpanwar/zulip,shrikrishnaholla/zulip,mansilladev/zulip,bitemyapp/zulip,jainayush975/zulip,christi3k/zulip,avastu/zulip,tiansiyuan/zulip,he15his/zulip,krtkmj/zulip,LAndreas/zulip,synicalsyntax/zulip,kou/zulip,zwily/zulip,themass/zulip,nicholasbs/zulip,amallia/zulip,jainayush975/zulip,shaunstanislaus/zulip,vabs22/zulip,souravbadami/zulip,bitemyapp/zulip,yuvipanda/zulip,Jianchun1/zulip,zofuthan/zulip,suxinde2009/zulip,jimmy54/zulip,susansls/zulip,jeffcao/zulip,dhcrzf/zulip,karamcnair/zulip,ryansnowboarder/zulip,KingxBanana/zulip,jackrzhang/zulip,hustlzp/zulip,jeffcao/zulip,so0k/zulip,Galexrt/zulip,codeKonami/zulip,schatt/zulip,hayderimran7/zulip,MariaFaBella85/zulip,Drooids/zulip,DazWorrall/zulip,AZtheAsian/zulip,Drooids/zulip,tiansiyuan/zulip,cosmicAsymmetry/zulip,tdr130/zulip,aliceriot/zulip,suxinde2009/zulip,thomasboyt/zulip,amallia/zulip,alliejones/zulip,swinghu/zulip,voidException/zulip,codeKonami/zulip,lfranchi/zulip,Frouk/zulip,amanharitsh123/zulip,cosmicAsymmetry/zulip,aps-sids/zulip,JanzTam/zulip,aliceriot/zulip,brainwane/zulip,zorojean/zulip,rht/zulip,hustlzp/zulip,natanovia/zulip,ashwinirudrappa/zulip,schatt/zulip,amyliu345/zulip,verma-varsha/zulip,tiansiyuan/zulip,hayderimran7/zulip,guiquanz/zulip,adnanh/zulip,suxinde2009/zulip,susansls/zulip,firstblade/zulip,voidException/zulip,RobotCaleb/zulip,timabbott/zulip,babbage/zulip,TigorC/zulip,dattatreya303/zulip,amyliu345/zulip,aliceriot/zulip,KingxBanana/zulip,Gabriel0402/zulip,schatt/zulip,jonesgithub/zulip,johnny9/zulip,esander91/zulip,mahim97/zulip,tdr130/zulip,seapasulli/zulip,mahim97/zulip,jrowan/zulip,rishig/zulip,xuxiao/zulip,willingc/zulip,aps-sids/zulip,zwily/zulip,amanharitsh123/zulip,avastu/zulip,showell/zulip,amallia/zulip,dxq-git/zulip,punchagan/zulip,saitodisse/zulip,reyha/zulip,stamhe/zulip,akuseru/zulip,punchagan/zulip,dotcool/zulip,alliejones/zulip,deer-hope/zulip,vaidap/zulip,gigawhitlocks/zulip,glovebx/zulip,ufosky-server/zulip,brainwane/zulip,dwrpayne/zulip,natanovia/zulip,gkotian/zulip,wangdeshui/zulip,Galexrt/zulip,vabs22/zulip,susansls/zulip,easyfmxu/zulip,karamcnair/zulip,moria/zulip,technicalpickles/zulip,armooo/zulip,brainwane/zulip,shubhamdhama/zulip,JPJPJPOPOP/zulip,schatt/zulip,eeshangarg/zulip,bssrdf/zulip,zwily/zulip,qq1012803704/zulip,schatt/zulip,jackrzhang/zulip,esander91/zulip,timabbott/zulip,ericzhou2008/zulip,xuxiao/zulip,MayB/zulip,littledogboy/zulip,DazWorrall/zulip,deer-hope/zulip,peiwei/zulip,ryanbackman/zulip,willingc/zulip,susansls/zulip,levixie/zulip,gigawhitlocks/zulip,proliming/zulip,christi3k/zulip,hustlzp/zulip,souravbadami/zulip,aakash-cr7/zulip,swinghu/zulip,Suninus/zulip,KJin99/zulip,thomasboyt/zulip,TigorC/zulip,susansls/zulip,nicholasbs/zulip,ApsOps/zulip,xuanhan863/zulip,shrikrishnaholla/zulip,peiwei/zulip,DazWorrall/zulip,noroot/zulip,Drooids/zulip,swinghu/zulip,zacps/zulip,dotcool/zulip,alliejones/zulip,easyfmxu/zulip,timabbott/zulip,amanharitsh123/zulip,rht/zulip,samatdav/zulip,hackerkid/zulip,atomic-labs/zulip,bastianh/zulip,zachallaun/zulip,arpitpanwar/zulip,ericzhou2008/zulip,bluesea/zulip,j831/zulip,pradiptad/zulip,Cheppers/zulip,peiwei/zulip,eeshangarg/zulip,eeshangarg/zulip,jonesgithub/zulip,zofuthan/zulip,PhilSk/zulip,hackerkid/zulip,moria/zulip,zulip/zulip,ahmadassaf/zulip,eeshangarg/zulip,dnmfarrell/zulip,MayB/zulip,mansilladev/zulip,glovebx/zulip,eastlhu/zulip,Batterfii/zulip,AZtheAsian/zulip,hj3938/zulip,littledogboy/zulip,Qgap/zulip,LeeRisk/zulip,susansls/zulip,dhcrzf/zulip,mohsenSy/zulip,AZtheAsian/zulip,vakila/zulip,voidException/zulip,proliming/zulip,MariaFaBella85/zulip,ufosky-server/zulip,ryansnowboarder/zulip,ikasumiwt/zulip,technicalpickles/zulip,jimmy54/zulip,jeffcao/zulip,firstblade/zulip,bssrdf/zulip,tommyip/zulip,praveenaki/zulip,wangdeshui/zulip,tdr130/zulip,Qgap/zulip,brockwhittaker/zulip,codeKonami/zulip,levixie/zulip,Suninus/zulip,Jianchun1/zulip,so0k/zulip,sonali0901/zulip,hackerkid/zulip,kou/zulip,itnihao/zulip,jessedhillon/zulip,LeeRisk/zulip,rishig/zulip,peguin40/zulip,bowlofstew/zulip,krtkmj/zulip,seapasulli/zulip,developerfm/zulip,jrowan/zulip,JPJPJPOPOP/zulip,zofuthan/zulip,firstblade/zulip,vaidap/zulip,cosmicAsymmetry/zulip,TigorC/zulip,grave-w-grave/zulip,ipernet/zulip,technicalpickles/zulip,zacps/zulip,samatdav/zulip,sup95/zulip,babbage/zulip,isht3/zulip,firstblade/zulip,SmartPeople/zulip,codeKonami/zulip,LAndreas/zulip,RobotCaleb/zulip,synicalsyntax/zulip,jessedhillon/zulip,wavelets/zulip,bluesea/zulip,kaiyuanheshang/zulip,bastianh/zulip,gigawhitlocks/zulip,kaiyuanheshang/zulip,natanovia/zulip,voidException/zulip,arpith/zulip,kokoar/zulip,gkotian/zulip,sonali0901/zulip,ashwinirudrappa/zulip,so0k/zulip,Drooids/zulip,calvinleenyc/zulip,zulip/zulip,ahmadassaf/zulip,yocome/zulip,Juanvulcano/zulip,fw1121/zulip,alliejones/zulip,umkay/zulip,eastlhu/zulip,dxq-git/zulip,souravbadami/zulip,EasonYi/zulip,shaunstanislaus/zulip,hafeez3000/zulip,gkotian/zulip,timabbott/zulip,vabs22/zulip,zofuthan/zulip,calvinleenyc/zulip,zofuthan/zulip,suxinde2009/zulip,sonali0901/zulip,dwrpayne/zulip,yocome/zulip,Galexrt/zulip,tbutter/zulip,rht/zulip,thomasboyt/zulip,arpitpanwar/zulip,zulip/zulip,nicholasbs/zulip,RobotCaleb/zulip,kokoar/zulip,tbutter/zulip,jeffcao/zulip,ericzhou2008/zulip,dattatreya303/zulip,joyhchen/zulip,dwrpayne/zulip,amyliu345/zulip,wweiradio/zulip,jonesgithub/zulip,jimmy54/zulip,dwrpayne/zulip,stamhe/zulip,huangkebo/zulip,paxapy/zulip,Juanvulcano/zulip,wdaher/zulip,yocome/zulip,udxxabp/zulip,fw1121/zulip,MayB/zulip,Batterfii/zulip,eastlhu/zulip,Galexrt/zulip,atomic-labs/zulip,synicalsyntax/zulip,ikasumiwt/zulip,themass/zulip,jeffcao/zulip,sharmaeklavya2/zulip,zorojean/zulip,he15his/zulip,mansilladev/zulip,yocome/zulip,ApsOps/zulip,paxapy/zulip,udxxabp/zulip,tommyip/zulip,avastu/zulip,wweiradio/zulip,mdavid/zulip,Suninus/zulip,kaiyuanheshang/zulip,dawran6/zulip,littledogboy/zulip,tommyip/zulip,sonali0901/zulip,noroot/zulip,wweiradio/zulip,huangkebo/zulip,adnanh/zulip,jimmy54/zulip,christi3k/zulip,Juanvulcano/zulip,jackrzhang/zulip,hengqujushi/zulip,souravbadami/zulip,LAndreas/zulip,PhilSk/zulip,dnmfarrell/zulip,dnmfarrell/zulip,dotcool/zulip,showell/zulip,stamhe/zulip,KingxBanana/zulip,hustlzp/zulip,brockwhittaker/zulip,ashwinirudrappa/zulip,christi3k/zulip,Cheppers/zulip,souravbadami/zulip,developerfm/zulip,Galexrt/zulip,calvinleenyc/zulip,vakila/zulip,vikas-parashar/zulip,jphilipsen05/zulip,zhaoweigg/zulip,wavelets/zulip,MariaFaBella85/zulip,umkay/zulip,amanharitsh123/zulip,tdr130/zulip,bastianh/zulip,joshisa/zulip,SmartPeople/zulip,jackrzhang/zulip,yuvipanda/zulip,mohsenSy/zulip,kaiyuanheshang/zulip,PaulPetring/zulip,gigawhitlocks/zulip,shubhamdhama/zulip,KJin99/zulip,levixie/zulip,xuanhan863/zulip,ryanbackman/zulip,joyhchen/zulip,jimmy54/zulip,kou/zulip,armooo/zulip,luyifan/zulip,brockwhittaker/zulip,tommyip/zulip,codeKonami/zulip,akuseru/zulip,dxq-git/zulip,bastianh/zulip,mdavid/zulip,joshisa/zulip,krtkmj/zulip,hafeez3000/zulip,Vallher/zulip,pradiptad/zulip,AZtheAsian/zulip,rishig/zulip,deer-hope/zulip,swinghu/zulip,samatdav/zulip,PhilSk/zulip,suxinde2009/zulip,Diptanshu8/zulip,seapasulli/zulip,andersk/zulip,shubhamdhama/zulip,Galexrt/zulip,joyhchen/zulip,Batterfii/zulip,isht3/zulip,wangdeshui/zulip,armooo/zulip,ericzhou2008/zulip,shaunstanislaus/zulip,glovebx/zulip,Frouk/zulip,ahmadassaf/zulip,shaunstanislaus/zulip,stamhe/zulip,themass/zulip,atomic-labs/zulip,Gabriel0402/zulip,johnnygaddarr/zulip,shubhamdhama/zulip,EasonYi/zulip,Drooids/zulip,xuanhan863/zulip,amanharitsh123/zulip,j831/zulip,andersk/zulip,noroot/zulip,kokoar/zulip,amyliu345/zulip,karamcnair/zulip,vaidap/zulip,synicalsyntax/zulip,bastianh/zulip,glovebx/zulip,zacps/zulip,EasonYi/zulip,dattatreya303/zulip,bowlofstew/zulip,johnny9/zulip,ahmadassaf/zulip,levixie/zulip,guiquanz/zulip,johnnygaddarr/zulip,praveenaki/zulip,jackrzhang/zulip,itnihao/zulip,Gabriel0402/zulip,jrowan/zulip,paxapy/zulip,dattatreya303/zulip,shubhamdhama/zulip,bowlofstew/zulip,christi3k/zulip,xuanhan863/zulip,EasonYi/zulip,vaidap/zulip,Suninus/zulip,deer-hope/zulip,verma-varsha/zulip,zulip/zulip,huangkebo/zulip,joyhchen/zulip,mahim97/zulip,huangkebo/zulip,alliejones/zulip,bowlofstew/zulip,he15his/zulip,calvinleenyc/zulip,kaiyuanheshang/zulip,dwrpayne/zulip,yuvipanda/zulip,Gabriel0402/zulip,bowlofstew/zulip,blaze225/zulip,Frouk/zulip,Juanvulcano/zulip,seapasulli/zulip,gkotian/zulip,ashwinirudrappa/zulip,timabbott/zulip,eeshangarg/zulip,jimmy54/zulip,yocome/zulip,peguin40/zulip,stamhe/zulip,Jianchun1/zulip,ryanbackman/zulip,proliming/zulip,LeeRisk/zulip,dawran6/zulip,dwrpayne/zulip,luyifan/zulip,qq1012803704/zulip,m1ssou/zulip,Batterfii/zulip,lfranchi/zulip,hengqujushi/zulip,reyha/zulip,Cheppers/zulip,j831/zulip,tiansiyuan/zulip,willingc/zulip,paxapy/zulip,technicalpickles/zulip,Vallher/zulip,thomasboyt/zulip,fw1121/zulip,zachallaun/zulip,arpitpanwar/zulip,glovebx/zulip,stamhe/zulip,levixie/zulip,Diptanshu8/zulip,wavelets/zulip,cosmicAsymmetry/zulip,ipernet/zulip,firstblade/zulip,bssrdf/zulip,ryansnowboarder/zulip,bitemyapp/zulip,ashwinirudrappa/zulip,niftynei/zulip,easyfmxu/zulip,synicalsyntax/zulip,PhilSk/zulip,zachallaun/zulip,punchagan/zulip,xuxiao/zulip,showell/zulip,brockwhittaker/zulip,themass/zulip,adnanh/zulip,rishig/zulip,vakila/zulip,rishig/zulip,isht3/zulip,natanovia/zulip,kou/zulip,verma-varsha/zulip,Vallher/zulip,glovebx/zulip,bssrdf/zulip,ryanbackman/zulip,brainwane/zulip,zachallaun/zulip,AZtheAsian/zulip,punchagan/zulip,suxinde2009/zulip,ufosky-server/zulip,karamcnair/zulip,codeKonami/zulip,kokoar/zulip,vikas-parashar/zulip,adnanh/zulip,zwily/zulip,zwily/zulip,Drooids/zulip,schatt/zulip,jessedhillon/zulip,armooo/zulip,ApsOps/zulip,avastu/zulip,developerfm/zulip,qq1012803704/zulip,jackrzhang/zulip,jrowan/zulip,tbutter/zulip,Cheppers/zulip,showell/zulip,tdr130/zulip,wweiradio/zulip,DazWorrall/zulip,zacps/zulip,brainwane/zulip,fw1121/zulip,adnanh/zulip,vabs22/zulip,timabbott/zulip,sup95/zulip,niftynei/zulip,wdaher/zulip,mansilladev/zulip,mahim97/zulip,jrowan/zulip,hj3938/zulip,willingc/zulip,lfranchi/zulip,johnny9/zulip,samatdav/zulip,jonesgithub/zulip,blaze225/zulip,PaulPetring/zulip,dawran6/zulip,guiquanz/zulip,wweiradio/zulip,calvinleenyc/zulip,grave-w-grave/zulip,zofuthan/zulip,vikas-parashar/zulip,hafeez3000/zulip,udxxabp/zulip,arpith/zulip,udxxabp/zulip,andersk/zulip,Suninus/zulip,umkay/zulip,yuvipanda/zulip,krtkmj/zulip,LeeRisk/zulip,vabs22/zulip,littledogboy/zulip,he15his/zulip,niftynei/zulip,wavelets/zulip,themass/zulip,RobotCaleb/zulip,adnanh/zulip,tommyip/zulip,huangkebo/zulip,qq1012803704/zulip,cosmicAsymmetry/zulip,showell/zulip,hengqujushi/zulip,fw1121/zulip,fw1121/zulip,noroot/zulip,joshisa/zulip,littledogboy/zulip,Jianchun1/zulip,ikasumiwt/zulip,arpith/zulip,zhaoweigg/zulip,Suninus/zulip,thomasboyt/zulip,luyifan/zulip,technicalpickles/zulip,kokoar/zulip,gigawhitlocks/zulip,saitodisse/zulip,aakash-cr7/zulip,zorojean/zulip,ufosky-server/zulip,johnnygaddarr/zulip,andersk/zulip,dattatreya303/zulip,wangdeshui/zulip,sharmaeklavya2/zulip,tbutter/zulip,ericzhou2008/zulip,xuxiao/zulip,proliming/zulip,LAndreas/zulip,joshisa/zulip,m1ssou/zulip,dattatreya303/zulip,hayderimran7/zulip,jerryge/zulip,Qgap/zulip,seapasulli/zulip,akuseru/zulip,sharmaeklavya2/zulip,babbage/zulip,gigawhitlocks/zulip,ahmadassaf/zulip,JPJPJPOPOP/zulip,peguin40/zulip,zacps/zulip,wdaher/zulip,mohsenSy/zulip,udxxabp/zulip,peguin40/zulip,firstblade/zulip,alliejones/zulip,KJin99/zulip,mohsenSy/zulip,andersk/zulip,brainwane/zulip,joyhchen/zulip,Diptanshu8/zulip,ikasumiwt/zulip,mahim97/zulip,esander91/zulip,bowlofstew/zulip,hengqujushi/zulip,yocome/zulip,verma-varsha/zulip,Gabriel0402/zulip,karamcnair/zulip,showell/zulip,adnanh/zulip,sup95/zulip,jerryge/zulip,itnihao/zulip,noroot/zulip,amallia/zulip,reyha/zulip,grave-w-grave/zulip,jainayush975/zulip,shrikrishnaholla/zulip,sharmaeklavya2/zulip,karamcnair/zulip,ikasumiwt/zulip,arpith/zulip,jeffcao/zulip,akuseru/zulip,alliejones/zulip,jessedhillon/zulip,ericzhou2008/zulip,mohsenSy/zulip,jphilipsen05/zulip,praveenaki/zulip,jphilipsen05/zulip,niftynei/zulip,umkay/zulip,firstblade/zulip,armooo/zulip,shubhamdhama/zulip,RobotCaleb/zulip,atomic-labs/zulip,synicalsyntax/zulip,zorojean/zulip,saitodisse/zulip,jessedhillon/zulip,AZtheAsian/zulip,karamcnair/zulip,jainayush975/zulip,jackrzhang/zulip,dawran6/zulip,bluesea/zulip,wweiradio/zulip,eastlhu/zulip,wavelets/zulip,hackerkid/zulip,bitemyapp/zulip,DazWorrall/zulip,KJin99/zulip,zhaoweigg/zulip,praveenaki/zulip,umkay/zulip,TigorC/zulip,itnihao/zulip,zachallaun/zulip,babbage/zulip,Gabriel0402/zulip,easyfmxu/zulip,luyifan/zulip,bitemyapp/zulip,yocome/zulip,johnny9/zulip,MariaFaBella85/zulip,avastu/zulip,Cheppers/zulip,seapasulli/zulip,jphilipsen05/zulip,technicalpickles/zulip,dhcrzf/zulip,ipernet/zulip,jessedhillon/zulip,nicholasbs/zulip,saitodisse/zulip,calvinleenyc/zulip,zacps/zulip,hustlzp/zulip,fw1121/zulip,eastlhu/zulip,avastu/zulip,hayderimran7/zulip,tdr130/zulip,jonesgithub/zulip,aliceriot/zulip,dhcrzf/zulip,dotcool/zulip,pradiptad/zulip,gkotian/zulip,peiwei/zulip,sharmaeklavya2/zulip,jerryge/zulip,ryanbackman/zulip,dnmfarrell/zulip,akuseru/zulip,hayderimran7/zulip,christi3k/zulip,vikas-parashar/zulip,isht3/zulip,luyifan/zulip,verma-varsha/zulip,eeshangarg/zulip,stamhe/zulip,aakash-cr7/zulip,zorojean/zulip,shubhamdhama/zulip,tommyip/zulip,dotcool/zulip,Jianchun1/zulip,dhcrzf/zulip,kou/zulip,samatdav/zulip,jerryge/zulip,aakash-cr7/zulip,atomic-labs/zulip,swinghu/zulip,Qgap/zulip,bowlofstew/zulip,jainayush975/zulip,vikas-parashar/zulip,JanzTam/zulip,samatdav/zulip,hayderimran7/zulip,hengqujushi/zulip,mdavid/zulip,yuvipanda/zulip,pradiptad/zulip,zulip/zulip,johnny9/zulip,proliming/zulip,SmartPeople/zulip,shrikrishnaholla/zulip,vakila/zulip,amallia/zulip,ryansnowboarder/zulip,hafeez3000/zulip,Suninus/zulip,Jianchun1/zulip,hustlzp/zulip,Vallher/zulip,blaze225/zulip,peiwei/zulip,LAndreas/zulip,suxinde2009/zulip,PhilSk/zulip,rht/zulip,easyfmxu/zulip,souravbadami/zulip,Diptanshu8/zulip,synicalsyntax/zulip,avastu/zulip,ryansnowboarder/zulip,punchagan/zulip,KJin99/zulip,shrikrishnaholla/zulip,ahmadassaf/zulip,tiansiyuan/zulip,tbutter/zulip,ipernet/zulip,dwrpayne/zulip,blaze225/zulip,KJin99/zulip,moria/zulip,LeeRisk/zulip,voidException/zulip,luyifan/zulip,lfranchi/zulip,krtkmj/zulip,rht/zulip,ryanbackman/zulip,itnihao/zulip,vikas-parashar/zulip,sup95/zulip,natanovia/zulip,willingc/zulip,dhcrzf/zulip,peguin40/zulip,guiquanz/zulip,tiansiyuan/zulip,tdr130/zulip,atomic-labs/zulip,hackerkid/zulip,jonesgithub/zulip,LAndreas/zulip,so0k/zulip,huangkebo/zulip,isht3/zulip,jessedhillon/zulip,TigorC/zulip,vakila/zulip,bitemyapp/zulip,JPJPJPOPOP/zulip,Galexrt/zulip,ikasumiwt/zulip,hayderimran7/zulip,ipernet/zulip,swinghu/zulip,PaulPetring/zulip,kokoar/zulip,PaulPetring/zulip,nicholasbs/zulip,ApsOps/zulip,m1ssou/zulip,zorojean/zulip,johnnygaddarr/zulip,andersk/zulip,zhaoweigg/zulip,Diptanshu8/zulip,DazWorrall/zulip,zulip/zulip,zachallaun/zulip,bastianh/zulip,grave-w-grave/zulip,brockwhittaker/zulip,hafeez3000/zulip,PaulPetring/zulip,esander91/zulip,arpitpanwar/zulip,guiquanz/zulip,aps-sids/zulip,jphilipsen05/zulip,thomasboyt/zulip,mahim97/zulip,umkay/zulip,amallia/zulip,johnnygaddarr/zulip,aliceriot/zulip,developerfm/zulip,kaiyuanheshang/zulip,gkotian/zulip,babbage/zulip,j831/zulip,amallia/zulip,hackerkid/zulip,JPJPJPOPOP/zulip,bssrdf/zulip,wdaher/zulip,themass/zulip,ufosky-server/zulip,andersk/zulip,hafeez3000/zulip,qq1012803704/zulip,PaulPetring/zulip,jainayush975/zulip,wangdeshui/zulip,xuanhan863/zulip,JanzTam/zulip,vaidap/zulip,Diptanshu8/zulip,ipernet/zulip,arpith/zulip,Batterfii/zulip,rht/zulip,wdaher/zulip,atomic-labs/zulip,glovebx/zulip,littledogboy/zulip,hackerkid/zulip,umkay/zulip,willingc/zulip,shaunstanislaus/zulip,amanharitsh123/zulip,qq1012803704/zulip,praveenaki/zulip,LeeRisk/zulip,tbutter/zulip,JanzTam/zulip,EasonYi/zulip,Juanvulcano/zulip,johnnygaddarr/zulip,Vallher/zulip,Vallher/zulip,KingxBanana/zulip,Vallher/zulip,arpitpanwar/zulip,KingxBanana/zulip,hustlzp/zulip,ashwinirudrappa/zulip,eastlhu/zulip,vabs22/zulip,joshisa/zulip,TigorC/zulip,arpith/zulip,babbage/zulip,Frouk/zulip,m1ssou/zulip,isht3/zulip,DazWorrall/zulip,hafeez3000/zulip,proliming/zulip,vakila/zulip,Frouk/zulip,ufosky-server/zulip,Qgap/zulip,bssrdf/zulip,mdavid/zulip,armooo/zulip,saitodisse/zulip,voidException/zulip,blaze225/zulip,ericzhou2008/zulip,MayB/zulip,aps-sids/zulip,jimmy54/zulip,udxxabp/zulip,aliceriot/zulip,hj3938/zulip,PaulPetring/zulip,willingc/zulip,easyfmxu/zulip,johnnygaddarr/zulip,udxxabp/zulip,gkotian/zulip,pradiptad/zulip,mdavid/zulip,itnihao/zulip,babbage/zulip,dxq-git/zulip,rishig/zulip,gigawhitlocks/zulip,punchagan/zulip,RobotCaleb/zulip,SmartPeople/zulip,akuseru/zulip,amyliu345/zulip,pradiptad/zulip,deer-hope/zulip,joyhchen/zulip,akuseru/zulip,ipernet/zulip,noroot/zulip,MariaFaBella85/zulip,schatt/zulip,wangdeshui/zulip,guiquanz/zulip,zwily/zulip,xuxiao/zulip,moria/zulip,aps-sids/zulip,shrikrishnaholla/zulip,m1ssou/zulip,jerryge/zulip,hj3938/zulip,ahmadassaf/zulip,developerfm/zulip,zhaoweigg/zulip,LAndreas/zulip,easyfmxu/zulip,eastlhu/zulip,PhilSk/zulip,moria/zulip,natanovia/zulip,paxapy/zulip,dnmfarrell/zulip,praveenaki/zulip,thomasboyt/zulip,hj3938/zulip,natanovia/zulip,shrikrishnaholla/zulip,huangkebo/zulip,xuanhan863/zulip,RobotCaleb/zulip,JPJPJPOPOP/zulip,m1ssou/zulip,wavelets/zulip,technicalpickles/zulip,krtkmj/zulip,EasonYi/zulip,sonali0901/zulip,armooo/zulip,showell/zulip,guiquanz/zulip,zorojean/zulip,amyliu345/zulip,MayB/zulip,nicholasbs/zulip,jerryge/zulip,j831/zulip,dnmfarrell/zulip,ikasumiwt/zulip,saitodisse/zulip,aps-sids/zulip,themass/zulip,Frouk/zulip,xuxiao/zulip,luyifan/zulip,MariaFaBella85/zulip,MayB/zulip,he15his/zulip,moria/zulip,Drooids/zulip,zulip/zulip,jrowan/zulip,qq1012803704/zulip,grave-w-grave/zulip,tommyip/zulip,SmartPeople/zulip,reyha/zulip,dawran6/zulip,sharmaeklavya2/zulip,dxq-git/zulip,KingxBanana/zulip,punchagan/zulip,shaunstanislaus/zulip,JanzTam/zulip,deer-hope/zulip,EasonYi/zulip,littledogboy/zulip,LeeRisk/zulip,MariaFaBella85/zulip,so0k/zulip,mdavid/zulip,proliming/zulip,yuvipanda/zulip,johnny9/zulip,dnmfarrell/zulip,joshisa/zulip,shaunstanislaus/zulip,SmartPeople/zulip,noroot/zulip,wdaher/zulip,verma-varsha/zulip,lfranchi/zulip,saitodisse/zulip,esander91/zulip,developerfm/zulip,zofuthan/zulip,johnny9/zulip,so0k/zulip,moria/zulip,MayB/zulip,he15his/zulip,aliceriot/zulip,wavelets/zulip,nicholasbs/zulip
Introduce new manage.py command which creates users with default passwords. (imported from commit ba5ed9cb6ee91435b184845019391e5dc38fc3aa)
import sys from django.core.management.base import BaseCommand, CommandError from django.core.exceptions import ValidationError from django.db.utils import IntegrityError from django.utils.timezone import now from django.core import validators from zephyr.models import Realm, do_create_user from zephyr.views import do_send_message from zephyr.lib.initial_password import initial_password class Command(BaseCommand): help = "Create the specified user with a default initial password." def handle(self, *args, **options): try: email, full_name = args try: validators.validate_email(email) except ValidationError: raise CommandError("Invalid email address.") except ValueError: if len(args) != 0: raise CommandError("Either specify an email and full name" + \ "as two parameters, or specify no parameters for" + \ "interactive user creation.") return 1 else: while True: email = raw_input("Email: ") try: validators.validate_email(email) break except ValidationError: print >> sys.stderr, "Invalid email address." full_name = raw_input("Full name: ") try: realm = Realm.objects.get(domain=email.split('@')[-1]) except Realm.DoesNotExist: raise CommandError("Realm does not exist.") try: do_create_user(email, initial_password(email), realm, full_name, email.split('@')[0]) except IntegrityError: raise CommandError("User already exists.")
<commit_before><commit_msg>Introduce new manage.py command which creates users with default passwords. (imported from commit ba5ed9cb6ee91435b184845019391e5dc38fc3aa)<commit_after>
import sys from django.core.management.base import BaseCommand, CommandError from django.core.exceptions import ValidationError from django.db.utils import IntegrityError from django.utils.timezone import now from django.core import validators from zephyr.models import Realm, do_create_user from zephyr.views import do_send_message from zephyr.lib.initial_password import initial_password class Command(BaseCommand): help = "Create the specified user with a default initial password." def handle(self, *args, **options): try: email, full_name = args try: validators.validate_email(email) except ValidationError: raise CommandError("Invalid email address.") except ValueError: if len(args) != 0: raise CommandError("Either specify an email and full name" + \ "as two parameters, or specify no parameters for" + \ "interactive user creation.") return 1 else: while True: email = raw_input("Email: ") try: validators.validate_email(email) break except ValidationError: print >> sys.stderr, "Invalid email address." full_name = raw_input("Full name: ") try: realm = Realm.objects.get(domain=email.split('@')[-1]) except Realm.DoesNotExist: raise CommandError("Realm does not exist.") try: do_create_user(email, initial_password(email), realm, full_name, email.split('@')[0]) except IntegrityError: raise CommandError("User already exists.")
Introduce new manage.py command which creates users with default passwords. (imported from commit ba5ed9cb6ee91435b184845019391e5dc38fc3aa)import sys from django.core.management.base import BaseCommand, CommandError from django.core.exceptions import ValidationError from django.db.utils import IntegrityError from django.utils.timezone import now from django.core import validators from zephyr.models import Realm, do_create_user from zephyr.views import do_send_message from zephyr.lib.initial_password import initial_password class Command(BaseCommand): help = "Create the specified user with a default initial password." def handle(self, *args, **options): try: email, full_name = args try: validators.validate_email(email) except ValidationError: raise CommandError("Invalid email address.") except ValueError: if len(args) != 0: raise CommandError("Either specify an email and full name" + \ "as two parameters, or specify no parameters for" + \ "interactive user creation.") return 1 else: while True: email = raw_input("Email: ") try: validators.validate_email(email) break except ValidationError: print >> sys.stderr, "Invalid email address." full_name = raw_input("Full name: ") try: realm = Realm.objects.get(domain=email.split('@')[-1]) except Realm.DoesNotExist: raise CommandError("Realm does not exist.") try: do_create_user(email, initial_password(email), realm, full_name, email.split('@')[0]) except IntegrityError: raise CommandError("User already exists.")
<commit_before><commit_msg>Introduce new manage.py command which creates users with default passwords. (imported from commit ba5ed9cb6ee91435b184845019391e5dc38fc3aa)<commit_after>import sys from django.core.management.base import BaseCommand, CommandError from django.core.exceptions import ValidationError from django.db.utils import IntegrityError from django.utils.timezone import now from django.core import validators from zephyr.models import Realm, do_create_user from zephyr.views import do_send_message from zephyr.lib.initial_password import initial_password class Command(BaseCommand): help = "Create the specified user with a default initial password." def handle(self, *args, **options): try: email, full_name = args try: validators.validate_email(email) except ValidationError: raise CommandError("Invalid email address.") except ValueError: if len(args) != 0: raise CommandError("Either specify an email and full name" + \ "as two parameters, or specify no parameters for" + \ "interactive user creation.") return 1 else: while True: email = raw_input("Email: ") try: validators.validate_email(email) break except ValidationError: print >> sys.stderr, "Invalid email address." full_name = raw_input("Full name: ") try: realm = Realm.objects.get(domain=email.split('@')[-1]) except Realm.DoesNotExist: raise CommandError("Realm does not exist.") try: do_create_user(email, initial_password(email), realm, full_name, email.split('@')[0]) except IntegrityError: raise CommandError("User already exists.")
04a46726e95b42d3566456338df04c222d5a2863
core/main.py
core/main.py
import time import RPi.GPIO as io from urllib.parse import urlencode from urllib.request import Request, urlopen io.setmode(io.BCM) contact_pin = 7 vibration_pin = 11 sound_pin = 13 pins_array = [contact_pin, vibration_pin, sound_pin] url = 'http://localhost:8000/triggers' def setup_door(): # So, that input pin uses the extra argument (pull_up_down=io.PUD_UP). # This activates an internal resistor that makes the input HIGH # (pulled-up) unless something stronger (like a switch connecting it # to GND) pulls it LOW. io.setup(door_pin, io.IN, pull_up_down=io.PUD_UP) def setup_vibration(): io.setup(vib_pin, io.IN) def setup_sound(): io.setup(sound_pin, io.IN) def main(): setup_door() setup_vibration() setup_sound() while True: for pin in pins_array: if io.input(i): sensor_id = pins_array.index(contact_pin) + 1 time.sleep(0.5) def trigger_sensor(sensor_args): request = Request(url, urlencode(sensor_args).encode()) main()
Add some pseudo core for the core
Add some pseudo core for the core
Python
mit
arnaudoff/watcher,arnaudoff/watcher,arnaudoff/watcher
Add some pseudo core for the core
import time import RPi.GPIO as io from urllib.parse import urlencode from urllib.request import Request, urlopen io.setmode(io.BCM) contact_pin = 7 vibration_pin = 11 sound_pin = 13 pins_array = [contact_pin, vibration_pin, sound_pin] url = 'http://localhost:8000/triggers' def setup_door(): # So, that input pin uses the extra argument (pull_up_down=io.PUD_UP). # This activates an internal resistor that makes the input HIGH # (pulled-up) unless something stronger (like a switch connecting it # to GND) pulls it LOW. io.setup(door_pin, io.IN, pull_up_down=io.PUD_UP) def setup_vibration(): io.setup(vib_pin, io.IN) def setup_sound(): io.setup(sound_pin, io.IN) def main(): setup_door() setup_vibration() setup_sound() while True: for pin in pins_array: if io.input(i): sensor_id = pins_array.index(contact_pin) + 1 time.sleep(0.5) def trigger_sensor(sensor_args): request = Request(url, urlencode(sensor_args).encode()) main()
<commit_before><commit_msg>Add some pseudo core for the core<commit_after>
import time import RPi.GPIO as io from urllib.parse import urlencode from urllib.request import Request, urlopen io.setmode(io.BCM) contact_pin = 7 vibration_pin = 11 sound_pin = 13 pins_array = [contact_pin, vibration_pin, sound_pin] url = 'http://localhost:8000/triggers' def setup_door(): # So, that input pin uses the extra argument (pull_up_down=io.PUD_UP). # This activates an internal resistor that makes the input HIGH # (pulled-up) unless something stronger (like a switch connecting it # to GND) pulls it LOW. io.setup(door_pin, io.IN, pull_up_down=io.PUD_UP) def setup_vibration(): io.setup(vib_pin, io.IN) def setup_sound(): io.setup(sound_pin, io.IN) def main(): setup_door() setup_vibration() setup_sound() while True: for pin in pins_array: if io.input(i): sensor_id = pins_array.index(contact_pin) + 1 time.sleep(0.5) def trigger_sensor(sensor_args): request = Request(url, urlencode(sensor_args).encode()) main()
Add some pseudo core for the coreimport time import RPi.GPIO as io from urllib.parse import urlencode from urllib.request import Request, urlopen io.setmode(io.BCM) contact_pin = 7 vibration_pin = 11 sound_pin = 13 pins_array = [contact_pin, vibration_pin, sound_pin] url = 'http://localhost:8000/triggers' def setup_door(): # So, that input pin uses the extra argument (pull_up_down=io.PUD_UP). # This activates an internal resistor that makes the input HIGH # (pulled-up) unless something stronger (like a switch connecting it # to GND) pulls it LOW. io.setup(door_pin, io.IN, pull_up_down=io.PUD_UP) def setup_vibration(): io.setup(vib_pin, io.IN) def setup_sound(): io.setup(sound_pin, io.IN) def main(): setup_door() setup_vibration() setup_sound() while True: for pin in pins_array: if io.input(i): sensor_id = pins_array.index(contact_pin) + 1 time.sleep(0.5) def trigger_sensor(sensor_args): request = Request(url, urlencode(sensor_args).encode()) main()
<commit_before><commit_msg>Add some pseudo core for the core<commit_after>import time import RPi.GPIO as io from urllib.parse import urlencode from urllib.request import Request, urlopen io.setmode(io.BCM) contact_pin = 7 vibration_pin = 11 sound_pin = 13 pins_array = [contact_pin, vibration_pin, sound_pin] url = 'http://localhost:8000/triggers' def setup_door(): # So, that input pin uses the extra argument (pull_up_down=io.PUD_UP). # This activates an internal resistor that makes the input HIGH # (pulled-up) unless something stronger (like a switch connecting it # to GND) pulls it LOW. io.setup(door_pin, io.IN, pull_up_down=io.PUD_UP) def setup_vibration(): io.setup(vib_pin, io.IN) def setup_sound(): io.setup(sound_pin, io.IN) def main(): setup_door() setup_vibration() setup_sound() while True: for pin in pins_array: if io.input(i): sensor_id = pins_array.index(contact_pin) + 1 time.sleep(0.5) def trigger_sensor(sensor_args): request = Request(url, urlencode(sensor_args).encode()) main()
628e571d3c29f806ac98154f68c428a05c43e8e2
gaphor/diagram/tests/test_inlineeditors.py
gaphor/diagram/tests/test_inlineeditors.py
import pytest from gaphas.painter import BoundingBoxPainter from gaphas.view import GtkView from gaphor import UML from gaphor.diagram.inlineeditors import named_item_inline_editor from gaphor.diagram.painter import ItemPainter from gaphor.diagram.selection import Selection @pytest.fixture def view(diagram): view = GtkView(model=diagram, selection=Selection()) view._qtree.resize((-100, -100, 400, 400)) item_painter = ItemPainter(view.selection) view.painter = item_painter view.bounding_box_painter = BoundingBoxPainter(item_painter) return view def test_named_item_inline_editor_with_element(diagram, element_factory, view): item = diagram.create( UML.classes.ClassItem, subject=element_factory.create(UML.Class) ) view.selection.hovered_item = item result = named_item_inline_editor(item, view) assert result is True def test_named_item_inline_editor_with_line(diagram, element_factory, view): item = diagram.create( UML.classes.DependencyItem, subject=element_factory.create(UML.Dependency) ) view.selection.hovered_item = item result = named_item_inline_editor(item, view) assert result is True def test_named_item_inline_editor_without_item(diagram, element_factory, view): item = diagram.create(UML.classes.DependencyItem) result = named_item_inline_editor(item, view) assert result is False
Add some inline editor tests
Add some inline editor tests
Python
lgpl-2.1
amolenaar/gaphor,amolenaar/gaphor
Add some inline editor tests
import pytest from gaphas.painter import BoundingBoxPainter from gaphas.view import GtkView from gaphor import UML from gaphor.diagram.inlineeditors import named_item_inline_editor from gaphor.diagram.painter import ItemPainter from gaphor.diagram.selection import Selection @pytest.fixture def view(diagram): view = GtkView(model=diagram, selection=Selection()) view._qtree.resize((-100, -100, 400, 400)) item_painter = ItemPainter(view.selection) view.painter = item_painter view.bounding_box_painter = BoundingBoxPainter(item_painter) return view def test_named_item_inline_editor_with_element(diagram, element_factory, view): item = diagram.create( UML.classes.ClassItem, subject=element_factory.create(UML.Class) ) view.selection.hovered_item = item result = named_item_inline_editor(item, view) assert result is True def test_named_item_inline_editor_with_line(diagram, element_factory, view): item = diagram.create( UML.classes.DependencyItem, subject=element_factory.create(UML.Dependency) ) view.selection.hovered_item = item result = named_item_inline_editor(item, view) assert result is True def test_named_item_inline_editor_without_item(diagram, element_factory, view): item = diagram.create(UML.classes.DependencyItem) result = named_item_inline_editor(item, view) assert result is False
<commit_before><commit_msg>Add some inline editor tests<commit_after>
import pytest from gaphas.painter import BoundingBoxPainter from gaphas.view import GtkView from gaphor import UML from gaphor.diagram.inlineeditors import named_item_inline_editor from gaphor.diagram.painter import ItemPainter from gaphor.diagram.selection import Selection @pytest.fixture def view(diagram): view = GtkView(model=diagram, selection=Selection()) view._qtree.resize((-100, -100, 400, 400)) item_painter = ItemPainter(view.selection) view.painter = item_painter view.bounding_box_painter = BoundingBoxPainter(item_painter) return view def test_named_item_inline_editor_with_element(diagram, element_factory, view): item = diagram.create( UML.classes.ClassItem, subject=element_factory.create(UML.Class) ) view.selection.hovered_item = item result = named_item_inline_editor(item, view) assert result is True def test_named_item_inline_editor_with_line(diagram, element_factory, view): item = diagram.create( UML.classes.DependencyItem, subject=element_factory.create(UML.Dependency) ) view.selection.hovered_item = item result = named_item_inline_editor(item, view) assert result is True def test_named_item_inline_editor_without_item(diagram, element_factory, view): item = diagram.create(UML.classes.DependencyItem) result = named_item_inline_editor(item, view) assert result is False
Add some inline editor testsimport pytest from gaphas.painter import BoundingBoxPainter from gaphas.view import GtkView from gaphor import UML from gaphor.diagram.inlineeditors import named_item_inline_editor from gaphor.diagram.painter import ItemPainter from gaphor.diagram.selection import Selection @pytest.fixture def view(diagram): view = GtkView(model=diagram, selection=Selection()) view._qtree.resize((-100, -100, 400, 400)) item_painter = ItemPainter(view.selection) view.painter = item_painter view.bounding_box_painter = BoundingBoxPainter(item_painter) return view def test_named_item_inline_editor_with_element(diagram, element_factory, view): item = diagram.create( UML.classes.ClassItem, subject=element_factory.create(UML.Class) ) view.selection.hovered_item = item result = named_item_inline_editor(item, view) assert result is True def test_named_item_inline_editor_with_line(diagram, element_factory, view): item = diagram.create( UML.classes.DependencyItem, subject=element_factory.create(UML.Dependency) ) view.selection.hovered_item = item result = named_item_inline_editor(item, view) assert result is True def test_named_item_inline_editor_without_item(diagram, element_factory, view): item = diagram.create(UML.classes.DependencyItem) result = named_item_inline_editor(item, view) assert result is False
<commit_before><commit_msg>Add some inline editor tests<commit_after>import pytest from gaphas.painter import BoundingBoxPainter from gaphas.view import GtkView from gaphor import UML from gaphor.diagram.inlineeditors import named_item_inline_editor from gaphor.diagram.painter import ItemPainter from gaphor.diagram.selection import Selection @pytest.fixture def view(diagram): view = GtkView(model=diagram, selection=Selection()) view._qtree.resize((-100, -100, 400, 400)) item_painter = ItemPainter(view.selection) view.painter = item_painter view.bounding_box_painter = BoundingBoxPainter(item_painter) return view def test_named_item_inline_editor_with_element(diagram, element_factory, view): item = diagram.create( UML.classes.ClassItem, subject=element_factory.create(UML.Class) ) view.selection.hovered_item = item result = named_item_inline_editor(item, view) assert result is True def test_named_item_inline_editor_with_line(diagram, element_factory, view): item = diagram.create( UML.classes.DependencyItem, subject=element_factory.create(UML.Dependency) ) view.selection.hovered_item = item result = named_item_inline_editor(item, view) assert result is True def test_named_item_inline_editor_without_item(diagram, element_factory, view): item = diagram.create(UML.classes.DependencyItem) result = named_item_inline_editor(item, view) assert result is False
9c11b014e1dc5e93cf3df2b074b27ceab1edf0ef
tests/test_connection_timeout.py
tests/test_connection_timeout.py
# -*- coding: utf-8 -*- import time import pytest import pymssql def test_connect_timeout(): for to in range(2,20,2): t = time.time() try: pymssql.connect(server="www.google.com", port=81, user='username', password='password', login_timeout=to) except pymssql.OperationalError: pass t = time.time() - t #print(to, t) assert t == pytest.approx(to, 1)
Add test for connection timeout.
Add test for connection timeout.
Python
lgpl-2.1
pymssql/pymssql,pymssql/pymssql
Add test for connection timeout.
# -*- coding: utf-8 -*- import time import pytest import pymssql def test_connect_timeout(): for to in range(2,20,2): t = time.time() try: pymssql.connect(server="www.google.com", port=81, user='username', password='password', login_timeout=to) except pymssql.OperationalError: pass t = time.time() - t #print(to, t) assert t == pytest.approx(to, 1)
<commit_before><commit_msg>Add test for connection timeout.<commit_after>
# -*- coding: utf-8 -*- import time import pytest import pymssql def test_connect_timeout(): for to in range(2,20,2): t = time.time() try: pymssql.connect(server="www.google.com", port=81, user='username', password='password', login_timeout=to) except pymssql.OperationalError: pass t = time.time() - t #print(to, t) assert t == pytest.approx(to, 1)
Add test for connection timeout.# -*- coding: utf-8 -*- import time import pytest import pymssql def test_connect_timeout(): for to in range(2,20,2): t = time.time() try: pymssql.connect(server="www.google.com", port=81, user='username', password='password', login_timeout=to) except pymssql.OperationalError: pass t = time.time() - t #print(to, t) assert t == pytest.approx(to, 1)
<commit_before><commit_msg>Add test for connection timeout.<commit_after># -*- coding: utf-8 -*- import time import pytest import pymssql def test_connect_timeout(): for to in range(2,20,2): t = time.time() try: pymssql.connect(server="www.google.com", port=81, user='username', password='password', login_timeout=to) except pymssql.OperationalError: pass t = time.time() - t #print(to, t) assert t == pytest.approx(to, 1)
c6113680e79ba076e4de06396ba75c18db4c98d0
scratch_test.py
scratch_test.py
from bluesky import RunEngine RE = RunEngine({}) from ophyd.sim import det from bluesky.plans import count from suitcase.jsonl import Serializer serializer = Serializer('') RE(count([det]), serializer) serializer.close() serializer = Serializer('') RE(count([det]), serializer) serializer.close()
Add temporary scratch test for dev.
TMP: Add temporary scratch test for dev.
Python
bsd-3-clause
ericdill/databroker,ericdill/databroker
TMP: Add temporary scratch test for dev.
from bluesky import RunEngine RE = RunEngine({}) from ophyd.sim import det from bluesky.plans import count from suitcase.jsonl import Serializer serializer = Serializer('') RE(count([det]), serializer) serializer.close() serializer = Serializer('') RE(count([det]), serializer) serializer.close()
<commit_before><commit_msg>TMP: Add temporary scratch test for dev.<commit_after>
from bluesky import RunEngine RE = RunEngine({}) from ophyd.sim import det from bluesky.plans import count from suitcase.jsonl import Serializer serializer = Serializer('') RE(count([det]), serializer) serializer.close() serializer = Serializer('') RE(count([det]), serializer) serializer.close()
TMP: Add temporary scratch test for dev.from bluesky import RunEngine RE = RunEngine({}) from ophyd.sim import det from bluesky.plans import count from suitcase.jsonl import Serializer serializer = Serializer('') RE(count([det]), serializer) serializer.close() serializer = Serializer('') RE(count([det]), serializer) serializer.close()
<commit_before><commit_msg>TMP: Add temporary scratch test for dev.<commit_after>from bluesky import RunEngine RE = RunEngine({}) from ophyd.sim import det from bluesky.plans import count from suitcase.jsonl import Serializer serializer = Serializer('') RE(count([det]), serializer) serializer.close() serializer = Serializer('') RE(count([det]), serializer) serializer.close()
1ff0b5fe7651b836ac9010b2970ad6b45a0739e7
tools/verify_simulator_result.py
tools/verify_simulator_result.py
#! /usr/bin/env python import os import os.path import sys import json success = 0 def _extractGenome(header): headers = header.split('|') genome = headers[4].strip() pos = genome.find(",") if pos > 0: genome = genome[:pos] pos = genome.find(" chromosome") if pos > 0: genome = genome[:pos] return genome def _parseClassified(path): f = open(path) for line in f: j = json.loads(line) query = j['query'] query_header = j['query_header'] result = j['result'] ctype = j['type'] if ctype == "CLASSIFIED": r = result[0] #print "> " + ctype + "\t" + r['header'] + "\t" + str(r['score']) genome = _extractGenome(r['header']) source = _extractGenome(query_header) if genome == source: global success success = success + 1 f.close() def _parseVague(path): count=0 f = open(path) for line in f: j = json.loads(line) query = j['query'] query_header = j['query_header'] result = j['result'] ctype = j['type'] """ if ctype == "VAGUE": print "> " for r in result: print ctype + "\t" + r['header'] + "\t" + str(r['score']) """ f.close() def _parseUnknown(path): count=0 f = open(path) for line in f: j = json.loads(line) query = j['query'] query_header = j['query_header'] result = j['result'] ctype = j['type'] """ if ctype == "UNKNOWN": print "> " + ctype + query_header """ f.close() def _parse(path): print "parsing", path _parseClassified(path); _parseVague(path); _parseUnknown(path); def parse(path): if os.path.isdir(path): for p in path: _parse(p) else: _parse(path) print "total successful classification =", success def main(): parse(sys.argv[1]) if __name__ == "__main__": main()
Add simulator result parser tool
Add simulator result parser tool
Python
apache-2.0
iychoi/biospectra,iychoi/biospectra,iychoi/biospectra
Add simulator result parser tool
#! /usr/bin/env python import os import os.path import sys import json success = 0 def _extractGenome(header): headers = header.split('|') genome = headers[4].strip() pos = genome.find(",") if pos > 0: genome = genome[:pos] pos = genome.find(" chromosome") if pos > 0: genome = genome[:pos] return genome def _parseClassified(path): f = open(path) for line in f: j = json.loads(line) query = j['query'] query_header = j['query_header'] result = j['result'] ctype = j['type'] if ctype == "CLASSIFIED": r = result[0] #print "> " + ctype + "\t" + r['header'] + "\t" + str(r['score']) genome = _extractGenome(r['header']) source = _extractGenome(query_header) if genome == source: global success success = success + 1 f.close() def _parseVague(path): count=0 f = open(path) for line in f: j = json.loads(line) query = j['query'] query_header = j['query_header'] result = j['result'] ctype = j['type'] """ if ctype == "VAGUE": print "> " for r in result: print ctype + "\t" + r['header'] + "\t" + str(r['score']) """ f.close() def _parseUnknown(path): count=0 f = open(path) for line in f: j = json.loads(line) query = j['query'] query_header = j['query_header'] result = j['result'] ctype = j['type'] """ if ctype == "UNKNOWN": print "> " + ctype + query_header """ f.close() def _parse(path): print "parsing", path _parseClassified(path); _parseVague(path); _parseUnknown(path); def parse(path): if os.path.isdir(path): for p in path: _parse(p) else: _parse(path) print "total successful classification =", success def main(): parse(sys.argv[1]) if __name__ == "__main__": main()
<commit_before><commit_msg>Add simulator result parser tool<commit_after>
#! /usr/bin/env python import os import os.path import sys import json success = 0 def _extractGenome(header): headers = header.split('|') genome = headers[4].strip() pos = genome.find(",") if pos > 0: genome = genome[:pos] pos = genome.find(" chromosome") if pos > 0: genome = genome[:pos] return genome def _parseClassified(path): f = open(path) for line in f: j = json.loads(line) query = j['query'] query_header = j['query_header'] result = j['result'] ctype = j['type'] if ctype == "CLASSIFIED": r = result[0] #print "> " + ctype + "\t" + r['header'] + "\t" + str(r['score']) genome = _extractGenome(r['header']) source = _extractGenome(query_header) if genome == source: global success success = success + 1 f.close() def _parseVague(path): count=0 f = open(path) for line in f: j = json.loads(line) query = j['query'] query_header = j['query_header'] result = j['result'] ctype = j['type'] """ if ctype == "VAGUE": print "> " for r in result: print ctype + "\t" + r['header'] + "\t" + str(r['score']) """ f.close() def _parseUnknown(path): count=0 f = open(path) for line in f: j = json.loads(line) query = j['query'] query_header = j['query_header'] result = j['result'] ctype = j['type'] """ if ctype == "UNKNOWN": print "> " + ctype + query_header """ f.close() def _parse(path): print "parsing", path _parseClassified(path); _parseVague(path); _parseUnknown(path); def parse(path): if os.path.isdir(path): for p in path: _parse(p) else: _parse(path) print "total successful classification =", success def main(): parse(sys.argv[1]) if __name__ == "__main__": main()
Add simulator result parser tool#! /usr/bin/env python import os import os.path import sys import json success = 0 def _extractGenome(header): headers = header.split('|') genome = headers[4].strip() pos = genome.find(",") if pos > 0: genome = genome[:pos] pos = genome.find(" chromosome") if pos > 0: genome = genome[:pos] return genome def _parseClassified(path): f = open(path) for line in f: j = json.loads(line) query = j['query'] query_header = j['query_header'] result = j['result'] ctype = j['type'] if ctype == "CLASSIFIED": r = result[0] #print "> " + ctype + "\t" + r['header'] + "\t" + str(r['score']) genome = _extractGenome(r['header']) source = _extractGenome(query_header) if genome == source: global success success = success + 1 f.close() def _parseVague(path): count=0 f = open(path) for line in f: j = json.loads(line) query = j['query'] query_header = j['query_header'] result = j['result'] ctype = j['type'] """ if ctype == "VAGUE": print "> " for r in result: print ctype + "\t" + r['header'] + "\t" + str(r['score']) """ f.close() def _parseUnknown(path): count=0 f = open(path) for line in f: j = json.loads(line) query = j['query'] query_header = j['query_header'] result = j['result'] ctype = j['type'] """ if ctype == "UNKNOWN": print "> " + ctype + query_header """ f.close() def _parse(path): print "parsing", path _parseClassified(path); _parseVague(path); _parseUnknown(path); def parse(path): if os.path.isdir(path): for p in path: _parse(p) else: _parse(path) print "total successful classification =", success def main(): parse(sys.argv[1]) if __name__ == "__main__": main()
<commit_before><commit_msg>Add simulator result parser tool<commit_after>#! /usr/bin/env python import os import os.path import sys import json success = 0 def _extractGenome(header): headers = header.split('|') genome = headers[4].strip() pos = genome.find(",") if pos > 0: genome = genome[:pos] pos = genome.find(" chromosome") if pos > 0: genome = genome[:pos] return genome def _parseClassified(path): f = open(path) for line in f: j = json.loads(line) query = j['query'] query_header = j['query_header'] result = j['result'] ctype = j['type'] if ctype == "CLASSIFIED": r = result[0] #print "> " + ctype + "\t" + r['header'] + "\t" + str(r['score']) genome = _extractGenome(r['header']) source = _extractGenome(query_header) if genome == source: global success success = success + 1 f.close() def _parseVague(path): count=0 f = open(path) for line in f: j = json.loads(line) query = j['query'] query_header = j['query_header'] result = j['result'] ctype = j['type'] """ if ctype == "VAGUE": print "> " for r in result: print ctype + "\t" + r['header'] + "\t" + str(r['score']) """ f.close() def _parseUnknown(path): count=0 f = open(path) for line in f: j = json.loads(line) query = j['query'] query_header = j['query_header'] result = j['result'] ctype = j['type'] """ if ctype == "UNKNOWN": print "> " + ctype + query_header """ f.close() def _parse(path): print "parsing", path _parseClassified(path); _parseVague(path); _parseUnknown(path); def parse(path): if os.path.isdir(path): for p in path: _parse(p) else: _parse(path) print "total successful classification =", success def main(): parse(sys.argv[1]) if __name__ == "__main__": main()
f3d00e7f0b76db522e55ed0f1988dfdf7e5cf3ff
bin/contig_statistics.py
bin/contig_statistics.py
#! /usr/bin/env python3 import argparse import statistics parser = argparse.ArgumentParser(description='Input Newick tree.') parser.add_argument( '-k', type=int, metavar='int', dest='k', required=True, help='k-mer length', ) parser.add_argument( '-f','--fai', type=str, metavar='str', dest='fai_fn', required=True, help='Fasta index (.fai).', ) args = parser.parse_args() lengths=[] with open(args.fai_fn) as file: for x in file: c = x.split() lengths.append(int(c[1])) lengths.sort() contig_nb=len(lengths) len_total=sum(lengths) len_mean=statistics.mean(lengths) len_stdev=statistics.stdev(lengths) len_median=statistics.median(lengths) kmer_occ=len_total - contig_nb * (args.k - 1) print("Number of contigs: {}".format(contig_nb)) print("Total length: {}".format(len_total)) print("Average length: {}".format(len_mean)) print(" ..st. dev: {}".format(len_stdev)) print("Median length: {}".format(len_median)) print("Number of k-mer occurencies: {}".format(kmer_occ))
Add script for contig statistics for FAI
Add script for contig statistics for FAI Former-commit-id: e7b46625514f8db9b99d474ad181629e483ecbf7
Python
mit
karel-brinda/prophyle,karel-brinda/prophyle,karel-brinda/prophyle,karel-brinda/prophyle
Add script for contig statistics for FAI Former-commit-id: e7b46625514f8db9b99d474ad181629e483ecbf7
#! /usr/bin/env python3 import argparse import statistics parser = argparse.ArgumentParser(description='Input Newick tree.') parser.add_argument( '-k', type=int, metavar='int', dest='k', required=True, help='k-mer length', ) parser.add_argument( '-f','--fai', type=str, metavar='str', dest='fai_fn', required=True, help='Fasta index (.fai).', ) args = parser.parse_args() lengths=[] with open(args.fai_fn) as file: for x in file: c = x.split() lengths.append(int(c[1])) lengths.sort() contig_nb=len(lengths) len_total=sum(lengths) len_mean=statistics.mean(lengths) len_stdev=statistics.stdev(lengths) len_median=statistics.median(lengths) kmer_occ=len_total - contig_nb * (args.k - 1) print("Number of contigs: {}".format(contig_nb)) print("Total length: {}".format(len_total)) print("Average length: {}".format(len_mean)) print(" ..st. dev: {}".format(len_stdev)) print("Median length: {}".format(len_median)) print("Number of k-mer occurencies: {}".format(kmer_occ))
<commit_before><commit_msg>Add script for contig statistics for FAI Former-commit-id: e7b46625514f8db9b99d474ad181629e483ecbf7<commit_after>
#! /usr/bin/env python3 import argparse import statistics parser = argparse.ArgumentParser(description='Input Newick tree.') parser.add_argument( '-k', type=int, metavar='int', dest='k', required=True, help='k-mer length', ) parser.add_argument( '-f','--fai', type=str, metavar='str', dest='fai_fn', required=True, help='Fasta index (.fai).', ) args = parser.parse_args() lengths=[] with open(args.fai_fn) as file: for x in file: c = x.split() lengths.append(int(c[1])) lengths.sort() contig_nb=len(lengths) len_total=sum(lengths) len_mean=statistics.mean(lengths) len_stdev=statistics.stdev(lengths) len_median=statistics.median(lengths) kmer_occ=len_total - contig_nb * (args.k - 1) print("Number of contigs: {}".format(contig_nb)) print("Total length: {}".format(len_total)) print("Average length: {}".format(len_mean)) print(" ..st. dev: {}".format(len_stdev)) print("Median length: {}".format(len_median)) print("Number of k-mer occurencies: {}".format(kmer_occ))
Add script for contig statistics for FAI Former-commit-id: e7b46625514f8db9b99d474ad181629e483ecbf7#! /usr/bin/env python3 import argparse import statistics parser = argparse.ArgumentParser(description='Input Newick tree.') parser.add_argument( '-k', type=int, metavar='int', dest='k', required=True, help='k-mer length', ) parser.add_argument( '-f','--fai', type=str, metavar='str', dest='fai_fn', required=True, help='Fasta index (.fai).', ) args = parser.parse_args() lengths=[] with open(args.fai_fn) as file: for x in file: c = x.split() lengths.append(int(c[1])) lengths.sort() contig_nb=len(lengths) len_total=sum(lengths) len_mean=statistics.mean(lengths) len_stdev=statistics.stdev(lengths) len_median=statistics.median(lengths) kmer_occ=len_total - contig_nb * (args.k - 1) print("Number of contigs: {}".format(contig_nb)) print("Total length: {}".format(len_total)) print("Average length: {}".format(len_mean)) print(" ..st. dev: {}".format(len_stdev)) print("Median length: {}".format(len_median)) print("Number of k-mer occurencies: {}".format(kmer_occ))
<commit_before><commit_msg>Add script for contig statistics for FAI Former-commit-id: e7b46625514f8db9b99d474ad181629e483ecbf7<commit_after>#! /usr/bin/env python3 import argparse import statistics parser = argparse.ArgumentParser(description='Input Newick tree.') parser.add_argument( '-k', type=int, metavar='int', dest='k', required=True, help='k-mer length', ) parser.add_argument( '-f','--fai', type=str, metavar='str', dest='fai_fn', required=True, help='Fasta index (.fai).', ) args = parser.parse_args() lengths=[] with open(args.fai_fn) as file: for x in file: c = x.split() lengths.append(int(c[1])) lengths.sort() contig_nb=len(lengths) len_total=sum(lengths) len_mean=statistics.mean(lengths) len_stdev=statistics.stdev(lengths) len_median=statistics.median(lengths) kmer_occ=len_total - contig_nb * (args.k - 1) print("Number of contigs: {}".format(contig_nb)) print("Total length: {}".format(len_total)) print("Average length: {}".format(len_mean)) print(" ..st. dev: {}".format(len_stdev)) print("Median length: {}".format(len_median)) print("Number of k-mer occurencies: {}".format(kmer_occ))
7ef79a8e434593612a223be0f40173639acc8a1c
scripts/read_in_lines.py
scripts/read_in_lines.py
from image_process.hough_transform import get_hough_lines from lines.line import Point, LineSegment import matplotlib.pyplot as plt import os image_path = os.path.join(os.path.dirname(os.path.dirname(__file__)), 'data', 'hough_test', 'Test_Set_1', 'PNGs', 'C(C)C(CCCC)(C)C.png') lines = get_hough_lines(image_path) line_segment_list = [] for line_seg in lines: point_1 = Point(*line_seg[0]) point_2 = Point(*line_seg[1]) new_line = LineSegment([point_1, point_2]) line_segment_list.append(new_line) l1 = line_segment_list[0] l2 = line_segment_list[10] print l1.pts print l2.pts print l1.getDifference(l2) print l1.m print l1.b plt.figure() plt.plot([l1.pts[0].x, l1.pts[1].x], [l1.pts[0].y, l1.pts[1].y]) plt.plot([l2.pts[0].x, l2.pts[1].x], [l2.pts[0].y, l2.pts[1].y]) deltas = [] thetas = [] for line_segs in line_segment_list: if line_segs == l1: continue diffs = l1.getDifference(line_segs) deltas.append(diffs[0]) thetas.append(diffs[1]) plt.figure() plt.plot(deltas, thetas, 'o') plt.show()
Add Script for Testing Line Segment Features
Add Script for Testing Line Segment Features
Python
mit
Molecular-Image-Recognition/Molecular-Image-Recognition
Add Script for Testing Line Segment Features
from image_process.hough_transform import get_hough_lines from lines.line import Point, LineSegment import matplotlib.pyplot as plt import os image_path = os.path.join(os.path.dirname(os.path.dirname(__file__)), 'data', 'hough_test', 'Test_Set_1', 'PNGs', 'C(C)C(CCCC)(C)C.png') lines = get_hough_lines(image_path) line_segment_list = [] for line_seg in lines: point_1 = Point(*line_seg[0]) point_2 = Point(*line_seg[1]) new_line = LineSegment([point_1, point_2]) line_segment_list.append(new_line) l1 = line_segment_list[0] l2 = line_segment_list[10] print l1.pts print l2.pts print l1.getDifference(l2) print l1.m print l1.b plt.figure() plt.plot([l1.pts[0].x, l1.pts[1].x], [l1.pts[0].y, l1.pts[1].y]) plt.plot([l2.pts[0].x, l2.pts[1].x], [l2.pts[0].y, l2.pts[1].y]) deltas = [] thetas = [] for line_segs in line_segment_list: if line_segs == l1: continue diffs = l1.getDifference(line_segs) deltas.append(diffs[0]) thetas.append(diffs[1]) plt.figure() plt.plot(deltas, thetas, 'o') plt.show()
<commit_before><commit_msg>Add Script for Testing Line Segment Features<commit_after>
from image_process.hough_transform import get_hough_lines from lines.line import Point, LineSegment import matplotlib.pyplot as plt import os image_path = os.path.join(os.path.dirname(os.path.dirname(__file__)), 'data', 'hough_test', 'Test_Set_1', 'PNGs', 'C(C)C(CCCC)(C)C.png') lines = get_hough_lines(image_path) line_segment_list = [] for line_seg in lines: point_1 = Point(*line_seg[0]) point_2 = Point(*line_seg[1]) new_line = LineSegment([point_1, point_2]) line_segment_list.append(new_line) l1 = line_segment_list[0] l2 = line_segment_list[10] print l1.pts print l2.pts print l1.getDifference(l2) print l1.m print l1.b plt.figure() plt.plot([l1.pts[0].x, l1.pts[1].x], [l1.pts[0].y, l1.pts[1].y]) plt.plot([l2.pts[0].x, l2.pts[1].x], [l2.pts[0].y, l2.pts[1].y]) deltas = [] thetas = [] for line_segs in line_segment_list: if line_segs == l1: continue diffs = l1.getDifference(line_segs) deltas.append(diffs[0]) thetas.append(diffs[1]) plt.figure() plt.plot(deltas, thetas, 'o') plt.show()
Add Script for Testing Line Segment Featuresfrom image_process.hough_transform import get_hough_lines from lines.line import Point, LineSegment import matplotlib.pyplot as plt import os image_path = os.path.join(os.path.dirname(os.path.dirname(__file__)), 'data', 'hough_test', 'Test_Set_1', 'PNGs', 'C(C)C(CCCC)(C)C.png') lines = get_hough_lines(image_path) line_segment_list = [] for line_seg in lines: point_1 = Point(*line_seg[0]) point_2 = Point(*line_seg[1]) new_line = LineSegment([point_1, point_2]) line_segment_list.append(new_line) l1 = line_segment_list[0] l2 = line_segment_list[10] print l1.pts print l2.pts print l1.getDifference(l2) print l1.m print l1.b plt.figure() plt.plot([l1.pts[0].x, l1.pts[1].x], [l1.pts[0].y, l1.pts[1].y]) plt.plot([l2.pts[0].x, l2.pts[1].x], [l2.pts[0].y, l2.pts[1].y]) deltas = [] thetas = [] for line_segs in line_segment_list: if line_segs == l1: continue diffs = l1.getDifference(line_segs) deltas.append(diffs[0]) thetas.append(diffs[1]) plt.figure() plt.plot(deltas, thetas, 'o') plt.show()
<commit_before><commit_msg>Add Script for Testing Line Segment Features<commit_after>from image_process.hough_transform import get_hough_lines from lines.line import Point, LineSegment import matplotlib.pyplot as plt import os image_path = os.path.join(os.path.dirname(os.path.dirname(__file__)), 'data', 'hough_test', 'Test_Set_1', 'PNGs', 'C(C)C(CCCC)(C)C.png') lines = get_hough_lines(image_path) line_segment_list = [] for line_seg in lines: point_1 = Point(*line_seg[0]) point_2 = Point(*line_seg[1]) new_line = LineSegment([point_1, point_2]) line_segment_list.append(new_line) l1 = line_segment_list[0] l2 = line_segment_list[10] print l1.pts print l2.pts print l1.getDifference(l2) print l1.m print l1.b plt.figure() plt.plot([l1.pts[0].x, l1.pts[1].x], [l1.pts[0].y, l1.pts[1].y]) plt.plot([l2.pts[0].x, l2.pts[1].x], [l2.pts[0].y, l2.pts[1].y]) deltas = [] thetas = [] for line_segs in line_segment_list: if line_segs == l1: continue diffs = l1.getDifference(line_segs) deltas.append(diffs[0]) thetas.append(diffs[1]) plt.figure() plt.plot(deltas, thetas, 'o') plt.show()
aead6b8dab78056e0034ef21ea82d9151123f8a3
scripts/create_shop_sequences.py
scripts/create_shop_sequences.py
#!/usr/bin/env python """Create article and order sequences for a party's shop. :Copyright: 2006-2018 Jochen Kupperschmidt :License: Modified BSD, see LICENSE for details. """ import click from byceps.services.shop.sequence.models import Purpose from byceps.services.shop.sequence.service import create_party_sequence from byceps.util.system import get_config_filename_from_env_or_exit from bootstrap.util import app_context from bootstrap.validators import validate_party @click.command() @click.argument('party', callback=validate_party) @click.argument('article_prefix') @click.argument('order_prefix') def execute(party, article_prefix, order_prefix): create_party_sequence(party.id, Purpose.article, article_prefix) create_party_sequence(party.id, Purpose.order, order_prefix) click.secho('Done.', fg='green') if __name__ == '__main__': config_filename = get_config_filename_from_env_or_exit() with app_context(config_filename): execute()
Add script to create party sequences for a shop
Add script to create party sequences for a shop
Python
bsd-3-clause
m-ober/byceps,homeworkprod/byceps,m-ober/byceps,homeworkprod/byceps,homeworkprod/byceps,m-ober/byceps
Add script to create party sequences for a shop
#!/usr/bin/env python """Create article and order sequences for a party's shop. :Copyright: 2006-2018 Jochen Kupperschmidt :License: Modified BSD, see LICENSE for details. """ import click from byceps.services.shop.sequence.models import Purpose from byceps.services.shop.sequence.service import create_party_sequence from byceps.util.system import get_config_filename_from_env_or_exit from bootstrap.util import app_context from bootstrap.validators import validate_party @click.command() @click.argument('party', callback=validate_party) @click.argument('article_prefix') @click.argument('order_prefix') def execute(party, article_prefix, order_prefix): create_party_sequence(party.id, Purpose.article, article_prefix) create_party_sequence(party.id, Purpose.order, order_prefix) click.secho('Done.', fg='green') if __name__ == '__main__': config_filename = get_config_filename_from_env_or_exit() with app_context(config_filename): execute()
<commit_before><commit_msg>Add script to create party sequences for a shop<commit_after>
#!/usr/bin/env python """Create article and order sequences for a party's shop. :Copyright: 2006-2018 Jochen Kupperschmidt :License: Modified BSD, see LICENSE for details. """ import click from byceps.services.shop.sequence.models import Purpose from byceps.services.shop.sequence.service import create_party_sequence from byceps.util.system import get_config_filename_from_env_or_exit from bootstrap.util import app_context from bootstrap.validators import validate_party @click.command() @click.argument('party', callback=validate_party) @click.argument('article_prefix') @click.argument('order_prefix') def execute(party, article_prefix, order_prefix): create_party_sequence(party.id, Purpose.article, article_prefix) create_party_sequence(party.id, Purpose.order, order_prefix) click.secho('Done.', fg='green') if __name__ == '__main__': config_filename = get_config_filename_from_env_or_exit() with app_context(config_filename): execute()
Add script to create party sequences for a shop#!/usr/bin/env python """Create article and order sequences for a party's shop. :Copyright: 2006-2018 Jochen Kupperschmidt :License: Modified BSD, see LICENSE for details. """ import click from byceps.services.shop.sequence.models import Purpose from byceps.services.shop.sequence.service import create_party_sequence from byceps.util.system import get_config_filename_from_env_or_exit from bootstrap.util import app_context from bootstrap.validators import validate_party @click.command() @click.argument('party', callback=validate_party) @click.argument('article_prefix') @click.argument('order_prefix') def execute(party, article_prefix, order_prefix): create_party_sequence(party.id, Purpose.article, article_prefix) create_party_sequence(party.id, Purpose.order, order_prefix) click.secho('Done.', fg='green') if __name__ == '__main__': config_filename = get_config_filename_from_env_or_exit() with app_context(config_filename): execute()
<commit_before><commit_msg>Add script to create party sequences for a shop<commit_after>#!/usr/bin/env python """Create article and order sequences for a party's shop. :Copyright: 2006-2018 Jochen Kupperschmidt :License: Modified BSD, see LICENSE for details. """ import click from byceps.services.shop.sequence.models import Purpose from byceps.services.shop.sequence.service import create_party_sequence from byceps.util.system import get_config_filename_from_env_or_exit from bootstrap.util import app_context from bootstrap.validators import validate_party @click.command() @click.argument('party', callback=validate_party) @click.argument('article_prefix') @click.argument('order_prefix') def execute(party, article_prefix, order_prefix): create_party_sequence(party.id, Purpose.article, article_prefix) create_party_sequence(party.id, Purpose.order, order_prefix) click.secho('Done.', fg='green') if __name__ == '__main__': config_filename = get_config_filename_from_env_or_exit() with app_context(config_filename): execute()
c8f53c175b948b771c7d1eee7d1156f76287ca97
vezilka/lib/pygments_rst.py
vezilka/lib/pygments_rst.py
# -*- coding: utf-8 -*- """ The Pygments reStructuredText directive ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ This fragment is a Docutils_ 0.4 directive that renders source code (to HTML only, currently) via Pygments. To use it, adjust the options below and copy the code into a module that you import on initialization. The code then automatically registers a ``sourcecode`` directive that you can use instead of normal code blocks like this:: .. sourcecode:: python My code goes here. If you want to have different code styles, e.g. one with line numbers and one without, add formatters with their names in the VARIANTS dict below. You can invoke them instead of the DEFAULT one by using a directive option:: .. sourcecode:: python :linenos: My code goes here. Look at the `directive documentation`_ to get all the gory details. .. _Docutils: http://docutils.sf.net/ .. _directive documentation: http://docutils.sourceforge.net/docs/howto/rst-directives.html :copyright: 2007 by Georg Brandl. :license: BSD, see LICENSE for more details. """ # Options # ~~~~~~~ # Set to True if you want inline CSS styles instead of classes INLINESTYLES = True from pygments.formatters import HtmlFormatter # The default formatter DEFAULT = HtmlFormatter(noclasses=INLINESTYLES) # Add name -> formatter pairs for every variant you want to use VARIANTS = { 'linenos': HtmlFormatter(noclasses=INLINESTYLES, linenos=True), } from docutils import nodes from docutils.parsers.rst import directives from pygments import highlight from pygments.lexers import get_lexer_by_name, TextLexer def pygments_directive(name, arguments, options, content, lineno, content_offset, block_text, state, state_machine): try: lexer = get_lexer_by_name(arguments[0]) except ValueError: # no lexer found - use the text one instead of an exception lexer = TextLexer() # take an arbitrary option if more than one is given formatter = options and VARIANTS[options.keys()[0]] or DEFAULT parsed = highlight(u'\n'.join(content), lexer, formatter) return [nodes.raw('', parsed, format='html')] pygments_directive.arguments = (1, 0, 1) pygments_directive.content = 1 pygments_directive.options = dict([(key, directives.flag) for key in VARIANTS]) directives.register_directive('sourcecode', pygments_directive)
Add pygments rst support plugin (from zine)
Add pygments rst support plugin (from zine)
Python
mit
gdamjan/vezilka
Add pygments rst support plugin (from zine)
# -*- coding: utf-8 -*- """ The Pygments reStructuredText directive ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ This fragment is a Docutils_ 0.4 directive that renders source code (to HTML only, currently) via Pygments. To use it, adjust the options below and copy the code into a module that you import on initialization. The code then automatically registers a ``sourcecode`` directive that you can use instead of normal code blocks like this:: .. sourcecode:: python My code goes here. If you want to have different code styles, e.g. one with line numbers and one without, add formatters with their names in the VARIANTS dict below. You can invoke them instead of the DEFAULT one by using a directive option:: .. sourcecode:: python :linenos: My code goes here. Look at the `directive documentation`_ to get all the gory details. .. _Docutils: http://docutils.sf.net/ .. _directive documentation: http://docutils.sourceforge.net/docs/howto/rst-directives.html :copyright: 2007 by Georg Brandl. :license: BSD, see LICENSE for more details. """ # Options # ~~~~~~~ # Set to True if you want inline CSS styles instead of classes INLINESTYLES = True from pygments.formatters import HtmlFormatter # The default formatter DEFAULT = HtmlFormatter(noclasses=INLINESTYLES) # Add name -> formatter pairs for every variant you want to use VARIANTS = { 'linenos': HtmlFormatter(noclasses=INLINESTYLES, linenos=True), } from docutils import nodes from docutils.parsers.rst import directives from pygments import highlight from pygments.lexers import get_lexer_by_name, TextLexer def pygments_directive(name, arguments, options, content, lineno, content_offset, block_text, state, state_machine): try: lexer = get_lexer_by_name(arguments[0]) except ValueError: # no lexer found - use the text one instead of an exception lexer = TextLexer() # take an arbitrary option if more than one is given formatter = options and VARIANTS[options.keys()[0]] or DEFAULT parsed = highlight(u'\n'.join(content), lexer, formatter) return [nodes.raw('', parsed, format='html')] pygments_directive.arguments = (1, 0, 1) pygments_directive.content = 1 pygments_directive.options = dict([(key, directives.flag) for key in VARIANTS]) directives.register_directive('sourcecode', pygments_directive)
<commit_before><commit_msg>Add pygments rst support plugin (from zine)<commit_after>
# -*- coding: utf-8 -*- """ The Pygments reStructuredText directive ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ This fragment is a Docutils_ 0.4 directive that renders source code (to HTML only, currently) via Pygments. To use it, adjust the options below and copy the code into a module that you import on initialization. The code then automatically registers a ``sourcecode`` directive that you can use instead of normal code blocks like this:: .. sourcecode:: python My code goes here. If you want to have different code styles, e.g. one with line numbers and one without, add formatters with their names in the VARIANTS dict below. You can invoke them instead of the DEFAULT one by using a directive option:: .. sourcecode:: python :linenos: My code goes here. Look at the `directive documentation`_ to get all the gory details. .. _Docutils: http://docutils.sf.net/ .. _directive documentation: http://docutils.sourceforge.net/docs/howto/rst-directives.html :copyright: 2007 by Georg Brandl. :license: BSD, see LICENSE for more details. """ # Options # ~~~~~~~ # Set to True if you want inline CSS styles instead of classes INLINESTYLES = True from pygments.formatters import HtmlFormatter # The default formatter DEFAULT = HtmlFormatter(noclasses=INLINESTYLES) # Add name -> formatter pairs for every variant you want to use VARIANTS = { 'linenos': HtmlFormatter(noclasses=INLINESTYLES, linenos=True), } from docutils import nodes from docutils.parsers.rst import directives from pygments import highlight from pygments.lexers import get_lexer_by_name, TextLexer def pygments_directive(name, arguments, options, content, lineno, content_offset, block_text, state, state_machine): try: lexer = get_lexer_by_name(arguments[0]) except ValueError: # no lexer found - use the text one instead of an exception lexer = TextLexer() # take an arbitrary option if more than one is given formatter = options and VARIANTS[options.keys()[0]] or DEFAULT parsed = highlight(u'\n'.join(content), lexer, formatter) return [nodes.raw('', parsed, format='html')] pygments_directive.arguments = (1, 0, 1) pygments_directive.content = 1 pygments_directive.options = dict([(key, directives.flag) for key in VARIANTS]) directives.register_directive('sourcecode', pygments_directive)
Add pygments rst support plugin (from zine)# -*- coding: utf-8 -*- """ The Pygments reStructuredText directive ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ This fragment is a Docutils_ 0.4 directive that renders source code (to HTML only, currently) via Pygments. To use it, adjust the options below and copy the code into a module that you import on initialization. The code then automatically registers a ``sourcecode`` directive that you can use instead of normal code blocks like this:: .. sourcecode:: python My code goes here. If you want to have different code styles, e.g. one with line numbers and one without, add formatters with their names in the VARIANTS dict below. You can invoke them instead of the DEFAULT one by using a directive option:: .. sourcecode:: python :linenos: My code goes here. Look at the `directive documentation`_ to get all the gory details. .. _Docutils: http://docutils.sf.net/ .. _directive documentation: http://docutils.sourceforge.net/docs/howto/rst-directives.html :copyright: 2007 by Georg Brandl. :license: BSD, see LICENSE for more details. """ # Options # ~~~~~~~ # Set to True if you want inline CSS styles instead of classes INLINESTYLES = True from pygments.formatters import HtmlFormatter # The default formatter DEFAULT = HtmlFormatter(noclasses=INLINESTYLES) # Add name -> formatter pairs for every variant you want to use VARIANTS = { 'linenos': HtmlFormatter(noclasses=INLINESTYLES, linenos=True), } from docutils import nodes from docutils.parsers.rst import directives from pygments import highlight from pygments.lexers import get_lexer_by_name, TextLexer def pygments_directive(name, arguments, options, content, lineno, content_offset, block_text, state, state_machine): try: lexer = get_lexer_by_name(arguments[0]) except ValueError: # no lexer found - use the text one instead of an exception lexer = TextLexer() # take an arbitrary option if more than one is given formatter = options and VARIANTS[options.keys()[0]] or DEFAULT parsed = highlight(u'\n'.join(content), lexer, formatter) return [nodes.raw('', parsed, format='html')] pygments_directive.arguments = (1, 0, 1) pygments_directive.content = 1 pygments_directive.options = dict([(key, directives.flag) for key in VARIANTS]) directives.register_directive('sourcecode', pygments_directive)
<commit_before><commit_msg>Add pygments rst support plugin (from zine)<commit_after># -*- coding: utf-8 -*- """ The Pygments reStructuredText directive ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ This fragment is a Docutils_ 0.4 directive that renders source code (to HTML only, currently) via Pygments. To use it, adjust the options below and copy the code into a module that you import on initialization. The code then automatically registers a ``sourcecode`` directive that you can use instead of normal code blocks like this:: .. sourcecode:: python My code goes here. If you want to have different code styles, e.g. one with line numbers and one without, add formatters with their names in the VARIANTS dict below. You can invoke them instead of the DEFAULT one by using a directive option:: .. sourcecode:: python :linenos: My code goes here. Look at the `directive documentation`_ to get all the gory details. .. _Docutils: http://docutils.sf.net/ .. _directive documentation: http://docutils.sourceforge.net/docs/howto/rst-directives.html :copyright: 2007 by Georg Brandl. :license: BSD, see LICENSE for more details. """ # Options # ~~~~~~~ # Set to True if you want inline CSS styles instead of classes INLINESTYLES = True from pygments.formatters import HtmlFormatter # The default formatter DEFAULT = HtmlFormatter(noclasses=INLINESTYLES) # Add name -> formatter pairs for every variant you want to use VARIANTS = { 'linenos': HtmlFormatter(noclasses=INLINESTYLES, linenos=True), } from docutils import nodes from docutils.parsers.rst import directives from pygments import highlight from pygments.lexers import get_lexer_by_name, TextLexer def pygments_directive(name, arguments, options, content, lineno, content_offset, block_text, state, state_machine): try: lexer = get_lexer_by_name(arguments[0]) except ValueError: # no lexer found - use the text one instead of an exception lexer = TextLexer() # take an arbitrary option if more than one is given formatter = options and VARIANTS[options.keys()[0]] or DEFAULT parsed = highlight(u'\n'.join(content), lexer, formatter) return [nodes.raw('', parsed, format='html')] pygments_directive.arguments = (1, 0, 1) pygments_directive.content = 1 pygments_directive.options = dict([(key, directives.flag) for key in VARIANTS]) directives.register_directive('sourcecode', pygments_directive)
22656ecba485c56b654cf4b57b2906a7f0e7effc
mapApp/migrations/0005_auto_20150819_1829.py
mapApp/migrations/0005_auto_20150819_1829.py
# -*- coding: utf-8 -*- from __future__ import unicode_literals from django.db import models, migrations class Migration(migrations.Migration): dependencies = [ ('mapApp', '0004_auto_20150806_1426'), ] operations = [ migrations.CreateModel( name='Weather', fields=[ ('incident', models.OneToOneField(primary_key=True, serialize=False, to='mapApp.Incident')), ('temperature_c', models.FloatField()), ('visibility_km', models.FloatField()), ('windspeed_kmh', models.FloatField()), ('precip_mmh', models.FloatField()), ('precip_prob', models.FloatField()), ('sunrise_time', models.DateTimeField()), ('sunset_time', models.DateTimeField()), ('dawn', models.BooleanField()), ('dusk', models.BooleanField()), ('wind_dir_deg', models.FloatField()), ('wind_dir_str', models.CharField(max_length=5)), ('black_ice_risk', models.BooleanField()), ('summary', models.CharField(max_length=250)), ], ), migrations.RemoveField( model_name='incident', name='weather', ), ]
Migrate datbase to add Weather model and delete weather field from Incident model
Migrate datbase to add Weather model and delete weather field from Incident model
Python
mit
SPARLab/BikeMaps,SPARLab/BikeMaps,SPARLab/BikeMaps
Migrate datbase to add Weather model and delete weather field from Incident model
# -*- coding: utf-8 -*- from __future__ import unicode_literals from django.db import models, migrations class Migration(migrations.Migration): dependencies = [ ('mapApp', '0004_auto_20150806_1426'), ] operations = [ migrations.CreateModel( name='Weather', fields=[ ('incident', models.OneToOneField(primary_key=True, serialize=False, to='mapApp.Incident')), ('temperature_c', models.FloatField()), ('visibility_km', models.FloatField()), ('windspeed_kmh', models.FloatField()), ('precip_mmh', models.FloatField()), ('precip_prob', models.FloatField()), ('sunrise_time', models.DateTimeField()), ('sunset_time', models.DateTimeField()), ('dawn', models.BooleanField()), ('dusk', models.BooleanField()), ('wind_dir_deg', models.FloatField()), ('wind_dir_str', models.CharField(max_length=5)), ('black_ice_risk', models.BooleanField()), ('summary', models.CharField(max_length=250)), ], ), migrations.RemoveField( model_name='incident', name='weather', ), ]
<commit_before><commit_msg>Migrate datbase to add Weather model and delete weather field from Incident model<commit_after>
# -*- coding: utf-8 -*- from __future__ import unicode_literals from django.db import models, migrations class Migration(migrations.Migration): dependencies = [ ('mapApp', '0004_auto_20150806_1426'), ] operations = [ migrations.CreateModel( name='Weather', fields=[ ('incident', models.OneToOneField(primary_key=True, serialize=False, to='mapApp.Incident')), ('temperature_c', models.FloatField()), ('visibility_km', models.FloatField()), ('windspeed_kmh', models.FloatField()), ('precip_mmh', models.FloatField()), ('precip_prob', models.FloatField()), ('sunrise_time', models.DateTimeField()), ('sunset_time', models.DateTimeField()), ('dawn', models.BooleanField()), ('dusk', models.BooleanField()), ('wind_dir_deg', models.FloatField()), ('wind_dir_str', models.CharField(max_length=5)), ('black_ice_risk', models.BooleanField()), ('summary', models.CharField(max_length=250)), ], ), migrations.RemoveField( model_name='incident', name='weather', ), ]
Migrate datbase to add Weather model and delete weather field from Incident model# -*- coding: utf-8 -*- from __future__ import unicode_literals from django.db import models, migrations class Migration(migrations.Migration): dependencies = [ ('mapApp', '0004_auto_20150806_1426'), ] operations = [ migrations.CreateModel( name='Weather', fields=[ ('incident', models.OneToOneField(primary_key=True, serialize=False, to='mapApp.Incident')), ('temperature_c', models.FloatField()), ('visibility_km', models.FloatField()), ('windspeed_kmh', models.FloatField()), ('precip_mmh', models.FloatField()), ('precip_prob', models.FloatField()), ('sunrise_time', models.DateTimeField()), ('sunset_time', models.DateTimeField()), ('dawn', models.BooleanField()), ('dusk', models.BooleanField()), ('wind_dir_deg', models.FloatField()), ('wind_dir_str', models.CharField(max_length=5)), ('black_ice_risk', models.BooleanField()), ('summary', models.CharField(max_length=250)), ], ), migrations.RemoveField( model_name='incident', name='weather', ), ]
<commit_before><commit_msg>Migrate datbase to add Weather model and delete weather field from Incident model<commit_after># -*- coding: utf-8 -*- from __future__ import unicode_literals from django.db import models, migrations class Migration(migrations.Migration): dependencies = [ ('mapApp', '0004_auto_20150806_1426'), ] operations = [ migrations.CreateModel( name='Weather', fields=[ ('incident', models.OneToOneField(primary_key=True, serialize=False, to='mapApp.Incident')), ('temperature_c', models.FloatField()), ('visibility_km', models.FloatField()), ('windspeed_kmh', models.FloatField()), ('precip_mmh', models.FloatField()), ('precip_prob', models.FloatField()), ('sunrise_time', models.DateTimeField()), ('sunset_time', models.DateTimeField()), ('dawn', models.BooleanField()), ('dusk', models.BooleanField()), ('wind_dir_deg', models.FloatField()), ('wind_dir_str', models.CharField(max_length=5)), ('black_ice_risk', models.BooleanField()), ('summary', models.CharField(max_length=250)), ], ), migrations.RemoveField( model_name='incident', name='weather', ), ]
7219cea7d4480739e39ac3e4c3c431002b786241
vumi/workers/vas2nets/workers.py
vumi/workers/vas2nets/workers.py
# -*- test-case-name: vumi.workers.vas2nets.test_vas2nets -*- # -*- encoding: utf-8 -*- from twisted.python import log from twisted.internet.defer import inlineCallbacks, Deferred from vumi.message import Message from vumi.service import Worker class EchoWorker(Worker): @inlineCallbacks def startWorker(self): """called by the Worker class when the AMQP connections been established""" self.publisher = yield self.publish_to('sms.outbound.%(transport_name)s' % self.config) self.consumer = yield self.consume('sms.inbound.%(transport_name)s.%(shortcode)s' % self.config, self.handle_inbound_message) def handle_inbound_message(self, message): log.msg("Received: %s" % (message.payload,)) """Reply to the message with the same content""" data = message.payload reply = { 'to_msisdn': data['from_msisdn'], 'from_msisdn': data['to_msisdn'], 'message': data['message'], 'id': data['transport_message_id'], 'transport_network_id': data['transport_network_id'], } return self.publisher.publish_message(Message(**reply)) def stopWorker(self): """shutdown""" pass
Test worker (replies with received content) for vas2nets transport.
Test worker (replies with received content) for vas2nets transport.
Python
bsd-3-clause
harrissoerja/vumi,vishwaprakashmishra/xmatrix,TouK/vumi,vishwaprakashmishra/xmatrix,vishwaprakashmishra/xmatrix,harrissoerja/vumi,harrissoerja/vumi,TouK/vumi,TouK/vumi
Test worker (replies with received content) for vas2nets transport.
# -*- test-case-name: vumi.workers.vas2nets.test_vas2nets -*- # -*- encoding: utf-8 -*- from twisted.python import log from twisted.internet.defer import inlineCallbacks, Deferred from vumi.message import Message from vumi.service import Worker class EchoWorker(Worker): @inlineCallbacks def startWorker(self): """called by the Worker class when the AMQP connections been established""" self.publisher = yield self.publish_to('sms.outbound.%(transport_name)s' % self.config) self.consumer = yield self.consume('sms.inbound.%(transport_name)s.%(shortcode)s' % self.config, self.handle_inbound_message) def handle_inbound_message(self, message): log.msg("Received: %s" % (message.payload,)) """Reply to the message with the same content""" data = message.payload reply = { 'to_msisdn': data['from_msisdn'], 'from_msisdn': data['to_msisdn'], 'message': data['message'], 'id': data['transport_message_id'], 'transport_network_id': data['transport_network_id'], } return self.publisher.publish_message(Message(**reply)) def stopWorker(self): """shutdown""" pass
<commit_before><commit_msg>Test worker (replies with received content) for vas2nets transport.<commit_after>
# -*- test-case-name: vumi.workers.vas2nets.test_vas2nets -*- # -*- encoding: utf-8 -*- from twisted.python import log from twisted.internet.defer import inlineCallbacks, Deferred from vumi.message import Message from vumi.service import Worker class EchoWorker(Worker): @inlineCallbacks def startWorker(self): """called by the Worker class when the AMQP connections been established""" self.publisher = yield self.publish_to('sms.outbound.%(transport_name)s' % self.config) self.consumer = yield self.consume('sms.inbound.%(transport_name)s.%(shortcode)s' % self.config, self.handle_inbound_message) def handle_inbound_message(self, message): log.msg("Received: %s" % (message.payload,)) """Reply to the message with the same content""" data = message.payload reply = { 'to_msisdn': data['from_msisdn'], 'from_msisdn': data['to_msisdn'], 'message': data['message'], 'id': data['transport_message_id'], 'transport_network_id': data['transport_network_id'], } return self.publisher.publish_message(Message(**reply)) def stopWorker(self): """shutdown""" pass
Test worker (replies with received content) for vas2nets transport.# -*- test-case-name: vumi.workers.vas2nets.test_vas2nets -*- # -*- encoding: utf-8 -*- from twisted.python import log from twisted.internet.defer import inlineCallbacks, Deferred from vumi.message import Message from vumi.service import Worker class EchoWorker(Worker): @inlineCallbacks def startWorker(self): """called by the Worker class when the AMQP connections been established""" self.publisher = yield self.publish_to('sms.outbound.%(transport_name)s' % self.config) self.consumer = yield self.consume('sms.inbound.%(transport_name)s.%(shortcode)s' % self.config, self.handle_inbound_message) def handle_inbound_message(self, message): log.msg("Received: %s" % (message.payload,)) """Reply to the message with the same content""" data = message.payload reply = { 'to_msisdn': data['from_msisdn'], 'from_msisdn': data['to_msisdn'], 'message': data['message'], 'id': data['transport_message_id'], 'transport_network_id': data['transport_network_id'], } return self.publisher.publish_message(Message(**reply)) def stopWorker(self): """shutdown""" pass
<commit_before><commit_msg>Test worker (replies with received content) for vas2nets transport.<commit_after># -*- test-case-name: vumi.workers.vas2nets.test_vas2nets -*- # -*- encoding: utf-8 -*- from twisted.python import log from twisted.internet.defer import inlineCallbacks, Deferred from vumi.message import Message from vumi.service import Worker class EchoWorker(Worker): @inlineCallbacks def startWorker(self): """called by the Worker class when the AMQP connections been established""" self.publisher = yield self.publish_to('sms.outbound.%(transport_name)s' % self.config) self.consumer = yield self.consume('sms.inbound.%(transport_name)s.%(shortcode)s' % self.config, self.handle_inbound_message) def handle_inbound_message(self, message): log.msg("Received: %s" % (message.payload,)) """Reply to the message with the same content""" data = message.payload reply = { 'to_msisdn': data['from_msisdn'], 'from_msisdn': data['to_msisdn'], 'message': data['message'], 'id': data['transport_message_id'], 'transport_network_id': data['transport_network_id'], } return self.publisher.publish_message(Message(**reply)) def stopWorker(self): """shutdown""" pass
6342fb6890ad4966b655ccfcea1b585dac155729
modules/testing/test_database_reference.py
modules/testing/test_database_reference.py
#!/usr/bin/env python """ Created by: Lee Bergstrand (2017) Description: A simple unittest for testing the database reference module. """ import unittest from modules.database_reference import parse_database_references class TestDatabaseReference(unittest.TestCase): """A unit testing class for testing the database_reference.py module. To be called by nosetests.""" def test_parse_database_references(self): """Test that database reference rows can be parsed.""" database_reference = [ ('RL', 'EMBO J. 2001;20:6561-6569.'), ('DC', 'Methane Biosynthesis'), ('DR', 'IUBMB; misc; methane;'), ('CC', ' Coenzyme F420 (a 7,8-didemethyl-8-hydroxy 5-deazaflavin) is found in') ] reference = parse_database_references(database_reference) self.assertEqual(len(reference), 1) first_reference = reference[0] self.assertEqual(first_reference.database_name, 'IUBMB') self.assertEqual(first_reference.record_title, 'Methane Biosynthesis') self.assertEqual(first_reference.record_ids, ['misc', 'methane']) def test_parse_multiple_database_references(self): """Test that database reference rows consisting of multiple references can be parsed.""" database_references = [ ('RL', 'EMBO J. 2001;20:6561-6569.'), ('DC', 'Methane Biosynthesis'), ('DR', 'IUBMB; misc; methane;'), ('DC', 'Coenzyme F420 hydrogenase'), ('DR', 'IUBMB; single; 112981;'), ('CC', ' Coenzyme F420 (a 7,8-didemethyl-8-hydroxy 5-deazaflavin) is found in') ] references = parse_database_references(database_references) self.assertEqual(len(references), 2)
Add automated tests for database references.
Add automated tests for database references.
Python
apache-2.0
LeeBergstrand/pygenprop
Add automated tests for database references.
#!/usr/bin/env python """ Created by: Lee Bergstrand (2017) Description: A simple unittest for testing the database reference module. """ import unittest from modules.database_reference import parse_database_references class TestDatabaseReference(unittest.TestCase): """A unit testing class for testing the database_reference.py module. To be called by nosetests.""" def test_parse_database_references(self): """Test that database reference rows can be parsed.""" database_reference = [ ('RL', 'EMBO J. 2001;20:6561-6569.'), ('DC', 'Methane Biosynthesis'), ('DR', 'IUBMB; misc; methane;'), ('CC', ' Coenzyme F420 (a 7,8-didemethyl-8-hydroxy 5-deazaflavin) is found in') ] reference = parse_database_references(database_reference) self.assertEqual(len(reference), 1) first_reference = reference[0] self.assertEqual(first_reference.database_name, 'IUBMB') self.assertEqual(first_reference.record_title, 'Methane Biosynthesis') self.assertEqual(first_reference.record_ids, ['misc', 'methane']) def test_parse_multiple_database_references(self): """Test that database reference rows consisting of multiple references can be parsed.""" database_references = [ ('RL', 'EMBO J. 2001;20:6561-6569.'), ('DC', 'Methane Biosynthesis'), ('DR', 'IUBMB; misc; methane;'), ('DC', 'Coenzyme F420 hydrogenase'), ('DR', 'IUBMB; single; 112981;'), ('CC', ' Coenzyme F420 (a 7,8-didemethyl-8-hydroxy 5-deazaflavin) is found in') ] references = parse_database_references(database_references) self.assertEqual(len(references), 2)
<commit_before><commit_msg>Add automated tests for database references.<commit_after>
#!/usr/bin/env python """ Created by: Lee Bergstrand (2017) Description: A simple unittest for testing the database reference module. """ import unittest from modules.database_reference import parse_database_references class TestDatabaseReference(unittest.TestCase): """A unit testing class for testing the database_reference.py module. To be called by nosetests.""" def test_parse_database_references(self): """Test that database reference rows can be parsed.""" database_reference = [ ('RL', 'EMBO J. 2001;20:6561-6569.'), ('DC', 'Methane Biosynthesis'), ('DR', 'IUBMB; misc; methane;'), ('CC', ' Coenzyme F420 (a 7,8-didemethyl-8-hydroxy 5-deazaflavin) is found in') ] reference = parse_database_references(database_reference) self.assertEqual(len(reference), 1) first_reference = reference[0] self.assertEqual(first_reference.database_name, 'IUBMB') self.assertEqual(first_reference.record_title, 'Methane Biosynthesis') self.assertEqual(first_reference.record_ids, ['misc', 'methane']) def test_parse_multiple_database_references(self): """Test that database reference rows consisting of multiple references can be parsed.""" database_references = [ ('RL', 'EMBO J. 2001;20:6561-6569.'), ('DC', 'Methane Biosynthesis'), ('DR', 'IUBMB; misc; methane;'), ('DC', 'Coenzyme F420 hydrogenase'), ('DR', 'IUBMB; single; 112981;'), ('CC', ' Coenzyme F420 (a 7,8-didemethyl-8-hydroxy 5-deazaflavin) is found in') ] references = parse_database_references(database_references) self.assertEqual(len(references), 2)
Add automated tests for database references.#!/usr/bin/env python """ Created by: Lee Bergstrand (2017) Description: A simple unittest for testing the database reference module. """ import unittest from modules.database_reference import parse_database_references class TestDatabaseReference(unittest.TestCase): """A unit testing class for testing the database_reference.py module. To be called by nosetests.""" def test_parse_database_references(self): """Test that database reference rows can be parsed.""" database_reference = [ ('RL', 'EMBO J. 2001;20:6561-6569.'), ('DC', 'Methane Biosynthesis'), ('DR', 'IUBMB; misc; methane;'), ('CC', ' Coenzyme F420 (a 7,8-didemethyl-8-hydroxy 5-deazaflavin) is found in') ] reference = parse_database_references(database_reference) self.assertEqual(len(reference), 1) first_reference = reference[0] self.assertEqual(first_reference.database_name, 'IUBMB') self.assertEqual(first_reference.record_title, 'Methane Biosynthesis') self.assertEqual(first_reference.record_ids, ['misc', 'methane']) def test_parse_multiple_database_references(self): """Test that database reference rows consisting of multiple references can be parsed.""" database_references = [ ('RL', 'EMBO J. 2001;20:6561-6569.'), ('DC', 'Methane Biosynthesis'), ('DR', 'IUBMB; misc; methane;'), ('DC', 'Coenzyme F420 hydrogenase'), ('DR', 'IUBMB; single; 112981;'), ('CC', ' Coenzyme F420 (a 7,8-didemethyl-8-hydroxy 5-deazaflavin) is found in') ] references = parse_database_references(database_references) self.assertEqual(len(references), 2)
<commit_before><commit_msg>Add automated tests for database references.<commit_after>#!/usr/bin/env python """ Created by: Lee Bergstrand (2017) Description: A simple unittest for testing the database reference module. """ import unittest from modules.database_reference import parse_database_references class TestDatabaseReference(unittest.TestCase): """A unit testing class for testing the database_reference.py module. To be called by nosetests.""" def test_parse_database_references(self): """Test that database reference rows can be parsed.""" database_reference = [ ('RL', 'EMBO J. 2001;20:6561-6569.'), ('DC', 'Methane Biosynthesis'), ('DR', 'IUBMB; misc; methane;'), ('CC', ' Coenzyme F420 (a 7,8-didemethyl-8-hydroxy 5-deazaflavin) is found in') ] reference = parse_database_references(database_reference) self.assertEqual(len(reference), 1) first_reference = reference[0] self.assertEqual(first_reference.database_name, 'IUBMB') self.assertEqual(first_reference.record_title, 'Methane Biosynthesis') self.assertEqual(first_reference.record_ids, ['misc', 'methane']) def test_parse_multiple_database_references(self): """Test that database reference rows consisting of multiple references can be parsed.""" database_references = [ ('RL', 'EMBO J. 2001;20:6561-6569.'), ('DC', 'Methane Biosynthesis'), ('DR', 'IUBMB; misc; methane;'), ('DC', 'Coenzyme F420 hydrogenase'), ('DR', 'IUBMB; single; 112981;'), ('CC', ' Coenzyme F420 (a 7,8-didemethyl-8-hydroxy 5-deazaflavin) is found in') ] references = parse_database_references(database_references) self.assertEqual(len(references), 2)
3aa6135d8104cac474326d915f290ecbb5014fde
tests/block/test_block_3.py
tests/block/test_block_3.py
import lz4.block import pytest test_data = [ (b'a' * 1024 * 1024), ] @pytest.fixture( params=test_data, ids=[ 'data' + str(i) for i in range(len(test_data)) ] ) def data(request): return request.param def test_block_decompress_mem_usage(data): tracemalloc = pytest.importorskip('tracemalloc') tracemalloc.start() compressed = lz4.block.compress(data) prev_snapshot = None for i in range(1000): decompressed = lz4.block.decompress(compressed) # noqa: F841 if i % 100 == 0: snapshot = tracemalloc.take_snapshot() if prev_snapshot: stats = snapshot.compare_to(prev_snapshot, 'lineno') assert stats[0].size_diff < (1024 * 4) prev_snapshot = snapshot
Add test which checks for growing memory usage for block funcs
Add test which checks for growing memory usage for block funcs
Python
bsd-3-clause
python-lz4/python-lz4,python-lz4/python-lz4
Add test which checks for growing memory usage for block funcs
import lz4.block import pytest test_data = [ (b'a' * 1024 * 1024), ] @pytest.fixture( params=test_data, ids=[ 'data' + str(i) for i in range(len(test_data)) ] ) def data(request): return request.param def test_block_decompress_mem_usage(data): tracemalloc = pytest.importorskip('tracemalloc') tracemalloc.start() compressed = lz4.block.compress(data) prev_snapshot = None for i in range(1000): decompressed = lz4.block.decompress(compressed) # noqa: F841 if i % 100 == 0: snapshot = tracemalloc.take_snapshot() if prev_snapshot: stats = snapshot.compare_to(prev_snapshot, 'lineno') assert stats[0].size_diff < (1024 * 4) prev_snapshot = snapshot
<commit_before><commit_msg>Add test which checks for growing memory usage for block funcs<commit_after>
import lz4.block import pytest test_data = [ (b'a' * 1024 * 1024), ] @pytest.fixture( params=test_data, ids=[ 'data' + str(i) for i in range(len(test_data)) ] ) def data(request): return request.param def test_block_decompress_mem_usage(data): tracemalloc = pytest.importorskip('tracemalloc') tracemalloc.start() compressed = lz4.block.compress(data) prev_snapshot = None for i in range(1000): decompressed = lz4.block.decompress(compressed) # noqa: F841 if i % 100 == 0: snapshot = tracemalloc.take_snapshot() if prev_snapshot: stats = snapshot.compare_to(prev_snapshot, 'lineno') assert stats[0].size_diff < (1024 * 4) prev_snapshot = snapshot
Add test which checks for growing memory usage for block funcsimport lz4.block import pytest test_data = [ (b'a' * 1024 * 1024), ] @pytest.fixture( params=test_data, ids=[ 'data' + str(i) for i in range(len(test_data)) ] ) def data(request): return request.param def test_block_decompress_mem_usage(data): tracemalloc = pytest.importorskip('tracemalloc') tracemalloc.start() compressed = lz4.block.compress(data) prev_snapshot = None for i in range(1000): decompressed = lz4.block.decompress(compressed) # noqa: F841 if i % 100 == 0: snapshot = tracemalloc.take_snapshot() if prev_snapshot: stats = snapshot.compare_to(prev_snapshot, 'lineno') assert stats[0].size_diff < (1024 * 4) prev_snapshot = snapshot
<commit_before><commit_msg>Add test which checks for growing memory usage for block funcs<commit_after>import lz4.block import pytest test_data = [ (b'a' * 1024 * 1024), ] @pytest.fixture( params=test_data, ids=[ 'data' + str(i) for i in range(len(test_data)) ] ) def data(request): return request.param def test_block_decompress_mem_usage(data): tracemalloc = pytest.importorskip('tracemalloc') tracemalloc.start() compressed = lz4.block.compress(data) prev_snapshot = None for i in range(1000): decompressed = lz4.block.decompress(compressed) # noqa: F841 if i % 100 == 0: snapshot = tracemalloc.take_snapshot() if prev_snapshot: stats = snapshot.compare_to(prev_snapshot, 'lineno') assert stats[0].size_diff < (1024 * 4) prev_snapshot = snapshot
bd94cd20c3021e7fef470963b67164f431ec86fa
PyFVCOM/current_tools.py
PyFVCOM/current_tools.py
""" Tools to work with current data. Reuses some functions from tide_tools. """ from tide_tools import addHarmonicResults, getObservedData, getObservedMetadata, getHarmonics, TAPPy def scalar2vector(direction, speed): """ Convert arrays of two scalars into the corresponding vector components. This is mainly meant to be used to convert direction and speed to the u and v velocity components. Parameters ---------- direction, speed : ndarray Arrays of direction (degrees) and speed (any units). Returns ------- u, v : ndarray Arrays of the u and v components of the speed and direction in units of speed. """ u = np.sin(np.deg2rad(direction)) * speed v = np.cos(np.deg2rad(direction)) * speed return u, v def vector2scalar(u, v): """ Convert two vector components into the scalar values. Mainly used for converting u and v velocity components into direction and speed. Parameters ---------- u, v : ndarray Arrays of (optionally time, space (vertical and horizontal) varying) u and v vectors. Returns ------- direction, speed : ndarray Arrays of direction (degrees) and speed (u and v units). """ direction = np.rad2deg(np.arctan2(u, v)) speed = np.sqrt(u**2 + v**2) return direction, speed
Add new tools for working with current data.
Add new tools for working with current data.
Python
mit
pwcazenave/PyFVCOM
Add new tools for working with current data.
""" Tools to work with current data. Reuses some functions from tide_tools. """ from tide_tools import addHarmonicResults, getObservedData, getObservedMetadata, getHarmonics, TAPPy def scalar2vector(direction, speed): """ Convert arrays of two scalars into the corresponding vector components. This is mainly meant to be used to convert direction and speed to the u and v velocity components. Parameters ---------- direction, speed : ndarray Arrays of direction (degrees) and speed (any units). Returns ------- u, v : ndarray Arrays of the u and v components of the speed and direction in units of speed. """ u = np.sin(np.deg2rad(direction)) * speed v = np.cos(np.deg2rad(direction)) * speed return u, v def vector2scalar(u, v): """ Convert two vector components into the scalar values. Mainly used for converting u and v velocity components into direction and speed. Parameters ---------- u, v : ndarray Arrays of (optionally time, space (vertical and horizontal) varying) u and v vectors. Returns ------- direction, speed : ndarray Arrays of direction (degrees) and speed (u and v units). """ direction = np.rad2deg(np.arctan2(u, v)) speed = np.sqrt(u**2 + v**2) return direction, speed
<commit_before><commit_msg>Add new tools for working with current data.<commit_after>
""" Tools to work with current data. Reuses some functions from tide_tools. """ from tide_tools import addHarmonicResults, getObservedData, getObservedMetadata, getHarmonics, TAPPy def scalar2vector(direction, speed): """ Convert arrays of two scalars into the corresponding vector components. This is mainly meant to be used to convert direction and speed to the u and v velocity components. Parameters ---------- direction, speed : ndarray Arrays of direction (degrees) and speed (any units). Returns ------- u, v : ndarray Arrays of the u and v components of the speed and direction in units of speed. """ u = np.sin(np.deg2rad(direction)) * speed v = np.cos(np.deg2rad(direction)) * speed return u, v def vector2scalar(u, v): """ Convert two vector components into the scalar values. Mainly used for converting u and v velocity components into direction and speed. Parameters ---------- u, v : ndarray Arrays of (optionally time, space (vertical and horizontal) varying) u and v vectors. Returns ------- direction, speed : ndarray Arrays of direction (degrees) and speed (u and v units). """ direction = np.rad2deg(np.arctan2(u, v)) speed = np.sqrt(u**2 + v**2) return direction, speed
Add new tools for working with current data.""" Tools to work with current data. Reuses some functions from tide_tools. """ from tide_tools import addHarmonicResults, getObservedData, getObservedMetadata, getHarmonics, TAPPy def scalar2vector(direction, speed): """ Convert arrays of two scalars into the corresponding vector components. This is mainly meant to be used to convert direction and speed to the u and v velocity components. Parameters ---------- direction, speed : ndarray Arrays of direction (degrees) and speed (any units). Returns ------- u, v : ndarray Arrays of the u and v components of the speed and direction in units of speed. """ u = np.sin(np.deg2rad(direction)) * speed v = np.cos(np.deg2rad(direction)) * speed return u, v def vector2scalar(u, v): """ Convert two vector components into the scalar values. Mainly used for converting u and v velocity components into direction and speed. Parameters ---------- u, v : ndarray Arrays of (optionally time, space (vertical and horizontal) varying) u and v vectors. Returns ------- direction, speed : ndarray Arrays of direction (degrees) and speed (u and v units). """ direction = np.rad2deg(np.arctan2(u, v)) speed = np.sqrt(u**2 + v**2) return direction, speed
<commit_before><commit_msg>Add new tools for working with current data.<commit_after>""" Tools to work with current data. Reuses some functions from tide_tools. """ from tide_tools import addHarmonicResults, getObservedData, getObservedMetadata, getHarmonics, TAPPy def scalar2vector(direction, speed): """ Convert arrays of two scalars into the corresponding vector components. This is mainly meant to be used to convert direction and speed to the u and v velocity components. Parameters ---------- direction, speed : ndarray Arrays of direction (degrees) and speed (any units). Returns ------- u, v : ndarray Arrays of the u and v components of the speed and direction in units of speed. """ u = np.sin(np.deg2rad(direction)) * speed v = np.cos(np.deg2rad(direction)) * speed return u, v def vector2scalar(u, v): """ Convert two vector components into the scalar values. Mainly used for converting u and v velocity components into direction and speed. Parameters ---------- u, v : ndarray Arrays of (optionally time, space (vertical and horizontal) varying) u and v vectors. Returns ------- direction, speed : ndarray Arrays of direction (degrees) and speed (u and v units). """ direction = np.rad2deg(np.arctan2(u, v)) speed = np.sqrt(u**2 + v**2) return direction, speed
789e6c43c03d9d0c238919fd85a48f4d9b37cabd
classify_vgg.py
classify_vgg.py
# Xiang Xiang (eglxiang@gmail.com), March 2016, MIT license. import numpy as np import cv2 import caffe import time img = caffe.io.load_image( "ak.png" ) img = img[:,:,::-1]*255.0 # convert RGB->BGR avg = np.array([129.1863,104.7624,93.5940]) img = img - avg # subtract mean (numpy takes care of dimensions :) img = img.transpose((2,0,1)) img = img[None,:] # add singleton dimension caffe.set_mode_cpu() net = caffe.Net("VGG_FACE_deploy.prototxt","VGG_FACE.caffemodel", caffe.TEST) start_time = time.time() out = net.forward_all( data = img ) elapsed_time = time.time() - start_time print elapsed_time
Add Python script for running VGG_Face demo.
Add Python script for running VGG_Face demo.
Python
mit
eglxiang/vgg_face,eglxiang/vgg_face,eglxiang/vgg_face
Add Python script for running VGG_Face demo.
# Xiang Xiang (eglxiang@gmail.com), March 2016, MIT license. import numpy as np import cv2 import caffe import time img = caffe.io.load_image( "ak.png" ) img = img[:,:,::-1]*255.0 # convert RGB->BGR avg = np.array([129.1863,104.7624,93.5940]) img = img - avg # subtract mean (numpy takes care of dimensions :) img = img.transpose((2,0,1)) img = img[None,:] # add singleton dimension caffe.set_mode_cpu() net = caffe.Net("VGG_FACE_deploy.prototxt","VGG_FACE.caffemodel", caffe.TEST) start_time = time.time() out = net.forward_all( data = img ) elapsed_time = time.time() - start_time print elapsed_time
<commit_before><commit_msg>Add Python script for running VGG_Face demo.<commit_after>
# Xiang Xiang (eglxiang@gmail.com), March 2016, MIT license. import numpy as np import cv2 import caffe import time img = caffe.io.load_image( "ak.png" ) img = img[:,:,::-1]*255.0 # convert RGB->BGR avg = np.array([129.1863,104.7624,93.5940]) img = img - avg # subtract mean (numpy takes care of dimensions :) img = img.transpose((2,0,1)) img = img[None,:] # add singleton dimension caffe.set_mode_cpu() net = caffe.Net("VGG_FACE_deploy.prototxt","VGG_FACE.caffemodel", caffe.TEST) start_time = time.time() out = net.forward_all( data = img ) elapsed_time = time.time() - start_time print elapsed_time
Add Python script for running VGG_Face demo.# Xiang Xiang (eglxiang@gmail.com), March 2016, MIT license. import numpy as np import cv2 import caffe import time img = caffe.io.load_image( "ak.png" ) img = img[:,:,::-1]*255.0 # convert RGB->BGR avg = np.array([129.1863,104.7624,93.5940]) img = img - avg # subtract mean (numpy takes care of dimensions :) img = img.transpose((2,0,1)) img = img[None,:] # add singleton dimension caffe.set_mode_cpu() net = caffe.Net("VGG_FACE_deploy.prototxt","VGG_FACE.caffemodel", caffe.TEST) start_time = time.time() out = net.forward_all( data = img ) elapsed_time = time.time() - start_time print elapsed_time
<commit_before><commit_msg>Add Python script for running VGG_Face demo.<commit_after># Xiang Xiang (eglxiang@gmail.com), March 2016, MIT license. import numpy as np import cv2 import caffe import time img = caffe.io.load_image( "ak.png" ) img = img[:,:,::-1]*255.0 # convert RGB->BGR avg = np.array([129.1863,104.7624,93.5940]) img = img - avg # subtract mean (numpy takes care of dimensions :) img = img.transpose((2,0,1)) img = img[None,:] # add singleton dimension caffe.set_mode_cpu() net = caffe.Net("VGG_FACE_deploy.prototxt","VGG_FACE.caffemodel", caffe.TEST) start_time = time.time() out = net.forward_all( data = img ) elapsed_time = time.time() - start_time print elapsed_time
bc857e06d5f464647bb0297006975f4132dbea8d
link_analysis/experiment_parser_temp_big.py
link_analysis/experiment_parser_temp_big.py
#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2014, Niklas Hauser # All rights reserved. # # The file is part of my bachelor thesis and is released under the 3-clause BSD # license. See the file `LICENSE` for the full license governing this code. # ----------------------------------------------------------------------------- import sys from string_message import StringMessage from link_analyzer import Analyzer from link import Link if __name__ == "__main__": link = None new_link = False files = [range(8), range(8), range(8), range(8)] tx_id = 0 for tx in range(4): for rx in range(8): if tx == rx: continue files[tx][rx] = open('../../hauser_data/big2/{}_to_{}.txt'.format(tx, rx), 'w') for arg in sys.argv[1:]: with open(arg, 'r') as linkfile: for line in linkfile: try: message = StringMessage(line) if message.is_transmission: tx_id = message['id'] for rx_id in xrange(4,8): files[tx_id][rx_id].write(line) elif message.is_reception: files[tx_id][message['id']].write(line) except: print line
Add log splitter for 8mote experiment.
Add log splitter for 8mote experiment.
Python
bsd-2-clause
salkinium/bachelor,salkinium/bachelor,salkinium/bachelor
Add log splitter for 8mote experiment.
#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2014, Niklas Hauser # All rights reserved. # # The file is part of my bachelor thesis and is released under the 3-clause BSD # license. See the file `LICENSE` for the full license governing this code. # ----------------------------------------------------------------------------- import sys from string_message import StringMessage from link_analyzer import Analyzer from link import Link if __name__ == "__main__": link = None new_link = False files = [range(8), range(8), range(8), range(8)] tx_id = 0 for tx in range(4): for rx in range(8): if tx == rx: continue files[tx][rx] = open('../../hauser_data/big2/{}_to_{}.txt'.format(tx, rx), 'w') for arg in sys.argv[1:]: with open(arg, 'r') as linkfile: for line in linkfile: try: message = StringMessage(line) if message.is_transmission: tx_id = message['id'] for rx_id in xrange(4,8): files[tx_id][rx_id].write(line) elif message.is_reception: files[tx_id][message['id']].write(line) except: print line
<commit_before><commit_msg>Add log splitter for 8mote experiment.<commit_after>
#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2014, Niklas Hauser # All rights reserved. # # The file is part of my bachelor thesis and is released under the 3-clause BSD # license. See the file `LICENSE` for the full license governing this code. # ----------------------------------------------------------------------------- import sys from string_message import StringMessage from link_analyzer import Analyzer from link import Link if __name__ == "__main__": link = None new_link = False files = [range(8), range(8), range(8), range(8)] tx_id = 0 for tx in range(4): for rx in range(8): if tx == rx: continue files[tx][rx] = open('../../hauser_data/big2/{}_to_{}.txt'.format(tx, rx), 'w') for arg in sys.argv[1:]: with open(arg, 'r') as linkfile: for line in linkfile: try: message = StringMessage(line) if message.is_transmission: tx_id = message['id'] for rx_id in xrange(4,8): files[tx_id][rx_id].write(line) elif message.is_reception: files[tx_id][message['id']].write(line) except: print line
Add log splitter for 8mote experiment.#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2014, Niklas Hauser # All rights reserved. # # The file is part of my bachelor thesis and is released under the 3-clause BSD # license. See the file `LICENSE` for the full license governing this code. # ----------------------------------------------------------------------------- import sys from string_message import StringMessage from link_analyzer import Analyzer from link import Link if __name__ == "__main__": link = None new_link = False files = [range(8), range(8), range(8), range(8)] tx_id = 0 for tx in range(4): for rx in range(8): if tx == rx: continue files[tx][rx] = open('../../hauser_data/big2/{}_to_{}.txt'.format(tx, rx), 'w') for arg in sys.argv[1:]: with open(arg, 'r') as linkfile: for line in linkfile: try: message = StringMessage(line) if message.is_transmission: tx_id = message['id'] for rx_id in xrange(4,8): files[tx_id][rx_id].write(line) elif message.is_reception: files[tx_id][message['id']].write(line) except: print line
<commit_before><commit_msg>Add log splitter for 8mote experiment.<commit_after>#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2014, Niklas Hauser # All rights reserved. # # The file is part of my bachelor thesis and is released under the 3-clause BSD # license. See the file `LICENSE` for the full license governing this code. # ----------------------------------------------------------------------------- import sys from string_message import StringMessage from link_analyzer import Analyzer from link import Link if __name__ == "__main__": link = None new_link = False files = [range(8), range(8), range(8), range(8)] tx_id = 0 for tx in range(4): for rx in range(8): if tx == rx: continue files[tx][rx] = open('../../hauser_data/big2/{}_to_{}.txt'.format(tx, rx), 'w') for arg in sys.argv[1:]: with open(arg, 'r') as linkfile: for line in linkfile: try: message = StringMessage(line) if message.is_transmission: tx_id = message['id'] for rx_id in xrange(4,8): files[tx_id][rx_id].write(line) elif message.is_reception: files[tx_id][message['id']].write(line) except: print line
b0a0154b93cf536ece580ba96fdf3251788c29dc
src/tests/test_reduce_framerate.py
src/tests/test_reduce_framerate.py
try: from unittest import mock except ImportError: import mock from sensor_msgs.msg import Image from reduce_framerate import FramerateReducer def test_callback(): r = FramerateReducer() with mock.patch.object(r, "image_publisher", autospec=True) as mock_pub: for i in range(16): r.frame_callback(Image()) assert mock_pub.publish.call_count == 2 # At 0 and 15
Write test for reduced framerate.
Write test for reduced framerate.
Python
mit
masasin/spirit,masasin/spirit
Write test for reduced framerate.
try: from unittest import mock except ImportError: import mock from sensor_msgs.msg import Image from reduce_framerate import FramerateReducer def test_callback(): r = FramerateReducer() with mock.patch.object(r, "image_publisher", autospec=True) as mock_pub: for i in range(16): r.frame_callback(Image()) assert mock_pub.publish.call_count == 2 # At 0 and 15
<commit_before><commit_msg>Write test for reduced framerate.<commit_after>
try: from unittest import mock except ImportError: import mock from sensor_msgs.msg import Image from reduce_framerate import FramerateReducer def test_callback(): r = FramerateReducer() with mock.patch.object(r, "image_publisher", autospec=True) as mock_pub: for i in range(16): r.frame_callback(Image()) assert mock_pub.publish.call_count == 2 # At 0 and 15
Write test for reduced framerate.try: from unittest import mock except ImportError: import mock from sensor_msgs.msg import Image from reduce_framerate import FramerateReducer def test_callback(): r = FramerateReducer() with mock.patch.object(r, "image_publisher", autospec=True) as mock_pub: for i in range(16): r.frame_callback(Image()) assert mock_pub.publish.call_count == 2 # At 0 and 15
<commit_before><commit_msg>Write test for reduced framerate.<commit_after>try: from unittest import mock except ImportError: import mock from sensor_msgs.msg import Image from reduce_framerate import FramerateReducer def test_callback(): r = FramerateReducer() with mock.patch.object(r, "image_publisher", autospec=True) as mock_pub: for i in range(16): r.frame_callback(Image()) assert mock_pub.publish.call_count == 2 # At 0 and 15
3a436f9345a8b6773ddac4198b33be563b66113d
Tools/set_source_default_svnprops.py
Tools/set_source_default_svnprops.py
# # Run in source directory to change all file eol-style's # import os, sys, os.path pj = os.path.join def isSourceFile(fname): if fname in ["SConstruct","SConscript","build.info"]: return True ext = os.path.splitext(fname)[1] return ext in [".c",".cpp",".h",".inl",".ins",".fcd",".yy",".ll",".py"] def isWindowsFile(fname): ext = os.path.splitext(fname)[1] return ext in [".dsp",".dsw"] source_files = [] msvs_files = [] for root, dirs, files in os.walk('.'): source_files.extend([pj(root,f) for f in files if isSourceFile(f)]) msvs_files.extend([pj(root,f) for f in files if isWindowsFile(f)]) print "Source files: " for f in source_files: print f print "Windows files: " for f in msvs_files: print f print "Setting eol-styles" for f in source_files: cmd = "svn propset svn:eol-style native %s"%f print "cmd: %s ..."%cmd, os.system(cmd) print "[OK]" print "Setting keywords=Id" for f in source_files: cmd = "svn propset svn:keywords Id %s"%f print "cmd: %s ..."%cmd, os.system(cmd) print "[OK]"
Add script to set default svn properties on source files.
Add script to set default svn properties on source files. git-svn-id: 6c7e10cdde2c115f53707ba8ec4efe535f92d362@263 4683daeb-ad0f-0410-a623-93161e962ae5
Python
lgpl-2.1
jondo2010/OpenSG,jondo2010/OpenSG,jondo2010/OpenSG,jondo2010/OpenSG,jondo2010/OpenSG
Add script to set default svn properties on source files. git-svn-id: 6c7e10cdde2c115f53707ba8ec4efe535f92d362@263 4683daeb-ad0f-0410-a623-93161e962ae5
# # Run in source directory to change all file eol-style's # import os, sys, os.path pj = os.path.join def isSourceFile(fname): if fname in ["SConstruct","SConscript","build.info"]: return True ext = os.path.splitext(fname)[1] return ext in [".c",".cpp",".h",".inl",".ins",".fcd",".yy",".ll",".py"] def isWindowsFile(fname): ext = os.path.splitext(fname)[1] return ext in [".dsp",".dsw"] source_files = [] msvs_files = [] for root, dirs, files in os.walk('.'): source_files.extend([pj(root,f) for f in files if isSourceFile(f)]) msvs_files.extend([pj(root,f) for f in files if isWindowsFile(f)]) print "Source files: " for f in source_files: print f print "Windows files: " for f in msvs_files: print f print "Setting eol-styles" for f in source_files: cmd = "svn propset svn:eol-style native %s"%f print "cmd: %s ..."%cmd, os.system(cmd) print "[OK]" print "Setting keywords=Id" for f in source_files: cmd = "svn propset svn:keywords Id %s"%f print "cmd: %s ..."%cmd, os.system(cmd) print "[OK]"
<commit_before><commit_msg>Add script to set default svn properties on source files. git-svn-id: 6c7e10cdde2c115f53707ba8ec4efe535f92d362@263 4683daeb-ad0f-0410-a623-93161e962ae5<commit_after>
# # Run in source directory to change all file eol-style's # import os, sys, os.path pj = os.path.join def isSourceFile(fname): if fname in ["SConstruct","SConscript","build.info"]: return True ext = os.path.splitext(fname)[1] return ext in [".c",".cpp",".h",".inl",".ins",".fcd",".yy",".ll",".py"] def isWindowsFile(fname): ext = os.path.splitext(fname)[1] return ext in [".dsp",".dsw"] source_files = [] msvs_files = [] for root, dirs, files in os.walk('.'): source_files.extend([pj(root,f) for f in files if isSourceFile(f)]) msvs_files.extend([pj(root,f) for f in files if isWindowsFile(f)]) print "Source files: " for f in source_files: print f print "Windows files: " for f in msvs_files: print f print "Setting eol-styles" for f in source_files: cmd = "svn propset svn:eol-style native %s"%f print "cmd: %s ..."%cmd, os.system(cmd) print "[OK]" print "Setting keywords=Id" for f in source_files: cmd = "svn propset svn:keywords Id %s"%f print "cmd: %s ..."%cmd, os.system(cmd) print "[OK]"
Add script to set default svn properties on source files. git-svn-id: 6c7e10cdde2c115f53707ba8ec4efe535f92d362@263 4683daeb-ad0f-0410-a623-93161e962ae5# # Run in source directory to change all file eol-style's # import os, sys, os.path pj = os.path.join def isSourceFile(fname): if fname in ["SConstruct","SConscript","build.info"]: return True ext = os.path.splitext(fname)[1] return ext in [".c",".cpp",".h",".inl",".ins",".fcd",".yy",".ll",".py"] def isWindowsFile(fname): ext = os.path.splitext(fname)[1] return ext in [".dsp",".dsw"] source_files = [] msvs_files = [] for root, dirs, files in os.walk('.'): source_files.extend([pj(root,f) for f in files if isSourceFile(f)]) msvs_files.extend([pj(root,f) for f in files if isWindowsFile(f)]) print "Source files: " for f in source_files: print f print "Windows files: " for f in msvs_files: print f print "Setting eol-styles" for f in source_files: cmd = "svn propset svn:eol-style native %s"%f print "cmd: %s ..."%cmd, os.system(cmd) print "[OK]" print "Setting keywords=Id" for f in source_files: cmd = "svn propset svn:keywords Id %s"%f print "cmd: %s ..."%cmd, os.system(cmd) print "[OK]"
<commit_before><commit_msg>Add script to set default svn properties on source files. git-svn-id: 6c7e10cdde2c115f53707ba8ec4efe535f92d362@263 4683daeb-ad0f-0410-a623-93161e962ae5<commit_after># # Run in source directory to change all file eol-style's # import os, sys, os.path pj = os.path.join def isSourceFile(fname): if fname in ["SConstruct","SConscript","build.info"]: return True ext = os.path.splitext(fname)[1] return ext in [".c",".cpp",".h",".inl",".ins",".fcd",".yy",".ll",".py"] def isWindowsFile(fname): ext = os.path.splitext(fname)[1] return ext in [".dsp",".dsw"] source_files = [] msvs_files = [] for root, dirs, files in os.walk('.'): source_files.extend([pj(root,f) for f in files if isSourceFile(f)]) msvs_files.extend([pj(root,f) for f in files if isWindowsFile(f)]) print "Source files: " for f in source_files: print f print "Windows files: " for f in msvs_files: print f print "Setting eol-styles" for f in source_files: cmd = "svn propset svn:eol-style native %s"%f print "cmd: %s ..."%cmd, os.system(cmd) print "[OK]" print "Setting keywords=Id" for f in source_files: cmd = "svn propset svn:keywords Id %s"%f print "cmd: %s ..."%cmd, os.system(cmd) print "[OK]"
4fb4ce10862ad03bb889b6e8deae4a1e09a9685c
tests/test_depthmap.py
tests/test_depthmap.py
from __future__ import division, absolute_import, print_function from past.builtins import xrange import unittest import numpy.testing as testing import numpy as np import healpy as hp import fitsio from redmapper import DepthMap from redmapper import Configuration class DepthMapTestCase(unittest.TestCase): def runTest(self): """ """ file_path = "data_for_tests" conf_filename = "testconfig.yaml" config = Configuration(file_path + "/" + conf_filename) # Check the regular depth depthstr = DepthMap(config) RAs = np.array([142.10934, 142.04090, 142.09242, 142.11448, 50.0]) Decs = np.array([65.022666, 65.133844, 65.084844, 65.109541, 50.0]) comp_limmag = np.array([20.6847, 20.5915, 20.5966, 20.5966, -1.63750e+30], dtype='f4') comp_exptime = np.array([70.3742, 63.5621, 63.5621, 63.5621, -1.63750e+30], dtype='f4') comp_m50 = np.array([20.8964, 20.8517, 20.8568, 20.8568, -1.63750e+30], dtype='f4') limmag, exptime, m50 = depthstr.get_depth_values(RAs, Decs) testing.assert_almost_equal(limmag, comp_limmag, 4) testing.assert_almost_equal(exptime, comp_exptime, 4) testing.assert_almost_equal(m50, comp_m50, 4) config2 = Configuration(file_path + "/" + conf_filename) config2.hpix = 582972 config2.nside = 1024 config2.border = 0.02 depthstr2 = DepthMap(config2) limmag2, exptime2, m502 = depthstr2.get_depth_values(RAs, Decs) comp_limmag[0] = hp.UNSEEN comp_exptime[0] = hp.UNSEEN comp_m50[0] = hp.UNSEEN testing.assert_almost_equal(limmag2, comp_limmag, 4) testing.assert_almost_equal(exptime2, comp_exptime, 4) testing.assert_almost_equal(m502, comp_m50, 4) if __name__=='__main__': unittest.main()
Test for reading the depth map
Test for reading the depth map
Python
apache-2.0
erykoff/redmapper,erykoff/redmapper
Test for reading the depth map
from __future__ import division, absolute_import, print_function from past.builtins import xrange import unittest import numpy.testing as testing import numpy as np import healpy as hp import fitsio from redmapper import DepthMap from redmapper import Configuration class DepthMapTestCase(unittest.TestCase): def runTest(self): """ """ file_path = "data_for_tests" conf_filename = "testconfig.yaml" config = Configuration(file_path + "/" + conf_filename) # Check the regular depth depthstr = DepthMap(config) RAs = np.array([142.10934, 142.04090, 142.09242, 142.11448, 50.0]) Decs = np.array([65.022666, 65.133844, 65.084844, 65.109541, 50.0]) comp_limmag = np.array([20.6847, 20.5915, 20.5966, 20.5966, -1.63750e+30], dtype='f4') comp_exptime = np.array([70.3742, 63.5621, 63.5621, 63.5621, -1.63750e+30], dtype='f4') comp_m50 = np.array([20.8964, 20.8517, 20.8568, 20.8568, -1.63750e+30], dtype='f4') limmag, exptime, m50 = depthstr.get_depth_values(RAs, Decs) testing.assert_almost_equal(limmag, comp_limmag, 4) testing.assert_almost_equal(exptime, comp_exptime, 4) testing.assert_almost_equal(m50, comp_m50, 4) config2 = Configuration(file_path + "/" + conf_filename) config2.hpix = 582972 config2.nside = 1024 config2.border = 0.02 depthstr2 = DepthMap(config2) limmag2, exptime2, m502 = depthstr2.get_depth_values(RAs, Decs) comp_limmag[0] = hp.UNSEEN comp_exptime[0] = hp.UNSEEN comp_m50[0] = hp.UNSEEN testing.assert_almost_equal(limmag2, comp_limmag, 4) testing.assert_almost_equal(exptime2, comp_exptime, 4) testing.assert_almost_equal(m502, comp_m50, 4) if __name__=='__main__': unittest.main()
<commit_before><commit_msg>Test for reading the depth map<commit_after>
from __future__ import division, absolute_import, print_function from past.builtins import xrange import unittest import numpy.testing as testing import numpy as np import healpy as hp import fitsio from redmapper import DepthMap from redmapper import Configuration class DepthMapTestCase(unittest.TestCase): def runTest(self): """ """ file_path = "data_for_tests" conf_filename = "testconfig.yaml" config = Configuration(file_path + "/" + conf_filename) # Check the regular depth depthstr = DepthMap(config) RAs = np.array([142.10934, 142.04090, 142.09242, 142.11448, 50.0]) Decs = np.array([65.022666, 65.133844, 65.084844, 65.109541, 50.0]) comp_limmag = np.array([20.6847, 20.5915, 20.5966, 20.5966, -1.63750e+30], dtype='f4') comp_exptime = np.array([70.3742, 63.5621, 63.5621, 63.5621, -1.63750e+30], dtype='f4') comp_m50 = np.array([20.8964, 20.8517, 20.8568, 20.8568, -1.63750e+30], dtype='f4') limmag, exptime, m50 = depthstr.get_depth_values(RAs, Decs) testing.assert_almost_equal(limmag, comp_limmag, 4) testing.assert_almost_equal(exptime, comp_exptime, 4) testing.assert_almost_equal(m50, comp_m50, 4) config2 = Configuration(file_path + "/" + conf_filename) config2.hpix = 582972 config2.nside = 1024 config2.border = 0.02 depthstr2 = DepthMap(config2) limmag2, exptime2, m502 = depthstr2.get_depth_values(RAs, Decs) comp_limmag[0] = hp.UNSEEN comp_exptime[0] = hp.UNSEEN comp_m50[0] = hp.UNSEEN testing.assert_almost_equal(limmag2, comp_limmag, 4) testing.assert_almost_equal(exptime2, comp_exptime, 4) testing.assert_almost_equal(m502, comp_m50, 4) if __name__=='__main__': unittest.main()
Test for reading the depth mapfrom __future__ import division, absolute_import, print_function from past.builtins import xrange import unittest import numpy.testing as testing import numpy as np import healpy as hp import fitsio from redmapper import DepthMap from redmapper import Configuration class DepthMapTestCase(unittest.TestCase): def runTest(self): """ """ file_path = "data_for_tests" conf_filename = "testconfig.yaml" config = Configuration(file_path + "/" + conf_filename) # Check the regular depth depthstr = DepthMap(config) RAs = np.array([142.10934, 142.04090, 142.09242, 142.11448, 50.0]) Decs = np.array([65.022666, 65.133844, 65.084844, 65.109541, 50.0]) comp_limmag = np.array([20.6847, 20.5915, 20.5966, 20.5966, -1.63750e+30], dtype='f4') comp_exptime = np.array([70.3742, 63.5621, 63.5621, 63.5621, -1.63750e+30], dtype='f4') comp_m50 = np.array([20.8964, 20.8517, 20.8568, 20.8568, -1.63750e+30], dtype='f4') limmag, exptime, m50 = depthstr.get_depth_values(RAs, Decs) testing.assert_almost_equal(limmag, comp_limmag, 4) testing.assert_almost_equal(exptime, comp_exptime, 4) testing.assert_almost_equal(m50, comp_m50, 4) config2 = Configuration(file_path + "/" + conf_filename) config2.hpix = 582972 config2.nside = 1024 config2.border = 0.02 depthstr2 = DepthMap(config2) limmag2, exptime2, m502 = depthstr2.get_depth_values(RAs, Decs) comp_limmag[0] = hp.UNSEEN comp_exptime[0] = hp.UNSEEN comp_m50[0] = hp.UNSEEN testing.assert_almost_equal(limmag2, comp_limmag, 4) testing.assert_almost_equal(exptime2, comp_exptime, 4) testing.assert_almost_equal(m502, comp_m50, 4) if __name__=='__main__': unittest.main()
<commit_before><commit_msg>Test for reading the depth map<commit_after>from __future__ import division, absolute_import, print_function from past.builtins import xrange import unittest import numpy.testing as testing import numpy as np import healpy as hp import fitsio from redmapper import DepthMap from redmapper import Configuration class DepthMapTestCase(unittest.TestCase): def runTest(self): """ """ file_path = "data_for_tests" conf_filename = "testconfig.yaml" config = Configuration(file_path + "/" + conf_filename) # Check the regular depth depthstr = DepthMap(config) RAs = np.array([142.10934, 142.04090, 142.09242, 142.11448, 50.0]) Decs = np.array([65.022666, 65.133844, 65.084844, 65.109541, 50.0]) comp_limmag = np.array([20.6847, 20.5915, 20.5966, 20.5966, -1.63750e+30], dtype='f4') comp_exptime = np.array([70.3742, 63.5621, 63.5621, 63.5621, -1.63750e+30], dtype='f4') comp_m50 = np.array([20.8964, 20.8517, 20.8568, 20.8568, -1.63750e+30], dtype='f4') limmag, exptime, m50 = depthstr.get_depth_values(RAs, Decs) testing.assert_almost_equal(limmag, comp_limmag, 4) testing.assert_almost_equal(exptime, comp_exptime, 4) testing.assert_almost_equal(m50, comp_m50, 4) config2 = Configuration(file_path + "/" + conf_filename) config2.hpix = 582972 config2.nside = 1024 config2.border = 0.02 depthstr2 = DepthMap(config2) limmag2, exptime2, m502 = depthstr2.get_depth_values(RAs, Decs) comp_limmag[0] = hp.UNSEEN comp_exptime[0] = hp.UNSEEN comp_m50[0] = hp.UNSEEN testing.assert_almost_equal(limmag2, comp_limmag, 4) testing.assert_almost_equal(exptime2, comp_exptime, 4) testing.assert_almost_equal(m502, comp_m50, 4) if __name__=='__main__': unittest.main()
c881dbef22ac7ca26ad6abceb3c2e02d0b759040
yutu/bot.py
yutu/bot.py
import discord from discord.ext import commands from pony import orm DESCRIPTION = """ Hi, I'm Yutu! I'm the bot for the Velvet fan discord. I'm still learning so sorry if I do something wrong. You can ask my programmer @Harkonen if you want to know more about me. """ class Yutu(commands.Bot): def __init__(self): super().__init__(commands.when_mentioned_or("~"), game=discord.Game(name="~help"), description=DESCRIPTION, pm_help=None) self.db = orm.Database() async def on_ready(self): print('We have logged in as {0.user}'.format(self)) self.owner_id = (await self.application_info()).owner.id async def on_command_error(self, ctx: commands.Context, exception): if(isinstance(exception, commands.errors.MissingRequiredArgument) or isinstance(exception, commands.errors.BadArgument)): await ctx.print_help() elif isinstance(exception, commands.CommandOnCooldown): await ctx.send(content=str(exception)) else: await super().on_command_error(ctx, exception)
import discord from discord.ext import commands from pony import orm DESCRIPTION = """ Hi, I'm Yutu! I'm the bot for the Velvet fan discord. I'm still learning so sorry if I do something wrong. You can ask my programmer @Harkonen if you want to know more about me. """ class Yutu(commands.Bot): def __init__(self): super().__init__(commands.when_mentioned_or("~"), game=discord.Game(name="~help"), description=DESCRIPTION, pm_help=None) self.db = orm.Database() self.get_command('help').after_invoke(self.post_help) async def post_help(self, ctx: commands.Context): await ctx.message.add_reaction("✅") async def on_ready(self): print('We have logged in as {0.user}'.format(self)) self.owner_id = (await self.application_info()).owner.id async def on_command_error(self, ctx: commands.Context, exception): if(isinstance(exception, commands.errors.MissingRequiredArgument) or isinstance(exception, commands.errors.BadArgument)): await ctx.print_help() elif isinstance(exception, commands.CommandOnCooldown): await ctx.send(content=str(exception)) else: await super().on_command_error(ctx, exception)
Make Yutu acknowledge pm'ed help commands
Make Yutu acknowledge pm'ed help commands
Python
mit
HarkonenBade/yutu
import discord from discord.ext import commands from pony import orm DESCRIPTION = """ Hi, I'm Yutu! I'm the bot for the Velvet fan discord. I'm still learning so sorry if I do something wrong. You can ask my programmer @Harkonen if you want to know more about me. """ class Yutu(commands.Bot): def __init__(self): super().__init__(commands.when_mentioned_or("~"), game=discord.Game(name="~help"), description=DESCRIPTION, pm_help=None) self.db = orm.Database() async def on_ready(self): print('We have logged in as {0.user}'.format(self)) self.owner_id = (await self.application_info()).owner.id async def on_command_error(self, ctx: commands.Context, exception): if(isinstance(exception, commands.errors.MissingRequiredArgument) or isinstance(exception, commands.errors.BadArgument)): await ctx.print_help() elif isinstance(exception, commands.CommandOnCooldown): await ctx.send(content=str(exception)) else: await super().on_command_error(ctx, exception) Make Yutu acknowledge pm'ed help commands
import discord from discord.ext import commands from pony import orm DESCRIPTION = """ Hi, I'm Yutu! I'm the bot for the Velvet fan discord. I'm still learning so sorry if I do something wrong. You can ask my programmer @Harkonen if you want to know more about me. """ class Yutu(commands.Bot): def __init__(self): super().__init__(commands.when_mentioned_or("~"), game=discord.Game(name="~help"), description=DESCRIPTION, pm_help=None) self.db = orm.Database() self.get_command('help').after_invoke(self.post_help) async def post_help(self, ctx: commands.Context): await ctx.message.add_reaction("✅") async def on_ready(self): print('We have logged in as {0.user}'.format(self)) self.owner_id = (await self.application_info()).owner.id async def on_command_error(self, ctx: commands.Context, exception): if(isinstance(exception, commands.errors.MissingRequiredArgument) or isinstance(exception, commands.errors.BadArgument)): await ctx.print_help() elif isinstance(exception, commands.CommandOnCooldown): await ctx.send(content=str(exception)) else: await super().on_command_error(ctx, exception)
<commit_before>import discord from discord.ext import commands from pony import orm DESCRIPTION = """ Hi, I'm Yutu! I'm the bot for the Velvet fan discord. I'm still learning so sorry if I do something wrong. You can ask my programmer @Harkonen if you want to know more about me. """ class Yutu(commands.Bot): def __init__(self): super().__init__(commands.when_mentioned_or("~"), game=discord.Game(name="~help"), description=DESCRIPTION, pm_help=None) self.db = orm.Database() async def on_ready(self): print('We have logged in as {0.user}'.format(self)) self.owner_id = (await self.application_info()).owner.id async def on_command_error(self, ctx: commands.Context, exception): if(isinstance(exception, commands.errors.MissingRequiredArgument) or isinstance(exception, commands.errors.BadArgument)): await ctx.print_help() elif isinstance(exception, commands.CommandOnCooldown): await ctx.send(content=str(exception)) else: await super().on_command_error(ctx, exception) <commit_msg>Make Yutu acknowledge pm'ed help commands<commit_after>
import discord from discord.ext import commands from pony import orm DESCRIPTION = """ Hi, I'm Yutu! I'm the bot for the Velvet fan discord. I'm still learning so sorry if I do something wrong. You can ask my programmer @Harkonen if you want to know more about me. """ class Yutu(commands.Bot): def __init__(self): super().__init__(commands.when_mentioned_or("~"), game=discord.Game(name="~help"), description=DESCRIPTION, pm_help=None) self.db = orm.Database() self.get_command('help').after_invoke(self.post_help) async def post_help(self, ctx: commands.Context): await ctx.message.add_reaction("✅") async def on_ready(self): print('We have logged in as {0.user}'.format(self)) self.owner_id = (await self.application_info()).owner.id async def on_command_error(self, ctx: commands.Context, exception): if(isinstance(exception, commands.errors.MissingRequiredArgument) or isinstance(exception, commands.errors.BadArgument)): await ctx.print_help() elif isinstance(exception, commands.CommandOnCooldown): await ctx.send(content=str(exception)) else: await super().on_command_error(ctx, exception)
import discord from discord.ext import commands from pony import orm DESCRIPTION = """ Hi, I'm Yutu! I'm the bot for the Velvet fan discord. I'm still learning so sorry if I do something wrong. You can ask my programmer @Harkonen if you want to know more about me. """ class Yutu(commands.Bot): def __init__(self): super().__init__(commands.when_mentioned_or("~"), game=discord.Game(name="~help"), description=DESCRIPTION, pm_help=None) self.db = orm.Database() async def on_ready(self): print('We have logged in as {0.user}'.format(self)) self.owner_id = (await self.application_info()).owner.id async def on_command_error(self, ctx: commands.Context, exception): if(isinstance(exception, commands.errors.MissingRequiredArgument) or isinstance(exception, commands.errors.BadArgument)): await ctx.print_help() elif isinstance(exception, commands.CommandOnCooldown): await ctx.send(content=str(exception)) else: await super().on_command_error(ctx, exception) Make Yutu acknowledge pm'ed help commandsimport discord from discord.ext import commands from pony import orm DESCRIPTION = """ Hi, I'm Yutu! I'm the bot for the Velvet fan discord. I'm still learning so sorry if I do something wrong. You can ask my programmer @Harkonen if you want to know more about me. """ class Yutu(commands.Bot): def __init__(self): super().__init__(commands.when_mentioned_or("~"), game=discord.Game(name="~help"), description=DESCRIPTION, pm_help=None) self.db = orm.Database() self.get_command('help').after_invoke(self.post_help) async def post_help(self, ctx: commands.Context): await ctx.message.add_reaction("✅") async def on_ready(self): print('We have logged in as {0.user}'.format(self)) self.owner_id = (await self.application_info()).owner.id async def on_command_error(self, ctx: commands.Context, exception): if(isinstance(exception, commands.errors.MissingRequiredArgument) or isinstance(exception, commands.errors.BadArgument)): await ctx.print_help() elif isinstance(exception, commands.CommandOnCooldown): await ctx.send(content=str(exception)) else: await super().on_command_error(ctx, exception)
<commit_before>import discord from discord.ext import commands from pony import orm DESCRIPTION = """ Hi, I'm Yutu! I'm the bot for the Velvet fan discord. I'm still learning so sorry if I do something wrong. You can ask my programmer @Harkonen if you want to know more about me. """ class Yutu(commands.Bot): def __init__(self): super().__init__(commands.when_mentioned_or("~"), game=discord.Game(name="~help"), description=DESCRIPTION, pm_help=None) self.db = orm.Database() async def on_ready(self): print('We have logged in as {0.user}'.format(self)) self.owner_id = (await self.application_info()).owner.id async def on_command_error(self, ctx: commands.Context, exception): if(isinstance(exception, commands.errors.MissingRequiredArgument) or isinstance(exception, commands.errors.BadArgument)): await ctx.print_help() elif isinstance(exception, commands.CommandOnCooldown): await ctx.send(content=str(exception)) else: await super().on_command_error(ctx, exception) <commit_msg>Make Yutu acknowledge pm'ed help commands<commit_after>import discord from discord.ext import commands from pony import orm DESCRIPTION = """ Hi, I'm Yutu! I'm the bot for the Velvet fan discord. I'm still learning so sorry if I do something wrong. You can ask my programmer @Harkonen if you want to know more about me. """ class Yutu(commands.Bot): def __init__(self): super().__init__(commands.when_mentioned_or("~"), game=discord.Game(name="~help"), description=DESCRIPTION, pm_help=None) self.db = orm.Database() self.get_command('help').after_invoke(self.post_help) async def post_help(self, ctx: commands.Context): await ctx.message.add_reaction("✅") async def on_ready(self): print('We have logged in as {0.user}'.format(self)) self.owner_id = (await self.application_info()).owner.id async def on_command_error(self, ctx: commands.Context, exception): if(isinstance(exception, commands.errors.MissingRequiredArgument) or isinstance(exception, commands.errors.BadArgument)): await ctx.print_help() elif isinstance(exception, commands.CommandOnCooldown): await ctx.send(content=str(exception)) else: await super().on_command_error(ctx, exception)
868c9bf69a04be5aa62d0796daf8dc0e06b2401f
lumisAndFiles.py
lumisAndFiles.py
#!/usr/bin/env python # Before running: # source /cvmfs/cms.cern.ch/crab3/crab.sh import pprint from dbs.apis.dbsClient import DbsApi url="https://cmsweb.cern.ch/dbs/prod/global/DBSReader" api=DbsApi(url=url) f = api.listFiles(run_num='296075', dataset='/ExpressPhysics/Run2017A-Express-v1/FEVT') lumis = [api.listFileLumiArray(logical_file_name=ff['logical_file_name']) for ff in f] lumis.sort(key=lambda x : x[0]['lumi_section_num']) lumi_file = ["%s %s" % (x[0]['lumi_section_num'],x[0]['logical_file_name']) for x in lumis ] pprint.pprint(lumi_file)
Add lumi for file for dataset tool
Add lumi for file for dataset tool
Python
mit
rovere/utilities,rovere/utilities,rovere/utilities,rovere/utilities
Add lumi for file for dataset tool
#!/usr/bin/env python # Before running: # source /cvmfs/cms.cern.ch/crab3/crab.sh import pprint from dbs.apis.dbsClient import DbsApi url="https://cmsweb.cern.ch/dbs/prod/global/DBSReader" api=DbsApi(url=url) f = api.listFiles(run_num='296075', dataset='/ExpressPhysics/Run2017A-Express-v1/FEVT') lumis = [api.listFileLumiArray(logical_file_name=ff['logical_file_name']) for ff in f] lumis.sort(key=lambda x : x[0]['lumi_section_num']) lumi_file = ["%s %s" % (x[0]['lumi_section_num'],x[0]['logical_file_name']) for x in lumis ] pprint.pprint(lumi_file)
<commit_before><commit_msg>Add lumi for file for dataset tool<commit_after>
#!/usr/bin/env python # Before running: # source /cvmfs/cms.cern.ch/crab3/crab.sh import pprint from dbs.apis.dbsClient import DbsApi url="https://cmsweb.cern.ch/dbs/prod/global/DBSReader" api=DbsApi(url=url) f = api.listFiles(run_num='296075', dataset='/ExpressPhysics/Run2017A-Express-v1/FEVT') lumis = [api.listFileLumiArray(logical_file_name=ff['logical_file_name']) for ff in f] lumis.sort(key=lambda x : x[0]['lumi_section_num']) lumi_file = ["%s %s" % (x[0]['lumi_section_num'],x[0]['logical_file_name']) for x in lumis ] pprint.pprint(lumi_file)
Add lumi for file for dataset tool#!/usr/bin/env python # Before running: # source /cvmfs/cms.cern.ch/crab3/crab.sh import pprint from dbs.apis.dbsClient import DbsApi url="https://cmsweb.cern.ch/dbs/prod/global/DBSReader" api=DbsApi(url=url) f = api.listFiles(run_num='296075', dataset='/ExpressPhysics/Run2017A-Express-v1/FEVT') lumis = [api.listFileLumiArray(logical_file_name=ff['logical_file_name']) for ff in f] lumis.sort(key=lambda x : x[0]['lumi_section_num']) lumi_file = ["%s %s" % (x[0]['lumi_section_num'],x[0]['logical_file_name']) for x in lumis ] pprint.pprint(lumi_file)
<commit_before><commit_msg>Add lumi for file for dataset tool<commit_after>#!/usr/bin/env python # Before running: # source /cvmfs/cms.cern.ch/crab3/crab.sh import pprint from dbs.apis.dbsClient import DbsApi url="https://cmsweb.cern.ch/dbs/prod/global/DBSReader" api=DbsApi(url=url) f = api.listFiles(run_num='296075', dataset='/ExpressPhysics/Run2017A-Express-v1/FEVT') lumis = [api.listFileLumiArray(logical_file_name=ff['logical_file_name']) for ff in f] lumis.sort(key=lambda x : x[0]['lumi_section_num']) lumi_file = ["%s %s" % (x[0]['lumi_section_num'],x[0]['logical_file_name']) for x in lumis ] pprint.pprint(lumi_file)
6c1f5364a3a8862b54121ab28ade5d41c101c056
landlab/_info.py
landlab/_info.py
version = '1.0.2' name = 'landlab' cite_as = [ """@article{hobley2017creative, title={Creative computing with Landlab: an open-source toolkit for building, coupling, and exploring two-dimensional numerical models of Earth-surface dynamics}, author={Hobley, Daniel EJ and Adams, Jordan M and Nudurupati, Sai Siddhartha and Hutton, Eric WH and Gasparini, Nicole M and Istanbulluoglu, Erkan and Tucker, Gregory E}, journal={Earth Surface Dynamics}, volume={5}, number={1}, pages={21}, year={2017}, publisher={Copernicus GmbH} }""", ]
Add private module with landlab info.
Add private module with landlab info.
Python
mit
landlab/landlab,landlab/landlab,cmshobe/landlab,amandersillinois/landlab,cmshobe/landlab,amandersillinois/landlab,landlab/landlab,cmshobe/landlab
Add private module with landlab info.
version = '1.0.2' name = 'landlab' cite_as = [ """@article{hobley2017creative, title={Creative computing with Landlab: an open-source toolkit for building, coupling, and exploring two-dimensional numerical models of Earth-surface dynamics}, author={Hobley, Daniel EJ and Adams, Jordan M and Nudurupati, Sai Siddhartha and Hutton, Eric WH and Gasparini, Nicole M and Istanbulluoglu, Erkan and Tucker, Gregory E}, journal={Earth Surface Dynamics}, volume={5}, number={1}, pages={21}, year={2017}, publisher={Copernicus GmbH} }""", ]
<commit_before><commit_msg>Add private module with landlab info.<commit_after>
version = '1.0.2' name = 'landlab' cite_as = [ """@article{hobley2017creative, title={Creative computing with Landlab: an open-source toolkit for building, coupling, and exploring two-dimensional numerical models of Earth-surface dynamics}, author={Hobley, Daniel EJ and Adams, Jordan M and Nudurupati, Sai Siddhartha and Hutton, Eric WH and Gasparini, Nicole M and Istanbulluoglu, Erkan and Tucker, Gregory E}, journal={Earth Surface Dynamics}, volume={5}, number={1}, pages={21}, year={2017}, publisher={Copernicus GmbH} }""", ]
Add private module with landlab info.version = '1.0.2' name = 'landlab' cite_as = [ """@article{hobley2017creative, title={Creative computing with Landlab: an open-source toolkit for building, coupling, and exploring two-dimensional numerical models of Earth-surface dynamics}, author={Hobley, Daniel EJ and Adams, Jordan M and Nudurupati, Sai Siddhartha and Hutton, Eric WH and Gasparini, Nicole M and Istanbulluoglu, Erkan and Tucker, Gregory E}, journal={Earth Surface Dynamics}, volume={5}, number={1}, pages={21}, year={2017}, publisher={Copernicus GmbH} }""", ]
<commit_before><commit_msg>Add private module with landlab info.<commit_after>version = '1.0.2' name = 'landlab' cite_as = [ """@article{hobley2017creative, title={Creative computing with Landlab: an open-source toolkit for building, coupling, and exploring two-dimensional numerical models of Earth-surface dynamics}, author={Hobley, Daniel EJ and Adams, Jordan M and Nudurupati, Sai Siddhartha and Hutton, Eric WH and Gasparini, Nicole M and Istanbulluoglu, Erkan and Tucker, Gregory E}, journal={Earth Surface Dynamics}, volume={5}, number={1}, pages={21}, year={2017}, publisher={Copernicus GmbH} }""", ]
d6050249f44304813d2bba120f9c232c0f158a34
bioagents/tests/test_model_diagnoser.py
bioagents/tests/test_model_diagnoser.py
from indra.statements import * from bioagents.mra.model_diagnoser import ModelDiagnoser drug = Agent('PLX4720') raf = Agent('RAF') mek = Agent('MEK') erk = Agent('ERK') def test_missing_activity1(): stmts = [Activation(raf, mek), Phosphorylation(mek, erk)] md = ModelDiagnoser(stmts) suggs = md.get_missing_activities() assert len(suggs) == 1 assert suggs[0].enz.name == 'MEK' assert suggs[0].enz.activity assert suggs[0].enz.activity.activity_type == 'activity' def test_missing_activity2(): stmts = [Inhibition(drug, raf), Activation(raf, mek)] md = ModelDiagnoser(stmts) suggs = md.get_missing_activities() assert len(suggs) == 1 assert suggs[0].subj.name == 'RAF' assert suggs[0].subj.activity assert suggs[0].subj.activity.activity_type == 'activity' def test_missing_activity3(): stmts = [Activation(raf, mek), Activation(raf, erk)] md = ModelDiagnoser(stmts) suggs = md.get_missing_activities() assert len(suggs) == 0
Test suggestions for missing activity
Test suggestions for missing activity
Python
bsd-2-clause
bgyori/bioagents,sorgerlab/bioagents
Test suggestions for missing activity
from indra.statements import * from bioagents.mra.model_diagnoser import ModelDiagnoser drug = Agent('PLX4720') raf = Agent('RAF') mek = Agent('MEK') erk = Agent('ERK') def test_missing_activity1(): stmts = [Activation(raf, mek), Phosphorylation(mek, erk)] md = ModelDiagnoser(stmts) suggs = md.get_missing_activities() assert len(suggs) == 1 assert suggs[0].enz.name == 'MEK' assert suggs[0].enz.activity assert suggs[0].enz.activity.activity_type == 'activity' def test_missing_activity2(): stmts = [Inhibition(drug, raf), Activation(raf, mek)] md = ModelDiagnoser(stmts) suggs = md.get_missing_activities() assert len(suggs) == 1 assert suggs[0].subj.name == 'RAF' assert suggs[0].subj.activity assert suggs[0].subj.activity.activity_type == 'activity' def test_missing_activity3(): stmts = [Activation(raf, mek), Activation(raf, erk)] md = ModelDiagnoser(stmts) suggs = md.get_missing_activities() assert len(suggs) == 0
<commit_before><commit_msg>Test suggestions for missing activity<commit_after>
from indra.statements import * from bioagents.mra.model_diagnoser import ModelDiagnoser drug = Agent('PLX4720') raf = Agent('RAF') mek = Agent('MEK') erk = Agent('ERK') def test_missing_activity1(): stmts = [Activation(raf, mek), Phosphorylation(mek, erk)] md = ModelDiagnoser(stmts) suggs = md.get_missing_activities() assert len(suggs) == 1 assert suggs[0].enz.name == 'MEK' assert suggs[0].enz.activity assert suggs[0].enz.activity.activity_type == 'activity' def test_missing_activity2(): stmts = [Inhibition(drug, raf), Activation(raf, mek)] md = ModelDiagnoser(stmts) suggs = md.get_missing_activities() assert len(suggs) == 1 assert suggs[0].subj.name == 'RAF' assert suggs[0].subj.activity assert suggs[0].subj.activity.activity_type == 'activity' def test_missing_activity3(): stmts = [Activation(raf, mek), Activation(raf, erk)] md = ModelDiagnoser(stmts) suggs = md.get_missing_activities() assert len(suggs) == 0
Test suggestions for missing activityfrom indra.statements import * from bioagents.mra.model_diagnoser import ModelDiagnoser drug = Agent('PLX4720') raf = Agent('RAF') mek = Agent('MEK') erk = Agent('ERK') def test_missing_activity1(): stmts = [Activation(raf, mek), Phosphorylation(mek, erk)] md = ModelDiagnoser(stmts) suggs = md.get_missing_activities() assert len(suggs) == 1 assert suggs[0].enz.name == 'MEK' assert suggs[0].enz.activity assert suggs[0].enz.activity.activity_type == 'activity' def test_missing_activity2(): stmts = [Inhibition(drug, raf), Activation(raf, mek)] md = ModelDiagnoser(stmts) suggs = md.get_missing_activities() assert len(suggs) == 1 assert suggs[0].subj.name == 'RAF' assert suggs[0].subj.activity assert suggs[0].subj.activity.activity_type == 'activity' def test_missing_activity3(): stmts = [Activation(raf, mek), Activation(raf, erk)] md = ModelDiagnoser(stmts) suggs = md.get_missing_activities() assert len(suggs) == 0
<commit_before><commit_msg>Test suggestions for missing activity<commit_after>from indra.statements import * from bioagents.mra.model_diagnoser import ModelDiagnoser drug = Agent('PLX4720') raf = Agent('RAF') mek = Agent('MEK') erk = Agent('ERK') def test_missing_activity1(): stmts = [Activation(raf, mek), Phosphorylation(mek, erk)] md = ModelDiagnoser(stmts) suggs = md.get_missing_activities() assert len(suggs) == 1 assert suggs[0].enz.name == 'MEK' assert suggs[0].enz.activity assert suggs[0].enz.activity.activity_type == 'activity' def test_missing_activity2(): stmts = [Inhibition(drug, raf), Activation(raf, mek)] md = ModelDiagnoser(stmts) suggs = md.get_missing_activities() assert len(suggs) == 1 assert suggs[0].subj.name == 'RAF' assert suggs[0].subj.activity assert suggs[0].subj.activity.activity_type == 'activity' def test_missing_activity3(): stmts = [Activation(raf, mek), Activation(raf, erk)] md = ModelDiagnoser(stmts) suggs = md.get_missing_activities() assert len(suggs) == 0
c9b844ca7a5693de59cca2e4c3d5e1cdf4b13270
py/detect-capital.py
py/detect-capital.py
class Solution(object): def detectCapitalUse(self, word): """ :type word: str :rtype: bool """ if word.islower(): return True if word.isupper(): return True if word[1:].islower(): return True return False
Add py solution for 520. Detect Capital
Add py solution for 520. Detect Capital 520. Detect Capital: https://leetcode.com/problems/detect-capital/
Python
apache-2.0
ckclark/leetcode,ckclark/leetcode,ckclark/leetcode,ckclark/leetcode,ckclark/leetcode,ckclark/leetcode
Add py solution for 520. Detect Capital 520. Detect Capital: https://leetcode.com/problems/detect-capital/
class Solution(object): def detectCapitalUse(self, word): """ :type word: str :rtype: bool """ if word.islower(): return True if word.isupper(): return True if word[1:].islower(): return True return False
<commit_before><commit_msg>Add py solution for 520. Detect Capital 520. Detect Capital: https://leetcode.com/problems/detect-capital/<commit_after>
class Solution(object): def detectCapitalUse(self, word): """ :type word: str :rtype: bool """ if word.islower(): return True if word.isupper(): return True if word[1:].islower(): return True return False
Add py solution for 520. Detect Capital 520. Detect Capital: https://leetcode.com/problems/detect-capital/class Solution(object): def detectCapitalUse(self, word): """ :type word: str :rtype: bool """ if word.islower(): return True if word.isupper(): return True if word[1:].islower(): return True return False
<commit_before><commit_msg>Add py solution for 520. Detect Capital 520. Detect Capital: https://leetcode.com/problems/detect-capital/<commit_after>class Solution(object): def detectCapitalUse(self, word): """ :type word: str :rtype: bool """ if word.islower(): return True if word.isupper(): return True if word[1:].islower(): return True return False
427ee2412af3143b3808e18a26ed2079b510ee43
tests/webcam_read_qr.py
tests/webcam_read_qr.py
#!/usr/bin/env python """ This module sets up a video stream from internal or connected webcam using Gstreamer. You can then take snapshots. import qrtools qr = qrtools.QR() qr.decode("cam.jpg") print qr.data """ import gi gi.require_version('Gtk', '3.0') gi.require_version('Gst', '1.0') from gi.repository import Gtk as gtk from gi.repository import Gdk from gi.repository import Gst as gst from gi.repository import GdkPixbuf from avocado import Test from os.path import exists, relpath import qrtools import time #import pyqrcode class WebcamReadQR(Test): def setUp(self): # if not exists('/dev/video0'): # self.skip("No webcam detected: /dev/video0 cannot be found"); self.device = '/dev/video0' Gdk.threads_init() gtk.main() self.take_snapshot() def test(self): self.create_video_pipeline() def create_video_pipeline(self): gst.init([]) #v4l2src self.video_player = gst.parse_launch("videotestsrc ! jpegenc ! filesink location=cam.jpg") self.video_player.set_state(gst.State.PLAYING) bus = self.video_player.get_bus() bus.add_signal_watch() bus.connect("message", self.on_message) bus.enable_sync_message_emission() bus.connect("sync-message::element", self.on_sync_message) def on_message(self, bus, message): t = message.type if t == gst.MessageType.EOS: self.exit() elif t == gst.MessageType.ERROR: self.exit() self.fail("Error {0}".format(message.parse_error())) def on_sync_message(self, bus, message): if message.structure is None: return message_name = message.structure.get_name() def exit(self): self.video_player.set_state(gst.State.NULL) gtk.main_quit() def take_snapshot(self): #TODO:fill this in
Put gst code into Avocado test format. Needs to be edited to take a snapshot and read the qr code.
Put gst code into Avocado test format. Needs to be edited to take a snapshot and read the qr code.
Python
mit
daveol/Fedora-Test-Laptop,daveol/Fedora-Test-Laptop
Put gst code into Avocado test format. Needs to be edited to take a snapshot and read the qr code.
#!/usr/bin/env python """ This module sets up a video stream from internal or connected webcam using Gstreamer. You can then take snapshots. import qrtools qr = qrtools.QR() qr.decode("cam.jpg") print qr.data """ import gi gi.require_version('Gtk', '3.0') gi.require_version('Gst', '1.0') from gi.repository import Gtk as gtk from gi.repository import Gdk from gi.repository import Gst as gst from gi.repository import GdkPixbuf from avocado import Test from os.path import exists, relpath import qrtools import time #import pyqrcode class WebcamReadQR(Test): def setUp(self): # if not exists('/dev/video0'): # self.skip("No webcam detected: /dev/video0 cannot be found"); self.device = '/dev/video0' Gdk.threads_init() gtk.main() self.take_snapshot() def test(self): self.create_video_pipeline() def create_video_pipeline(self): gst.init([]) #v4l2src self.video_player = gst.parse_launch("videotestsrc ! jpegenc ! filesink location=cam.jpg") self.video_player.set_state(gst.State.PLAYING) bus = self.video_player.get_bus() bus.add_signal_watch() bus.connect("message", self.on_message) bus.enable_sync_message_emission() bus.connect("sync-message::element", self.on_sync_message) def on_message(self, bus, message): t = message.type if t == gst.MessageType.EOS: self.exit() elif t == gst.MessageType.ERROR: self.exit() self.fail("Error {0}".format(message.parse_error())) def on_sync_message(self, bus, message): if message.structure is None: return message_name = message.structure.get_name() def exit(self): self.video_player.set_state(gst.State.NULL) gtk.main_quit() def take_snapshot(self): #TODO:fill this in
<commit_before><commit_msg>Put gst code into Avocado test format. Needs to be edited to take a snapshot and read the qr code.<commit_after>
#!/usr/bin/env python """ This module sets up a video stream from internal or connected webcam using Gstreamer. You can then take snapshots. import qrtools qr = qrtools.QR() qr.decode("cam.jpg") print qr.data """ import gi gi.require_version('Gtk', '3.0') gi.require_version('Gst', '1.0') from gi.repository import Gtk as gtk from gi.repository import Gdk from gi.repository import Gst as gst from gi.repository import GdkPixbuf from avocado import Test from os.path import exists, relpath import qrtools import time #import pyqrcode class WebcamReadQR(Test): def setUp(self): # if not exists('/dev/video0'): # self.skip("No webcam detected: /dev/video0 cannot be found"); self.device = '/dev/video0' Gdk.threads_init() gtk.main() self.take_snapshot() def test(self): self.create_video_pipeline() def create_video_pipeline(self): gst.init([]) #v4l2src self.video_player = gst.parse_launch("videotestsrc ! jpegenc ! filesink location=cam.jpg") self.video_player.set_state(gst.State.PLAYING) bus = self.video_player.get_bus() bus.add_signal_watch() bus.connect("message", self.on_message) bus.enable_sync_message_emission() bus.connect("sync-message::element", self.on_sync_message) def on_message(self, bus, message): t = message.type if t == gst.MessageType.EOS: self.exit() elif t == gst.MessageType.ERROR: self.exit() self.fail("Error {0}".format(message.parse_error())) def on_sync_message(self, bus, message): if message.structure is None: return message_name = message.structure.get_name() def exit(self): self.video_player.set_state(gst.State.NULL) gtk.main_quit() def take_snapshot(self): #TODO:fill this in
Put gst code into Avocado test format. Needs to be edited to take a snapshot and read the qr code.#!/usr/bin/env python """ This module sets up a video stream from internal or connected webcam using Gstreamer. You can then take snapshots. import qrtools qr = qrtools.QR() qr.decode("cam.jpg") print qr.data """ import gi gi.require_version('Gtk', '3.0') gi.require_version('Gst', '1.0') from gi.repository import Gtk as gtk from gi.repository import Gdk from gi.repository import Gst as gst from gi.repository import GdkPixbuf from avocado import Test from os.path import exists, relpath import qrtools import time #import pyqrcode class WebcamReadQR(Test): def setUp(self): # if not exists('/dev/video0'): # self.skip("No webcam detected: /dev/video0 cannot be found"); self.device = '/dev/video0' Gdk.threads_init() gtk.main() self.take_snapshot() def test(self): self.create_video_pipeline() def create_video_pipeline(self): gst.init([]) #v4l2src self.video_player = gst.parse_launch("videotestsrc ! jpegenc ! filesink location=cam.jpg") self.video_player.set_state(gst.State.PLAYING) bus = self.video_player.get_bus() bus.add_signal_watch() bus.connect("message", self.on_message) bus.enable_sync_message_emission() bus.connect("sync-message::element", self.on_sync_message) def on_message(self, bus, message): t = message.type if t == gst.MessageType.EOS: self.exit() elif t == gst.MessageType.ERROR: self.exit() self.fail("Error {0}".format(message.parse_error())) def on_sync_message(self, bus, message): if message.structure is None: return message_name = message.structure.get_name() def exit(self): self.video_player.set_state(gst.State.NULL) gtk.main_quit() def take_snapshot(self): #TODO:fill this in
<commit_before><commit_msg>Put gst code into Avocado test format. Needs to be edited to take a snapshot and read the qr code.<commit_after>#!/usr/bin/env python """ This module sets up a video stream from internal or connected webcam using Gstreamer. You can then take snapshots. import qrtools qr = qrtools.QR() qr.decode("cam.jpg") print qr.data """ import gi gi.require_version('Gtk', '3.0') gi.require_version('Gst', '1.0') from gi.repository import Gtk as gtk from gi.repository import Gdk from gi.repository import Gst as gst from gi.repository import GdkPixbuf from avocado import Test from os.path import exists, relpath import qrtools import time #import pyqrcode class WebcamReadQR(Test): def setUp(self): # if not exists('/dev/video0'): # self.skip("No webcam detected: /dev/video0 cannot be found"); self.device = '/dev/video0' Gdk.threads_init() gtk.main() self.take_snapshot() def test(self): self.create_video_pipeline() def create_video_pipeline(self): gst.init([]) #v4l2src self.video_player = gst.parse_launch("videotestsrc ! jpegenc ! filesink location=cam.jpg") self.video_player.set_state(gst.State.PLAYING) bus = self.video_player.get_bus() bus.add_signal_watch() bus.connect("message", self.on_message) bus.enable_sync_message_emission() bus.connect("sync-message::element", self.on_sync_message) def on_message(self, bus, message): t = message.type if t == gst.MessageType.EOS: self.exit() elif t == gst.MessageType.ERROR: self.exit() self.fail("Error {0}".format(message.parse_error())) def on_sync_message(self, bus, message): if message.structure is None: return message_name = message.structure.get_name() def exit(self): self.video_player.set_state(gst.State.NULL) gtk.main_quit() def take_snapshot(self): #TODO:fill this in
26a66b90f3e1a63ae91eb2eac08a580b4be6a3c4
counting/mapper.py
counting/mapper.py
from contextlib import closing import subprocess import logging import do logger = logging.getLogger("Main") def g37_map(out_fn): url_map = "http://ftp.ncbi.nlm.nih.gov/genomes/Homo_sapiens/ARCHIVE/ANNOTATION_RELEASE.105/Assembled_chromosomes/chr_accessions_GRCh37.p13" url_ann = "http://ftp.ncbi.nlm.nih.gov/genomes/Homo_sapiens/ARCHIVE/ANNOTATION_RELEASE.105/GFF/ref_GRCh37.p13_top_level.gff3.gz" g_map(url_map, url_ann, out_fn) def g38_map(out_fn): url_map = "http://ftp.ncbi.nlm.nih.gov/genomes/Homo_sapiens/Assembled_chromosomes/chr_accessions_GRCh38.p2" url_ann = "http://ftp.ncbi.nlm.nih.gov/genomes/Homo_sapiens/GFF/ref_GRCh38.p2_top_level.gff3.gz" g_map(url_map, url_ann, out_fn) def g_map(url_map, url_ann, out_fn): cl = ("wget -q -O - {url_map}").format(**locals()) cl = cl.split(" ") proc = subprocess.Popen(cl, stdout=subprocess.PIPE) d_map = {} with closing(proc.stdout) as stdout: for line in iter(stdout.readline,''): cols = line.split("\t") d_map[cols[1]] = cols[0] cl = ("wget -q -O tmp.gz {url_ann}").format(**locals()).split(" ") do.run(cl) cl = ["zcat" ,"tmp.gz"] proc = subprocess.Popen(cl,stdout=subprocess.PIPE) logger.info("Creating GTF file %s" % out_fn) with closing(proc.stdout) as stdout: with open(out_fn, "w") as out_h: for line in iter(stdout.readline,''): cols = line.strip().split("\t") if line.startswith("#") or cols[2] == "region": continue if cols[0] in d_map: cols[0] = d_map[cols[0]] # cols[8] = cols[8].replace("=", " ") print >>out_h, "\t".join(cols)
Add smart functions to get the correct gene annotation
Add smart functions to get the correct gene annotation
Python
cc0-1.0
NCBI-Hackathons/rnaseqview,NCBI-Hackathons/rnaseqview,NCBI-Hackathons/rnaseqview,NCBI-Hackathons/rnaseqview,NCBI-Hackathons/rnaseqview
Add smart functions to get the correct gene annotation
from contextlib import closing import subprocess import logging import do logger = logging.getLogger("Main") def g37_map(out_fn): url_map = "http://ftp.ncbi.nlm.nih.gov/genomes/Homo_sapiens/ARCHIVE/ANNOTATION_RELEASE.105/Assembled_chromosomes/chr_accessions_GRCh37.p13" url_ann = "http://ftp.ncbi.nlm.nih.gov/genomes/Homo_sapiens/ARCHIVE/ANNOTATION_RELEASE.105/GFF/ref_GRCh37.p13_top_level.gff3.gz" g_map(url_map, url_ann, out_fn) def g38_map(out_fn): url_map = "http://ftp.ncbi.nlm.nih.gov/genomes/Homo_sapiens/Assembled_chromosomes/chr_accessions_GRCh38.p2" url_ann = "http://ftp.ncbi.nlm.nih.gov/genomes/Homo_sapiens/GFF/ref_GRCh38.p2_top_level.gff3.gz" g_map(url_map, url_ann, out_fn) def g_map(url_map, url_ann, out_fn): cl = ("wget -q -O - {url_map}").format(**locals()) cl = cl.split(" ") proc = subprocess.Popen(cl, stdout=subprocess.PIPE) d_map = {} with closing(proc.stdout) as stdout: for line in iter(stdout.readline,''): cols = line.split("\t") d_map[cols[1]] = cols[0] cl = ("wget -q -O tmp.gz {url_ann}").format(**locals()).split(" ") do.run(cl) cl = ["zcat" ,"tmp.gz"] proc = subprocess.Popen(cl,stdout=subprocess.PIPE) logger.info("Creating GTF file %s" % out_fn) with closing(proc.stdout) as stdout: with open(out_fn, "w") as out_h: for line in iter(stdout.readline,''): cols = line.strip().split("\t") if line.startswith("#") or cols[2] == "region": continue if cols[0] in d_map: cols[0] = d_map[cols[0]] # cols[8] = cols[8].replace("=", " ") print >>out_h, "\t".join(cols)
<commit_before><commit_msg>Add smart functions to get the correct gene annotation<commit_after>
from contextlib import closing import subprocess import logging import do logger = logging.getLogger("Main") def g37_map(out_fn): url_map = "http://ftp.ncbi.nlm.nih.gov/genomes/Homo_sapiens/ARCHIVE/ANNOTATION_RELEASE.105/Assembled_chromosomes/chr_accessions_GRCh37.p13" url_ann = "http://ftp.ncbi.nlm.nih.gov/genomes/Homo_sapiens/ARCHIVE/ANNOTATION_RELEASE.105/GFF/ref_GRCh37.p13_top_level.gff3.gz" g_map(url_map, url_ann, out_fn) def g38_map(out_fn): url_map = "http://ftp.ncbi.nlm.nih.gov/genomes/Homo_sapiens/Assembled_chromosomes/chr_accessions_GRCh38.p2" url_ann = "http://ftp.ncbi.nlm.nih.gov/genomes/Homo_sapiens/GFF/ref_GRCh38.p2_top_level.gff3.gz" g_map(url_map, url_ann, out_fn) def g_map(url_map, url_ann, out_fn): cl = ("wget -q -O - {url_map}").format(**locals()) cl = cl.split(" ") proc = subprocess.Popen(cl, stdout=subprocess.PIPE) d_map = {} with closing(proc.stdout) as stdout: for line in iter(stdout.readline,''): cols = line.split("\t") d_map[cols[1]] = cols[0] cl = ("wget -q -O tmp.gz {url_ann}").format(**locals()).split(" ") do.run(cl) cl = ["zcat" ,"tmp.gz"] proc = subprocess.Popen(cl,stdout=subprocess.PIPE) logger.info("Creating GTF file %s" % out_fn) with closing(proc.stdout) as stdout: with open(out_fn, "w") as out_h: for line in iter(stdout.readline,''): cols = line.strip().split("\t") if line.startswith("#") or cols[2] == "region": continue if cols[0] in d_map: cols[0] = d_map[cols[0]] # cols[8] = cols[8].replace("=", " ") print >>out_h, "\t".join(cols)
Add smart functions to get the correct gene annotationfrom contextlib import closing import subprocess import logging import do logger = logging.getLogger("Main") def g37_map(out_fn): url_map = "http://ftp.ncbi.nlm.nih.gov/genomes/Homo_sapiens/ARCHIVE/ANNOTATION_RELEASE.105/Assembled_chromosomes/chr_accessions_GRCh37.p13" url_ann = "http://ftp.ncbi.nlm.nih.gov/genomes/Homo_sapiens/ARCHIVE/ANNOTATION_RELEASE.105/GFF/ref_GRCh37.p13_top_level.gff3.gz" g_map(url_map, url_ann, out_fn) def g38_map(out_fn): url_map = "http://ftp.ncbi.nlm.nih.gov/genomes/Homo_sapiens/Assembled_chromosomes/chr_accessions_GRCh38.p2" url_ann = "http://ftp.ncbi.nlm.nih.gov/genomes/Homo_sapiens/GFF/ref_GRCh38.p2_top_level.gff3.gz" g_map(url_map, url_ann, out_fn) def g_map(url_map, url_ann, out_fn): cl = ("wget -q -O - {url_map}").format(**locals()) cl = cl.split(" ") proc = subprocess.Popen(cl, stdout=subprocess.PIPE) d_map = {} with closing(proc.stdout) as stdout: for line in iter(stdout.readline,''): cols = line.split("\t") d_map[cols[1]] = cols[0] cl = ("wget -q -O tmp.gz {url_ann}").format(**locals()).split(" ") do.run(cl) cl = ["zcat" ,"tmp.gz"] proc = subprocess.Popen(cl,stdout=subprocess.PIPE) logger.info("Creating GTF file %s" % out_fn) with closing(proc.stdout) as stdout: with open(out_fn, "w") as out_h: for line in iter(stdout.readline,''): cols = line.strip().split("\t") if line.startswith("#") or cols[2] == "region": continue if cols[0] in d_map: cols[0] = d_map[cols[0]] # cols[8] = cols[8].replace("=", " ") print >>out_h, "\t".join(cols)
<commit_before><commit_msg>Add smart functions to get the correct gene annotation<commit_after>from contextlib import closing import subprocess import logging import do logger = logging.getLogger("Main") def g37_map(out_fn): url_map = "http://ftp.ncbi.nlm.nih.gov/genomes/Homo_sapiens/ARCHIVE/ANNOTATION_RELEASE.105/Assembled_chromosomes/chr_accessions_GRCh37.p13" url_ann = "http://ftp.ncbi.nlm.nih.gov/genomes/Homo_sapiens/ARCHIVE/ANNOTATION_RELEASE.105/GFF/ref_GRCh37.p13_top_level.gff3.gz" g_map(url_map, url_ann, out_fn) def g38_map(out_fn): url_map = "http://ftp.ncbi.nlm.nih.gov/genomes/Homo_sapiens/Assembled_chromosomes/chr_accessions_GRCh38.p2" url_ann = "http://ftp.ncbi.nlm.nih.gov/genomes/Homo_sapiens/GFF/ref_GRCh38.p2_top_level.gff3.gz" g_map(url_map, url_ann, out_fn) def g_map(url_map, url_ann, out_fn): cl = ("wget -q -O - {url_map}").format(**locals()) cl = cl.split(" ") proc = subprocess.Popen(cl, stdout=subprocess.PIPE) d_map = {} with closing(proc.stdout) as stdout: for line in iter(stdout.readline,''): cols = line.split("\t") d_map[cols[1]] = cols[0] cl = ("wget -q -O tmp.gz {url_ann}").format(**locals()).split(" ") do.run(cl) cl = ["zcat" ,"tmp.gz"] proc = subprocess.Popen(cl,stdout=subprocess.PIPE) logger.info("Creating GTF file %s" % out_fn) with closing(proc.stdout) as stdout: with open(out_fn, "w") as out_h: for line in iter(stdout.readline,''): cols = line.strip().split("\t") if line.startswith("#") or cols[2] == "region": continue if cols[0] in d_map: cols[0] = d_map[cols[0]] # cols[8] = cols[8].replace("=", " ") print >>out_h, "\t".join(cols)
fe4f86b9635fadd6a0c79065e4c9888327e31b80
DeleteDataFromUrlTest.py
DeleteDataFromUrlTest.py
__author__ = 'chuqiao' import script # DELETE ALL DATA script.deleteDataInSolr() # ADD DATA FROM 2 SORCES script.addDataToSolrFromUrl("http://www.elixir-europe.org:8080/events", "http://www.elixir-europe.org:8080/events"); script.addDataToSolrFromUrl("http://localhost/ep/events?state=published&field_type_tid=All", "http://localhost/ep/events"); # DELETE DATA FROM 1 SOURCE # script.deleteDataInSolrFromUrl("http://www.elixir-europe.org:8080/events") script.deleteDataInSolrFromUrl("http://localhost/ep/events?state=published&field_type_tid=All") # script.deleteDataInSolrByQuery('source:("http://www.elixir-europe.org:8080/events")') # script.deleteDataInSolrByQuery('source:("http://localhost/ep/events" AND "state=published" AND "field_type_tid=All")')
Create delete data from url test script
Create delete data from url test script
Python
mit
elixirhub/events-portal-scraping-scripts
Create delete data from url test script
__author__ = 'chuqiao' import script # DELETE ALL DATA script.deleteDataInSolr() # ADD DATA FROM 2 SORCES script.addDataToSolrFromUrl("http://www.elixir-europe.org:8080/events", "http://www.elixir-europe.org:8080/events"); script.addDataToSolrFromUrl("http://localhost/ep/events?state=published&field_type_tid=All", "http://localhost/ep/events"); # DELETE DATA FROM 1 SOURCE # script.deleteDataInSolrFromUrl("http://www.elixir-europe.org:8080/events") script.deleteDataInSolrFromUrl("http://localhost/ep/events?state=published&field_type_tid=All") # script.deleteDataInSolrByQuery('source:("http://www.elixir-europe.org:8080/events")') # script.deleteDataInSolrByQuery('source:("http://localhost/ep/events" AND "state=published" AND "field_type_tid=All")')
<commit_before><commit_msg>Create delete data from url test script<commit_after>
__author__ = 'chuqiao' import script # DELETE ALL DATA script.deleteDataInSolr() # ADD DATA FROM 2 SORCES script.addDataToSolrFromUrl("http://www.elixir-europe.org:8080/events", "http://www.elixir-europe.org:8080/events"); script.addDataToSolrFromUrl("http://localhost/ep/events?state=published&field_type_tid=All", "http://localhost/ep/events"); # DELETE DATA FROM 1 SOURCE # script.deleteDataInSolrFromUrl("http://www.elixir-europe.org:8080/events") script.deleteDataInSolrFromUrl("http://localhost/ep/events?state=published&field_type_tid=All") # script.deleteDataInSolrByQuery('source:("http://www.elixir-europe.org:8080/events")') # script.deleteDataInSolrByQuery('source:("http://localhost/ep/events" AND "state=published" AND "field_type_tid=All")')
Create delete data from url test script__author__ = 'chuqiao' import script # DELETE ALL DATA script.deleteDataInSolr() # ADD DATA FROM 2 SORCES script.addDataToSolrFromUrl("http://www.elixir-europe.org:8080/events", "http://www.elixir-europe.org:8080/events"); script.addDataToSolrFromUrl("http://localhost/ep/events?state=published&field_type_tid=All", "http://localhost/ep/events"); # DELETE DATA FROM 1 SOURCE # script.deleteDataInSolrFromUrl("http://www.elixir-europe.org:8080/events") script.deleteDataInSolrFromUrl("http://localhost/ep/events?state=published&field_type_tid=All") # script.deleteDataInSolrByQuery('source:("http://www.elixir-europe.org:8080/events")') # script.deleteDataInSolrByQuery('source:("http://localhost/ep/events" AND "state=published" AND "field_type_tid=All")')
<commit_before><commit_msg>Create delete data from url test script<commit_after>__author__ = 'chuqiao' import script # DELETE ALL DATA script.deleteDataInSolr() # ADD DATA FROM 2 SORCES script.addDataToSolrFromUrl("http://www.elixir-europe.org:8080/events", "http://www.elixir-europe.org:8080/events"); script.addDataToSolrFromUrl("http://localhost/ep/events?state=published&field_type_tid=All", "http://localhost/ep/events"); # DELETE DATA FROM 1 SOURCE # script.deleteDataInSolrFromUrl("http://www.elixir-europe.org:8080/events") script.deleteDataInSolrFromUrl("http://localhost/ep/events?state=published&field_type_tid=All") # script.deleteDataInSolrByQuery('source:("http://www.elixir-europe.org:8080/events")') # script.deleteDataInSolrByQuery('source:("http://localhost/ep/events" AND "state=published" AND "field_type_tid=All")')
b88118fb323625f0571961b7f8ac40294c85b741
examples/volume_by_process_example.py
examples/volume_by_process_example.py
""" Mutes the volume of all processes, but unmutes chrome.exe process. """ from pycaw import AudioUtilities def main(): sessions = AudioUtilities.GetAllSessions() for session in sessions: volume = session.SimpleAudioVolume if session.Process and session.Process.name() == "chrome.exe": volume.SetMute(0, None) else: volume.SetMute(1, None) if __name__ == "__main__": main()
Change volume per process example
Change volume per process example Mutes the volume of all processes, but unmutes chrome.exe process.
Python
mit
AndreMiras/pycaw
Change volume per process example Mutes the volume of all processes, but unmutes chrome.exe process.
""" Mutes the volume of all processes, but unmutes chrome.exe process. """ from pycaw import AudioUtilities def main(): sessions = AudioUtilities.GetAllSessions() for session in sessions: volume = session.SimpleAudioVolume if session.Process and session.Process.name() == "chrome.exe": volume.SetMute(0, None) else: volume.SetMute(1, None) if __name__ == "__main__": main()
<commit_before><commit_msg>Change volume per process example Mutes the volume of all processes, but unmutes chrome.exe process.<commit_after>
""" Mutes the volume of all processes, but unmutes chrome.exe process. """ from pycaw import AudioUtilities def main(): sessions = AudioUtilities.GetAllSessions() for session in sessions: volume = session.SimpleAudioVolume if session.Process and session.Process.name() == "chrome.exe": volume.SetMute(0, None) else: volume.SetMute(1, None) if __name__ == "__main__": main()
Change volume per process example Mutes the volume of all processes, but unmutes chrome.exe process.""" Mutes the volume of all processes, but unmutes chrome.exe process. """ from pycaw import AudioUtilities def main(): sessions = AudioUtilities.GetAllSessions() for session in sessions: volume = session.SimpleAudioVolume if session.Process and session.Process.name() == "chrome.exe": volume.SetMute(0, None) else: volume.SetMute(1, None) if __name__ == "__main__": main()
<commit_before><commit_msg>Change volume per process example Mutes the volume of all processes, but unmutes chrome.exe process.<commit_after>""" Mutes the volume of all processes, but unmutes chrome.exe process. """ from pycaw import AudioUtilities def main(): sessions = AudioUtilities.GetAllSessions() for session in sessions: volume = session.SimpleAudioVolume if session.Process and session.Process.name() == "chrome.exe": volume.SetMute(0, None) else: volume.SetMute(1, None) if __name__ == "__main__": main()
e50d37fb89fc9bd80499b2db282afa310965017d
tests/unit/boundary/measurement_get_test.py
tests/unit/boundary/measurement_get_test.py
#!/usr/bin/env python # # Copyright 2014-2015 Boundary, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # import unittest import datetime import json from boundary import MeasurementGet class TestMeasurementGet(unittest.TestCase): def setUp(self): self.mg = MeasurementGet() self.now = datetime.datetime.now() self.now_epoch = self.now.strftime("%s") pass # def test_parse_datetime(self): # out = self.mg.parse_time_date('2015-06-10') # print(out) # print(out.strftime("%s")) # out = self.mg.parse_time_date(out.strftime("%s")) # print(out) def test_datetime_to_json(self): j = {} j['start'] =self.now.strftime('%s') out = json.dumps(j) print(out) out = self.mg.parse_time_date(self.now.strftime("%s")) print(out.strftime("%s")) print(self.now_epoch)
Add test class for MeasurementGet
Add test class for MeasurementGet
Python
apache-2.0
boundary/pulse-api-cli,jdgwartney/boundary-api-cli,boundary/pulse-api-cli,boundary/boundary-api-cli,jdgwartney/boundary-api-cli,jdgwartney/pulse-api-cli,boundary/boundary-api-cli,wcainboundary/boundary-api-cli,jdgwartney/pulse-api-cli,wcainboundary/boundary-api-cli
Add test class for MeasurementGet
#!/usr/bin/env python # # Copyright 2014-2015 Boundary, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # import unittest import datetime import json from boundary import MeasurementGet class TestMeasurementGet(unittest.TestCase): def setUp(self): self.mg = MeasurementGet() self.now = datetime.datetime.now() self.now_epoch = self.now.strftime("%s") pass # def test_parse_datetime(self): # out = self.mg.parse_time_date('2015-06-10') # print(out) # print(out.strftime("%s")) # out = self.mg.parse_time_date(out.strftime("%s")) # print(out) def test_datetime_to_json(self): j = {} j['start'] =self.now.strftime('%s') out = json.dumps(j) print(out) out = self.mg.parse_time_date(self.now.strftime("%s")) print(out.strftime("%s")) print(self.now_epoch)
<commit_before><commit_msg>Add test class for MeasurementGet<commit_after>
#!/usr/bin/env python # # Copyright 2014-2015 Boundary, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # import unittest import datetime import json from boundary import MeasurementGet class TestMeasurementGet(unittest.TestCase): def setUp(self): self.mg = MeasurementGet() self.now = datetime.datetime.now() self.now_epoch = self.now.strftime("%s") pass # def test_parse_datetime(self): # out = self.mg.parse_time_date('2015-06-10') # print(out) # print(out.strftime("%s")) # out = self.mg.parse_time_date(out.strftime("%s")) # print(out) def test_datetime_to_json(self): j = {} j['start'] =self.now.strftime('%s') out = json.dumps(j) print(out) out = self.mg.parse_time_date(self.now.strftime("%s")) print(out.strftime("%s")) print(self.now_epoch)
Add test class for MeasurementGet#!/usr/bin/env python # # Copyright 2014-2015 Boundary, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # import unittest import datetime import json from boundary import MeasurementGet class TestMeasurementGet(unittest.TestCase): def setUp(self): self.mg = MeasurementGet() self.now = datetime.datetime.now() self.now_epoch = self.now.strftime("%s") pass # def test_parse_datetime(self): # out = self.mg.parse_time_date('2015-06-10') # print(out) # print(out.strftime("%s")) # out = self.mg.parse_time_date(out.strftime("%s")) # print(out) def test_datetime_to_json(self): j = {} j['start'] =self.now.strftime('%s') out = json.dumps(j) print(out) out = self.mg.parse_time_date(self.now.strftime("%s")) print(out.strftime("%s")) print(self.now_epoch)
<commit_before><commit_msg>Add test class for MeasurementGet<commit_after>#!/usr/bin/env python # # Copyright 2014-2015 Boundary, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # import unittest import datetime import json from boundary import MeasurementGet class TestMeasurementGet(unittest.TestCase): def setUp(self): self.mg = MeasurementGet() self.now = datetime.datetime.now() self.now_epoch = self.now.strftime("%s") pass # def test_parse_datetime(self): # out = self.mg.parse_time_date('2015-06-10') # print(out) # print(out.strftime("%s")) # out = self.mg.parse_time_date(out.strftime("%s")) # print(out) def test_datetime_to_json(self): j = {} j['start'] =self.now.strftime('%s') out = json.dumps(j) print(out) out = self.mg.parse_time_date(self.now.strftime("%s")) print(out.strftime("%s")) print(self.now_epoch)
a993765b8ebb02baf64c3fdd13406734f8ae9136
print_results.py
print_results.py
from __future__ import division import argparse import h5py def main(): parser = argparse.ArgumentParser() parser.add_argument('results_hdf5_fname', type=str) parser.add_argument('dset_keys', nargs='*', type=str, default=[]) args = parser.parse_args() f = h5py.File(args.results_hdf5_fname, 'r') # import IPython as ipy; ipy.embed() for group_key, group in f.items(): print group_key if args.dset_keys: for dset_key in args.dset_keys: if dset_key in group: print '\t' + dset_key + ':', group[dset_key][()] else: for dset_key, dset in group.items(): print '\t' + dset_key + ':', dset[()] if __name__ == "__main__": main()
Add script to print results.
Add script to print results.
Python
mit
alexlee-gk/visual_dynamics
Add script to print results.
from __future__ import division import argparse import h5py def main(): parser = argparse.ArgumentParser() parser.add_argument('results_hdf5_fname', type=str) parser.add_argument('dset_keys', nargs='*', type=str, default=[]) args = parser.parse_args() f = h5py.File(args.results_hdf5_fname, 'r') # import IPython as ipy; ipy.embed() for group_key, group in f.items(): print group_key if args.dset_keys: for dset_key in args.dset_keys: if dset_key in group: print '\t' + dset_key + ':', group[dset_key][()] else: for dset_key, dset in group.items(): print '\t' + dset_key + ':', dset[()] if __name__ == "__main__": main()
<commit_before><commit_msg>Add script to print results.<commit_after>
from __future__ import division import argparse import h5py def main(): parser = argparse.ArgumentParser() parser.add_argument('results_hdf5_fname', type=str) parser.add_argument('dset_keys', nargs='*', type=str, default=[]) args = parser.parse_args() f = h5py.File(args.results_hdf5_fname, 'r') # import IPython as ipy; ipy.embed() for group_key, group in f.items(): print group_key if args.dset_keys: for dset_key in args.dset_keys: if dset_key in group: print '\t' + dset_key + ':', group[dset_key][()] else: for dset_key, dset in group.items(): print '\t' + dset_key + ':', dset[()] if __name__ == "__main__": main()
Add script to print results.from __future__ import division import argparse import h5py def main(): parser = argparse.ArgumentParser() parser.add_argument('results_hdf5_fname', type=str) parser.add_argument('dset_keys', nargs='*', type=str, default=[]) args = parser.parse_args() f = h5py.File(args.results_hdf5_fname, 'r') # import IPython as ipy; ipy.embed() for group_key, group in f.items(): print group_key if args.dset_keys: for dset_key in args.dset_keys: if dset_key in group: print '\t' + dset_key + ':', group[dset_key][()] else: for dset_key, dset in group.items(): print '\t' + dset_key + ':', dset[()] if __name__ == "__main__": main()
<commit_before><commit_msg>Add script to print results.<commit_after>from __future__ import division import argparse import h5py def main(): parser = argparse.ArgumentParser() parser.add_argument('results_hdf5_fname', type=str) parser.add_argument('dset_keys', nargs='*', type=str, default=[]) args = parser.parse_args() f = h5py.File(args.results_hdf5_fname, 'r') # import IPython as ipy; ipy.embed() for group_key, group in f.items(): print group_key if args.dset_keys: for dset_key in args.dset_keys: if dset_key in group: print '\t' + dset_key + ':', group[dset_key][()] else: for dset_key, dset in group.items(): print '\t' + dset_key + ':', dset[()] if __name__ == "__main__": main()
dbfdeea5f080c444a4d4abf6bdf81632bbab917a
IPython/core/tests/test_shellapp.py
IPython/core/tests/test_shellapp.py
# -*- coding: utf-8 -*- """Tests for shellapp module. Authors ------- * Bradley Froehle """ #----------------------------------------------------------------------------- # Copyright (C) 2012 The IPython Development Team # # Distributed under the terms of the BSD License. The full license is in # the file COPYING, distributed as part of this software. #----------------------------------------------------------------------------- #----------------------------------------------------------------------------- # Imports #----------------------------------------------------------------------------- import unittest import nose.tools as nt from IPython.testing import decorators as dec from IPython.testing import tools as tt class TestFileToRun(unittest.TestCase, tt.TempFileMixin): """Test the behavior of the file_to_run parameter.""" def test_py_script_file_attribute(self): """Test that `__file__` is set when running `ipython file.py`""" src = "print(__file__)\n" self.mktmp(src) if dec.module_not_available('sqlite3'): err = 'WARNING: IPython History requires SQLite, your history will not be saved\n' else: err = None tt.ipexec_validate(self.fname, self.fname, err) def test_ipy_script_file_attribute(self): """Test that `__file__` is set when running `ipython file.ipy`""" src = "print(__file__)\n" self.mktmp(src, ext='.ipy') if dec.module_not_available('sqlite3'): err = 'WARNING: IPython History requires SQLite, your history will not be saved\n' else: err = None tt.ipexec_validate(self.fname, self.fname, err) # Ideally we would also test that `__file__` is not set in the # interactive namespace after running `ipython -i <file>`.
Add test for `__file__` behavior in `ipython <file>`
Add test for `__file__` behavior in `ipython <file>`
Python
bsd-3-clause
ipython/ipython,ipython/ipython
Add test for `__file__` behavior in `ipython <file>`
# -*- coding: utf-8 -*- """Tests for shellapp module. Authors ------- * Bradley Froehle """ #----------------------------------------------------------------------------- # Copyright (C) 2012 The IPython Development Team # # Distributed under the terms of the BSD License. The full license is in # the file COPYING, distributed as part of this software. #----------------------------------------------------------------------------- #----------------------------------------------------------------------------- # Imports #----------------------------------------------------------------------------- import unittest import nose.tools as nt from IPython.testing import decorators as dec from IPython.testing import tools as tt class TestFileToRun(unittest.TestCase, tt.TempFileMixin): """Test the behavior of the file_to_run parameter.""" def test_py_script_file_attribute(self): """Test that `__file__` is set when running `ipython file.py`""" src = "print(__file__)\n" self.mktmp(src) if dec.module_not_available('sqlite3'): err = 'WARNING: IPython History requires SQLite, your history will not be saved\n' else: err = None tt.ipexec_validate(self.fname, self.fname, err) def test_ipy_script_file_attribute(self): """Test that `__file__` is set when running `ipython file.ipy`""" src = "print(__file__)\n" self.mktmp(src, ext='.ipy') if dec.module_not_available('sqlite3'): err = 'WARNING: IPython History requires SQLite, your history will not be saved\n' else: err = None tt.ipexec_validate(self.fname, self.fname, err) # Ideally we would also test that `__file__` is not set in the # interactive namespace after running `ipython -i <file>`.
<commit_before><commit_msg>Add test for `__file__` behavior in `ipython <file>`<commit_after>
# -*- coding: utf-8 -*- """Tests for shellapp module. Authors ------- * Bradley Froehle """ #----------------------------------------------------------------------------- # Copyright (C) 2012 The IPython Development Team # # Distributed under the terms of the BSD License. The full license is in # the file COPYING, distributed as part of this software. #----------------------------------------------------------------------------- #----------------------------------------------------------------------------- # Imports #----------------------------------------------------------------------------- import unittest import nose.tools as nt from IPython.testing import decorators as dec from IPython.testing import tools as tt class TestFileToRun(unittest.TestCase, tt.TempFileMixin): """Test the behavior of the file_to_run parameter.""" def test_py_script_file_attribute(self): """Test that `__file__` is set when running `ipython file.py`""" src = "print(__file__)\n" self.mktmp(src) if dec.module_not_available('sqlite3'): err = 'WARNING: IPython History requires SQLite, your history will not be saved\n' else: err = None tt.ipexec_validate(self.fname, self.fname, err) def test_ipy_script_file_attribute(self): """Test that `__file__` is set when running `ipython file.ipy`""" src = "print(__file__)\n" self.mktmp(src, ext='.ipy') if dec.module_not_available('sqlite3'): err = 'WARNING: IPython History requires SQLite, your history will not be saved\n' else: err = None tt.ipexec_validate(self.fname, self.fname, err) # Ideally we would also test that `__file__` is not set in the # interactive namespace after running `ipython -i <file>`.
Add test for `__file__` behavior in `ipython <file>`# -*- coding: utf-8 -*- """Tests for shellapp module. Authors ------- * Bradley Froehle """ #----------------------------------------------------------------------------- # Copyright (C) 2012 The IPython Development Team # # Distributed under the terms of the BSD License. The full license is in # the file COPYING, distributed as part of this software. #----------------------------------------------------------------------------- #----------------------------------------------------------------------------- # Imports #----------------------------------------------------------------------------- import unittest import nose.tools as nt from IPython.testing import decorators as dec from IPython.testing import tools as tt class TestFileToRun(unittest.TestCase, tt.TempFileMixin): """Test the behavior of the file_to_run parameter.""" def test_py_script_file_attribute(self): """Test that `__file__` is set when running `ipython file.py`""" src = "print(__file__)\n" self.mktmp(src) if dec.module_not_available('sqlite3'): err = 'WARNING: IPython History requires SQLite, your history will not be saved\n' else: err = None tt.ipexec_validate(self.fname, self.fname, err) def test_ipy_script_file_attribute(self): """Test that `__file__` is set when running `ipython file.ipy`""" src = "print(__file__)\n" self.mktmp(src, ext='.ipy') if dec.module_not_available('sqlite3'): err = 'WARNING: IPython History requires SQLite, your history will not be saved\n' else: err = None tt.ipexec_validate(self.fname, self.fname, err) # Ideally we would also test that `__file__` is not set in the # interactive namespace after running `ipython -i <file>`.
<commit_before><commit_msg>Add test for `__file__` behavior in `ipython <file>`<commit_after># -*- coding: utf-8 -*- """Tests for shellapp module. Authors ------- * Bradley Froehle """ #----------------------------------------------------------------------------- # Copyright (C) 2012 The IPython Development Team # # Distributed under the terms of the BSD License. The full license is in # the file COPYING, distributed as part of this software. #----------------------------------------------------------------------------- #----------------------------------------------------------------------------- # Imports #----------------------------------------------------------------------------- import unittest import nose.tools as nt from IPython.testing import decorators as dec from IPython.testing import tools as tt class TestFileToRun(unittest.TestCase, tt.TempFileMixin): """Test the behavior of the file_to_run parameter.""" def test_py_script_file_attribute(self): """Test that `__file__` is set when running `ipython file.py`""" src = "print(__file__)\n" self.mktmp(src) if dec.module_not_available('sqlite3'): err = 'WARNING: IPython History requires SQLite, your history will not be saved\n' else: err = None tt.ipexec_validate(self.fname, self.fname, err) def test_ipy_script_file_attribute(self): """Test that `__file__` is set when running `ipython file.ipy`""" src = "print(__file__)\n" self.mktmp(src, ext='.ipy') if dec.module_not_available('sqlite3'): err = 'WARNING: IPython History requires SQLite, your history will not be saved\n' else: err = None tt.ipexec_validate(self.fname, self.fname, err) # Ideally we would also test that `__file__` is not set in the # interactive namespace after running `ipython -i <file>`.
4486009178284e83fdcae7b7a6e6b755a74f22a7
alg_cartesian_product.py
alg_cartesian_product.py
"""Cartesian product of same numbers with repeated times.""" class CartesianProduct(object): def _product_two(self, nums1, nums2): two_products = [] for i in range(len(nums1)): for j in range(len(nums2)): if isinstance(nums1[0], list): # nums1[i] is a list. two_products.append(nums1[i] + [nums2[j]]) else: # nums1[0] is not a list, i.e. nums1 is a list. two_products.append([nums1[i]] + [nums2[j]]) return two_products def repeated_product(self, nums, repeat): # Created repeated numbers as pool for Cartesian product. repeated_nums = [nums for _ in range(repeat)] result = repeated_nums[0] for r in range(1, repeat): result = self._product_two(result, repeated_nums[r]) return result def main(): nums = [1, 2, 3] repeat = 2 print CartesianProduct().repeated_product(nums, repeat) if __name__ == '__main__': main()
Complete cartesian product for repeated
Complete cartesian product for repeated
Python
bsd-2-clause
bowen0701/algorithms_data_structures
Complete cartesian product for repeated
"""Cartesian product of same numbers with repeated times.""" class CartesianProduct(object): def _product_two(self, nums1, nums2): two_products = [] for i in range(len(nums1)): for j in range(len(nums2)): if isinstance(nums1[0], list): # nums1[i] is a list. two_products.append(nums1[i] + [nums2[j]]) else: # nums1[0] is not a list, i.e. nums1 is a list. two_products.append([nums1[i]] + [nums2[j]]) return two_products def repeated_product(self, nums, repeat): # Created repeated numbers as pool for Cartesian product. repeated_nums = [nums for _ in range(repeat)] result = repeated_nums[0] for r in range(1, repeat): result = self._product_two(result, repeated_nums[r]) return result def main(): nums = [1, 2, 3] repeat = 2 print CartesianProduct().repeated_product(nums, repeat) if __name__ == '__main__': main()
<commit_before><commit_msg>Complete cartesian product for repeated<commit_after>
"""Cartesian product of same numbers with repeated times.""" class CartesianProduct(object): def _product_two(self, nums1, nums2): two_products = [] for i in range(len(nums1)): for j in range(len(nums2)): if isinstance(nums1[0], list): # nums1[i] is a list. two_products.append(nums1[i] + [nums2[j]]) else: # nums1[0] is not a list, i.e. nums1 is a list. two_products.append([nums1[i]] + [nums2[j]]) return two_products def repeated_product(self, nums, repeat): # Created repeated numbers as pool for Cartesian product. repeated_nums = [nums for _ in range(repeat)] result = repeated_nums[0] for r in range(1, repeat): result = self._product_two(result, repeated_nums[r]) return result def main(): nums = [1, 2, 3] repeat = 2 print CartesianProduct().repeated_product(nums, repeat) if __name__ == '__main__': main()
Complete cartesian product for repeated"""Cartesian product of same numbers with repeated times.""" class CartesianProduct(object): def _product_two(self, nums1, nums2): two_products = [] for i in range(len(nums1)): for j in range(len(nums2)): if isinstance(nums1[0], list): # nums1[i] is a list. two_products.append(nums1[i] + [nums2[j]]) else: # nums1[0] is not a list, i.e. nums1 is a list. two_products.append([nums1[i]] + [nums2[j]]) return two_products def repeated_product(self, nums, repeat): # Created repeated numbers as pool for Cartesian product. repeated_nums = [nums for _ in range(repeat)] result = repeated_nums[0] for r in range(1, repeat): result = self._product_two(result, repeated_nums[r]) return result def main(): nums = [1, 2, 3] repeat = 2 print CartesianProduct().repeated_product(nums, repeat) if __name__ == '__main__': main()
<commit_before><commit_msg>Complete cartesian product for repeated<commit_after>"""Cartesian product of same numbers with repeated times.""" class CartesianProduct(object): def _product_two(self, nums1, nums2): two_products = [] for i in range(len(nums1)): for j in range(len(nums2)): if isinstance(nums1[0], list): # nums1[i] is a list. two_products.append(nums1[i] + [nums2[j]]) else: # nums1[0] is not a list, i.e. nums1 is a list. two_products.append([nums1[i]] + [nums2[j]]) return two_products def repeated_product(self, nums, repeat): # Created repeated numbers as pool for Cartesian product. repeated_nums = [nums for _ in range(repeat)] result = repeated_nums[0] for r in range(1, repeat): result = self._product_two(result, repeated_nums[r]) return result def main(): nums = [1, 2, 3] repeat = 2 print CartesianProduct().repeated_product(nums, repeat) if __name__ == '__main__': main()
d18d85f85f91440a886969bdbd20bf4578e6b9e7
tests/devices_test/stratis_test.py
tests/devices_test/stratis_test.py
import test_compat # pylint: disable=unused-import import unittest import blivet from blivet.devices import StorageDevice from blivet.devices import StratisPoolDevice from blivet.devices import StratisFilesystemDevice from blivet.size import Size DEVICE_CLASSES = [ StratisPoolDevice, StratisFilesystemDevice, StorageDevice ] @unittest.skipUnless(not any(x.unavailable_type_dependencies() for x in DEVICE_CLASSES), "some unsupported device classes required for this test") class BlivetNewStratisDeviceTest(unittest.TestCase): def test_new_stratis(self): b = blivet.Blivet() bd = StorageDevice("bd1", fmt=blivet.formats.get_format("stratis"), size=Size("1 GiB"), exists=True) pool = b.new_stratis_pool(name="testpool", parents=[bd]) self.assertEqual(pool.name, "testpool") self.assertEqual(pool.size, bd.size) fs = b.new_stratis_filesystem(name="testfs", parents=[pool]) self.assertEqual(fs.name, "testpool/testfs") self.assertEqual(fs.path, "/dev/stratis/%s" % fs.name) self.assertEqual(fs.size, Size("1 TiB")) self.assertEqual(fs.pool, pool) self.assertEqual(fs.format.type, "stratis xfs")
Add simple test case for Stratis
Add simple test case for Stratis
Python
lgpl-2.1
vojtechtrefny/blivet,rvykydal/blivet,vojtechtrefny/blivet,rvykydal/blivet
Add simple test case for Stratis
import test_compat # pylint: disable=unused-import import unittest import blivet from blivet.devices import StorageDevice from blivet.devices import StratisPoolDevice from blivet.devices import StratisFilesystemDevice from blivet.size import Size DEVICE_CLASSES = [ StratisPoolDevice, StratisFilesystemDevice, StorageDevice ] @unittest.skipUnless(not any(x.unavailable_type_dependencies() for x in DEVICE_CLASSES), "some unsupported device classes required for this test") class BlivetNewStratisDeviceTest(unittest.TestCase): def test_new_stratis(self): b = blivet.Blivet() bd = StorageDevice("bd1", fmt=blivet.formats.get_format("stratis"), size=Size("1 GiB"), exists=True) pool = b.new_stratis_pool(name="testpool", parents=[bd]) self.assertEqual(pool.name, "testpool") self.assertEqual(pool.size, bd.size) fs = b.new_stratis_filesystem(name="testfs", parents=[pool]) self.assertEqual(fs.name, "testpool/testfs") self.assertEqual(fs.path, "/dev/stratis/%s" % fs.name) self.assertEqual(fs.size, Size("1 TiB")) self.assertEqual(fs.pool, pool) self.assertEqual(fs.format.type, "stratis xfs")
<commit_before><commit_msg>Add simple test case for Stratis<commit_after>
import test_compat # pylint: disable=unused-import import unittest import blivet from blivet.devices import StorageDevice from blivet.devices import StratisPoolDevice from blivet.devices import StratisFilesystemDevice from blivet.size import Size DEVICE_CLASSES = [ StratisPoolDevice, StratisFilesystemDevice, StorageDevice ] @unittest.skipUnless(not any(x.unavailable_type_dependencies() for x in DEVICE_CLASSES), "some unsupported device classes required for this test") class BlivetNewStratisDeviceTest(unittest.TestCase): def test_new_stratis(self): b = blivet.Blivet() bd = StorageDevice("bd1", fmt=blivet.formats.get_format("stratis"), size=Size("1 GiB"), exists=True) pool = b.new_stratis_pool(name="testpool", parents=[bd]) self.assertEqual(pool.name, "testpool") self.assertEqual(pool.size, bd.size) fs = b.new_stratis_filesystem(name="testfs", parents=[pool]) self.assertEqual(fs.name, "testpool/testfs") self.assertEqual(fs.path, "/dev/stratis/%s" % fs.name) self.assertEqual(fs.size, Size("1 TiB")) self.assertEqual(fs.pool, pool) self.assertEqual(fs.format.type, "stratis xfs")
Add simple test case for Stratisimport test_compat # pylint: disable=unused-import import unittest import blivet from blivet.devices import StorageDevice from blivet.devices import StratisPoolDevice from blivet.devices import StratisFilesystemDevice from blivet.size import Size DEVICE_CLASSES = [ StratisPoolDevice, StratisFilesystemDevice, StorageDevice ] @unittest.skipUnless(not any(x.unavailable_type_dependencies() for x in DEVICE_CLASSES), "some unsupported device classes required for this test") class BlivetNewStratisDeviceTest(unittest.TestCase): def test_new_stratis(self): b = blivet.Blivet() bd = StorageDevice("bd1", fmt=blivet.formats.get_format("stratis"), size=Size("1 GiB"), exists=True) pool = b.new_stratis_pool(name="testpool", parents=[bd]) self.assertEqual(pool.name, "testpool") self.assertEqual(pool.size, bd.size) fs = b.new_stratis_filesystem(name="testfs", parents=[pool]) self.assertEqual(fs.name, "testpool/testfs") self.assertEqual(fs.path, "/dev/stratis/%s" % fs.name) self.assertEqual(fs.size, Size("1 TiB")) self.assertEqual(fs.pool, pool) self.assertEqual(fs.format.type, "stratis xfs")
<commit_before><commit_msg>Add simple test case for Stratis<commit_after>import test_compat # pylint: disable=unused-import import unittest import blivet from blivet.devices import StorageDevice from blivet.devices import StratisPoolDevice from blivet.devices import StratisFilesystemDevice from blivet.size import Size DEVICE_CLASSES = [ StratisPoolDevice, StratisFilesystemDevice, StorageDevice ] @unittest.skipUnless(not any(x.unavailable_type_dependencies() for x in DEVICE_CLASSES), "some unsupported device classes required for this test") class BlivetNewStratisDeviceTest(unittest.TestCase): def test_new_stratis(self): b = blivet.Blivet() bd = StorageDevice("bd1", fmt=blivet.formats.get_format("stratis"), size=Size("1 GiB"), exists=True) pool = b.new_stratis_pool(name="testpool", parents=[bd]) self.assertEqual(pool.name, "testpool") self.assertEqual(pool.size, bd.size) fs = b.new_stratis_filesystem(name="testfs", parents=[pool]) self.assertEqual(fs.name, "testpool/testfs") self.assertEqual(fs.path, "/dev/stratis/%s" % fs.name) self.assertEqual(fs.size, Size("1 TiB")) self.assertEqual(fs.pool, pool) self.assertEqual(fs.format.type, "stratis xfs")
e412a68afe691913525245d2a8a3a8e9e3ba532d
python/xicore.py
python/xicore.py
#!/usr/bin/env python # Copyright 2016 Google Inc. All rights reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http:#www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import sys import struct import json def sendraw(buf): sys.stdout.write(struct.pack("<q", len(buf))) sys.stdout.write(buf) sys.stdout.flush() def send(obj): sendraw(json.dumps(obj)) def mainloop(): text = '' while True: sizebuf = sys.stdin.read(8) if len(sizebuf) == 0: return (size,) = struct.unpack("<q", sizebuf) cmd, arg = json.loads(sys.stdin.read(size)) print >> sys.stderr, cmd, arg if cmd == 'key': chars = arg['chars'] if chars == u'\x7f': if len(text): text = text[:-1] else: text += chars send(['settext', text]) mainloop()
#!/usr/bin/env python # Copyright 2016 Google Inc. All rights reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http:#www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import sys import struct import json def sendraw(buf): sys.stdout.write(struct.pack("<q", len(buf))) sys.stdout.write(buf) sys.stdout.flush() def send(obj): sendraw(json.dumps(obj)) def mainloop(): text = '' while True: sizebuf = sys.stdin.read(8) if len(sizebuf) == 0: return (size,) = struct.unpack("<q", sizebuf) cmd, arg = json.loads(sys.stdin.read(size)) print >> sys.stderr, cmd, arg if cmd == 'key': chars = arg['chars'] if chars == u'\x7f': if len(text): text = text[:-1] else: text += chars send(['settext', text]) mainloop()
Replace tab indentation with 4 spaces
Replace tab indentation with 4 spaces
Python
apache-2.0
google/xi-editor,google/xi-editor,fuchsia-mirror/third_party-xi-editor,modelorganism/xi-editor,modelorganism/xi-editor,fuchsia-mirror/third_party-xi-editor,fuchsia-mirror/third_party-xi-editor,google/xi-editor,google/xi-editor,fuchsia-mirror/third_party-xi-editor,modelorganism/xi-editor
#!/usr/bin/env python # Copyright 2016 Google Inc. All rights reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http:#www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import sys import struct import json def sendraw(buf): sys.stdout.write(struct.pack("<q", len(buf))) sys.stdout.write(buf) sys.stdout.flush() def send(obj): sendraw(json.dumps(obj)) def mainloop(): text = '' while True: sizebuf = sys.stdin.read(8) if len(sizebuf) == 0: return (size,) = struct.unpack("<q", sizebuf) cmd, arg = json.loads(sys.stdin.read(size)) print >> sys.stderr, cmd, arg if cmd == 'key': chars = arg['chars'] if chars == u'\x7f': if len(text): text = text[:-1] else: text += chars send(['settext', text]) mainloop() Replace tab indentation with 4 spaces
#!/usr/bin/env python # Copyright 2016 Google Inc. All rights reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http:#www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import sys import struct import json def sendraw(buf): sys.stdout.write(struct.pack("<q", len(buf))) sys.stdout.write(buf) sys.stdout.flush() def send(obj): sendraw(json.dumps(obj)) def mainloop(): text = '' while True: sizebuf = sys.stdin.read(8) if len(sizebuf) == 0: return (size,) = struct.unpack("<q", sizebuf) cmd, arg = json.loads(sys.stdin.read(size)) print >> sys.stderr, cmd, arg if cmd == 'key': chars = arg['chars'] if chars == u'\x7f': if len(text): text = text[:-1] else: text += chars send(['settext', text]) mainloop()
<commit_before>#!/usr/bin/env python # Copyright 2016 Google Inc. All rights reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http:#www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import sys import struct import json def sendraw(buf): sys.stdout.write(struct.pack("<q", len(buf))) sys.stdout.write(buf) sys.stdout.flush() def send(obj): sendraw(json.dumps(obj)) def mainloop(): text = '' while True: sizebuf = sys.stdin.read(8) if len(sizebuf) == 0: return (size,) = struct.unpack("<q", sizebuf) cmd, arg = json.loads(sys.stdin.read(size)) print >> sys.stderr, cmd, arg if cmd == 'key': chars = arg['chars'] if chars == u'\x7f': if len(text): text = text[:-1] else: text += chars send(['settext', text]) mainloop() <commit_msg>Replace tab indentation with 4 spaces<commit_after>
#!/usr/bin/env python # Copyright 2016 Google Inc. All rights reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http:#www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import sys import struct import json def sendraw(buf): sys.stdout.write(struct.pack("<q", len(buf))) sys.stdout.write(buf) sys.stdout.flush() def send(obj): sendraw(json.dumps(obj)) def mainloop(): text = '' while True: sizebuf = sys.stdin.read(8) if len(sizebuf) == 0: return (size,) = struct.unpack("<q", sizebuf) cmd, arg = json.loads(sys.stdin.read(size)) print >> sys.stderr, cmd, arg if cmd == 'key': chars = arg['chars'] if chars == u'\x7f': if len(text): text = text[:-1] else: text += chars send(['settext', text]) mainloop()
#!/usr/bin/env python # Copyright 2016 Google Inc. All rights reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http:#www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import sys import struct import json def sendraw(buf): sys.stdout.write(struct.pack("<q", len(buf))) sys.stdout.write(buf) sys.stdout.flush() def send(obj): sendraw(json.dumps(obj)) def mainloop(): text = '' while True: sizebuf = sys.stdin.read(8) if len(sizebuf) == 0: return (size,) = struct.unpack("<q", sizebuf) cmd, arg = json.loads(sys.stdin.read(size)) print >> sys.stderr, cmd, arg if cmd == 'key': chars = arg['chars'] if chars == u'\x7f': if len(text): text = text[:-1] else: text += chars send(['settext', text]) mainloop() Replace tab indentation with 4 spaces#!/usr/bin/env python # Copyright 2016 Google Inc. All rights reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http:#www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import sys import struct import json def sendraw(buf): sys.stdout.write(struct.pack("<q", len(buf))) sys.stdout.write(buf) sys.stdout.flush() def send(obj): sendraw(json.dumps(obj)) def mainloop(): text = '' while True: sizebuf = sys.stdin.read(8) if len(sizebuf) == 0: return (size,) = struct.unpack("<q", sizebuf) cmd, arg = json.loads(sys.stdin.read(size)) print >> sys.stderr, cmd, arg if cmd == 'key': chars = arg['chars'] if chars == u'\x7f': if len(text): text = text[:-1] else: text += chars send(['settext', text]) mainloop()
<commit_before>#!/usr/bin/env python # Copyright 2016 Google Inc. All rights reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http:#www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import sys import struct import json def sendraw(buf): sys.stdout.write(struct.pack("<q", len(buf))) sys.stdout.write(buf) sys.stdout.flush() def send(obj): sendraw(json.dumps(obj)) def mainloop(): text = '' while True: sizebuf = sys.stdin.read(8) if len(sizebuf) == 0: return (size,) = struct.unpack("<q", sizebuf) cmd, arg = json.loads(sys.stdin.read(size)) print >> sys.stderr, cmd, arg if cmd == 'key': chars = arg['chars'] if chars == u'\x7f': if len(text): text = text[:-1] else: text += chars send(['settext', text]) mainloop() <commit_msg>Replace tab indentation with 4 spaces<commit_after>#!/usr/bin/env python # Copyright 2016 Google Inc. All rights reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http:#www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import sys import struct import json def sendraw(buf): sys.stdout.write(struct.pack("<q", len(buf))) sys.stdout.write(buf) sys.stdout.flush() def send(obj): sendraw(json.dumps(obj)) def mainloop(): text = '' while True: sizebuf = sys.stdin.read(8) if len(sizebuf) == 0: return (size,) = struct.unpack("<q", sizebuf) cmd, arg = json.loads(sys.stdin.read(size)) print >> sys.stderr, cmd, arg if cmd == 'key': chars = arg['chars'] if chars == u'\x7f': if len(text): text = text[:-1] else: text += chars send(['settext', text]) mainloop()
c5433454a14f4dd6c3d84735e920144914ca6f2d
salt/beacons/twilio_txt_msg.py
salt/beacons/twilio_txt_msg.py
# -*- coding: utf-8 -*- ''' Beacon to emit Twilio text messages ''' # Import Python libs from __future__ import absolute_import from datetime import datetime import logging # Import 3rd Party libs try: from twilio.rest import TwilioRestClient HAS_TWILIO = True except ImportError: HAS_TWILIO = False log = logging.getLogger(__name__) __virtualname__ = 'twilio_txt_msg' def __virtual__(): if HAS_TWILIO: return __virtualname__ else: return False def beacon(config): ''' Emit a dict name "texts" whose value is a list of texts. code_block:: yaml beacons: twilio_txt_msg: account_sid: "<account sid>" auth_token: "<auth token>" twilio_number: "+15555555555" poll_interval: 10 poll_interval defaults to 10 seconds ''' log.trace('twilio_txt_msg beacon starting') ret = [] if not all([config['account_sid'], config['auth_token'], config['twilio_number']]): return ret output = {} poll_interval = config.get('poll_interval') if not poll_interval: # Let's default to polling every 10 secons poll_interval = 10 now = datetime.now() if 'twilio_txt_msg' in __context__: timedelta = now - __context__['twilio_txt_msg'] if timedelta.seconds < poll_interval: log.trace('Twilio beacon poll interval not met.') log.trace('Twilio polling in {0}'.format(poll_interval - timedelta.seconds)) return ret output['texts'] = [] client = TwilioRestClient(config['account_sid'], config['auth_token']) messages = client.messages.list(to=config['twilio_number']) log.trace('Num messages: {0}'.format(len(messages))) if len(messages) < 1: log.trace('Twilio beacon has no texts') __context__['twilio_txt_msg'] = now return ret for message in messages: item = {} item['id'] = str(message.sid) item['body'] = str(message.body) item['from'] = str(message.from_) item['sent'] = str(message.date_sent) item['images'] = [] if int(message.num_media): media = client.media(message.sid).list() if len(media): for pic in media: item['images'].append(str(pic.uri)) output['texts'].append(item) message.delete() __context__['twilio_txt_msg'] = now ret.append(output) return ret
Add Twilio text message beacon
Add Twilio text message beacon This beacon will poll a Twilio account for text messages and emit an event on Salt's event bus as texts are received.
Python
apache-2.0
saltstack/salt,saltstack/salt,saltstack/salt,saltstack/salt,saltstack/salt
Add Twilio text message beacon This beacon will poll a Twilio account for text messages and emit an event on Salt's event bus as texts are received.
# -*- coding: utf-8 -*- ''' Beacon to emit Twilio text messages ''' # Import Python libs from __future__ import absolute_import from datetime import datetime import logging # Import 3rd Party libs try: from twilio.rest import TwilioRestClient HAS_TWILIO = True except ImportError: HAS_TWILIO = False log = logging.getLogger(__name__) __virtualname__ = 'twilio_txt_msg' def __virtual__(): if HAS_TWILIO: return __virtualname__ else: return False def beacon(config): ''' Emit a dict name "texts" whose value is a list of texts. code_block:: yaml beacons: twilio_txt_msg: account_sid: "<account sid>" auth_token: "<auth token>" twilio_number: "+15555555555" poll_interval: 10 poll_interval defaults to 10 seconds ''' log.trace('twilio_txt_msg beacon starting') ret = [] if not all([config['account_sid'], config['auth_token'], config['twilio_number']]): return ret output = {} poll_interval = config.get('poll_interval') if not poll_interval: # Let's default to polling every 10 secons poll_interval = 10 now = datetime.now() if 'twilio_txt_msg' in __context__: timedelta = now - __context__['twilio_txt_msg'] if timedelta.seconds < poll_interval: log.trace('Twilio beacon poll interval not met.') log.trace('Twilio polling in {0}'.format(poll_interval - timedelta.seconds)) return ret output['texts'] = [] client = TwilioRestClient(config['account_sid'], config['auth_token']) messages = client.messages.list(to=config['twilio_number']) log.trace('Num messages: {0}'.format(len(messages))) if len(messages) < 1: log.trace('Twilio beacon has no texts') __context__['twilio_txt_msg'] = now return ret for message in messages: item = {} item['id'] = str(message.sid) item['body'] = str(message.body) item['from'] = str(message.from_) item['sent'] = str(message.date_sent) item['images'] = [] if int(message.num_media): media = client.media(message.sid).list() if len(media): for pic in media: item['images'].append(str(pic.uri)) output['texts'].append(item) message.delete() __context__['twilio_txt_msg'] = now ret.append(output) return ret
<commit_before><commit_msg>Add Twilio text message beacon This beacon will poll a Twilio account for text messages and emit an event on Salt's event bus as texts are received.<commit_after>
# -*- coding: utf-8 -*- ''' Beacon to emit Twilio text messages ''' # Import Python libs from __future__ import absolute_import from datetime import datetime import logging # Import 3rd Party libs try: from twilio.rest import TwilioRestClient HAS_TWILIO = True except ImportError: HAS_TWILIO = False log = logging.getLogger(__name__) __virtualname__ = 'twilio_txt_msg' def __virtual__(): if HAS_TWILIO: return __virtualname__ else: return False def beacon(config): ''' Emit a dict name "texts" whose value is a list of texts. code_block:: yaml beacons: twilio_txt_msg: account_sid: "<account sid>" auth_token: "<auth token>" twilio_number: "+15555555555" poll_interval: 10 poll_interval defaults to 10 seconds ''' log.trace('twilio_txt_msg beacon starting') ret = [] if not all([config['account_sid'], config['auth_token'], config['twilio_number']]): return ret output = {} poll_interval = config.get('poll_interval') if not poll_interval: # Let's default to polling every 10 secons poll_interval = 10 now = datetime.now() if 'twilio_txt_msg' in __context__: timedelta = now - __context__['twilio_txt_msg'] if timedelta.seconds < poll_interval: log.trace('Twilio beacon poll interval not met.') log.trace('Twilio polling in {0}'.format(poll_interval - timedelta.seconds)) return ret output['texts'] = [] client = TwilioRestClient(config['account_sid'], config['auth_token']) messages = client.messages.list(to=config['twilio_number']) log.trace('Num messages: {0}'.format(len(messages))) if len(messages) < 1: log.trace('Twilio beacon has no texts') __context__['twilio_txt_msg'] = now return ret for message in messages: item = {} item['id'] = str(message.sid) item['body'] = str(message.body) item['from'] = str(message.from_) item['sent'] = str(message.date_sent) item['images'] = [] if int(message.num_media): media = client.media(message.sid).list() if len(media): for pic in media: item['images'].append(str(pic.uri)) output['texts'].append(item) message.delete() __context__['twilio_txt_msg'] = now ret.append(output) return ret
Add Twilio text message beacon This beacon will poll a Twilio account for text messages and emit an event on Salt's event bus as texts are received.# -*- coding: utf-8 -*- ''' Beacon to emit Twilio text messages ''' # Import Python libs from __future__ import absolute_import from datetime import datetime import logging # Import 3rd Party libs try: from twilio.rest import TwilioRestClient HAS_TWILIO = True except ImportError: HAS_TWILIO = False log = logging.getLogger(__name__) __virtualname__ = 'twilio_txt_msg' def __virtual__(): if HAS_TWILIO: return __virtualname__ else: return False def beacon(config): ''' Emit a dict name "texts" whose value is a list of texts. code_block:: yaml beacons: twilio_txt_msg: account_sid: "<account sid>" auth_token: "<auth token>" twilio_number: "+15555555555" poll_interval: 10 poll_interval defaults to 10 seconds ''' log.trace('twilio_txt_msg beacon starting') ret = [] if not all([config['account_sid'], config['auth_token'], config['twilio_number']]): return ret output = {} poll_interval = config.get('poll_interval') if not poll_interval: # Let's default to polling every 10 secons poll_interval = 10 now = datetime.now() if 'twilio_txt_msg' in __context__: timedelta = now - __context__['twilio_txt_msg'] if timedelta.seconds < poll_interval: log.trace('Twilio beacon poll interval not met.') log.trace('Twilio polling in {0}'.format(poll_interval - timedelta.seconds)) return ret output['texts'] = [] client = TwilioRestClient(config['account_sid'], config['auth_token']) messages = client.messages.list(to=config['twilio_number']) log.trace('Num messages: {0}'.format(len(messages))) if len(messages) < 1: log.trace('Twilio beacon has no texts') __context__['twilio_txt_msg'] = now return ret for message in messages: item = {} item['id'] = str(message.sid) item['body'] = str(message.body) item['from'] = str(message.from_) item['sent'] = str(message.date_sent) item['images'] = [] if int(message.num_media): media = client.media(message.sid).list() if len(media): for pic in media: item['images'].append(str(pic.uri)) output['texts'].append(item) message.delete() __context__['twilio_txt_msg'] = now ret.append(output) return ret
<commit_before><commit_msg>Add Twilio text message beacon This beacon will poll a Twilio account for text messages and emit an event on Salt's event bus as texts are received.<commit_after># -*- coding: utf-8 -*- ''' Beacon to emit Twilio text messages ''' # Import Python libs from __future__ import absolute_import from datetime import datetime import logging # Import 3rd Party libs try: from twilio.rest import TwilioRestClient HAS_TWILIO = True except ImportError: HAS_TWILIO = False log = logging.getLogger(__name__) __virtualname__ = 'twilio_txt_msg' def __virtual__(): if HAS_TWILIO: return __virtualname__ else: return False def beacon(config): ''' Emit a dict name "texts" whose value is a list of texts. code_block:: yaml beacons: twilio_txt_msg: account_sid: "<account sid>" auth_token: "<auth token>" twilio_number: "+15555555555" poll_interval: 10 poll_interval defaults to 10 seconds ''' log.trace('twilio_txt_msg beacon starting') ret = [] if not all([config['account_sid'], config['auth_token'], config['twilio_number']]): return ret output = {} poll_interval = config.get('poll_interval') if not poll_interval: # Let's default to polling every 10 secons poll_interval = 10 now = datetime.now() if 'twilio_txt_msg' in __context__: timedelta = now - __context__['twilio_txt_msg'] if timedelta.seconds < poll_interval: log.trace('Twilio beacon poll interval not met.') log.trace('Twilio polling in {0}'.format(poll_interval - timedelta.seconds)) return ret output['texts'] = [] client = TwilioRestClient(config['account_sid'], config['auth_token']) messages = client.messages.list(to=config['twilio_number']) log.trace('Num messages: {0}'.format(len(messages))) if len(messages) < 1: log.trace('Twilio beacon has no texts') __context__['twilio_txt_msg'] = now return ret for message in messages: item = {} item['id'] = str(message.sid) item['body'] = str(message.body) item['from'] = str(message.from_) item['sent'] = str(message.date_sent) item['images'] = [] if int(message.num_media): media = client.media(message.sid).list() if len(media): for pic in media: item['images'].append(str(pic.uri)) output['texts'].append(item) message.delete() __context__['twilio_txt_msg'] = now ret.append(output) return ret
70705f896aa5f45c6365f9c584982289b0711030
tools/extract_from_multialign.py
tools/extract_from_multialign.py
# # Copyright (c) 2017 Tuukka Norri # This code is licensed under MIT license (see LICENSE for details). # import argparse import os import sys # Read from file into a buffer. # Used the idea from https://stackoverflow.com/a/26209275/856976 def chunks(fp, bufsize): while True: chunk = fp.read(bufsize) if not chunk: break yield chunk def chars(fp, bufsize = 4096): for chunk in chunks(fp, bufsize): for char in chunk: yield char def handle_file(input, input_is_reference, offset, length, output, found_offset = None): k = 0 if input_is_reference: i = 0 j = 0 outputting = False for char in chars(input): if char != '-': if i == offset: if found_offset is not None: found_offset(j) outputting = True i += 1 if outputting: output.write(char) k += 1 if k == length: break j += 1 else: input.seek(offset, 0) for char in chars(input): output.write(char) k += 1 if k == length: break if __name__ == "__main__": parser = argparse.ArgumentParser("Extract subsequences from vcf2multialign output.") parser.add_argument('--input', type = argparse.FileType('rU'), required = True) parser.add_argument("--input-is-reference", action = 'store_true', default = False) parser.add_argument('--offset', type = int, required = True) parser.add_argument('--length', type = int, required = True) args = parser.parse_args() if args.offset < 0: parser.error("Offset has to be non-negative.") if args.length <= 0: parser.error("Length must be positive.") handle_file( args.input, args.input_is_reference, args.offset, args.length, sys.stdout, lambda n: print("Found the requested substring at file offset %d" % n, file = sys.stderr) )
Add a tool for extracting subsequences from vcf2multialign output
Add a tool for extracting subsequences from vcf2multialign output
Python
mit
tsnorri/vcf2multialign,tsnorri/vcf2multialign,tsnorri/vcf2multialign,tsnorri/vcf2multialign
Add a tool for extracting subsequences from vcf2multialign output
# # Copyright (c) 2017 Tuukka Norri # This code is licensed under MIT license (see LICENSE for details). # import argparse import os import sys # Read from file into a buffer. # Used the idea from https://stackoverflow.com/a/26209275/856976 def chunks(fp, bufsize): while True: chunk = fp.read(bufsize) if not chunk: break yield chunk def chars(fp, bufsize = 4096): for chunk in chunks(fp, bufsize): for char in chunk: yield char def handle_file(input, input_is_reference, offset, length, output, found_offset = None): k = 0 if input_is_reference: i = 0 j = 0 outputting = False for char in chars(input): if char != '-': if i == offset: if found_offset is not None: found_offset(j) outputting = True i += 1 if outputting: output.write(char) k += 1 if k == length: break j += 1 else: input.seek(offset, 0) for char in chars(input): output.write(char) k += 1 if k == length: break if __name__ == "__main__": parser = argparse.ArgumentParser("Extract subsequences from vcf2multialign output.") parser.add_argument('--input', type = argparse.FileType('rU'), required = True) parser.add_argument("--input-is-reference", action = 'store_true', default = False) parser.add_argument('--offset', type = int, required = True) parser.add_argument('--length', type = int, required = True) args = parser.parse_args() if args.offset < 0: parser.error("Offset has to be non-negative.") if args.length <= 0: parser.error("Length must be positive.") handle_file( args.input, args.input_is_reference, args.offset, args.length, sys.stdout, lambda n: print("Found the requested substring at file offset %d" % n, file = sys.stderr) )
<commit_before><commit_msg>Add a tool for extracting subsequences from vcf2multialign output<commit_after>
# # Copyright (c) 2017 Tuukka Norri # This code is licensed under MIT license (see LICENSE for details). # import argparse import os import sys # Read from file into a buffer. # Used the idea from https://stackoverflow.com/a/26209275/856976 def chunks(fp, bufsize): while True: chunk = fp.read(bufsize) if not chunk: break yield chunk def chars(fp, bufsize = 4096): for chunk in chunks(fp, bufsize): for char in chunk: yield char def handle_file(input, input_is_reference, offset, length, output, found_offset = None): k = 0 if input_is_reference: i = 0 j = 0 outputting = False for char in chars(input): if char != '-': if i == offset: if found_offset is not None: found_offset(j) outputting = True i += 1 if outputting: output.write(char) k += 1 if k == length: break j += 1 else: input.seek(offset, 0) for char in chars(input): output.write(char) k += 1 if k == length: break if __name__ == "__main__": parser = argparse.ArgumentParser("Extract subsequences from vcf2multialign output.") parser.add_argument('--input', type = argparse.FileType('rU'), required = True) parser.add_argument("--input-is-reference", action = 'store_true', default = False) parser.add_argument('--offset', type = int, required = True) parser.add_argument('--length', type = int, required = True) args = parser.parse_args() if args.offset < 0: parser.error("Offset has to be non-negative.") if args.length <= 0: parser.error("Length must be positive.") handle_file( args.input, args.input_is_reference, args.offset, args.length, sys.stdout, lambda n: print("Found the requested substring at file offset %d" % n, file = sys.stderr) )
Add a tool for extracting subsequences from vcf2multialign output# # Copyright (c) 2017 Tuukka Norri # This code is licensed under MIT license (see LICENSE for details). # import argparse import os import sys # Read from file into a buffer. # Used the idea from https://stackoverflow.com/a/26209275/856976 def chunks(fp, bufsize): while True: chunk = fp.read(bufsize) if not chunk: break yield chunk def chars(fp, bufsize = 4096): for chunk in chunks(fp, bufsize): for char in chunk: yield char def handle_file(input, input_is_reference, offset, length, output, found_offset = None): k = 0 if input_is_reference: i = 0 j = 0 outputting = False for char in chars(input): if char != '-': if i == offset: if found_offset is not None: found_offset(j) outputting = True i += 1 if outputting: output.write(char) k += 1 if k == length: break j += 1 else: input.seek(offset, 0) for char in chars(input): output.write(char) k += 1 if k == length: break if __name__ == "__main__": parser = argparse.ArgumentParser("Extract subsequences from vcf2multialign output.") parser.add_argument('--input', type = argparse.FileType('rU'), required = True) parser.add_argument("--input-is-reference", action = 'store_true', default = False) parser.add_argument('--offset', type = int, required = True) parser.add_argument('--length', type = int, required = True) args = parser.parse_args() if args.offset < 0: parser.error("Offset has to be non-negative.") if args.length <= 0: parser.error("Length must be positive.") handle_file( args.input, args.input_is_reference, args.offset, args.length, sys.stdout, lambda n: print("Found the requested substring at file offset %d" % n, file = sys.stderr) )
<commit_before><commit_msg>Add a tool for extracting subsequences from vcf2multialign output<commit_after># # Copyright (c) 2017 Tuukka Norri # This code is licensed under MIT license (see LICENSE for details). # import argparse import os import sys # Read from file into a buffer. # Used the idea from https://stackoverflow.com/a/26209275/856976 def chunks(fp, bufsize): while True: chunk = fp.read(bufsize) if not chunk: break yield chunk def chars(fp, bufsize = 4096): for chunk in chunks(fp, bufsize): for char in chunk: yield char def handle_file(input, input_is_reference, offset, length, output, found_offset = None): k = 0 if input_is_reference: i = 0 j = 0 outputting = False for char in chars(input): if char != '-': if i == offset: if found_offset is not None: found_offset(j) outputting = True i += 1 if outputting: output.write(char) k += 1 if k == length: break j += 1 else: input.seek(offset, 0) for char in chars(input): output.write(char) k += 1 if k == length: break if __name__ == "__main__": parser = argparse.ArgumentParser("Extract subsequences from vcf2multialign output.") parser.add_argument('--input', type = argparse.FileType('rU'), required = True) parser.add_argument("--input-is-reference", action = 'store_true', default = False) parser.add_argument('--offset', type = int, required = True) parser.add_argument('--length', type = int, required = True) args = parser.parse_args() if args.offset < 0: parser.error("Offset has to be non-negative.") if args.length <= 0: parser.error("Length must be positive.") handle_file( args.input, args.input_is_reference, args.offset, args.length, sys.stdout, lambda n: print("Found the requested substring at file offset %d" % n, file = sys.stderr) )
a4e77ce700a1d54d97dd9ddd4de73e5bfb6edf61
Python/189_Rotate_Array.py
Python/189_Rotate_Array.py
class Solution(object): def rotate(self, nums, k): """ :type nums: List[int] :type k: int :rtype: void Do not return anything, modify nums in-place instead. """ for i in xrange(k): nums.insert(0,nums.pop()) if __name__ == '__main__': nums = [-1,2,3,4,5] k = 3 Solution().rotate(nums,k) print nums
Add two methods for rotating an array.
Add two methods for rotating an array.
Python
mit
comicxmz001/LeetCode,comicxmz001/LeetCode
Add two methods for rotating an array.
class Solution(object): def rotate(self, nums, k): """ :type nums: List[int] :type k: int :rtype: void Do not return anything, modify nums in-place instead. """ for i in xrange(k): nums.insert(0,nums.pop()) if __name__ == '__main__': nums = [-1,2,3,4,5] k = 3 Solution().rotate(nums,k) print nums
<commit_before><commit_msg>Add two methods for rotating an array.<commit_after>
class Solution(object): def rotate(self, nums, k): """ :type nums: List[int] :type k: int :rtype: void Do not return anything, modify nums in-place instead. """ for i in xrange(k): nums.insert(0,nums.pop()) if __name__ == '__main__': nums = [-1,2,3,4,5] k = 3 Solution().rotate(nums,k) print nums
Add two methods for rotating an array.class Solution(object): def rotate(self, nums, k): """ :type nums: List[int] :type k: int :rtype: void Do not return anything, modify nums in-place instead. """ for i in xrange(k): nums.insert(0,nums.pop()) if __name__ == '__main__': nums = [-1,2,3,4,5] k = 3 Solution().rotate(nums,k) print nums
<commit_before><commit_msg>Add two methods for rotating an array.<commit_after>class Solution(object): def rotate(self, nums, k): """ :type nums: List[int] :type k: int :rtype: void Do not return anything, modify nums in-place instead. """ for i in xrange(k): nums.insert(0,nums.pop()) if __name__ == '__main__': nums = [-1,2,3,4,5] k = 3 Solution().rotate(nums,k) print nums
9c9cdd1f1979aa880726fc0039255ed7f7ea2f6d
tests/test_simpleflow/swf/test_task.py
tests/test_simpleflow/swf/test_task.py
from sure import expect from simpleflow import activity from simpleflow.swf.task import ActivityTask @activity.with_attributes() def show_context_func(): return show_context_func.context @activity.with_attributes() class ShowContextCls(object): def execute(self): return self.context def test_task_has_an_empty_context_by_default(): expect(ActivityTask(show_context_func).execute()).to.be.none expect(ActivityTask(ShowContextCls).execute()).to.be.none def test_task_attaches_context_to_functions(): ctx = {'foo': 'bar'} expect(ActivityTask(show_context_func, context=ctx).execute()).to.equal(ctx) expect(show_context_func.context).to.equal(ctx) def test_task_attaches_context_to_object_instances(): ctx = {'foo': 'bar'} expect(ActivityTask(ShowContextCls, context=ctx).execute()).to.equal(ctx) expect(ShowContextCls.context).to.be.none
Add tests to ensure ActivityTask attaches the context
Add tests to ensure ActivityTask attaches the context
Python
mit
botify-labs/simpleflow,botify-labs/simpleflow
Add tests to ensure ActivityTask attaches the context
from sure import expect from simpleflow import activity from simpleflow.swf.task import ActivityTask @activity.with_attributes() def show_context_func(): return show_context_func.context @activity.with_attributes() class ShowContextCls(object): def execute(self): return self.context def test_task_has_an_empty_context_by_default(): expect(ActivityTask(show_context_func).execute()).to.be.none expect(ActivityTask(ShowContextCls).execute()).to.be.none def test_task_attaches_context_to_functions(): ctx = {'foo': 'bar'} expect(ActivityTask(show_context_func, context=ctx).execute()).to.equal(ctx) expect(show_context_func.context).to.equal(ctx) def test_task_attaches_context_to_object_instances(): ctx = {'foo': 'bar'} expect(ActivityTask(ShowContextCls, context=ctx).execute()).to.equal(ctx) expect(ShowContextCls.context).to.be.none
<commit_before><commit_msg>Add tests to ensure ActivityTask attaches the context<commit_after>
from sure import expect from simpleflow import activity from simpleflow.swf.task import ActivityTask @activity.with_attributes() def show_context_func(): return show_context_func.context @activity.with_attributes() class ShowContextCls(object): def execute(self): return self.context def test_task_has_an_empty_context_by_default(): expect(ActivityTask(show_context_func).execute()).to.be.none expect(ActivityTask(ShowContextCls).execute()).to.be.none def test_task_attaches_context_to_functions(): ctx = {'foo': 'bar'} expect(ActivityTask(show_context_func, context=ctx).execute()).to.equal(ctx) expect(show_context_func.context).to.equal(ctx) def test_task_attaches_context_to_object_instances(): ctx = {'foo': 'bar'} expect(ActivityTask(ShowContextCls, context=ctx).execute()).to.equal(ctx) expect(ShowContextCls.context).to.be.none
Add tests to ensure ActivityTask attaches the contextfrom sure import expect from simpleflow import activity from simpleflow.swf.task import ActivityTask @activity.with_attributes() def show_context_func(): return show_context_func.context @activity.with_attributes() class ShowContextCls(object): def execute(self): return self.context def test_task_has_an_empty_context_by_default(): expect(ActivityTask(show_context_func).execute()).to.be.none expect(ActivityTask(ShowContextCls).execute()).to.be.none def test_task_attaches_context_to_functions(): ctx = {'foo': 'bar'} expect(ActivityTask(show_context_func, context=ctx).execute()).to.equal(ctx) expect(show_context_func.context).to.equal(ctx) def test_task_attaches_context_to_object_instances(): ctx = {'foo': 'bar'} expect(ActivityTask(ShowContextCls, context=ctx).execute()).to.equal(ctx) expect(ShowContextCls.context).to.be.none
<commit_before><commit_msg>Add tests to ensure ActivityTask attaches the context<commit_after>from sure import expect from simpleflow import activity from simpleflow.swf.task import ActivityTask @activity.with_attributes() def show_context_func(): return show_context_func.context @activity.with_attributes() class ShowContextCls(object): def execute(self): return self.context def test_task_has_an_empty_context_by_default(): expect(ActivityTask(show_context_func).execute()).to.be.none expect(ActivityTask(ShowContextCls).execute()).to.be.none def test_task_attaches_context_to_functions(): ctx = {'foo': 'bar'} expect(ActivityTask(show_context_func, context=ctx).execute()).to.equal(ctx) expect(show_context_func.context).to.equal(ctx) def test_task_attaches_context_to_object_instances(): ctx = {'foo': 'bar'} expect(ActivityTask(ShowContextCls, context=ctx).execute()).to.equal(ctx) expect(ShowContextCls.context).to.be.none
6594eee668a563d73237669b0dd05a84fbe1e5fb
zooq_main.py
zooq_main.py
#!/usr/bin/env python3 # # from zooq import ZooQ from zooqdb_sqlite import ZooQDB_SQLite import time from argparse import ArgumentParser ap = ArgumentParser(description="Example main service for a ZooQ") ap.add_argument('-d', '--dir', required=False, default='.', action='store', help='Root folder for ZooQ') ap.add_argument('-n', '--numprocs', required=False, default=4, action='store', type=int, help='Number of parallel processes') ap.add_argument('-f', '--foreground', required=False, default=False, action='store_true', type=bool, help='Remain in foreground') args = ap.parse_args() db = ZooQDB_SQLite('{dir}/zooqdb.sqlite'.format(dir=args.dir)) z = ZooQ(max_procs=args.numprocs, db=db) z.Run() while args.foreground and True: time.sleep(1)
Add args to main code, and make executable
Add args to main code, and make executable
Python
mit
ckane/zooq,ckane/zooq
Add args to main code, and make executable
#!/usr/bin/env python3 # # from zooq import ZooQ from zooqdb_sqlite import ZooQDB_SQLite import time from argparse import ArgumentParser ap = ArgumentParser(description="Example main service for a ZooQ") ap.add_argument('-d', '--dir', required=False, default='.', action='store', help='Root folder for ZooQ') ap.add_argument('-n', '--numprocs', required=False, default=4, action='store', type=int, help='Number of parallel processes') ap.add_argument('-f', '--foreground', required=False, default=False, action='store_true', type=bool, help='Remain in foreground') args = ap.parse_args() db = ZooQDB_SQLite('{dir}/zooqdb.sqlite'.format(dir=args.dir)) z = ZooQ(max_procs=args.numprocs, db=db) z.Run() while args.foreground and True: time.sleep(1)
<commit_before><commit_msg>Add args to main code, and make executable<commit_after>
#!/usr/bin/env python3 # # from zooq import ZooQ from zooqdb_sqlite import ZooQDB_SQLite import time from argparse import ArgumentParser ap = ArgumentParser(description="Example main service for a ZooQ") ap.add_argument('-d', '--dir', required=False, default='.', action='store', help='Root folder for ZooQ') ap.add_argument('-n', '--numprocs', required=False, default=4, action='store', type=int, help='Number of parallel processes') ap.add_argument('-f', '--foreground', required=False, default=False, action='store_true', type=bool, help='Remain in foreground') args = ap.parse_args() db = ZooQDB_SQLite('{dir}/zooqdb.sqlite'.format(dir=args.dir)) z = ZooQ(max_procs=args.numprocs, db=db) z.Run() while args.foreground and True: time.sleep(1)
Add args to main code, and make executable#!/usr/bin/env python3 # # from zooq import ZooQ from zooqdb_sqlite import ZooQDB_SQLite import time from argparse import ArgumentParser ap = ArgumentParser(description="Example main service for a ZooQ") ap.add_argument('-d', '--dir', required=False, default='.', action='store', help='Root folder for ZooQ') ap.add_argument('-n', '--numprocs', required=False, default=4, action='store', type=int, help='Number of parallel processes') ap.add_argument('-f', '--foreground', required=False, default=False, action='store_true', type=bool, help='Remain in foreground') args = ap.parse_args() db = ZooQDB_SQLite('{dir}/zooqdb.sqlite'.format(dir=args.dir)) z = ZooQ(max_procs=args.numprocs, db=db) z.Run() while args.foreground and True: time.sleep(1)
<commit_before><commit_msg>Add args to main code, and make executable<commit_after>#!/usr/bin/env python3 # # from zooq import ZooQ from zooqdb_sqlite import ZooQDB_SQLite import time from argparse import ArgumentParser ap = ArgumentParser(description="Example main service for a ZooQ") ap.add_argument('-d', '--dir', required=False, default='.', action='store', help='Root folder for ZooQ') ap.add_argument('-n', '--numprocs', required=False, default=4, action='store', type=int, help='Number of parallel processes') ap.add_argument('-f', '--foreground', required=False, default=False, action='store_true', type=bool, help='Remain in foreground') args = ap.parse_args() db = ZooQDB_SQLite('{dir}/zooqdb.sqlite'.format(dir=args.dir)) z = ZooQ(max_procs=args.numprocs, db=db) z.Run() while args.foreground and True: time.sleep(1)
9e1b9086fb699fa79c38d3f57f2ea76d3ea9d09c
Python/232_ImplementStackUsingQueue.py
Python/232_ImplementStackUsingQueue.py
class Stack(object): def __init__(self): """ initialize your data structure here. """ self.__queue = [] def push(self, x): """ :type x: int :rtype: nothing """ self.__queue.append(x) def pop(self): """ :rtype: nothing """ p = self.__queue[-1] tmpQ = self.__queue[:-1] self.__queue = tmpQ return p def top(self): """ :rtype: int """ return self.__queue[-1] def empty(self): """ :rtype: bool """ return not self.__queue # This method is not requred, just for helping test the code def printstack(self): print self.__queue if __name__ == '__main__': stack = Stack() stack.push(1) stack.push(9) stack.pop() stack.pop() stack.printstack() print stack.empty()
Add solution for 232, implement stack using queue.?
Add solution for 232, implement stack using queue.?
Python
mit
comicxmz001/LeetCode,comicxmz001/LeetCode
Add solution for 232, implement stack using queue.?
class Stack(object): def __init__(self): """ initialize your data structure here. """ self.__queue = [] def push(self, x): """ :type x: int :rtype: nothing """ self.__queue.append(x) def pop(self): """ :rtype: nothing """ p = self.__queue[-1] tmpQ = self.__queue[:-1] self.__queue = tmpQ return p def top(self): """ :rtype: int """ return self.__queue[-1] def empty(self): """ :rtype: bool """ return not self.__queue # This method is not requred, just for helping test the code def printstack(self): print self.__queue if __name__ == '__main__': stack = Stack() stack.push(1) stack.push(9) stack.pop() stack.pop() stack.printstack() print stack.empty()
<commit_before><commit_msg>Add solution for 232, implement stack using queue.?<commit_after>
class Stack(object): def __init__(self): """ initialize your data structure here. """ self.__queue = [] def push(self, x): """ :type x: int :rtype: nothing """ self.__queue.append(x) def pop(self): """ :rtype: nothing """ p = self.__queue[-1] tmpQ = self.__queue[:-1] self.__queue = tmpQ return p def top(self): """ :rtype: int """ return self.__queue[-1] def empty(self): """ :rtype: bool """ return not self.__queue # This method is not requred, just for helping test the code def printstack(self): print self.__queue if __name__ == '__main__': stack = Stack() stack.push(1) stack.push(9) stack.pop() stack.pop() stack.printstack() print stack.empty()
Add solution for 232, implement stack using queue.?class Stack(object): def __init__(self): """ initialize your data structure here. """ self.__queue = [] def push(self, x): """ :type x: int :rtype: nothing """ self.__queue.append(x) def pop(self): """ :rtype: nothing """ p = self.__queue[-1] tmpQ = self.__queue[:-1] self.__queue = tmpQ return p def top(self): """ :rtype: int """ return self.__queue[-1] def empty(self): """ :rtype: bool """ return not self.__queue # This method is not requred, just for helping test the code def printstack(self): print self.__queue if __name__ == '__main__': stack = Stack() stack.push(1) stack.push(9) stack.pop() stack.pop() stack.printstack() print stack.empty()
<commit_before><commit_msg>Add solution for 232, implement stack using queue.?<commit_after>class Stack(object): def __init__(self): """ initialize your data structure here. """ self.__queue = [] def push(self, x): """ :type x: int :rtype: nothing """ self.__queue.append(x) def pop(self): """ :rtype: nothing """ p = self.__queue[-1] tmpQ = self.__queue[:-1] self.__queue = tmpQ return p def top(self): """ :rtype: int """ return self.__queue[-1] def empty(self): """ :rtype: bool """ return not self.__queue # This method is not requred, just for helping test the code def printstack(self): print self.__queue if __name__ == '__main__': stack = Stack() stack.push(1) stack.push(9) stack.pop() stack.pop() stack.printstack() print stack.empty()
f2cbc1a1b23f182730ccc5f4fc9f71ad15f3e188
conda_env/specs/yaml_file.py
conda_env/specs/yaml_file.py
from .. import env from ..exceptions import EnvironmentFileNotFound class YamlFileSpec(object): _environment = None def __init__(self, filename=None, **kwargs): self.filename = filename self.msg = None def can_handle(self): try: self._environment = env.from_file(self.filename) return True except EnvironmentFileNotFound as e: self.msg = e.message return False @property def environment(self): if not self._environment: self.can_handle() return self._environment
from .. import env from ..exceptions import EnvironmentFileNotFound class YamlFileSpec(object): _environment = None def __init__(self, filename=None, **kwargs): self.filename = filename self.msg = None def can_handle(self): try: self._environment = env.from_file(self.filename) return True except EnvironmentFileNotFound as e: self.msg = str(e) return False @property def environment(self): if not self._environment: self.can_handle() return self._environment
Fix exception to work with py3
Fix exception to work with py3
Python
bsd-3-clause
mikecroucher/conda-env,mikecroucher/conda-env,dan-blanchard/conda-env,conda/conda-env,phobson/conda-env,conda/conda-env,isaac-kit/conda-env,nicoddemus/conda-env,nicoddemus/conda-env,asmeurer/conda-env,ESSS/conda-env,asmeurer/conda-env,dan-blanchard/conda-env,isaac-kit/conda-env,phobson/conda-env,ESSS/conda-env
from .. import env from ..exceptions import EnvironmentFileNotFound class YamlFileSpec(object): _environment = None def __init__(self, filename=None, **kwargs): self.filename = filename self.msg = None def can_handle(self): try: self._environment = env.from_file(self.filename) return True except EnvironmentFileNotFound as e: self.msg = e.message return False @property def environment(self): if not self._environment: self.can_handle() return self._environment Fix exception to work with py3
from .. import env from ..exceptions import EnvironmentFileNotFound class YamlFileSpec(object): _environment = None def __init__(self, filename=None, **kwargs): self.filename = filename self.msg = None def can_handle(self): try: self._environment = env.from_file(self.filename) return True except EnvironmentFileNotFound as e: self.msg = str(e) return False @property def environment(self): if not self._environment: self.can_handle() return self._environment
<commit_before>from .. import env from ..exceptions import EnvironmentFileNotFound class YamlFileSpec(object): _environment = None def __init__(self, filename=None, **kwargs): self.filename = filename self.msg = None def can_handle(self): try: self._environment = env.from_file(self.filename) return True except EnvironmentFileNotFound as e: self.msg = e.message return False @property def environment(self): if not self._environment: self.can_handle() return self._environment <commit_msg>Fix exception to work with py3<commit_after>
from .. import env from ..exceptions import EnvironmentFileNotFound class YamlFileSpec(object): _environment = None def __init__(self, filename=None, **kwargs): self.filename = filename self.msg = None def can_handle(self): try: self._environment = env.from_file(self.filename) return True except EnvironmentFileNotFound as e: self.msg = str(e) return False @property def environment(self): if not self._environment: self.can_handle() return self._environment
from .. import env from ..exceptions import EnvironmentFileNotFound class YamlFileSpec(object): _environment = None def __init__(self, filename=None, **kwargs): self.filename = filename self.msg = None def can_handle(self): try: self._environment = env.from_file(self.filename) return True except EnvironmentFileNotFound as e: self.msg = e.message return False @property def environment(self): if not self._environment: self.can_handle() return self._environment Fix exception to work with py3from .. import env from ..exceptions import EnvironmentFileNotFound class YamlFileSpec(object): _environment = None def __init__(self, filename=None, **kwargs): self.filename = filename self.msg = None def can_handle(self): try: self._environment = env.from_file(self.filename) return True except EnvironmentFileNotFound as e: self.msg = str(e) return False @property def environment(self): if not self._environment: self.can_handle() return self._environment
<commit_before>from .. import env from ..exceptions import EnvironmentFileNotFound class YamlFileSpec(object): _environment = None def __init__(self, filename=None, **kwargs): self.filename = filename self.msg = None def can_handle(self): try: self._environment = env.from_file(self.filename) return True except EnvironmentFileNotFound as e: self.msg = e.message return False @property def environment(self): if not self._environment: self.can_handle() return self._environment <commit_msg>Fix exception to work with py3<commit_after>from .. import env from ..exceptions import EnvironmentFileNotFound class YamlFileSpec(object): _environment = None def __init__(self, filename=None, **kwargs): self.filename = filename self.msg = None def can_handle(self): try: self._environment = env.from_file(self.filename) return True except EnvironmentFileNotFound as e: self.msg = str(e) return False @property def environment(self): if not self._environment: self.can_handle() return self._environment
65a156201dc91b54d8edd7d45689eb920ec0dbe3
UnitaryTests/SCardGetStatusChange2.py
UnitaryTests/SCardGetStatusChange2.py
#! /usr/bin/env python # SCardGetStatusChange2.py : Unitary test for SCardGetStatusChange() # Copyright (C) 2011 Ludovic Rousseau # # This program is free software; you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation; either version 2 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along # with this program; if not, write to the Free Software Foundation, Inc., # 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # Check the return value of SCardGetStatusChange() for unknown readers # Before revision 5881 SCardGetStatusChange() returned SCARD_S_SUCCESS from smartcard.scard import * from smartcard.pcsc.PCSCExceptions import * hresult, hcontext = SCardEstablishContext(SCARD_SCOPE_USER) print "SCardEstablishContext()", SCardGetErrorMessage(hresult) if hresult != SCARD_S_SUCCESS: raise EstablishContextException(hresult) hresult, readers = SCardListReaders(hcontext, []) print "SCardListReaders()", SCardGetErrorMessage(hresult) print 'PC/SC Readers:', readers readers = ["a", "b"] print readers readerstates = {} for reader in readers: readerstates[reader] = (reader, SCARD_STATE_UNAWARE) hresult, newstates = SCardGetStatusChange(hcontext, 10, readerstates.values()) print "SCardGetStatusChange()", SCardGetErrorMessage(hresult) if hresult != SCARD_S_SUCCESS: raise BaseSCardException(hresult) print newstates hresult = SCardReleaseContext(hcontext) print "SCardReleaseContext()", SCardGetErrorMessage(hresult) if hresult != SCARD_S_SUCCESS: raise ReleaseContextException(hresult)
Check the return value of SCardGetStatusChange() for unknown readers
Check the return value of SCardGetStatusChange() for unknown readers git-svn-id: f2d781e409b7e36a714fc884bb9b2fc5091ddd28@5882 0ce88b0d-b2fd-0310-8134-9614164e65ea
Python
bsd-3-clause
vicamo/pcsc-lite-android,vicamo/pcsc-lite-android,vicamo/pcsc-lite-android,vicamo/pcsc-lite-android,vicamo/pcsc-lite-android
Check the return value of SCardGetStatusChange() for unknown readers git-svn-id: f2d781e409b7e36a714fc884bb9b2fc5091ddd28@5882 0ce88b0d-b2fd-0310-8134-9614164e65ea
#! /usr/bin/env python # SCardGetStatusChange2.py : Unitary test for SCardGetStatusChange() # Copyright (C) 2011 Ludovic Rousseau # # This program is free software; you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation; either version 2 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along # with this program; if not, write to the Free Software Foundation, Inc., # 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # Check the return value of SCardGetStatusChange() for unknown readers # Before revision 5881 SCardGetStatusChange() returned SCARD_S_SUCCESS from smartcard.scard import * from smartcard.pcsc.PCSCExceptions import * hresult, hcontext = SCardEstablishContext(SCARD_SCOPE_USER) print "SCardEstablishContext()", SCardGetErrorMessage(hresult) if hresult != SCARD_S_SUCCESS: raise EstablishContextException(hresult) hresult, readers = SCardListReaders(hcontext, []) print "SCardListReaders()", SCardGetErrorMessage(hresult) print 'PC/SC Readers:', readers readers = ["a", "b"] print readers readerstates = {} for reader in readers: readerstates[reader] = (reader, SCARD_STATE_UNAWARE) hresult, newstates = SCardGetStatusChange(hcontext, 10, readerstates.values()) print "SCardGetStatusChange()", SCardGetErrorMessage(hresult) if hresult != SCARD_S_SUCCESS: raise BaseSCardException(hresult) print newstates hresult = SCardReleaseContext(hcontext) print "SCardReleaseContext()", SCardGetErrorMessage(hresult) if hresult != SCARD_S_SUCCESS: raise ReleaseContextException(hresult)
<commit_before><commit_msg>Check the return value of SCardGetStatusChange() for unknown readers git-svn-id: f2d781e409b7e36a714fc884bb9b2fc5091ddd28@5882 0ce88b0d-b2fd-0310-8134-9614164e65ea<commit_after>
#! /usr/bin/env python # SCardGetStatusChange2.py : Unitary test for SCardGetStatusChange() # Copyright (C) 2011 Ludovic Rousseau # # This program is free software; you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation; either version 2 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along # with this program; if not, write to the Free Software Foundation, Inc., # 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # Check the return value of SCardGetStatusChange() for unknown readers # Before revision 5881 SCardGetStatusChange() returned SCARD_S_SUCCESS from smartcard.scard import * from smartcard.pcsc.PCSCExceptions import * hresult, hcontext = SCardEstablishContext(SCARD_SCOPE_USER) print "SCardEstablishContext()", SCardGetErrorMessage(hresult) if hresult != SCARD_S_SUCCESS: raise EstablishContextException(hresult) hresult, readers = SCardListReaders(hcontext, []) print "SCardListReaders()", SCardGetErrorMessage(hresult) print 'PC/SC Readers:', readers readers = ["a", "b"] print readers readerstates = {} for reader in readers: readerstates[reader] = (reader, SCARD_STATE_UNAWARE) hresult, newstates = SCardGetStatusChange(hcontext, 10, readerstates.values()) print "SCardGetStatusChange()", SCardGetErrorMessage(hresult) if hresult != SCARD_S_SUCCESS: raise BaseSCardException(hresult) print newstates hresult = SCardReleaseContext(hcontext) print "SCardReleaseContext()", SCardGetErrorMessage(hresult) if hresult != SCARD_S_SUCCESS: raise ReleaseContextException(hresult)
Check the return value of SCardGetStatusChange() for unknown readers git-svn-id: f2d781e409b7e36a714fc884bb9b2fc5091ddd28@5882 0ce88b0d-b2fd-0310-8134-9614164e65ea#! /usr/bin/env python # SCardGetStatusChange2.py : Unitary test for SCardGetStatusChange() # Copyright (C) 2011 Ludovic Rousseau # # This program is free software; you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation; either version 2 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along # with this program; if not, write to the Free Software Foundation, Inc., # 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # Check the return value of SCardGetStatusChange() for unknown readers # Before revision 5881 SCardGetStatusChange() returned SCARD_S_SUCCESS from smartcard.scard import * from smartcard.pcsc.PCSCExceptions import * hresult, hcontext = SCardEstablishContext(SCARD_SCOPE_USER) print "SCardEstablishContext()", SCardGetErrorMessage(hresult) if hresult != SCARD_S_SUCCESS: raise EstablishContextException(hresult) hresult, readers = SCardListReaders(hcontext, []) print "SCardListReaders()", SCardGetErrorMessage(hresult) print 'PC/SC Readers:', readers readers = ["a", "b"] print readers readerstates = {} for reader in readers: readerstates[reader] = (reader, SCARD_STATE_UNAWARE) hresult, newstates = SCardGetStatusChange(hcontext, 10, readerstates.values()) print "SCardGetStatusChange()", SCardGetErrorMessage(hresult) if hresult != SCARD_S_SUCCESS: raise BaseSCardException(hresult) print newstates hresult = SCardReleaseContext(hcontext) print "SCardReleaseContext()", SCardGetErrorMessage(hresult) if hresult != SCARD_S_SUCCESS: raise ReleaseContextException(hresult)
<commit_before><commit_msg>Check the return value of SCardGetStatusChange() for unknown readers git-svn-id: f2d781e409b7e36a714fc884bb9b2fc5091ddd28@5882 0ce88b0d-b2fd-0310-8134-9614164e65ea<commit_after>#! /usr/bin/env python # SCardGetStatusChange2.py : Unitary test for SCardGetStatusChange() # Copyright (C) 2011 Ludovic Rousseau # # This program is free software; you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation; either version 2 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along # with this program; if not, write to the Free Software Foundation, Inc., # 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # Check the return value of SCardGetStatusChange() for unknown readers # Before revision 5881 SCardGetStatusChange() returned SCARD_S_SUCCESS from smartcard.scard import * from smartcard.pcsc.PCSCExceptions import * hresult, hcontext = SCardEstablishContext(SCARD_SCOPE_USER) print "SCardEstablishContext()", SCardGetErrorMessage(hresult) if hresult != SCARD_S_SUCCESS: raise EstablishContextException(hresult) hresult, readers = SCardListReaders(hcontext, []) print "SCardListReaders()", SCardGetErrorMessage(hresult) print 'PC/SC Readers:', readers readers = ["a", "b"] print readers readerstates = {} for reader in readers: readerstates[reader] = (reader, SCARD_STATE_UNAWARE) hresult, newstates = SCardGetStatusChange(hcontext, 10, readerstates.values()) print "SCardGetStatusChange()", SCardGetErrorMessage(hresult) if hresult != SCARD_S_SUCCESS: raise BaseSCardException(hresult) print newstates hresult = SCardReleaseContext(hcontext) print "SCardReleaseContext()", SCardGetErrorMessage(hresult) if hresult != SCARD_S_SUCCESS: raise ReleaseContextException(hresult)
779f9868cd348cdbe24255d52bc2ce21c4ca85bd
Tests/test_process2.py
Tests/test_process2.py
""" Test 2 ====== - Separation between Mscthesis and pySpatialTools """ ## Mscthesis application ########################## from Mscthesis.IO.clean_module import clean from Mscthesis.IO import Firms_Parser from Mscthesis.Preprocess import Firms_Preprocessor ## Define parameters and info needed logfile = 'Data/Outputs/Logs/logfile_2015_07_04.log' parentpath = '/home/tono/mscthesis/code/Data/pruebas_clean' inpath = '/home/tono/mscthesis/code/Data/pruebas_raw/raw1' typevars = {'loc_vars': ['ES-X', 'ES-Y'], 'feat_vars': ['cnae'], 'agg_var': 'cp'} ## Cleaning (TODO: Prepare as a process class) #clean(inpath, parentpath, extension='csv') ## Parse empresas parser = Firms_Parser(cleaned=True, logfile=logfile) empresas = parser.parse(parentpath=parentpath, year=2006) ## Preprocess preprocess = Firms_Preprocessor(typevars) empresas = preprocess.preprocess(empresas) ## Spatial module application ############################### #from pySpatialTools.Retrieve.spatialdiscretizer import GridSpatialDisc from pySpatialTools.Preprocess import Aggregator from pySpatialTools.Retrieve import Neighbourhood, CircRetriever from pySpatialTools.Models.pjensen import Pjensen from Mscthesis.Models import ModelProcess import numpy as np ## Parameters and info needed n_permuts = 10 ## Define aggregator agg = Aggregator(typevars=typevars) ## Define permuts reindices = create_permtutation(n_permuts) #reindices = np.zeros((empresas.shape[0], 11)) #reindices[:, 0] = np.array(range(empresas.shape[0])) #for i in range(1, 11): # reindices[:, i] = np.random.permutation(np.arange(empresas.shape[0])) ## Define retriever (Neigh has to know typevars) locs = empresas[typevars['loc_vars']].as_matrix() retriever = CircRetriever(locs) Neigh = Neighbourhood(retriever) Neigh.define_mainretriever(retriever) Neigh.define_aggretrievers(agg, empresas, reindices) del locs, retriever ## Define descriptormodel descriptormodel = Pjensen(empresas, typevars) ## Define process modelprocess = ModelProcess(logfile, Neigh, descriptormodel, typevars=typevars, lim_rows=100000, proc_name='Test') modelprocess.compute_net(empresas, 2., True, reindices)
Test for the whole process with separated modules.
Test for the whole process with separated modules.
Python
mit
tgquintela/Mscthesis
Test for the whole process with separated modules.
""" Test 2 ====== - Separation between Mscthesis and pySpatialTools """ ## Mscthesis application ########################## from Mscthesis.IO.clean_module import clean from Mscthesis.IO import Firms_Parser from Mscthesis.Preprocess import Firms_Preprocessor ## Define parameters and info needed logfile = 'Data/Outputs/Logs/logfile_2015_07_04.log' parentpath = '/home/tono/mscthesis/code/Data/pruebas_clean' inpath = '/home/tono/mscthesis/code/Data/pruebas_raw/raw1' typevars = {'loc_vars': ['ES-X', 'ES-Y'], 'feat_vars': ['cnae'], 'agg_var': 'cp'} ## Cleaning (TODO: Prepare as a process class) #clean(inpath, parentpath, extension='csv') ## Parse empresas parser = Firms_Parser(cleaned=True, logfile=logfile) empresas = parser.parse(parentpath=parentpath, year=2006) ## Preprocess preprocess = Firms_Preprocessor(typevars) empresas = preprocess.preprocess(empresas) ## Spatial module application ############################### #from pySpatialTools.Retrieve.spatialdiscretizer import GridSpatialDisc from pySpatialTools.Preprocess import Aggregator from pySpatialTools.Retrieve import Neighbourhood, CircRetriever from pySpatialTools.Models.pjensen import Pjensen from Mscthesis.Models import ModelProcess import numpy as np ## Parameters and info needed n_permuts = 10 ## Define aggregator agg = Aggregator(typevars=typevars) ## Define permuts reindices = create_permtutation(n_permuts) #reindices = np.zeros((empresas.shape[0], 11)) #reindices[:, 0] = np.array(range(empresas.shape[0])) #for i in range(1, 11): # reindices[:, i] = np.random.permutation(np.arange(empresas.shape[0])) ## Define retriever (Neigh has to know typevars) locs = empresas[typevars['loc_vars']].as_matrix() retriever = CircRetriever(locs) Neigh = Neighbourhood(retriever) Neigh.define_mainretriever(retriever) Neigh.define_aggretrievers(agg, empresas, reindices) del locs, retriever ## Define descriptormodel descriptormodel = Pjensen(empresas, typevars) ## Define process modelprocess = ModelProcess(logfile, Neigh, descriptormodel, typevars=typevars, lim_rows=100000, proc_name='Test') modelprocess.compute_net(empresas, 2., True, reindices)
<commit_before><commit_msg>Test for the whole process with separated modules.<commit_after>
""" Test 2 ====== - Separation between Mscthesis and pySpatialTools """ ## Mscthesis application ########################## from Mscthesis.IO.clean_module import clean from Mscthesis.IO import Firms_Parser from Mscthesis.Preprocess import Firms_Preprocessor ## Define parameters and info needed logfile = 'Data/Outputs/Logs/logfile_2015_07_04.log' parentpath = '/home/tono/mscthesis/code/Data/pruebas_clean' inpath = '/home/tono/mscthesis/code/Data/pruebas_raw/raw1' typevars = {'loc_vars': ['ES-X', 'ES-Y'], 'feat_vars': ['cnae'], 'agg_var': 'cp'} ## Cleaning (TODO: Prepare as a process class) #clean(inpath, parentpath, extension='csv') ## Parse empresas parser = Firms_Parser(cleaned=True, logfile=logfile) empresas = parser.parse(parentpath=parentpath, year=2006) ## Preprocess preprocess = Firms_Preprocessor(typevars) empresas = preprocess.preprocess(empresas) ## Spatial module application ############################### #from pySpatialTools.Retrieve.spatialdiscretizer import GridSpatialDisc from pySpatialTools.Preprocess import Aggregator from pySpatialTools.Retrieve import Neighbourhood, CircRetriever from pySpatialTools.Models.pjensen import Pjensen from Mscthesis.Models import ModelProcess import numpy as np ## Parameters and info needed n_permuts = 10 ## Define aggregator agg = Aggregator(typevars=typevars) ## Define permuts reindices = create_permtutation(n_permuts) #reindices = np.zeros((empresas.shape[0], 11)) #reindices[:, 0] = np.array(range(empresas.shape[0])) #for i in range(1, 11): # reindices[:, i] = np.random.permutation(np.arange(empresas.shape[0])) ## Define retriever (Neigh has to know typevars) locs = empresas[typevars['loc_vars']].as_matrix() retriever = CircRetriever(locs) Neigh = Neighbourhood(retriever) Neigh.define_mainretriever(retriever) Neigh.define_aggretrievers(agg, empresas, reindices) del locs, retriever ## Define descriptormodel descriptormodel = Pjensen(empresas, typevars) ## Define process modelprocess = ModelProcess(logfile, Neigh, descriptormodel, typevars=typevars, lim_rows=100000, proc_name='Test') modelprocess.compute_net(empresas, 2., True, reindices)
Test for the whole process with separated modules. """ Test 2 ====== - Separation between Mscthesis and pySpatialTools """ ## Mscthesis application ########################## from Mscthesis.IO.clean_module import clean from Mscthesis.IO import Firms_Parser from Mscthesis.Preprocess import Firms_Preprocessor ## Define parameters and info needed logfile = 'Data/Outputs/Logs/logfile_2015_07_04.log' parentpath = '/home/tono/mscthesis/code/Data/pruebas_clean' inpath = '/home/tono/mscthesis/code/Data/pruebas_raw/raw1' typevars = {'loc_vars': ['ES-X', 'ES-Y'], 'feat_vars': ['cnae'], 'agg_var': 'cp'} ## Cleaning (TODO: Prepare as a process class) #clean(inpath, parentpath, extension='csv') ## Parse empresas parser = Firms_Parser(cleaned=True, logfile=logfile) empresas = parser.parse(parentpath=parentpath, year=2006) ## Preprocess preprocess = Firms_Preprocessor(typevars) empresas = preprocess.preprocess(empresas) ## Spatial module application ############################### #from pySpatialTools.Retrieve.spatialdiscretizer import GridSpatialDisc from pySpatialTools.Preprocess import Aggregator from pySpatialTools.Retrieve import Neighbourhood, CircRetriever from pySpatialTools.Models.pjensen import Pjensen from Mscthesis.Models import ModelProcess import numpy as np ## Parameters and info needed n_permuts = 10 ## Define aggregator agg = Aggregator(typevars=typevars) ## Define permuts reindices = create_permtutation(n_permuts) #reindices = np.zeros((empresas.shape[0], 11)) #reindices[:, 0] = np.array(range(empresas.shape[0])) #for i in range(1, 11): # reindices[:, i] = np.random.permutation(np.arange(empresas.shape[0])) ## Define retriever (Neigh has to know typevars) locs = empresas[typevars['loc_vars']].as_matrix() retriever = CircRetriever(locs) Neigh = Neighbourhood(retriever) Neigh.define_mainretriever(retriever) Neigh.define_aggretrievers(agg, empresas, reindices) del locs, retriever ## Define descriptormodel descriptormodel = Pjensen(empresas, typevars) ## Define process modelprocess = ModelProcess(logfile, Neigh, descriptormodel, typevars=typevars, lim_rows=100000, proc_name='Test') modelprocess.compute_net(empresas, 2., True, reindices)
<commit_before><commit_msg>Test for the whole process with separated modules.<commit_after> """ Test 2 ====== - Separation between Mscthesis and pySpatialTools """ ## Mscthesis application ########################## from Mscthesis.IO.clean_module import clean from Mscthesis.IO import Firms_Parser from Mscthesis.Preprocess import Firms_Preprocessor ## Define parameters and info needed logfile = 'Data/Outputs/Logs/logfile_2015_07_04.log' parentpath = '/home/tono/mscthesis/code/Data/pruebas_clean' inpath = '/home/tono/mscthesis/code/Data/pruebas_raw/raw1' typevars = {'loc_vars': ['ES-X', 'ES-Y'], 'feat_vars': ['cnae'], 'agg_var': 'cp'} ## Cleaning (TODO: Prepare as a process class) #clean(inpath, parentpath, extension='csv') ## Parse empresas parser = Firms_Parser(cleaned=True, logfile=logfile) empresas = parser.parse(parentpath=parentpath, year=2006) ## Preprocess preprocess = Firms_Preprocessor(typevars) empresas = preprocess.preprocess(empresas) ## Spatial module application ############################### #from pySpatialTools.Retrieve.spatialdiscretizer import GridSpatialDisc from pySpatialTools.Preprocess import Aggregator from pySpatialTools.Retrieve import Neighbourhood, CircRetriever from pySpatialTools.Models.pjensen import Pjensen from Mscthesis.Models import ModelProcess import numpy as np ## Parameters and info needed n_permuts = 10 ## Define aggregator agg = Aggregator(typevars=typevars) ## Define permuts reindices = create_permtutation(n_permuts) #reindices = np.zeros((empresas.shape[0], 11)) #reindices[:, 0] = np.array(range(empresas.shape[0])) #for i in range(1, 11): # reindices[:, i] = np.random.permutation(np.arange(empresas.shape[0])) ## Define retriever (Neigh has to know typevars) locs = empresas[typevars['loc_vars']].as_matrix() retriever = CircRetriever(locs) Neigh = Neighbourhood(retriever) Neigh.define_mainretriever(retriever) Neigh.define_aggretrievers(agg, empresas, reindices) del locs, retriever ## Define descriptormodel descriptormodel = Pjensen(empresas, typevars) ## Define process modelprocess = ModelProcess(logfile, Neigh, descriptormodel, typevars=typevars, lim_rows=100000, proc_name='Test') modelprocess.compute_net(empresas, 2., True, reindices)
59d92d862d5e744a11a59b41ca8e01acd6c2b105
tests/test_command_cluster.py
tests/test_command_cluster.py
import os import tempfile from buddy.command.cluster import cli import pytest import vcr import yaml import boto3 def teardown(): ecs_client = boto3.client('ecs') ecs_client.delete_service(cluster='CLUSTERNAME', service='SERVICENAME') ecs_client.delete_cluster(cluster='CLUSTERNAME') def setup(): ecs_client = boto3.client('ecs') containers = [ { 'name': 'NAME', 'image': 'nginx', 'memory': 10, } ] response = ecs_client.register_task_definition( family='TASKNAME', containerDefinitions=containers, ) task_definition_arn = response['taskDefinition']['taskDefinitionArn'] response = ecs_client.create_cluster(clusterName='CLUSTERNAME') ecs_cluster = response['cluster']['clusterName'] response = ecs_client.create_service( cluster=ecs_cluster, serviceName='SERVICENAME', taskDefinition=task_definition_arn, desiredCount=0, ) ecs_service = response['service']['serviceName'] return ecs_cluster, ecs_service def make_deploy_config_data(cluster, service): return { 'targets': { 'production': { 'cluster': cluster, 'service': service, 'task': 'TASKNAME', 'environment': 'ENVNAME', }, }, 'tasks': { 'TASKNAME': { 'containers': ['CONTAINERNAME'], }, }, 'environments': { 'ENVNAME': { 'VARIABLE_NAME': 'VARIABLE_VALUE', }, }, 'containers': { 'CONTAINERNAME': { 'properties': { 'cpu': 10, 'memory': 20, 'command': ['prog', 'arg1', 'arg2'], 'logConfiguration': { 'logDriver': 'awslogs', }, }, 'environment': ['VARIABLE_NAME'], }, }, } def deploy_config(ecs_service, ecs_cluster): data = make_deploy_config_data(cluster=ecs_cluster, service=ecs_service) fh, name = tempfile.mkstemp() fh.write(yaml.safe_dump(data)) fh.close() return name @vcr.use_cassette('tests/vcr/deploy.yaml') def test_deploy(): cluster, service = setup() config = deploy_config(cluster, service) try: args = ['deploy', config, 'production', 'image:tag', 'rev'] # result = runner.invoke(cli, args, catch_exceptions=True) # print(result.output) # assert result.exit_code == 0 # assert 'CREATE_COMPLETE' in result.output except: try: teardown() except: pass raise
Add a test for cluster command using VCR
Add a test for cluster command using VCR
Python
mit
pior/buddy
Add a test for cluster command using VCR
import os import tempfile from buddy.command.cluster import cli import pytest import vcr import yaml import boto3 def teardown(): ecs_client = boto3.client('ecs') ecs_client.delete_service(cluster='CLUSTERNAME', service='SERVICENAME') ecs_client.delete_cluster(cluster='CLUSTERNAME') def setup(): ecs_client = boto3.client('ecs') containers = [ { 'name': 'NAME', 'image': 'nginx', 'memory': 10, } ] response = ecs_client.register_task_definition( family='TASKNAME', containerDefinitions=containers, ) task_definition_arn = response['taskDefinition']['taskDefinitionArn'] response = ecs_client.create_cluster(clusterName='CLUSTERNAME') ecs_cluster = response['cluster']['clusterName'] response = ecs_client.create_service( cluster=ecs_cluster, serviceName='SERVICENAME', taskDefinition=task_definition_arn, desiredCount=0, ) ecs_service = response['service']['serviceName'] return ecs_cluster, ecs_service def make_deploy_config_data(cluster, service): return { 'targets': { 'production': { 'cluster': cluster, 'service': service, 'task': 'TASKNAME', 'environment': 'ENVNAME', }, }, 'tasks': { 'TASKNAME': { 'containers': ['CONTAINERNAME'], }, }, 'environments': { 'ENVNAME': { 'VARIABLE_NAME': 'VARIABLE_VALUE', }, }, 'containers': { 'CONTAINERNAME': { 'properties': { 'cpu': 10, 'memory': 20, 'command': ['prog', 'arg1', 'arg2'], 'logConfiguration': { 'logDriver': 'awslogs', }, }, 'environment': ['VARIABLE_NAME'], }, }, } def deploy_config(ecs_service, ecs_cluster): data = make_deploy_config_data(cluster=ecs_cluster, service=ecs_service) fh, name = tempfile.mkstemp() fh.write(yaml.safe_dump(data)) fh.close() return name @vcr.use_cassette('tests/vcr/deploy.yaml') def test_deploy(): cluster, service = setup() config = deploy_config(cluster, service) try: args = ['deploy', config, 'production', 'image:tag', 'rev'] # result = runner.invoke(cli, args, catch_exceptions=True) # print(result.output) # assert result.exit_code == 0 # assert 'CREATE_COMPLETE' in result.output except: try: teardown() except: pass raise
<commit_before><commit_msg>Add a test for cluster command using VCR<commit_after>
import os import tempfile from buddy.command.cluster import cli import pytest import vcr import yaml import boto3 def teardown(): ecs_client = boto3.client('ecs') ecs_client.delete_service(cluster='CLUSTERNAME', service='SERVICENAME') ecs_client.delete_cluster(cluster='CLUSTERNAME') def setup(): ecs_client = boto3.client('ecs') containers = [ { 'name': 'NAME', 'image': 'nginx', 'memory': 10, } ] response = ecs_client.register_task_definition( family='TASKNAME', containerDefinitions=containers, ) task_definition_arn = response['taskDefinition']['taskDefinitionArn'] response = ecs_client.create_cluster(clusterName='CLUSTERNAME') ecs_cluster = response['cluster']['clusterName'] response = ecs_client.create_service( cluster=ecs_cluster, serviceName='SERVICENAME', taskDefinition=task_definition_arn, desiredCount=0, ) ecs_service = response['service']['serviceName'] return ecs_cluster, ecs_service def make_deploy_config_data(cluster, service): return { 'targets': { 'production': { 'cluster': cluster, 'service': service, 'task': 'TASKNAME', 'environment': 'ENVNAME', }, }, 'tasks': { 'TASKNAME': { 'containers': ['CONTAINERNAME'], }, }, 'environments': { 'ENVNAME': { 'VARIABLE_NAME': 'VARIABLE_VALUE', }, }, 'containers': { 'CONTAINERNAME': { 'properties': { 'cpu': 10, 'memory': 20, 'command': ['prog', 'arg1', 'arg2'], 'logConfiguration': { 'logDriver': 'awslogs', }, }, 'environment': ['VARIABLE_NAME'], }, }, } def deploy_config(ecs_service, ecs_cluster): data = make_deploy_config_data(cluster=ecs_cluster, service=ecs_service) fh, name = tempfile.mkstemp() fh.write(yaml.safe_dump(data)) fh.close() return name @vcr.use_cassette('tests/vcr/deploy.yaml') def test_deploy(): cluster, service = setup() config = deploy_config(cluster, service) try: args = ['deploy', config, 'production', 'image:tag', 'rev'] # result = runner.invoke(cli, args, catch_exceptions=True) # print(result.output) # assert result.exit_code == 0 # assert 'CREATE_COMPLETE' in result.output except: try: teardown() except: pass raise
Add a test for cluster command using VCRimport os import tempfile from buddy.command.cluster import cli import pytest import vcr import yaml import boto3 def teardown(): ecs_client = boto3.client('ecs') ecs_client.delete_service(cluster='CLUSTERNAME', service='SERVICENAME') ecs_client.delete_cluster(cluster='CLUSTERNAME') def setup(): ecs_client = boto3.client('ecs') containers = [ { 'name': 'NAME', 'image': 'nginx', 'memory': 10, } ] response = ecs_client.register_task_definition( family='TASKNAME', containerDefinitions=containers, ) task_definition_arn = response['taskDefinition']['taskDefinitionArn'] response = ecs_client.create_cluster(clusterName='CLUSTERNAME') ecs_cluster = response['cluster']['clusterName'] response = ecs_client.create_service( cluster=ecs_cluster, serviceName='SERVICENAME', taskDefinition=task_definition_arn, desiredCount=0, ) ecs_service = response['service']['serviceName'] return ecs_cluster, ecs_service def make_deploy_config_data(cluster, service): return { 'targets': { 'production': { 'cluster': cluster, 'service': service, 'task': 'TASKNAME', 'environment': 'ENVNAME', }, }, 'tasks': { 'TASKNAME': { 'containers': ['CONTAINERNAME'], }, }, 'environments': { 'ENVNAME': { 'VARIABLE_NAME': 'VARIABLE_VALUE', }, }, 'containers': { 'CONTAINERNAME': { 'properties': { 'cpu': 10, 'memory': 20, 'command': ['prog', 'arg1', 'arg2'], 'logConfiguration': { 'logDriver': 'awslogs', }, }, 'environment': ['VARIABLE_NAME'], }, }, } def deploy_config(ecs_service, ecs_cluster): data = make_deploy_config_data(cluster=ecs_cluster, service=ecs_service) fh, name = tempfile.mkstemp() fh.write(yaml.safe_dump(data)) fh.close() return name @vcr.use_cassette('tests/vcr/deploy.yaml') def test_deploy(): cluster, service = setup() config = deploy_config(cluster, service) try: args = ['deploy', config, 'production', 'image:tag', 'rev'] # result = runner.invoke(cli, args, catch_exceptions=True) # print(result.output) # assert result.exit_code == 0 # assert 'CREATE_COMPLETE' in result.output except: try: teardown() except: pass raise
<commit_before><commit_msg>Add a test for cluster command using VCR<commit_after>import os import tempfile from buddy.command.cluster import cli import pytest import vcr import yaml import boto3 def teardown(): ecs_client = boto3.client('ecs') ecs_client.delete_service(cluster='CLUSTERNAME', service='SERVICENAME') ecs_client.delete_cluster(cluster='CLUSTERNAME') def setup(): ecs_client = boto3.client('ecs') containers = [ { 'name': 'NAME', 'image': 'nginx', 'memory': 10, } ] response = ecs_client.register_task_definition( family='TASKNAME', containerDefinitions=containers, ) task_definition_arn = response['taskDefinition']['taskDefinitionArn'] response = ecs_client.create_cluster(clusterName='CLUSTERNAME') ecs_cluster = response['cluster']['clusterName'] response = ecs_client.create_service( cluster=ecs_cluster, serviceName='SERVICENAME', taskDefinition=task_definition_arn, desiredCount=0, ) ecs_service = response['service']['serviceName'] return ecs_cluster, ecs_service def make_deploy_config_data(cluster, service): return { 'targets': { 'production': { 'cluster': cluster, 'service': service, 'task': 'TASKNAME', 'environment': 'ENVNAME', }, }, 'tasks': { 'TASKNAME': { 'containers': ['CONTAINERNAME'], }, }, 'environments': { 'ENVNAME': { 'VARIABLE_NAME': 'VARIABLE_VALUE', }, }, 'containers': { 'CONTAINERNAME': { 'properties': { 'cpu': 10, 'memory': 20, 'command': ['prog', 'arg1', 'arg2'], 'logConfiguration': { 'logDriver': 'awslogs', }, }, 'environment': ['VARIABLE_NAME'], }, }, } def deploy_config(ecs_service, ecs_cluster): data = make_deploy_config_data(cluster=ecs_cluster, service=ecs_service) fh, name = tempfile.mkstemp() fh.write(yaml.safe_dump(data)) fh.close() return name @vcr.use_cassette('tests/vcr/deploy.yaml') def test_deploy(): cluster, service = setup() config = deploy_config(cluster, service) try: args = ['deploy', config, 'production', 'image:tag', 'rev'] # result = runner.invoke(cli, args, catch_exceptions=True) # print(result.output) # assert result.exit_code == 0 # assert 'CREATE_COMPLETE' in result.output except: try: teardown() except: pass raise
804a91299c97a8008633d6e61b68b7923c804152
lesion/stats.py
lesion/stats.py
""" Compute statistics from linear traces of embryos. The most basic statistic is min/max, but others will be compiled here. """ import numpy as np from scipy import ndimage as nd def min_max(tr): """Return the ratio of minimum to maximum of a trace. Parameters ---------- tr : 1D array of float The input profile. Returns ------- mm : float The ratio of the minimum value in `tr` over the maximum. Examples -------- >>> tr = np.array([0.8, 0.9, 1.4, 2.0, 1.1]) >>> min_max(tr) # 0.8 / 2.0 0.4 """ tr = tr.astype(float) mm = tr.min() / tr.max() return mm def slope(tr, sigma=None): """Compute the absolute slope between the max and min positions. Parameters ---------- tr : 1D array of float The input profile. sigma : float, optional Smooth `tr` by a Gaussian filter with this sigma. Returns ------- a : float The slope from max to min, in absolute value. """ tr = tr.astype(float) if sigma is not None: tr = nd.gaussian_filter1d(tr, sigma=sigma) m, M = np.argmin(tr), np.argmax(tr) a = np.abs((tr[m] - tr[M]) / (m - M)) return a
Add statistics computation from traces
Add statistics computation from traces
Python
bsd-3-clause
jni/lesion
Add statistics computation from traces
""" Compute statistics from linear traces of embryos. The most basic statistic is min/max, but others will be compiled here. """ import numpy as np from scipy import ndimage as nd def min_max(tr): """Return the ratio of minimum to maximum of a trace. Parameters ---------- tr : 1D array of float The input profile. Returns ------- mm : float The ratio of the minimum value in `tr` over the maximum. Examples -------- >>> tr = np.array([0.8, 0.9, 1.4, 2.0, 1.1]) >>> min_max(tr) # 0.8 / 2.0 0.4 """ tr = tr.astype(float) mm = tr.min() / tr.max() return mm def slope(tr, sigma=None): """Compute the absolute slope between the max and min positions. Parameters ---------- tr : 1D array of float The input profile. sigma : float, optional Smooth `tr` by a Gaussian filter with this sigma. Returns ------- a : float The slope from max to min, in absolute value. """ tr = tr.astype(float) if sigma is not None: tr = nd.gaussian_filter1d(tr, sigma=sigma) m, M = np.argmin(tr), np.argmax(tr) a = np.abs((tr[m] - tr[M]) / (m - M)) return a
<commit_before><commit_msg>Add statistics computation from traces<commit_after>
""" Compute statistics from linear traces of embryos. The most basic statistic is min/max, but others will be compiled here. """ import numpy as np from scipy import ndimage as nd def min_max(tr): """Return the ratio of minimum to maximum of a trace. Parameters ---------- tr : 1D array of float The input profile. Returns ------- mm : float The ratio of the minimum value in `tr` over the maximum. Examples -------- >>> tr = np.array([0.8, 0.9, 1.4, 2.0, 1.1]) >>> min_max(tr) # 0.8 / 2.0 0.4 """ tr = tr.astype(float) mm = tr.min() / tr.max() return mm def slope(tr, sigma=None): """Compute the absolute slope between the max and min positions. Parameters ---------- tr : 1D array of float The input profile. sigma : float, optional Smooth `tr` by a Gaussian filter with this sigma. Returns ------- a : float The slope from max to min, in absolute value. """ tr = tr.astype(float) if sigma is not None: tr = nd.gaussian_filter1d(tr, sigma=sigma) m, M = np.argmin(tr), np.argmax(tr) a = np.abs((tr[m] - tr[M]) / (m - M)) return a
Add statistics computation from traces""" Compute statistics from linear traces of embryos. The most basic statistic is min/max, but others will be compiled here. """ import numpy as np from scipy import ndimage as nd def min_max(tr): """Return the ratio of minimum to maximum of a trace. Parameters ---------- tr : 1D array of float The input profile. Returns ------- mm : float The ratio of the minimum value in `tr` over the maximum. Examples -------- >>> tr = np.array([0.8, 0.9, 1.4, 2.0, 1.1]) >>> min_max(tr) # 0.8 / 2.0 0.4 """ tr = tr.astype(float) mm = tr.min() / tr.max() return mm def slope(tr, sigma=None): """Compute the absolute slope between the max and min positions. Parameters ---------- tr : 1D array of float The input profile. sigma : float, optional Smooth `tr` by a Gaussian filter with this sigma. Returns ------- a : float The slope from max to min, in absolute value. """ tr = tr.astype(float) if sigma is not None: tr = nd.gaussian_filter1d(tr, sigma=sigma) m, M = np.argmin(tr), np.argmax(tr) a = np.abs((tr[m] - tr[M]) / (m - M)) return a
<commit_before><commit_msg>Add statistics computation from traces<commit_after>""" Compute statistics from linear traces of embryos. The most basic statistic is min/max, but others will be compiled here. """ import numpy as np from scipy import ndimage as nd def min_max(tr): """Return the ratio of minimum to maximum of a trace. Parameters ---------- tr : 1D array of float The input profile. Returns ------- mm : float The ratio of the minimum value in `tr` over the maximum. Examples -------- >>> tr = np.array([0.8, 0.9, 1.4, 2.0, 1.1]) >>> min_max(tr) # 0.8 / 2.0 0.4 """ tr = tr.astype(float) mm = tr.min() / tr.max() return mm def slope(tr, sigma=None): """Compute the absolute slope between the max and min positions. Parameters ---------- tr : 1D array of float The input profile. sigma : float, optional Smooth `tr` by a Gaussian filter with this sigma. Returns ------- a : float The slope from max to min, in absolute value. """ tr = tr.astype(float) if sigma is not None: tr = nd.gaussian_filter1d(tr, sigma=sigma) m, M = np.argmin(tr), np.argmax(tr) a = np.abs((tr[m] - tr[M]) / (m - M)) return a
268f9c7d7354fef5a87b8a56282aad617c5953f4
ci/copy-release.py
ci/copy-release.py
#!/usr/bin/env python3 import os import pathlib import sys import boto3 import requests tag_name = os.environ.get("TAG_NAME") dcos_version = os.environ.get("DCOS_VERSION") if not tag_name: print("Missing TAG_NAME.", file=sys.stderr) sys.exit(1) if not dcos_version: print("Missing DCOS_VERSION.", file=sys.stderr) sys.exit(1) s3_client = boto3.resource('s3', region_name='us-west-2').meta.client bucket = "downloads.dcos.io" artifacts = [ "binaries/cli/linux/x86-64/{}/dcos", "binaries/cli/darwin/x86-64/{}/dcos", "binaries/cli/windows/x86-64/{}/dcos.exe" ] for artifact in artifacts: src = {'Bucket': bucket, 'Key': artifact.format(tag_name)} dst = artifact.format("dcos-" + dcos_version) s3_client.copy(src, bucket, dst) slack_token = os.environ.get("SLACK_API_TOKEN") if not slack_token: sys.exit(0) attachment_text = tag_name + " has been released!" s3_urls = ["https://{}/{}".format(bucket, a.format("dcos-" + dcos_version)) for a in artifacts] try: resp = requests.post( "https://mesosphere.slack.com/services/hooks/jenkins-ci?token=" + slack_token, json={ "channel": "#dcos-cli-ci", "color": "good", "attachments": [ { "color": "good", "title": "dcos-core-cli", "text": "\n".join([attachment_text + " :tada:"] + s3_urls), "fallback": "[dcos-core-cli] " + attachment_text } ] }, timeout=30) if resp.status_code != 200: raise Exception("received {} status response: {}".format(resp.status_code, resp.text)) except Exception as e: print("Couldn't post Slack notification:\n {}".format(e))
Add a CI script to copy the CLI release
Add a CI script to copy the CLI release This moves the script out of the core CLI. https://github.com/dcos/dcos-core-cli/pull/66 https://jira.mesosphere.com/browse/DCOS_OSS-4278
Python
apache-2.0
kensipe/dcos-cli,dcos/dcos-cli,dcos/dcos-cli,kensipe/dcos-cli,dcos/dcos-cli,dcos/dcos-cli,kensipe/dcos-cli,dcos/dcos-cli,kensipe/dcos-cli,kensipe/dcos-cli
Add a CI script to copy the CLI release This moves the script out of the core CLI. https://github.com/dcos/dcos-core-cli/pull/66 https://jira.mesosphere.com/browse/DCOS_OSS-4278
#!/usr/bin/env python3 import os import pathlib import sys import boto3 import requests tag_name = os.environ.get("TAG_NAME") dcos_version = os.environ.get("DCOS_VERSION") if not tag_name: print("Missing TAG_NAME.", file=sys.stderr) sys.exit(1) if not dcos_version: print("Missing DCOS_VERSION.", file=sys.stderr) sys.exit(1) s3_client = boto3.resource('s3', region_name='us-west-2').meta.client bucket = "downloads.dcos.io" artifacts = [ "binaries/cli/linux/x86-64/{}/dcos", "binaries/cli/darwin/x86-64/{}/dcos", "binaries/cli/windows/x86-64/{}/dcos.exe" ] for artifact in artifacts: src = {'Bucket': bucket, 'Key': artifact.format(tag_name)} dst = artifact.format("dcos-" + dcos_version) s3_client.copy(src, bucket, dst) slack_token = os.environ.get("SLACK_API_TOKEN") if not slack_token: sys.exit(0) attachment_text = tag_name + " has been released!" s3_urls = ["https://{}/{}".format(bucket, a.format("dcos-" + dcos_version)) for a in artifacts] try: resp = requests.post( "https://mesosphere.slack.com/services/hooks/jenkins-ci?token=" + slack_token, json={ "channel": "#dcos-cli-ci", "color": "good", "attachments": [ { "color": "good", "title": "dcos-core-cli", "text": "\n".join([attachment_text + " :tada:"] + s3_urls), "fallback": "[dcos-core-cli] " + attachment_text } ] }, timeout=30) if resp.status_code != 200: raise Exception("received {} status response: {}".format(resp.status_code, resp.text)) except Exception as e: print("Couldn't post Slack notification:\n {}".format(e))
<commit_before><commit_msg>Add a CI script to copy the CLI release This moves the script out of the core CLI. https://github.com/dcos/dcos-core-cli/pull/66 https://jira.mesosphere.com/browse/DCOS_OSS-4278<commit_after>
#!/usr/bin/env python3 import os import pathlib import sys import boto3 import requests tag_name = os.environ.get("TAG_NAME") dcos_version = os.environ.get("DCOS_VERSION") if not tag_name: print("Missing TAG_NAME.", file=sys.stderr) sys.exit(1) if not dcos_version: print("Missing DCOS_VERSION.", file=sys.stderr) sys.exit(1) s3_client = boto3.resource('s3', region_name='us-west-2').meta.client bucket = "downloads.dcos.io" artifacts = [ "binaries/cli/linux/x86-64/{}/dcos", "binaries/cli/darwin/x86-64/{}/dcos", "binaries/cli/windows/x86-64/{}/dcos.exe" ] for artifact in artifacts: src = {'Bucket': bucket, 'Key': artifact.format(tag_name)} dst = artifact.format("dcos-" + dcos_version) s3_client.copy(src, bucket, dst) slack_token = os.environ.get("SLACK_API_TOKEN") if not slack_token: sys.exit(0) attachment_text = tag_name + " has been released!" s3_urls = ["https://{}/{}".format(bucket, a.format("dcos-" + dcos_version)) for a in artifacts] try: resp = requests.post( "https://mesosphere.slack.com/services/hooks/jenkins-ci?token=" + slack_token, json={ "channel": "#dcos-cli-ci", "color": "good", "attachments": [ { "color": "good", "title": "dcos-core-cli", "text": "\n".join([attachment_text + " :tada:"] + s3_urls), "fallback": "[dcos-core-cli] " + attachment_text } ] }, timeout=30) if resp.status_code != 200: raise Exception("received {} status response: {}".format(resp.status_code, resp.text)) except Exception as e: print("Couldn't post Slack notification:\n {}".format(e))
Add a CI script to copy the CLI release This moves the script out of the core CLI. https://github.com/dcos/dcos-core-cli/pull/66 https://jira.mesosphere.com/browse/DCOS_OSS-4278#!/usr/bin/env python3 import os import pathlib import sys import boto3 import requests tag_name = os.environ.get("TAG_NAME") dcos_version = os.environ.get("DCOS_VERSION") if not tag_name: print("Missing TAG_NAME.", file=sys.stderr) sys.exit(1) if not dcos_version: print("Missing DCOS_VERSION.", file=sys.stderr) sys.exit(1) s3_client = boto3.resource('s3', region_name='us-west-2').meta.client bucket = "downloads.dcos.io" artifacts = [ "binaries/cli/linux/x86-64/{}/dcos", "binaries/cli/darwin/x86-64/{}/dcos", "binaries/cli/windows/x86-64/{}/dcos.exe" ] for artifact in artifacts: src = {'Bucket': bucket, 'Key': artifact.format(tag_name)} dst = artifact.format("dcos-" + dcos_version) s3_client.copy(src, bucket, dst) slack_token = os.environ.get("SLACK_API_TOKEN") if not slack_token: sys.exit(0) attachment_text = tag_name + " has been released!" s3_urls = ["https://{}/{}".format(bucket, a.format("dcos-" + dcos_version)) for a in artifacts] try: resp = requests.post( "https://mesosphere.slack.com/services/hooks/jenkins-ci?token=" + slack_token, json={ "channel": "#dcos-cli-ci", "color": "good", "attachments": [ { "color": "good", "title": "dcos-core-cli", "text": "\n".join([attachment_text + " :tada:"] + s3_urls), "fallback": "[dcos-core-cli] " + attachment_text } ] }, timeout=30) if resp.status_code != 200: raise Exception("received {} status response: {}".format(resp.status_code, resp.text)) except Exception as e: print("Couldn't post Slack notification:\n {}".format(e))
<commit_before><commit_msg>Add a CI script to copy the CLI release This moves the script out of the core CLI. https://github.com/dcos/dcos-core-cli/pull/66 https://jira.mesosphere.com/browse/DCOS_OSS-4278<commit_after>#!/usr/bin/env python3 import os import pathlib import sys import boto3 import requests tag_name = os.environ.get("TAG_NAME") dcos_version = os.environ.get("DCOS_VERSION") if not tag_name: print("Missing TAG_NAME.", file=sys.stderr) sys.exit(1) if not dcos_version: print("Missing DCOS_VERSION.", file=sys.stderr) sys.exit(1) s3_client = boto3.resource('s3', region_name='us-west-2').meta.client bucket = "downloads.dcos.io" artifacts = [ "binaries/cli/linux/x86-64/{}/dcos", "binaries/cli/darwin/x86-64/{}/dcos", "binaries/cli/windows/x86-64/{}/dcos.exe" ] for artifact in artifacts: src = {'Bucket': bucket, 'Key': artifact.format(tag_name)} dst = artifact.format("dcos-" + dcos_version) s3_client.copy(src, bucket, dst) slack_token = os.environ.get("SLACK_API_TOKEN") if not slack_token: sys.exit(0) attachment_text = tag_name + " has been released!" s3_urls = ["https://{}/{}".format(bucket, a.format("dcos-" + dcos_version)) for a in artifacts] try: resp = requests.post( "https://mesosphere.slack.com/services/hooks/jenkins-ci?token=" + slack_token, json={ "channel": "#dcos-cli-ci", "color": "good", "attachments": [ { "color": "good", "title": "dcos-core-cli", "text": "\n".join([attachment_text + " :tada:"] + s3_urls), "fallback": "[dcos-core-cli] " + attachment_text } ] }, timeout=30) if resp.status_code != 200: raise Exception("received {} status response: {}".format(resp.status_code, resp.text)) except Exception as e: print("Couldn't post Slack notification:\n {}".format(e))
fb3fc2bd34fd9ad8f5e8ad088fb1db35a1a1ceb6
chrome/browser/extensions/PRESUBMIT.py
chrome/browser/extensions/PRESUBMIT.py
# Copyright (c) 2011 The Chromium Authors. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. """Chromium presubmit script for src/chrome/browser/extensions. See http://dev.chromium.org/developers/how-tos/depottools/presubmit-scripts for more details on the presubmit API built into gcl. """ def GetPreferredTrySlaves(): return ['linux_chromeos_aura:compile']
Add aura compile testing by default to likely areas (4).
Add aura compile testing by default to likely areas (4). BUG=chromium:107599 TEST=None Review URL: http://codereview.chromium.org/8907044 git-svn-id: de016e52bd170d2d4f2344f9bf92d50478b649e0@114847 0039d316-1c4b-4281-b951-d872f2087c98
Python
bsd-3-clause
adobe/chromium,ropik/chromium,gavinp/chromium,adobe/chromium,ropik/chromium,ropik/chromium,yitian134/chromium,yitian134/chromium,gavinp/chromium,gavinp/chromium,adobe/chromium,gavinp/chromium,ropik/chromium,gavinp/chromium,ropik/chromium,adobe/chromium,yitian134/chromium,yitian134/chromium,yitian134/chromium,adobe/chromium,gavinp/chromium,ropik/chromium,gavinp/chromium,yitian134/chromium,adobe/chromium,yitian134/chromium,adobe/chromium,ropik/chromium,gavinp/chromium,yitian134/chromium,yitian134/chromium,gavinp/chromium,adobe/chromium,yitian134/chromium,adobe/chromium,adobe/chromium,gavinp/chromium,ropik/chromium,ropik/chromium,adobe/chromium
Add aura compile testing by default to likely areas (4). BUG=chromium:107599 TEST=None Review URL: http://codereview.chromium.org/8907044 git-svn-id: de016e52bd170d2d4f2344f9bf92d50478b649e0@114847 0039d316-1c4b-4281-b951-d872f2087c98
# Copyright (c) 2011 The Chromium Authors. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. """Chromium presubmit script for src/chrome/browser/extensions. See http://dev.chromium.org/developers/how-tos/depottools/presubmit-scripts for more details on the presubmit API built into gcl. """ def GetPreferredTrySlaves(): return ['linux_chromeos_aura:compile']
<commit_before><commit_msg>Add aura compile testing by default to likely areas (4). BUG=chromium:107599 TEST=None Review URL: http://codereview.chromium.org/8907044 git-svn-id: de016e52bd170d2d4f2344f9bf92d50478b649e0@114847 0039d316-1c4b-4281-b951-d872f2087c98<commit_after>
# Copyright (c) 2011 The Chromium Authors. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. """Chromium presubmit script for src/chrome/browser/extensions. See http://dev.chromium.org/developers/how-tos/depottools/presubmit-scripts for more details on the presubmit API built into gcl. """ def GetPreferredTrySlaves(): return ['linux_chromeos_aura:compile']
Add aura compile testing by default to likely areas (4). BUG=chromium:107599 TEST=None Review URL: http://codereview.chromium.org/8907044 git-svn-id: de016e52bd170d2d4f2344f9bf92d50478b649e0@114847 0039d316-1c4b-4281-b951-d872f2087c98# Copyright (c) 2011 The Chromium Authors. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. """Chromium presubmit script for src/chrome/browser/extensions. See http://dev.chromium.org/developers/how-tos/depottools/presubmit-scripts for more details on the presubmit API built into gcl. """ def GetPreferredTrySlaves(): return ['linux_chromeos_aura:compile']
<commit_before><commit_msg>Add aura compile testing by default to likely areas (4). BUG=chromium:107599 TEST=None Review URL: http://codereview.chromium.org/8907044 git-svn-id: de016e52bd170d2d4f2344f9bf92d50478b649e0@114847 0039d316-1c4b-4281-b951-d872f2087c98<commit_after># Copyright (c) 2011 The Chromium Authors. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. """Chromium presubmit script for src/chrome/browser/extensions. See http://dev.chromium.org/developers/how-tos/depottools/presubmit-scripts for more details on the presubmit API built into gcl. """ def GetPreferredTrySlaves(): return ['linux_chromeos_aura:compile']
5da5b3426ab6134d1a2e08ce5b57a63a32493178
prototypes/nrel_polymer_converter.py
prototypes/nrel_polymer_converter.py
#!/usr/bin/env python import sys import os import json def main(argv): path = len(argv) == 2 and os.path.dirname(argv[0]) or "/Users/mo/Documents/MDF" filename = len(argv) == 2 and os.path.basename(argv[0]) or "NREL_polymer_export.txt" output = len(argv) == 2 and argv[1] or "NREL_polymer_output.txt" # print filename records = process_file(path, filename) # data output gw = open(output, "w") try: for record in records: print(json.dumps(record)) gw.write(json.dumps(record) + "\n") except IOError: print("A list Records is empty!") gw.close() print("FINISH") def run_extraction(filename): ''' Parse the NREL file. Get the data defined by the header 'gap_extrapolated' -> Extrapolated Gap 'delta_homo' -> Highest Occupied Molecular Orbital difference 'sum_f_osc' -> Sum of OSC 'delta_optical_lumo' -> Optical Lowest Unoccupied Molecular Orbital difference 'delta_lumo' -> Lowest Unoccupied Molecular Orbital difference 'spectral_overlap' -> Spectral overlap 'functional/basis' -> Functional basis 'homo_extrapolated' -> Extrapolated Highest Occupied Molecular Orbital 'common_tag' -> Common tag 'optical_lumo_extrapolated' -> Extrapolated Optical Lowest Unoccupied Molecular Orbital 'lumo_extrapolated' -> Extrapolated Lowest Unoccupied Molecular Orbital :param filename: Filename of NREL data file :return: List of Dictionaries containing data ''' # parse line by line and create list of dictionaries with key headers try: f = open(filename) heads = list() records = list() line_index = 0 for line in f: line = line.rstrip('\n') # print line if line_index == 0: heads = line.split() else: elements = line.split() if len(heads) == len(elements): records.append(dict(zip(heads, elements))) else: print("Wrong number of records: " + str(len(elements)) + " instead of " + str(len(heads))) line_index += 1 f.close() return records except IOError: print("File " + filename + " does not exist!") def process_file(path, file): # file exists if path[-1] == "/": filename = path + file else: filename = path + "/" + file if os.path.isfile(filename): return run_extraction(filename) else: print("Exiting! File " + file + " does not exist.") exit(0) if __name__ == "__main__": main(sys.argv[1:])
Read line by line from the NREL polymer text file and create list of dictionaries using the header keys.
Read line by line from the NREL polymer text file and create list of dictionaries using the header keys.
Python
apache-2.0
materials-data-facility/forge
Read line by line from the NREL polymer text file and create list of dictionaries using the header keys.
#!/usr/bin/env python import sys import os import json def main(argv): path = len(argv) == 2 and os.path.dirname(argv[0]) or "/Users/mo/Documents/MDF" filename = len(argv) == 2 and os.path.basename(argv[0]) or "NREL_polymer_export.txt" output = len(argv) == 2 and argv[1] or "NREL_polymer_output.txt" # print filename records = process_file(path, filename) # data output gw = open(output, "w") try: for record in records: print(json.dumps(record)) gw.write(json.dumps(record) + "\n") except IOError: print("A list Records is empty!") gw.close() print("FINISH") def run_extraction(filename): ''' Parse the NREL file. Get the data defined by the header 'gap_extrapolated' -> Extrapolated Gap 'delta_homo' -> Highest Occupied Molecular Orbital difference 'sum_f_osc' -> Sum of OSC 'delta_optical_lumo' -> Optical Lowest Unoccupied Molecular Orbital difference 'delta_lumo' -> Lowest Unoccupied Molecular Orbital difference 'spectral_overlap' -> Spectral overlap 'functional/basis' -> Functional basis 'homo_extrapolated' -> Extrapolated Highest Occupied Molecular Orbital 'common_tag' -> Common tag 'optical_lumo_extrapolated' -> Extrapolated Optical Lowest Unoccupied Molecular Orbital 'lumo_extrapolated' -> Extrapolated Lowest Unoccupied Molecular Orbital :param filename: Filename of NREL data file :return: List of Dictionaries containing data ''' # parse line by line and create list of dictionaries with key headers try: f = open(filename) heads = list() records = list() line_index = 0 for line in f: line = line.rstrip('\n') # print line if line_index == 0: heads = line.split() else: elements = line.split() if len(heads) == len(elements): records.append(dict(zip(heads, elements))) else: print("Wrong number of records: " + str(len(elements)) + " instead of " + str(len(heads))) line_index += 1 f.close() return records except IOError: print("File " + filename + " does not exist!") def process_file(path, file): # file exists if path[-1] == "/": filename = path + file else: filename = path + "/" + file if os.path.isfile(filename): return run_extraction(filename) else: print("Exiting! File " + file + " does not exist.") exit(0) if __name__ == "__main__": main(sys.argv[1:])
<commit_before><commit_msg>Read line by line from the NREL polymer text file and create list of dictionaries using the header keys.<commit_after>
#!/usr/bin/env python import sys import os import json def main(argv): path = len(argv) == 2 and os.path.dirname(argv[0]) or "/Users/mo/Documents/MDF" filename = len(argv) == 2 and os.path.basename(argv[0]) or "NREL_polymer_export.txt" output = len(argv) == 2 and argv[1] or "NREL_polymer_output.txt" # print filename records = process_file(path, filename) # data output gw = open(output, "w") try: for record in records: print(json.dumps(record)) gw.write(json.dumps(record) + "\n") except IOError: print("A list Records is empty!") gw.close() print("FINISH") def run_extraction(filename): ''' Parse the NREL file. Get the data defined by the header 'gap_extrapolated' -> Extrapolated Gap 'delta_homo' -> Highest Occupied Molecular Orbital difference 'sum_f_osc' -> Sum of OSC 'delta_optical_lumo' -> Optical Lowest Unoccupied Molecular Orbital difference 'delta_lumo' -> Lowest Unoccupied Molecular Orbital difference 'spectral_overlap' -> Spectral overlap 'functional/basis' -> Functional basis 'homo_extrapolated' -> Extrapolated Highest Occupied Molecular Orbital 'common_tag' -> Common tag 'optical_lumo_extrapolated' -> Extrapolated Optical Lowest Unoccupied Molecular Orbital 'lumo_extrapolated' -> Extrapolated Lowest Unoccupied Molecular Orbital :param filename: Filename of NREL data file :return: List of Dictionaries containing data ''' # parse line by line and create list of dictionaries with key headers try: f = open(filename) heads = list() records = list() line_index = 0 for line in f: line = line.rstrip('\n') # print line if line_index == 0: heads = line.split() else: elements = line.split() if len(heads) == len(elements): records.append(dict(zip(heads, elements))) else: print("Wrong number of records: " + str(len(elements)) + " instead of " + str(len(heads))) line_index += 1 f.close() return records except IOError: print("File " + filename + " does not exist!") def process_file(path, file): # file exists if path[-1] == "/": filename = path + file else: filename = path + "/" + file if os.path.isfile(filename): return run_extraction(filename) else: print("Exiting! File " + file + " does not exist.") exit(0) if __name__ == "__main__": main(sys.argv[1:])
Read line by line from the NREL polymer text file and create list of dictionaries using the header keys.#!/usr/bin/env python import sys import os import json def main(argv): path = len(argv) == 2 and os.path.dirname(argv[0]) or "/Users/mo/Documents/MDF" filename = len(argv) == 2 and os.path.basename(argv[0]) or "NREL_polymer_export.txt" output = len(argv) == 2 and argv[1] or "NREL_polymer_output.txt" # print filename records = process_file(path, filename) # data output gw = open(output, "w") try: for record in records: print(json.dumps(record)) gw.write(json.dumps(record) + "\n") except IOError: print("A list Records is empty!") gw.close() print("FINISH") def run_extraction(filename): ''' Parse the NREL file. Get the data defined by the header 'gap_extrapolated' -> Extrapolated Gap 'delta_homo' -> Highest Occupied Molecular Orbital difference 'sum_f_osc' -> Sum of OSC 'delta_optical_lumo' -> Optical Lowest Unoccupied Molecular Orbital difference 'delta_lumo' -> Lowest Unoccupied Molecular Orbital difference 'spectral_overlap' -> Spectral overlap 'functional/basis' -> Functional basis 'homo_extrapolated' -> Extrapolated Highest Occupied Molecular Orbital 'common_tag' -> Common tag 'optical_lumo_extrapolated' -> Extrapolated Optical Lowest Unoccupied Molecular Orbital 'lumo_extrapolated' -> Extrapolated Lowest Unoccupied Molecular Orbital :param filename: Filename of NREL data file :return: List of Dictionaries containing data ''' # parse line by line and create list of dictionaries with key headers try: f = open(filename) heads = list() records = list() line_index = 0 for line in f: line = line.rstrip('\n') # print line if line_index == 0: heads = line.split() else: elements = line.split() if len(heads) == len(elements): records.append(dict(zip(heads, elements))) else: print("Wrong number of records: " + str(len(elements)) + " instead of " + str(len(heads))) line_index += 1 f.close() return records except IOError: print("File " + filename + " does not exist!") def process_file(path, file): # file exists if path[-1] == "/": filename = path + file else: filename = path + "/" + file if os.path.isfile(filename): return run_extraction(filename) else: print("Exiting! File " + file + " does not exist.") exit(0) if __name__ == "__main__": main(sys.argv[1:])
<commit_before><commit_msg>Read line by line from the NREL polymer text file and create list of dictionaries using the header keys.<commit_after>#!/usr/bin/env python import sys import os import json def main(argv): path = len(argv) == 2 and os.path.dirname(argv[0]) or "/Users/mo/Documents/MDF" filename = len(argv) == 2 and os.path.basename(argv[0]) or "NREL_polymer_export.txt" output = len(argv) == 2 and argv[1] or "NREL_polymer_output.txt" # print filename records = process_file(path, filename) # data output gw = open(output, "w") try: for record in records: print(json.dumps(record)) gw.write(json.dumps(record) + "\n") except IOError: print("A list Records is empty!") gw.close() print("FINISH") def run_extraction(filename): ''' Parse the NREL file. Get the data defined by the header 'gap_extrapolated' -> Extrapolated Gap 'delta_homo' -> Highest Occupied Molecular Orbital difference 'sum_f_osc' -> Sum of OSC 'delta_optical_lumo' -> Optical Lowest Unoccupied Molecular Orbital difference 'delta_lumo' -> Lowest Unoccupied Molecular Orbital difference 'spectral_overlap' -> Spectral overlap 'functional/basis' -> Functional basis 'homo_extrapolated' -> Extrapolated Highest Occupied Molecular Orbital 'common_tag' -> Common tag 'optical_lumo_extrapolated' -> Extrapolated Optical Lowest Unoccupied Molecular Orbital 'lumo_extrapolated' -> Extrapolated Lowest Unoccupied Molecular Orbital :param filename: Filename of NREL data file :return: List of Dictionaries containing data ''' # parse line by line and create list of dictionaries with key headers try: f = open(filename) heads = list() records = list() line_index = 0 for line in f: line = line.rstrip('\n') # print line if line_index == 0: heads = line.split() else: elements = line.split() if len(heads) == len(elements): records.append(dict(zip(heads, elements))) else: print("Wrong number of records: " + str(len(elements)) + " instead of " + str(len(heads))) line_index += 1 f.close() return records except IOError: print("File " + filename + " does not exist!") def process_file(path, file): # file exists if path[-1] == "/": filename = path + file else: filename = path + "/" + file if os.path.isfile(filename): return run_extraction(filename) else: print("Exiting! File " + file + " does not exist.") exit(0) if __name__ == "__main__": main(sys.argv[1:])
7946b8fe8a8cddeef675cc60b5ebb64a250ea2c4
smugmug_test.py
smugmug_test.py
import smugmug import unittest class MockNode(object): def __init__(self): self._reset_times = 0 def reset_cache(self): self._reset_times += 1 class TestChildCacheGarbageCollector(unittest.TestCase): def test_clears_child_cache(self): gc = smugmug.ChildCacheGarbageCollector(3) nodes = [MockNode(), MockNode(), MockNode(), MockNode(), MockNode()] gc.visited(nodes[0]) gc.visited(nodes[1]) gc.visited(nodes[2]) gc.visited(nodes[3]) gc.visited(nodes[4]) self.assertEqual(nodes[0]._reset_times, 1) self.assertEqual(nodes[1]._reset_times, 1) self.assertEqual(nodes[2]._reset_times, 0) self.assertEqual(nodes[3]._reset_times, 0) self.assertEqual(nodes[4]._reset_times, 0) def test_repeated_visit_are_ignored(self): gc = smugmug.ChildCacheGarbageCollector(2) nodes = [MockNode(), MockNode(), MockNode()] gc.visited(nodes[0]) gc.visited(nodes[1]) gc.visited(nodes[2]) gc.visited(nodes[2]) gc.visited(nodes[2]) self.assertEqual(nodes[0]._reset_times, 1) self.assertEqual(nodes[1]._reset_times, 0) self.assertEqual(nodes[2]._reset_times, 0)
Add a unit test for the ChildCacheGarbageColleector class.
Add a unit test for the ChildCacheGarbageColleector class.
Python
mit
graveljp/smugcli
Add a unit test for the ChildCacheGarbageColleector class.
import smugmug import unittest class MockNode(object): def __init__(self): self._reset_times = 0 def reset_cache(self): self._reset_times += 1 class TestChildCacheGarbageCollector(unittest.TestCase): def test_clears_child_cache(self): gc = smugmug.ChildCacheGarbageCollector(3) nodes = [MockNode(), MockNode(), MockNode(), MockNode(), MockNode()] gc.visited(nodes[0]) gc.visited(nodes[1]) gc.visited(nodes[2]) gc.visited(nodes[3]) gc.visited(nodes[4]) self.assertEqual(nodes[0]._reset_times, 1) self.assertEqual(nodes[1]._reset_times, 1) self.assertEqual(nodes[2]._reset_times, 0) self.assertEqual(nodes[3]._reset_times, 0) self.assertEqual(nodes[4]._reset_times, 0) def test_repeated_visit_are_ignored(self): gc = smugmug.ChildCacheGarbageCollector(2) nodes = [MockNode(), MockNode(), MockNode()] gc.visited(nodes[0]) gc.visited(nodes[1]) gc.visited(nodes[2]) gc.visited(nodes[2]) gc.visited(nodes[2]) self.assertEqual(nodes[0]._reset_times, 1) self.assertEqual(nodes[1]._reset_times, 0) self.assertEqual(nodes[2]._reset_times, 0)
<commit_before><commit_msg>Add a unit test for the ChildCacheGarbageColleector class.<commit_after>
import smugmug import unittest class MockNode(object): def __init__(self): self._reset_times = 0 def reset_cache(self): self._reset_times += 1 class TestChildCacheGarbageCollector(unittest.TestCase): def test_clears_child_cache(self): gc = smugmug.ChildCacheGarbageCollector(3) nodes = [MockNode(), MockNode(), MockNode(), MockNode(), MockNode()] gc.visited(nodes[0]) gc.visited(nodes[1]) gc.visited(nodes[2]) gc.visited(nodes[3]) gc.visited(nodes[4]) self.assertEqual(nodes[0]._reset_times, 1) self.assertEqual(nodes[1]._reset_times, 1) self.assertEqual(nodes[2]._reset_times, 0) self.assertEqual(nodes[3]._reset_times, 0) self.assertEqual(nodes[4]._reset_times, 0) def test_repeated_visit_are_ignored(self): gc = smugmug.ChildCacheGarbageCollector(2) nodes = [MockNode(), MockNode(), MockNode()] gc.visited(nodes[0]) gc.visited(nodes[1]) gc.visited(nodes[2]) gc.visited(nodes[2]) gc.visited(nodes[2]) self.assertEqual(nodes[0]._reset_times, 1) self.assertEqual(nodes[1]._reset_times, 0) self.assertEqual(nodes[2]._reset_times, 0)
Add a unit test for the ChildCacheGarbageColleector class.import smugmug import unittest class MockNode(object): def __init__(self): self._reset_times = 0 def reset_cache(self): self._reset_times += 1 class TestChildCacheGarbageCollector(unittest.TestCase): def test_clears_child_cache(self): gc = smugmug.ChildCacheGarbageCollector(3) nodes = [MockNode(), MockNode(), MockNode(), MockNode(), MockNode()] gc.visited(nodes[0]) gc.visited(nodes[1]) gc.visited(nodes[2]) gc.visited(nodes[3]) gc.visited(nodes[4]) self.assertEqual(nodes[0]._reset_times, 1) self.assertEqual(nodes[1]._reset_times, 1) self.assertEqual(nodes[2]._reset_times, 0) self.assertEqual(nodes[3]._reset_times, 0) self.assertEqual(nodes[4]._reset_times, 0) def test_repeated_visit_are_ignored(self): gc = smugmug.ChildCacheGarbageCollector(2) nodes = [MockNode(), MockNode(), MockNode()] gc.visited(nodes[0]) gc.visited(nodes[1]) gc.visited(nodes[2]) gc.visited(nodes[2]) gc.visited(nodes[2]) self.assertEqual(nodes[0]._reset_times, 1) self.assertEqual(nodes[1]._reset_times, 0) self.assertEqual(nodes[2]._reset_times, 0)
<commit_before><commit_msg>Add a unit test for the ChildCacheGarbageColleector class.<commit_after>import smugmug import unittest class MockNode(object): def __init__(self): self._reset_times = 0 def reset_cache(self): self._reset_times += 1 class TestChildCacheGarbageCollector(unittest.TestCase): def test_clears_child_cache(self): gc = smugmug.ChildCacheGarbageCollector(3) nodes = [MockNode(), MockNode(), MockNode(), MockNode(), MockNode()] gc.visited(nodes[0]) gc.visited(nodes[1]) gc.visited(nodes[2]) gc.visited(nodes[3]) gc.visited(nodes[4]) self.assertEqual(nodes[0]._reset_times, 1) self.assertEqual(nodes[1]._reset_times, 1) self.assertEqual(nodes[2]._reset_times, 0) self.assertEqual(nodes[3]._reset_times, 0) self.assertEqual(nodes[4]._reset_times, 0) def test_repeated_visit_are_ignored(self): gc = smugmug.ChildCacheGarbageCollector(2) nodes = [MockNode(), MockNode(), MockNode()] gc.visited(nodes[0]) gc.visited(nodes[1]) gc.visited(nodes[2]) gc.visited(nodes[2]) gc.visited(nodes[2]) self.assertEqual(nodes[0]._reset_times, 1) self.assertEqual(nodes[1]._reset_times, 0) self.assertEqual(nodes[2]._reset_times, 0)
27d9b013f05d3fb2ef3cfdfe23af506743fb12ed
tests/test_ast.py
tests/test_ast.py
import unittest import sure from compiler import parse, lex, error, ast, type class TestAST(unittest.TestCase): def test_eq(self): ast.Constructor("foo", []).should.be.equal(ast.Constructor("foo", [])) ast.Constructor("foo", []).shouldnt.be.equal(ast.Constructor("bar", []))
Add an example test checking that ast equality works
Add an example test checking that ast equality works
Python
mit
dionyziz/llama,Renelvon/llama,Renelvon/llama,dionyziz/llama
Add an example test checking that ast equality works
import unittest import sure from compiler import parse, lex, error, ast, type class TestAST(unittest.TestCase): def test_eq(self): ast.Constructor("foo", []).should.be.equal(ast.Constructor("foo", [])) ast.Constructor("foo", []).shouldnt.be.equal(ast.Constructor("bar", []))
<commit_before><commit_msg>Add an example test checking that ast equality works<commit_after>
import unittest import sure from compiler import parse, lex, error, ast, type class TestAST(unittest.TestCase): def test_eq(self): ast.Constructor("foo", []).should.be.equal(ast.Constructor("foo", [])) ast.Constructor("foo", []).shouldnt.be.equal(ast.Constructor("bar", []))
Add an example test checking that ast equality worksimport unittest import sure from compiler import parse, lex, error, ast, type class TestAST(unittest.TestCase): def test_eq(self): ast.Constructor("foo", []).should.be.equal(ast.Constructor("foo", [])) ast.Constructor("foo", []).shouldnt.be.equal(ast.Constructor("bar", []))
<commit_before><commit_msg>Add an example test checking that ast equality works<commit_after>import unittest import sure from compiler import parse, lex, error, ast, type class TestAST(unittest.TestCase): def test_eq(self): ast.Constructor("foo", []).should.be.equal(ast.Constructor("foo", [])) ast.Constructor("foo", []).shouldnt.be.equal(ast.Constructor("bar", []))
b2bcbcedccffba4dce7b5c2d391de6444520f177
devil/devil/utils/signal_handler.py
devil/devil/utils/signal_handler.py
# Copyright 2016 The Chromium Authors. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. import contextlib import signal @contextlib.contextmanager def AddSignalHandler(signalnum, additional_handler): """Adds a signal handler for the given signal in the wrapped context. This runs the new handler after any existing handler rather than replacing the existing handler. Args: signum: The signal for which a handler should be added. additional_handler: The handler to add. """ existing_handler = signal.getsignal(signalnum) def handler(signum, frame): if callable(existing_handler): existing_handler(signum, frame) additional_handler(signum, frame) signal.signal(signalnum, handler) yield signal.signal(signalnum, existing_handler)
Add signal handler context manager.
[devil] Add signal handler context manager. BUG=chromium:620723 Review-Url: https://codereview.chromium.org/2069133005
Python
bsd-3-clause
sahiljain/catapult,catapult-project/catapult-csm,sahiljain/catapult,catapult-project/catapult-csm,benschmaus/catapult,catapult-project/catapult-csm,benschmaus/catapult,catapult-project/catapult,sahiljain/catapult,catapult-project/catapult,catapult-project/catapult-csm,benschmaus/catapult,catapult-project/catapult,benschmaus/catapult,sahiljain/catapult,benschmaus/catapult,catapult-project/catapult,catapult-project/catapult,benschmaus/catapult,sahiljain/catapult,catapult-project/catapult,catapult-project/catapult-csm,sahiljain/catapult,catapult-project/catapult,catapult-project/catapult-csm,benschmaus/catapult,catapult-project/catapult-csm
[devil] Add signal handler context manager. BUG=chromium:620723 Review-Url: https://codereview.chromium.org/2069133005
# Copyright 2016 The Chromium Authors. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. import contextlib import signal @contextlib.contextmanager def AddSignalHandler(signalnum, additional_handler): """Adds a signal handler for the given signal in the wrapped context. This runs the new handler after any existing handler rather than replacing the existing handler. Args: signum: The signal for which a handler should be added. additional_handler: The handler to add. """ existing_handler = signal.getsignal(signalnum) def handler(signum, frame): if callable(existing_handler): existing_handler(signum, frame) additional_handler(signum, frame) signal.signal(signalnum, handler) yield signal.signal(signalnum, existing_handler)
<commit_before><commit_msg>[devil] Add signal handler context manager. BUG=chromium:620723 Review-Url: https://codereview.chromium.org/2069133005<commit_after>
# Copyright 2016 The Chromium Authors. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. import contextlib import signal @contextlib.contextmanager def AddSignalHandler(signalnum, additional_handler): """Adds a signal handler for the given signal in the wrapped context. This runs the new handler after any existing handler rather than replacing the existing handler. Args: signum: The signal for which a handler should be added. additional_handler: The handler to add. """ existing_handler = signal.getsignal(signalnum) def handler(signum, frame): if callable(existing_handler): existing_handler(signum, frame) additional_handler(signum, frame) signal.signal(signalnum, handler) yield signal.signal(signalnum, existing_handler)
[devil] Add signal handler context manager. BUG=chromium:620723 Review-Url: https://codereview.chromium.org/2069133005# Copyright 2016 The Chromium Authors. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. import contextlib import signal @contextlib.contextmanager def AddSignalHandler(signalnum, additional_handler): """Adds a signal handler for the given signal in the wrapped context. This runs the new handler after any existing handler rather than replacing the existing handler. Args: signum: The signal for which a handler should be added. additional_handler: The handler to add. """ existing_handler = signal.getsignal(signalnum) def handler(signum, frame): if callable(existing_handler): existing_handler(signum, frame) additional_handler(signum, frame) signal.signal(signalnum, handler) yield signal.signal(signalnum, existing_handler)
<commit_before><commit_msg>[devil] Add signal handler context manager. BUG=chromium:620723 Review-Url: https://codereview.chromium.org/2069133005<commit_after># Copyright 2016 The Chromium Authors. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. import contextlib import signal @contextlib.contextmanager def AddSignalHandler(signalnum, additional_handler): """Adds a signal handler for the given signal in the wrapped context. This runs the new handler after any existing handler rather than replacing the existing handler. Args: signum: The signal for which a handler should be added. additional_handler: The handler to add. """ existing_handler = signal.getsignal(signalnum) def handler(signum, frame): if callable(existing_handler): existing_handler(signum, frame) additional_handler(signum, frame) signal.signal(signalnum, handler) yield signal.signal(signalnum, existing_handler)
b2b75a41f661b7945a3e1bca632aef13e8a9a70d
src/reviews/migrations/0010_auto_20160319_0758.py
src/reviews/migrations/0010_auto_20160319_0758.py
# -*- coding: utf-8 -*- # Generated by Django 1.9.3 on 2016-03-19 07:58 from __future__ import unicode_literals from django.db import migrations def migrate_score(apps, schema_editor): Review = apps.get_model('reviews', 'Review') Review.objects.filter(score=2).update(vote='+1') Review.objects.filter(score=1).update(vote='+0') Review.objects.filter(score=0).update(vote='') Review.objects.filter(score=-1).update(vote='-0') Review.objects.filter(score=-2).update(vote='-1') class Migration(migrations.Migration): dependencies = [ ('reviews', '0009_auto_20160319_0753'), ] operations = [ migrations.RunPython(migrate_score), ]
Add data migrations for vote
Add data migrations for vote
Python
mit
pycontw/pycontw2016,pycontw/pycontw2016,pycontw/pycontw2016,pycontw/pycontw2016
Add data migrations for vote
# -*- coding: utf-8 -*- # Generated by Django 1.9.3 on 2016-03-19 07:58 from __future__ import unicode_literals from django.db import migrations def migrate_score(apps, schema_editor): Review = apps.get_model('reviews', 'Review') Review.objects.filter(score=2).update(vote='+1') Review.objects.filter(score=1).update(vote='+0') Review.objects.filter(score=0).update(vote='') Review.objects.filter(score=-1).update(vote='-0') Review.objects.filter(score=-2).update(vote='-1') class Migration(migrations.Migration): dependencies = [ ('reviews', '0009_auto_20160319_0753'), ] operations = [ migrations.RunPython(migrate_score), ]
<commit_before><commit_msg>Add data migrations for vote<commit_after>
# -*- coding: utf-8 -*- # Generated by Django 1.9.3 on 2016-03-19 07:58 from __future__ import unicode_literals from django.db import migrations def migrate_score(apps, schema_editor): Review = apps.get_model('reviews', 'Review') Review.objects.filter(score=2).update(vote='+1') Review.objects.filter(score=1).update(vote='+0') Review.objects.filter(score=0).update(vote='') Review.objects.filter(score=-1).update(vote='-0') Review.objects.filter(score=-2).update(vote='-1') class Migration(migrations.Migration): dependencies = [ ('reviews', '0009_auto_20160319_0753'), ] operations = [ migrations.RunPython(migrate_score), ]
Add data migrations for vote# -*- coding: utf-8 -*- # Generated by Django 1.9.3 on 2016-03-19 07:58 from __future__ import unicode_literals from django.db import migrations def migrate_score(apps, schema_editor): Review = apps.get_model('reviews', 'Review') Review.objects.filter(score=2).update(vote='+1') Review.objects.filter(score=1).update(vote='+0') Review.objects.filter(score=0).update(vote='') Review.objects.filter(score=-1).update(vote='-0') Review.objects.filter(score=-2).update(vote='-1') class Migration(migrations.Migration): dependencies = [ ('reviews', '0009_auto_20160319_0753'), ] operations = [ migrations.RunPython(migrate_score), ]
<commit_before><commit_msg>Add data migrations for vote<commit_after># -*- coding: utf-8 -*- # Generated by Django 1.9.3 on 2016-03-19 07:58 from __future__ import unicode_literals from django.db import migrations def migrate_score(apps, schema_editor): Review = apps.get_model('reviews', 'Review') Review.objects.filter(score=2).update(vote='+1') Review.objects.filter(score=1).update(vote='+0') Review.objects.filter(score=0).update(vote='') Review.objects.filter(score=-1).update(vote='-0') Review.objects.filter(score=-2).update(vote='-1') class Migration(migrations.Migration): dependencies = [ ('reviews', '0009_auto_20160319_0753'), ] operations = [ migrations.RunPython(migrate_score), ]
5df378c356d22cd1e668ba8e591ef61bb2da324b
plugins/CoD_MW3.py
plugins/CoD_MW3.py
import os from lib.base_plugin import BasePlugin from lib.paths import SteamGamesPath class CoDMW3Plugin(BasePlugin): Name = "Call of Duty: Modern Warfare 3" support_os = ["Windows"] def backup(self, _): _.add_folder('Profiles', os.path.join(SteamGamesPath, 'call of duty modern warfare 3'), 'players2') def restore(self, _): _.restore_folder('Profiles', os.path.join(SteamGamesPath, 'call of duty modern warfare 3'), 'players2') def detect(self): if os.path.isdir(os.path.join(SteamGamesPath, 'call of duty modern warfare 3')): return True return False
Call of Duty: Modern Warfare 3 plugin
Call of Duty: Modern Warfare 3 plugin
Python
mit
Pr0Ger/SGSB
Call of Duty: Modern Warfare 3 plugin
import os from lib.base_plugin import BasePlugin from lib.paths import SteamGamesPath class CoDMW3Plugin(BasePlugin): Name = "Call of Duty: Modern Warfare 3" support_os = ["Windows"] def backup(self, _): _.add_folder('Profiles', os.path.join(SteamGamesPath, 'call of duty modern warfare 3'), 'players2') def restore(self, _): _.restore_folder('Profiles', os.path.join(SteamGamesPath, 'call of duty modern warfare 3'), 'players2') def detect(self): if os.path.isdir(os.path.join(SteamGamesPath, 'call of duty modern warfare 3')): return True return False
<commit_before><commit_msg>Call of Duty: Modern Warfare 3 plugin<commit_after>
import os from lib.base_plugin import BasePlugin from lib.paths import SteamGamesPath class CoDMW3Plugin(BasePlugin): Name = "Call of Duty: Modern Warfare 3" support_os = ["Windows"] def backup(self, _): _.add_folder('Profiles', os.path.join(SteamGamesPath, 'call of duty modern warfare 3'), 'players2') def restore(self, _): _.restore_folder('Profiles', os.path.join(SteamGamesPath, 'call of duty modern warfare 3'), 'players2') def detect(self): if os.path.isdir(os.path.join(SteamGamesPath, 'call of duty modern warfare 3')): return True return False
Call of Duty: Modern Warfare 3 pluginimport os from lib.base_plugin import BasePlugin from lib.paths import SteamGamesPath class CoDMW3Plugin(BasePlugin): Name = "Call of Duty: Modern Warfare 3" support_os = ["Windows"] def backup(self, _): _.add_folder('Profiles', os.path.join(SteamGamesPath, 'call of duty modern warfare 3'), 'players2') def restore(self, _): _.restore_folder('Profiles', os.path.join(SteamGamesPath, 'call of duty modern warfare 3'), 'players2') def detect(self): if os.path.isdir(os.path.join(SteamGamesPath, 'call of duty modern warfare 3')): return True return False
<commit_before><commit_msg>Call of Duty: Modern Warfare 3 plugin<commit_after>import os from lib.base_plugin import BasePlugin from lib.paths import SteamGamesPath class CoDMW3Plugin(BasePlugin): Name = "Call of Duty: Modern Warfare 3" support_os = ["Windows"] def backup(self, _): _.add_folder('Profiles', os.path.join(SteamGamesPath, 'call of duty modern warfare 3'), 'players2') def restore(self, _): _.restore_folder('Profiles', os.path.join(SteamGamesPath, 'call of duty modern warfare 3'), 'players2') def detect(self): if os.path.isdir(os.path.join(SteamGamesPath, 'call of duty modern warfare 3')): return True return False
c365bbd6cb1dad50a2084356e6f1ba3fa4575d00
fpsd/test/test_evaluation.py
fpsd/test/test_evaluation.py
import unittest from evaluation import precision_recall_at_x_proportion class EvaluationTest(unittest.TestCase): """Most evaluation methods are from scikit-learn and are thus tested however some of our preprocessing is custom and should be tested""" def test_precision_recall_f1_perfect(self): test_labels = [1, 1, 0, 0] test_predictions = [0.99, 0.99, 0.01, 0.01] precision, recall, f1 = precision_recall_at_x_proportion(test_labels, test_predictions, x_proportion=0.50) self.assertEqual(recall, 1) self.assertEqual(precision, 1) self.assertEqual(f1, 1) def test_precision_recall_f1_horrible(self): test_labels = [0, 0, 1, 1] test_predictions = [0.99, 0.99, 0.01, 0.01] precision, recall, f1 = precision_recall_at_x_proportion(test_labels, test_predictions, x_proportion=0.50) self.assertEqual(recall, 0) self.assertEqual(precision, 0) self.assertEqual(f1, 0) def test_precision_recall_f1_realistic(self): test_labels = [1, 0, 1, 0] test_predictions = [0.80, 0.20, 0.20, 0.80] precision, recall, f1 = precision_recall_at_x_proportion(test_labels, test_predictions, x_proportion=0.50) self.assertEqual(recall, 0.5) self.assertEqual(precision, 0.5) self.assertEqual(f1, 0.5)
Add tests for custom evaluation code
Add tests for custom evaluation code
Python
agpl-3.0
freedomofpress/fingerprint-securedrop,freedomofpress/FingerprintSecureDrop,freedomofpress/fingerprint-securedrop,freedomofpress/fingerprint-securedrop,freedomofpress/FingerprintSecureDrop
Add tests for custom evaluation code
import unittest from evaluation import precision_recall_at_x_proportion class EvaluationTest(unittest.TestCase): """Most evaluation methods are from scikit-learn and are thus tested however some of our preprocessing is custom and should be tested""" def test_precision_recall_f1_perfect(self): test_labels = [1, 1, 0, 0] test_predictions = [0.99, 0.99, 0.01, 0.01] precision, recall, f1 = precision_recall_at_x_proportion(test_labels, test_predictions, x_proportion=0.50) self.assertEqual(recall, 1) self.assertEqual(precision, 1) self.assertEqual(f1, 1) def test_precision_recall_f1_horrible(self): test_labels = [0, 0, 1, 1] test_predictions = [0.99, 0.99, 0.01, 0.01] precision, recall, f1 = precision_recall_at_x_proportion(test_labels, test_predictions, x_proportion=0.50) self.assertEqual(recall, 0) self.assertEqual(precision, 0) self.assertEqual(f1, 0) def test_precision_recall_f1_realistic(self): test_labels = [1, 0, 1, 0] test_predictions = [0.80, 0.20, 0.20, 0.80] precision, recall, f1 = precision_recall_at_x_proportion(test_labels, test_predictions, x_proportion=0.50) self.assertEqual(recall, 0.5) self.assertEqual(precision, 0.5) self.assertEqual(f1, 0.5)
<commit_before><commit_msg>Add tests for custom evaluation code<commit_after>
import unittest from evaluation import precision_recall_at_x_proportion class EvaluationTest(unittest.TestCase): """Most evaluation methods are from scikit-learn and are thus tested however some of our preprocessing is custom and should be tested""" def test_precision_recall_f1_perfect(self): test_labels = [1, 1, 0, 0] test_predictions = [0.99, 0.99, 0.01, 0.01] precision, recall, f1 = precision_recall_at_x_proportion(test_labels, test_predictions, x_proportion=0.50) self.assertEqual(recall, 1) self.assertEqual(precision, 1) self.assertEqual(f1, 1) def test_precision_recall_f1_horrible(self): test_labels = [0, 0, 1, 1] test_predictions = [0.99, 0.99, 0.01, 0.01] precision, recall, f1 = precision_recall_at_x_proportion(test_labels, test_predictions, x_proportion=0.50) self.assertEqual(recall, 0) self.assertEqual(precision, 0) self.assertEqual(f1, 0) def test_precision_recall_f1_realistic(self): test_labels = [1, 0, 1, 0] test_predictions = [0.80, 0.20, 0.20, 0.80] precision, recall, f1 = precision_recall_at_x_proportion(test_labels, test_predictions, x_proportion=0.50) self.assertEqual(recall, 0.5) self.assertEqual(precision, 0.5) self.assertEqual(f1, 0.5)
Add tests for custom evaluation codeimport unittest from evaluation import precision_recall_at_x_proportion class EvaluationTest(unittest.TestCase): """Most evaluation methods are from scikit-learn and are thus tested however some of our preprocessing is custom and should be tested""" def test_precision_recall_f1_perfect(self): test_labels = [1, 1, 0, 0] test_predictions = [0.99, 0.99, 0.01, 0.01] precision, recall, f1 = precision_recall_at_x_proportion(test_labels, test_predictions, x_proportion=0.50) self.assertEqual(recall, 1) self.assertEqual(precision, 1) self.assertEqual(f1, 1) def test_precision_recall_f1_horrible(self): test_labels = [0, 0, 1, 1] test_predictions = [0.99, 0.99, 0.01, 0.01] precision, recall, f1 = precision_recall_at_x_proportion(test_labels, test_predictions, x_proportion=0.50) self.assertEqual(recall, 0) self.assertEqual(precision, 0) self.assertEqual(f1, 0) def test_precision_recall_f1_realistic(self): test_labels = [1, 0, 1, 0] test_predictions = [0.80, 0.20, 0.20, 0.80] precision, recall, f1 = precision_recall_at_x_proportion(test_labels, test_predictions, x_proportion=0.50) self.assertEqual(recall, 0.5) self.assertEqual(precision, 0.5) self.assertEqual(f1, 0.5)
<commit_before><commit_msg>Add tests for custom evaluation code<commit_after>import unittest from evaluation import precision_recall_at_x_proportion class EvaluationTest(unittest.TestCase): """Most evaluation methods are from scikit-learn and are thus tested however some of our preprocessing is custom and should be tested""" def test_precision_recall_f1_perfect(self): test_labels = [1, 1, 0, 0] test_predictions = [0.99, 0.99, 0.01, 0.01] precision, recall, f1 = precision_recall_at_x_proportion(test_labels, test_predictions, x_proportion=0.50) self.assertEqual(recall, 1) self.assertEqual(precision, 1) self.assertEqual(f1, 1) def test_precision_recall_f1_horrible(self): test_labels = [0, 0, 1, 1] test_predictions = [0.99, 0.99, 0.01, 0.01] precision, recall, f1 = precision_recall_at_x_proportion(test_labels, test_predictions, x_proportion=0.50) self.assertEqual(recall, 0) self.assertEqual(precision, 0) self.assertEqual(f1, 0) def test_precision_recall_f1_realistic(self): test_labels = [1, 0, 1, 0] test_predictions = [0.80, 0.20, 0.20, 0.80] precision, recall, f1 = precision_recall_at_x_proportion(test_labels, test_predictions, x_proportion=0.50) self.assertEqual(recall, 0.5) self.assertEqual(precision, 0.5) self.assertEqual(f1, 0.5)
4a8bba2e2354ec3a73c01c8183d100254e1314df
utils/resolve-crashes.py
utils/resolve-crashes.py
#!/usr/bin/python # A small utility to take the output of a Swift validation test run # where some compiler crashers have been fixed, and move them into the # "fixed" testsuite, removing the "--crash" in the process. import re import sys import os def execute_cmd(cmd): print(cmd) os.system(cmd) # The regular expression we use to match compiler-crasher lines. regex = re.compile('.*Swift :: compiler_crashers/(.*\.swift).*') # Take the output of lit as standard input. for line in sys.stdin: match = regex.match(line) if match: filename=match.group(1) # Move the test over to the fixed suite. from_filename = 'validation-test/compiler_crashers/%s' % (filename) to_filename = 'validation-test/compiler_crashers_fixed/%s' % (filename) git_mv_cmd = 'git mv %s %s' % (from_filename, to_filename) execute_cmd(git_mv_cmd) # Replace "not --crash" with "not", and remove XFAIL lines. sed_replace_not_cmd = 'sed -e "s/not --crash/not/" -i "" %s' % (to_filename) execute_cmd(sed_replace_not_cmd) # Remove "// XFAIL: whatever" lines. sed_remove_xfail_cmd = 'sed -e "s/^\\/\\/.*XFAIL.*$//g" -i "" %s' % (to_filename) execute_cmd(sed_remove_xfail_cmd) # "git add" the result. git_add_cmd = 'git add %s' % (to_filename) execute_cmd(git_add_cmd)
Add a little utility to help update resolved crashers.
Add a little utility to help update resolved crashers. Swift SVN r28923
Python
apache-2.0
shahmishal/swift,karwa/swift,devincoughlin/swift,xwu/swift,jopamer/swift,benlangmuir/swift,mightydeveloper/swift,stephentyrone/swift,dduan/swift,emilstahl/swift,dduan/swift,tkremenek/swift,shajrawi/swift,amraboelela/swift,ahoppen/swift,nathawes/swift,practicalswift/swift,kstaring/swift,parkera/swift,harlanhaskins/swift,manavgabhawala/swift,xedin/swift,SwiftAndroid/swift,tjw/swift,apple/swift,gottesmm/swift,Jnosh/swift,slavapestov/swift,slavapestov/swift,natecook1000/swift,amraboelela/swift,OscarSwanros/swift,gottesmm/swift,return/swift,danielmartin/swift,hughbe/swift,uasys/swift,rudkx/swift,cbrentharris/swift,danielmartin/swift,mightydeveloper/swift,johnno1962d/swift,benlangmuir/swift,mightydeveloper/swift,LeoShimonaka/swift,adrfer/swift,jmgc/swift,xwu/swift,nathawes/swift,swiftix/swift.old,JGiola/swift,gregomni/swift,lorentey/swift,milseman/swift,zisko/swift,devincoughlin/swift,Ivacker/swift,return/swift,shahmishal/swift,natecook1000/swift,gottesmm/swift,aschwaighofer/swift,airspeedswift/swift,hughbe/swift,benlangmuir/swift,deyton/swift,benlangmuir/swift,kentya6/swift,bitjammer/swift,lorentey/swift,return/swift,mightydeveloper/swift,jmgc/swift,jopamer/swift,huonw/swift,hooman/swift,therealbnut/swift,emilstahl/swift,felix91gr/swift,tinysun212/swift-windows,therealbnut/swift,russbishop/swift,tinysun212/swift-windows,tjw/swift,ben-ng/swift,roambotics/swift,tjw/swift,gribozavr/swift,sschiau/swift,modocache/swift,austinzheng/swift,felix91gr/swift,mightydeveloper/swift,hughbe/swift,KrishMunot/swift,ben-ng/swift,Jnosh/swift,LeoShimonaka/swift,allevato/swift,tkremenek/swift,johnno1962d/swift,ken0nek/swift,khizkhiz/swift,bitjammer/swift,allevato/swift,Jnosh/swift,hooman/swift,ahoppen/swift,Ivacker/swift,cbrentharris/swift,uasys/swift,CodaFi/swift,johnno1962d/swift,parkera/swift,manavgabhawala/swift,shahmishal/swift,JaSpa/swift,kusl/swift,codestergit/swift,aschwaighofer/swift,kperryua/swift,gribozavr/swift,swiftix/swift.old,djwbrown/swift,milseman/swift,cbrentharris/swift,gottesmm/swift,natecook1000/swift,therealbnut/swift,gregomni/swift,JaSpa/swift,calebd/swift,practicalswift/swift,xwu/swift,djwbrown/swift,zisko/swift,roambotics/swift,xedin/swift,atrick/swift,roambotics/swift,kstaring/swift,hughbe/swift,MukeshKumarS/Swift,xedin/swift,xedin/swift,alblue/swift,gribozavr/swift,kperryua/swift,JaSpa/swift,glessard/swift,CodaFi/swift,allevato/swift,aschwaighofer/swift,tkremenek/swift,calebd/swift,ben-ng/swift,KrishMunot/swift,slavapestov/swift,Ivacker/swift,benlangmuir/swift,huonw/swift,sschiau/swift,apple/swift,bitjammer/swift,arvedviehweger/swift,stephentyrone/swift,OscarSwanros/swift,xedin/swift,MukeshKumarS/Swift,kentya6/swift,gribozavr/swift,KrishMunot/swift,jmgc/swift,hooman/swift,parkera/swift,arvedviehweger/swift,JGiola/swift,manavgabhawala/swift,return/swift,emilstahl/swift,jckarter/swift,therealbnut/swift,sschiau/swift,brentdax/swift,slavapestov/swift,IngmarStein/swift,swiftix/swift.old,jmgc/swift,practicalswift/swift,slavapestov/swift,kstaring/swift,cbrentharris/swift,gmilos/swift,frootloops/swift,LeoShimonaka/swift,bitjammer/swift,OscarSwanros/swift,alblue/swift,swiftix/swift.old,jckarter/swift,aschwaighofer/swift,xedin/swift,atrick/swift,parkera/swift,jopamer/swift,calebd/swift,calebd/swift,deyton/swift,glessard/swift,brentdax/swift,kstaring/swift,frootloops/swift,gmilos/swift,kusl/swift,sschiau/swift,manavgabhawala/swift,djwbrown/swift,kperryua/swift,apple/swift,nathawes/swift,kusl/swift,shajrawi/swift,allevato/swift,benlangmuir/swift,IngmarStein/swift,bitjammer/swift,airspeedswift/swift,MukeshKumarS/Swift,milseman/swift,bitjammer/swift,JGiola/swift,dduan/swift,airspeedswift/swift,allevato/swift,IngmarStein/swift,russbishop/swift,calebd/swift,jckarter/swift,alblue/swift,tardieu/swift,swiftix/swift,manavgabhawala/swift,ahoppen/swift,codestergit/swift,calebd/swift,kentya6/swift,arvedviehweger/swift,Ivacker/swift,Ivacker/swift,danielmartin/swift,JGiola/swift,tardieu/swift,uasys/swift,austinzheng/swift,jmgc/swift,kusl/swift,atrick/swift,sdulal/swift,hughbe/swift,kentya6/swift,danielmartin/swift,ben-ng/swift,kperryua/swift,sdulal/swift,gregomni/swift,brentdax/swift,therealbnut/swift,parkera/swift,airspeedswift/swift,shajrawi/swift,huonw/swift,tjw/swift,airspeedswift/swift,mightydeveloper/swift,bitjammer/swift,practicalswift/swift,devincoughlin/swift,kentya6/swift,SwiftAndroid/swift,djwbrown/swift,parkera/swift,CodaFi/swift,xedin/swift,jopamer/swift,shajrawi/swift,kstaring/swift,jmgc/swift,MukeshKumarS/Swift,austinzheng/swift,jopamer/swift,jtbandes/swift,cbrentharris/swift,glessard/swift,cbrentharris/swift,swiftix/swift,IngmarStein/swift,alblue/swift,CodaFi/swift,gmilos/swift,sschiau/swift,tjw/swift,sdulal/swift,harlanhaskins/swift,austinzheng/swift,roambotics/swift,milseman/swift,shahmishal/swift,glessard/swift,nathawes/swift,lorentey/swift,apple/swift,kusl/swift,gmilos/swift,OscarSwanros/swift,MukeshKumarS/Swift,practicalswift/swift,amraboelela/swift,alblue/swift,emilstahl/swift,karwa/swift,devincoughlin/swift,brentdax/swift,adrfer/swift,tinysun212/swift-windows,gregomni/swift,roambotics/swift,apple/swift,frootloops/swift,zisko/swift,karwa/swift,tardieu/swift,Ivacker/swift,deyton/swift,khizkhiz/swift,tkremenek/swift,felix91gr/swift,harlanhaskins/swift,Jnosh/swift,jckarter/swift,glessard/swift,djwbrown/swift,devincoughlin/swift,harlanhaskins/swift,cbrentharris/swift,aschwaighofer/swift,kentya6/swift,khizkhiz/swift,apple/swift,adrfer/swift,modocache/swift,tinysun212/swift-windows,kentya6/swift,shahmishal/swift,swiftix/swift,johnno1962d/swift,felix91gr/swift,kusl/swift,zisko/swift,russbishop/swift,practicalswift/swift,lorentey/swift,return/swift,sdulal/swift,swiftix/swift,hooman/swift,stephentyrone/swift,airspeedswift/swift,tardieu/swift,arvedviehweger/swift,karwa/swift,Jnosh/swift,rudkx/swift,khizkhiz/swift,felix91gr/swift,austinzheng/swift,jtbandes/swift,tinysun212/swift-windows,rudkx/swift,KrishMunot/swift,lorentey/swift,JaSpa/swift,jmgc/swift,natecook1000/swift,jopamer/swift,gmilos/swift,arvedviehweger/swift,LeoShimonaka/swift,SwiftAndroid/swift,OscarSwanros/swift,dreamsxin/swift,karwa/swift,ben-ng/swift,sdulal/swift,swiftix/swift.old,sdulal/swift,cbrentharris/swift,roambotics/swift,amraboelela/swift,sschiau/swift,JaSpa/swift,lorentey/swift,ken0nek/swift,huonw/swift,IngmarStein/swift,natecook1000/swift,ken0nek/swift,huonw/swift,tjw/swift,JaSpa/swift,ahoppen/swift,brentdax/swift,deyton/swift,Jnosh/swift,MukeshKumarS/Swift,dduan/swift,gribozavr/swift,kstaring/swift,brentdax/swift,jckarter/swift,devincoughlin/swift,ken0nek/swift,zisko/swift,sdulal/swift,djwbrown/swift,frootloops/swift,tkremenek/swift,gribozavr/swift,tinysun212/swift-windows,codestergit/swift,danielmartin/swift,hooman/swift,therealbnut/swift,johnno1962d/swift,IngmarStein/swift,khizkhiz/swift,tardieu/swift,natecook1000/swift,codestergit/swift,rudkx/swift,kperryua/swift,russbishop/swift,Jnosh/swift,Ivacker/swift,swiftix/swift,SwiftAndroid/swift,kstaring/swift,swiftix/swift.old,nathawes/swift,tkremenek/swift,ben-ng/swift,parkera/swift,harlanhaskins/swift,jtbandes/swift,danielmartin/swift,lorentey/swift,frootloops/swift,ahoppen/swift,zisko/swift,alblue/swift,nathawes/swift,sschiau/swift,deyton/swift,stephentyrone/swift,ahoppen/swift,return/swift,swiftix/swift,airspeedswift/swift,jtbandes/swift,adrfer/swift,kperryua/swift,OscarSwanros/swift,stephentyrone/swift,huonw/swift,johnno1962d/swift,ben-ng/swift,hughbe/swift,emilstahl/swift,hooman/swift,frootloops/swift,karwa/swift,harlanhaskins/swift,dreamsxin/swift,kperryua/swift,ken0nek/swift,arvedviehweger/swift,milseman/swift,mightydeveloper/swift,zisko/swift,swiftix/swift,MukeshKumarS/Swift,kusl/swift,dduan/swift,ken0nek/swift,therealbnut/swift,modocache/swift,uasys/swift,tardieu/swift,shajrawi/swift,danielmartin/swift,LeoShimonaka/swift,gottesmm/swift,allevato/swift,CodaFi/swift,SwiftAndroid/swift,austinzheng/swift,adrfer/swift,dduan/swift,gribozavr/swift,xwu/swift,alblue/swift,khizkhiz/swift,rudkx/swift,shahmishal/swift,hooman/swift,kusl/swift,slavapestov/swift,aschwaighofer/swift,mightydeveloper/swift,jckarter/swift,practicalswift/swift,swiftix/swift.old,OscarSwanros/swift,manavgabhawala/swift,shajrawi/swift,uasys/swift,LeoShimonaka/swift,felix91gr/swift,SwiftAndroid/swift,atrick/swift,codestergit/swift,tardieu/swift,frootloops/swift,amraboelela/swift,hughbe/swift,sdulal/swift,codestergit/swift,modocache/swift,stephentyrone/swift,tinysun212/swift-windows,dduan/swift,brentdax/swift,aschwaighofer/swift,adrfer/swift,xedin/swift,emilstahl/swift,amraboelela/swift,jtbandes/swift,kentya6/swift,gregomni/swift,tjw/swift,devincoughlin/swift,khizkhiz/swift,milseman/swift,tkremenek/swift,atrick/swift,djwbrown/swift,gmilos/swift,gregomni/swift,JGiola/swift,modocache/swift,amraboelela/swift,shahmishal/swift,JaSpa/swift,stephentyrone/swift,parkera/swift,rudkx/swift,emilstahl/swift,jopamer/swift,gottesmm/swift,karwa/swift,shajrawi/swift,calebd/swift,jtbandes/swift,sschiau/swift,uasys/swift,CodaFi/swift,xwu/swift,johnno1962d/swift,gribozavr/swift,modocache/swift,return/swift,milseman/swift,uasys/swift,codestergit/swift,gottesmm/swift,KrishMunot/swift,devincoughlin/swift,nathawes/swift,KrishMunot/swift,slavapestov/swift,SwiftAndroid/swift,austinzheng/swift,arvedviehweger/swift,jtbandes/swift,IngmarStein/swift,harlanhaskins/swift,ken0nek/swift,allevato/swift,natecook1000/swift,shahmishal/swift,russbishop/swift,russbishop/swift,swiftix/swift.old,modocache/swift,manavgabhawala/swift,LeoShimonaka/swift,KrishMunot/swift,emilstahl/swift,huonw/swift,felix91gr/swift,lorentey/swift,gmilos/swift,CodaFi/swift,karwa/swift,practicalswift/swift,atrick/swift,russbishop/swift,xwu/swift,Ivacker/swift,glessard/swift,deyton/swift,shajrawi/swift,jckarter/swift,adrfer/swift,LeoShimonaka/swift,xwu/swift,JGiola/swift,deyton/swift
Add a little utility to help update resolved crashers. Swift SVN r28923
#!/usr/bin/python # A small utility to take the output of a Swift validation test run # where some compiler crashers have been fixed, and move them into the # "fixed" testsuite, removing the "--crash" in the process. import re import sys import os def execute_cmd(cmd): print(cmd) os.system(cmd) # The regular expression we use to match compiler-crasher lines. regex = re.compile('.*Swift :: compiler_crashers/(.*\.swift).*') # Take the output of lit as standard input. for line in sys.stdin: match = regex.match(line) if match: filename=match.group(1) # Move the test over to the fixed suite. from_filename = 'validation-test/compiler_crashers/%s' % (filename) to_filename = 'validation-test/compiler_crashers_fixed/%s' % (filename) git_mv_cmd = 'git mv %s %s' % (from_filename, to_filename) execute_cmd(git_mv_cmd) # Replace "not --crash" with "not", and remove XFAIL lines. sed_replace_not_cmd = 'sed -e "s/not --crash/not/" -i "" %s' % (to_filename) execute_cmd(sed_replace_not_cmd) # Remove "// XFAIL: whatever" lines. sed_remove_xfail_cmd = 'sed -e "s/^\\/\\/.*XFAIL.*$//g" -i "" %s' % (to_filename) execute_cmd(sed_remove_xfail_cmd) # "git add" the result. git_add_cmd = 'git add %s' % (to_filename) execute_cmd(git_add_cmd)
<commit_before><commit_msg>Add a little utility to help update resolved crashers. Swift SVN r28923<commit_after>
#!/usr/bin/python # A small utility to take the output of a Swift validation test run # where some compiler crashers have been fixed, and move them into the # "fixed" testsuite, removing the "--crash" in the process. import re import sys import os def execute_cmd(cmd): print(cmd) os.system(cmd) # The regular expression we use to match compiler-crasher lines. regex = re.compile('.*Swift :: compiler_crashers/(.*\.swift).*') # Take the output of lit as standard input. for line in sys.stdin: match = regex.match(line) if match: filename=match.group(1) # Move the test over to the fixed suite. from_filename = 'validation-test/compiler_crashers/%s' % (filename) to_filename = 'validation-test/compiler_crashers_fixed/%s' % (filename) git_mv_cmd = 'git mv %s %s' % (from_filename, to_filename) execute_cmd(git_mv_cmd) # Replace "not --crash" with "not", and remove XFAIL lines. sed_replace_not_cmd = 'sed -e "s/not --crash/not/" -i "" %s' % (to_filename) execute_cmd(sed_replace_not_cmd) # Remove "// XFAIL: whatever" lines. sed_remove_xfail_cmd = 'sed -e "s/^\\/\\/.*XFAIL.*$//g" -i "" %s' % (to_filename) execute_cmd(sed_remove_xfail_cmd) # "git add" the result. git_add_cmd = 'git add %s' % (to_filename) execute_cmd(git_add_cmd)
Add a little utility to help update resolved crashers. Swift SVN r28923#!/usr/bin/python # A small utility to take the output of a Swift validation test run # where some compiler crashers have been fixed, and move them into the # "fixed" testsuite, removing the "--crash" in the process. import re import sys import os def execute_cmd(cmd): print(cmd) os.system(cmd) # The regular expression we use to match compiler-crasher lines. regex = re.compile('.*Swift :: compiler_crashers/(.*\.swift).*') # Take the output of lit as standard input. for line in sys.stdin: match = regex.match(line) if match: filename=match.group(1) # Move the test over to the fixed suite. from_filename = 'validation-test/compiler_crashers/%s' % (filename) to_filename = 'validation-test/compiler_crashers_fixed/%s' % (filename) git_mv_cmd = 'git mv %s %s' % (from_filename, to_filename) execute_cmd(git_mv_cmd) # Replace "not --crash" with "not", and remove XFAIL lines. sed_replace_not_cmd = 'sed -e "s/not --crash/not/" -i "" %s' % (to_filename) execute_cmd(sed_replace_not_cmd) # Remove "// XFAIL: whatever" lines. sed_remove_xfail_cmd = 'sed -e "s/^\\/\\/.*XFAIL.*$//g" -i "" %s' % (to_filename) execute_cmd(sed_remove_xfail_cmd) # "git add" the result. git_add_cmd = 'git add %s' % (to_filename) execute_cmd(git_add_cmd)
<commit_before><commit_msg>Add a little utility to help update resolved crashers. Swift SVN r28923<commit_after>#!/usr/bin/python # A small utility to take the output of a Swift validation test run # where some compiler crashers have been fixed, and move them into the # "fixed" testsuite, removing the "--crash" in the process. import re import sys import os def execute_cmd(cmd): print(cmd) os.system(cmd) # The regular expression we use to match compiler-crasher lines. regex = re.compile('.*Swift :: compiler_crashers/(.*\.swift).*') # Take the output of lit as standard input. for line in sys.stdin: match = regex.match(line) if match: filename=match.group(1) # Move the test over to the fixed suite. from_filename = 'validation-test/compiler_crashers/%s' % (filename) to_filename = 'validation-test/compiler_crashers_fixed/%s' % (filename) git_mv_cmd = 'git mv %s %s' % (from_filename, to_filename) execute_cmd(git_mv_cmd) # Replace "not --crash" with "not", and remove XFAIL lines. sed_replace_not_cmd = 'sed -e "s/not --crash/not/" -i "" %s' % (to_filename) execute_cmd(sed_replace_not_cmd) # Remove "// XFAIL: whatever" lines. sed_remove_xfail_cmd = 'sed -e "s/^\\/\\/.*XFAIL.*$//g" -i "" %s' % (to_filename) execute_cmd(sed_remove_xfail_cmd) # "git add" the result. git_add_cmd = 'git add %s' % (to_filename) execute_cmd(git_add_cmd)
c66eba027465779938f6ca211595982526d05cac
CodeFights/countSumOfTwoRepresentations2.py
CodeFights/countSumOfTwoRepresentations2.py
#!/usr/local/bin/python # Code Fights Count of Two Representations 2 Problem def countSumOfTwoRepresentations2(n, l, r): return sum(1 for x in range(l, r + 1) if x <= n - x <= r) def main(): tests = [ [6, 2, 4, 2], [6, 3, 3, 1], [10, 9, 11, 0], [24, 8, 16, 5], [24, 12, 12, 1], [93, 24, 58, 12] ] for t in tests: res = countSumOfTwoRepresentations2(t[0], t[1], t[2]) ans = t[3] if ans == res: print("PASSED: countSumOfTwoRepresentations2({}, {}, {}) returned" " {}" .format(t[0], t[1], t[2], res)) else: print(("FAILED: countSumOfTwoRepresentations2({}, {}, {}) returned" " {}, answer: {}").format(t[0], t[1], t[2], res, ans)) if __name__ == '__main__': main()
Solve Code Fights sum of two representations 2 problem
Solve Code Fights sum of two representations 2 problem
Python
mit
HKuz/Test_Code
Solve Code Fights sum of two representations 2 problem
#!/usr/local/bin/python # Code Fights Count of Two Representations 2 Problem def countSumOfTwoRepresentations2(n, l, r): return sum(1 for x in range(l, r + 1) if x <= n - x <= r) def main(): tests = [ [6, 2, 4, 2], [6, 3, 3, 1], [10, 9, 11, 0], [24, 8, 16, 5], [24, 12, 12, 1], [93, 24, 58, 12] ] for t in tests: res = countSumOfTwoRepresentations2(t[0], t[1], t[2]) ans = t[3] if ans == res: print("PASSED: countSumOfTwoRepresentations2({}, {}, {}) returned" " {}" .format(t[0], t[1], t[2], res)) else: print(("FAILED: countSumOfTwoRepresentations2({}, {}, {}) returned" " {}, answer: {}").format(t[0], t[1], t[2], res, ans)) if __name__ == '__main__': main()
<commit_before><commit_msg>Solve Code Fights sum of two representations 2 problem<commit_after>
#!/usr/local/bin/python # Code Fights Count of Two Representations 2 Problem def countSumOfTwoRepresentations2(n, l, r): return sum(1 for x in range(l, r + 1) if x <= n - x <= r) def main(): tests = [ [6, 2, 4, 2], [6, 3, 3, 1], [10, 9, 11, 0], [24, 8, 16, 5], [24, 12, 12, 1], [93, 24, 58, 12] ] for t in tests: res = countSumOfTwoRepresentations2(t[0], t[1], t[2]) ans = t[3] if ans == res: print("PASSED: countSumOfTwoRepresentations2({}, {}, {}) returned" " {}" .format(t[0], t[1], t[2], res)) else: print(("FAILED: countSumOfTwoRepresentations2({}, {}, {}) returned" " {}, answer: {}").format(t[0], t[1], t[2], res, ans)) if __name__ == '__main__': main()
Solve Code Fights sum of two representations 2 problem#!/usr/local/bin/python # Code Fights Count of Two Representations 2 Problem def countSumOfTwoRepresentations2(n, l, r): return sum(1 for x in range(l, r + 1) if x <= n - x <= r) def main(): tests = [ [6, 2, 4, 2], [6, 3, 3, 1], [10, 9, 11, 0], [24, 8, 16, 5], [24, 12, 12, 1], [93, 24, 58, 12] ] for t in tests: res = countSumOfTwoRepresentations2(t[0], t[1], t[2]) ans = t[3] if ans == res: print("PASSED: countSumOfTwoRepresentations2({}, {}, {}) returned" " {}" .format(t[0], t[1], t[2], res)) else: print(("FAILED: countSumOfTwoRepresentations2({}, {}, {}) returned" " {}, answer: {}").format(t[0], t[1], t[2], res, ans)) if __name__ == '__main__': main()
<commit_before><commit_msg>Solve Code Fights sum of two representations 2 problem<commit_after>#!/usr/local/bin/python # Code Fights Count of Two Representations 2 Problem def countSumOfTwoRepresentations2(n, l, r): return sum(1 for x in range(l, r + 1) if x <= n - x <= r) def main(): tests = [ [6, 2, 4, 2], [6, 3, 3, 1], [10, 9, 11, 0], [24, 8, 16, 5], [24, 12, 12, 1], [93, 24, 58, 12] ] for t in tests: res = countSumOfTwoRepresentations2(t[0], t[1], t[2]) ans = t[3] if ans == res: print("PASSED: countSumOfTwoRepresentations2({}, {}, {}) returned" " {}" .format(t[0], t[1], t[2], res)) else: print(("FAILED: countSumOfTwoRepresentations2({}, {}, {}) returned" " {}, answer: {}").format(t[0], t[1], t[2], res, ans)) if __name__ == '__main__': main()
14ccbca8b2cb7318bd1dd11970cd359b36ce9b8e
corehq/apps/userreports/reports/builder/columns.py
corehq/apps/userreports/reports/builder/columns.py
class ColumnOption(object): """ This class represents column options in the report builder. """ aggregation_map = { 'simple': 'simple', 'Count per Choice': 'expand', 'Sum': 'sum', 'Average': 'avg' } def __init__(self, id, display, indicator_id, is_non_numeric): self.id = id # The string representing this choice in the configure report form. self.display = display self.indicator_id = indicator_id self.is_non_numeric = is_non_numeric def to_column_dict(self, index, display_text, aggregation): return { "format": "default", "aggregation": self.aggregation_map[aggregation], "field": self.indicator_id, "column_id": "column_{}".format(index), "type": "field", "display": display_text, "transform": {'type': 'custom', 'custom_type': 'short_decimal_display'}, } class QuestionColumnOption(ColumnOption): def __init__(self, id, display, indicator_id, is_non_numeric, question_source): super(QuestionColumnOption, self).__init__(id, display, indicator_id, is_non_numeric) self.question_source = question_source class CountColumn(ColumnOption): def __init__(self, display): super(CountColumn, self).__init__('computed/count', display, "count", False) def to_column_dict(self, index, display_text, aggregation): return { 'type': 'field', 'format': 'default', 'aggregation': 'sum', 'field': 'count', 'column_id': 'column_{}'.format(index), 'display': display_text, }
Add classes representing column options
Add classes representing column options We will use these instances of these classes, instead of the DataSourceProperty named tuples to represent the available columns in the report.
Python
bsd-3-clause
qedsoftware/commcare-hq,qedsoftware/commcare-hq,qedsoftware/commcare-hq,dimagi/commcare-hq,qedsoftware/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq,qedsoftware/commcare-hq,dimagi/commcare-hq
Add classes representing column options We will use these instances of these classes, instead of the DataSourceProperty named tuples to represent the available columns in the report.
class ColumnOption(object): """ This class represents column options in the report builder. """ aggregation_map = { 'simple': 'simple', 'Count per Choice': 'expand', 'Sum': 'sum', 'Average': 'avg' } def __init__(self, id, display, indicator_id, is_non_numeric): self.id = id # The string representing this choice in the configure report form. self.display = display self.indicator_id = indicator_id self.is_non_numeric = is_non_numeric def to_column_dict(self, index, display_text, aggregation): return { "format": "default", "aggregation": self.aggregation_map[aggregation], "field": self.indicator_id, "column_id": "column_{}".format(index), "type": "field", "display": display_text, "transform": {'type': 'custom', 'custom_type': 'short_decimal_display'}, } class QuestionColumnOption(ColumnOption): def __init__(self, id, display, indicator_id, is_non_numeric, question_source): super(QuestionColumnOption, self).__init__(id, display, indicator_id, is_non_numeric) self.question_source = question_source class CountColumn(ColumnOption): def __init__(self, display): super(CountColumn, self).__init__('computed/count', display, "count", False) def to_column_dict(self, index, display_text, aggregation): return { 'type': 'field', 'format': 'default', 'aggregation': 'sum', 'field': 'count', 'column_id': 'column_{}'.format(index), 'display': display_text, }
<commit_before><commit_msg>Add classes representing column options We will use these instances of these classes, instead of the DataSourceProperty named tuples to represent the available columns in the report.<commit_after>
class ColumnOption(object): """ This class represents column options in the report builder. """ aggregation_map = { 'simple': 'simple', 'Count per Choice': 'expand', 'Sum': 'sum', 'Average': 'avg' } def __init__(self, id, display, indicator_id, is_non_numeric): self.id = id # The string representing this choice in the configure report form. self.display = display self.indicator_id = indicator_id self.is_non_numeric = is_non_numeric def to_column_dict(self, index, display_text, aggregation): return { "format": "default", "aggregation": self.aggregation_map[aggregation], "field": self.indicator_id, "column_id": "column_{}".format(index), "type": "field", "display": display_text, "transform": {'type': 'custom', 'custom_type': 'short_decimal_display'}, } class QuestionColumnOption(ColumnOption): def __init__(self, id, display, indicator_id, is_non_numeric, question_source): super(QuestionColumnOption, self).__init__(id, display, indicator_id, is_non_numeric) self.question_source = question_source class CountColumn(ColumnOption): def __init__(self, display): super(CountColumn, self).__init__('computed/count', display, "count", False) def to_column_dict(self, index, display_text, aggregation): return { 'type': 'field', 'format': 'default', 'aggregation': 'sum', 'field': 'count', 'column_id': 'column_{}'.format(index), 'display': display_text, }
Add classes representing column options We will use these instances of these classes, instead of the DataSourceProperty named tuples to represent the available columns in the report.class ColumnOption(object): """ This class represents column options in the report builder. """ aggregation_map = { 'simple': 'simple', 'Count per Choice': 'expand', 'Sum': 'sum', 'Average': 'avg' } def __init__(self, id, display, indicator_id, is_non_numeric): self.id = id # The string representing this choice in the configure report form. self.display = display self.indicator_id = indicator_id self.is_non_numeric = is_non_numeric def to_column_dict(self, index, display_text, aggregation): return { "format": "default", "aggregation": self.aggregation_map[aggregation], "field": self.indicator_id, "column_id": "column_{}".format(index), "type": "field", "display": display_text, "transform": {'type': 'custom', 'custom_type': 'short_decimal_display'}, } class QuestionColumnOption(ColumnOption): def __init__(self, id, display, indicator_id, is_non_numeric, question_source): super(QuestionColumnOption, self).__init__(id, display, indicator_id, is_non_numeric) self.question_source = question_source class CountColumn(ColumnOption): def __init__(self, display): super(CountColumn, self).__init__('computed/count', display, "count", False) def to_column_dict(self, index, display_text, aggregation): return { 'type': 'field', 'format': 'default', 'aggregation': 'sum', 'field': 'count', 'column_id': 'column_{}'.format(index), 'display': display_text, }
<commit_before><commit_msg>Add classes representing column options We will use these instances of these classes, instead of the DataSourceProperty named tuples to represent the available columns in the report.<commit_after>class ColumnOption(object): """ This class represents column options in the report builder. """ aggregation_map = { 'simple': 'simple', 'Count per Choice': 'expand', 'Sum': 'sum', 'Average': 'avg' } def __init__(self, id, display, indicator_id, is_non_numeric): self.id = id # The string representing this choice in the configure report form. self.display = display self.indicator_id = indicator_id self.is_non_numeric = is_non_numeric def to_column_dict(self, index, display_text, aggregation): return { "format": "default", "aggregation": self.aggregation_map[aggregation], "field": self.indicator_id, "column_id": "column_{}".format(index), "type": "field", "display": display_text, "transform": {'type': 'custom', 'custom_type': 'short_decimal_display'}, } class QuestionColumnOption(ColumnOption): def __init__(self, id, display, indicator_id, is_non_numeric, question_source): super(QuestionColumnOption, self).__init__(id, display, indicator_id, is_non_numeric) self.question_source = question_source class CountColumn(ColumnOption): def __init__(self, display): super(CountColumn, self).__init__('computed/count', display, "count", False) def to_column_dict(self, index, display_text, aggregation): return { 'type': 'field', 'format': 'default', 'aggregation': 'sum', 'field': 'count', 'column_id': 'column_{}'.format(index), 'display': display_text, }
936523ae5212791337a852060052bc5f379ffaf7
modules/python/test/test_copytomask.py
modules/python/test/test_copytomask.py
#!/usr/bin/env python ''' Test for copyto with mask ''' # Python 2/3 compatibility from __future__ import print_function import cv2 as cv import numpy as np import sys from tests_common import NewOpenCVTests class copytomask_test(NewOpenCVTests): def test_copytomask(self): img = self.get_sample('python/images/baboon.png', cv.IMREAD_COLOR) eps = 0. #Create mask using inRange valeurBGRinf = np.array([0,0,100]) valeurBGRSup = np.array([70, 70,255]) maskRed = cv.inRange(img, valeurBGRinf, valeurBGRSup) #New binding dstcv = cv.copyTo(img,maskRed) #using numpy mask2=maskRed.astype(bool) _, mask_b = np.broadcast_arrays(img, mask2[..., None]) dstnp = np.ma.masked_array(img, np.logical_not(mask_b)) dstnp =np.ma.filled(dstnp,[0]) self.assertEqual(cv.norm(dstnp ,dstcv), eps) if __name__ == '__main__': NewOpenCVTests.bootstrap()
Add python test for copyto with mask
Add python test for copyto with mask
Python
apache-2.0
opencv/opencv,opencv/opencv,opencv/opencv,opencv/opencv,opencv/opencv,opencv/opencv,opencv/opencv,opencv/opencv,opencv/opencv,opencv/opencv
Add python test for copyto with mask
#!/usr/bin/env python ''' Test for copyto with mask ''' # Python 2/3 compatibility from __future__ import print_function import cv2 as cv import numpy as np import sys from tests_common import NewOpenCVTests class copytomask_test(NewOpenCVTests): def test_copytomask(self): img = self.get_sample('python/images/baboon.png', cv.IMREAD_COLOR) eps = 0. #Create mask using inRange valeurBGRinf = np.array([0,0,100]) valeurBGRSup = np.array([70, 70,255]) maskRed = cv.inRange(img, valeurBGRinf, valeurBGRSup) #New binding dstcv = cv.copyTo(img,maskRed) #using numpy mask2=maskRed.astype(bool) _, mask_b = np.broadcast_arrays(img, mask2[..., None]) dstnp = np.ma.masked_array(img, np.logical_not(mask_b)) dstnp =np.ma.filled(dstnp,[0]) self.assertEqual(cv.norm(dstnp ,dstcv), eps) if __name__ == '__main__': NewOpenCVTests.bootstrap()
<commit_before><commit_msg>Add python test for copyto with mask<commit_after>
#!/usr/bin/env python ''' Test for copyto with mask ''' # Python 2/3 compatibility from __future__ import print_function import cv2 as cv import numpy as np import sys from tests_common import NewOpenCVTests class copytomask_test(NewOpenCVTests): def test_copytomask(self): img = self.get_sample('python/images/baboon.png', cv.IMREAD_COLOR) eps = 0. #Create mask using inRange valeurBGRinf = np.array([0,0,100]) valeurBGRSup = np.array([70, 70,255]) maskRed = cv.inRange(img, valeurBGRinf, valeurBGRSup) #New binding dstcv = cv.copyTo(img,maskRed) #using numpy mask2=maskRed.astype(bool) _, mask_b = np.broadcast_arrays(img, mask2[..., None]) dstnp = np.ma.masked_array(img, np.logical_not(mask_b)) dstnp =np.ma.filled(dstnp,[0]) self.assertEqual(cv.norm(dstnp ,dstcv), eps) if __name__ == '__main__': NewOpenCVTests.bootstrap()
Add python test for copyto with mask#!/usr/bin/env python ''' Test for copyto with mask ''' # Python 2/3 compatibility from __future__ import print_function import cv2 as cv import numpy as np import sys from tests_common import NewOpenCVTests class copytomask_test(NewOpenCVTests): def test_copytomask(self): img = self.get_sample('python/images/baboon.png', cv.IMREAD_COLOR) eps = 0. #Create mask using inRange valeurBGRinf = np.array([0,0,100]) valeurBGRSup = np.array([70, 70,255]) maskRed = cv.inRange(img, valeurBGRinf, valeurBGRSup) #New binding dstcv = cv.copyTo(img,maskRed) #using numpy mask2=maskRed.astype(bool) _, mask_b = np.broadcast_arrays(img, mask2[..., None]) dstnp = np.ma.masked_array(img, np.logical_not(mask_b)) dstnp =np.ma.filled(dstnp,[0]) self.assertEqual(cv.norm(dstnp ,dstcv), eps) if __name__ == '__main__': NewOpenCVTests.bootstrap()
<commit_before><commit_msg>Add python test for copyto with mask<commit_after>#!/usr/bin/env python ''' Test for copyto with mask ''' # Python 2/3 compatibility from __future__ import print_function import cv2 as cv import numpy as np import sys from tests_common import NewOpenCVTests class copytomask_test(NewOpenCVTests): def test_copytomask(self): img = self.get_sample('python/images/baboon.png', cv.IMREAD_COLOR) eps = 0. #Create mask using inRange valeurBGRinf = np.array([0,0,100]) valeurBGRSup = np.array([70, 70,255]) maskRed = cv.inRange(img, valeurBGRinf, valeurBGRSup) #New binding dstcv = cv.copyTo(img,maskRed) #using numpy mask2=maskRed.astype(bool) _, mask_b = np.broadcast_arrays(img, mask2[..., None]) dstnp = np.ma.masked_array(img, np.logical_not(mask_b)) dstnp =np.ma.filled(dstnp,[0]) self.assertEqual(cv.norm(dstnp ,dstcv), eps) if __name__ == '__main__': NewOpenCVTests.bootstrap()
003e1b5c25b5a5515f9938871516c8607a560643
project/api/migrations/0002_auto_20180224_1322.py
project/api/migrations/0002_auto_20180224_1322.py
# Generated by Django 2.0.2 on 2018-02-24 21:22 from django.db import migrations import django.db.models.manager class Migration(migrations.Migration): dependencies = [ ('api', '0001_initial'), ] operations = [ migrations.AlterModelManagers( name='member', managers=[ ('lows', django.db.models.manager.Manager()), ], ), ]
Complete members bypass for larger than chorus
Complete members bypass for larger than chorus
Python
bsd-2-clause
dbinetti/barberscore-django,dbinetti/barberscore-django,barberscore/barberscore-api,dbinetti/barberscore,barberscore/barberscore-api,dbinetti/barberscore,barberscore/barberscore-api,barberscore/barberscore-api
Complete members bypass for larger than chorus
# Generated by Django 2.0.2 on 2018-02-24 21:22 from django.db import migrations import django.db.models.manager class Migration(migrations.Migration): dependencies = [ ('api', '0001_initial'), ] operations = [ migrations.AlterModelManagers( name='member', managers=[ ('lows', django.db.models.manager.Manager()), ], ), ]
<commit_before><commit_msg>Complete members bypass for larger than chorus<commit_after>
# Generated by Django 2.0.2 on 2018-02-24 21:22 from django.db import migrations import django.db.models.manager class Migration(migrations.Migration): dependencies = [ ('api', '0001_initial'), ] operations = [ migrations.AlterModelManagers( name='member', managers=[ ('lows', django.db.models.manager.Manager()), ], ), ]
Complete members bypass for larger than chorus# Generated by Django 2.0.2 on 2018-02-24 21:22 from django.db import migrations import django.db.models.manager class Migration(migrations.Migration): dependencies = [ ('api', '0001_initial'), ] operations = [ migrations.AlterModelManagers( name='member', managers=[ ('lows', django.db.models.manager.Manager()), ], ), ]
<commit_before><commit_msg>Complete members bypass for larger than chorus<commit_after># Generated by Django 2.0.2 on 2018-02-24 21:22 from django.db import migrations import django.db.models.manager class Migration(migrations.Migration): dependencies = [ ('api', '0001_initial'), ] operations = [ migrations.AlterModelManagers( name='member', managers=[ ('lows', django.db.models.manager.Manager()), ], ), ]
eea2d8de93806f19f431efff7afe66c1dfef1630
opps/core/tests/channel_models.py
opps/core/tests/channel_models.py
#!/usr/bin/env python # -*- coding: utf-8 -*- from django.test import TestCase from django.contrib.sites.models import Site from opps.core.models.channel import Channel class ChannelModelTest(TestCase): def setUp(self): self.site = Site.objects.filter(name=u'example.com').get() self.channel = Channel.objects.create(name=u'Home', slug=u'home', description=u'home page', site=self.site)
Create setup channel models test
Create setup channel models test
Python
mit
williamroot/opps,jeanmask/opps,opps/opps,jeanmask/opps,YACOWS/opps,jeanmask/opps,YACOWS/opps,williamroot/opps,YACOWS/opps,jeanmask/opps,williamroot/opps,opps/opps,williamroot/opps,YACOWS/opps,opps/opps,opps/opps
Create setup channel models test
#!/usr/bin/env python # -*- coding: utf-8 -*- from django.test import TestCase from django.contrib.sites.models import Site from opps.core.models.channel import Channel class ChannelModelTest(TestCase): def setUp(self): self.site = Site.objects.filter(name=u'example.com').get() self.channel = Channel.objects.create(name=u'Home', slug=u'home', description=u'home page', site=self.site)
<commit_before><commit_msg>Create setup channel models test<commit_after>
#!/usr/bin/env python # -*- coding: utf-8 -*- from django.test import TestCase from django.contrib.sites.models import Site from opps.core.models.channel import Channel class ChannelModelTest(TestCase): def setUp(self): self.site = Site.objects.filter(name=u'example.com').get() self.channel = Channel.objects.create(name=u'Home', slug=u'home', description=u'home page', site=self.site)
Create setup channel models test#!/usr/bin/env python # -*- coding: utf-8 -*- from django.test import TestCase from django.contrib.sites.models import Site from opps.core.models.channel import Channel class ChannelModelTest(TestCase): def setUp(self): self.site = Site.objects.filter(name=u'example.com').get() self.channel = Channel.objects.create(name=u'Home', slug=u'home', description=u'home page', site=self.site)
<commit_before><commit_msg>Create setup channel models test<commit_after>#!/usr/bin/env python # -*- coding: utf-8 -*- from django.test import TestCase from django.contrib.sites.models import Site from opps.core.models.channel import Channel class ChannelModelTest(TestCase): def setUp(self): self.site = Site.objects.filter(name=u'example.com').get() self.channel = Channel.objects.create(name=u'Home', slug=u'home', description=u'home page', site=self.site)
11aed227058304d691ed0dc42f533ee0832ac5ae
space_allocator.py
space_allocator.py
"""space_allocator.py Usage: create_room <room_type> <room_name> Create rooms in the dojo add_person <person_name> <FELLOW|STAFF> [wants_accommodation] Adds and allocate space to a person in the dojo print_room <room_name> Prints the names of all the people in room_name on the screen. print_allocations [-o=filename] Prints a list of allocations onto the screen space_allocator.py --version Shows the program's version number and exits space_allocator.py (-h | --help) Show this help message and exit print_unallocated [-o=filename] Prints a list of unallocated people to the screen reallocate_person <person_identifier> <new_room_name> Reallocate the person with person_identifier to new_room_name load_people [-o=filename] Adds people to rooms from a txt file. See Appendix 1A for text input format save_state [--db=sqlite_database] Persists all the data stored in the app to an SQLite database load_state <sqlite_database> Loads data from a database into the application. Options: -h --help Show this screen. --version Show version. """ from docopt import docopt if __name__ == '__main__': arguments = docopt(__doc__, version='Space allocator 1.0')
Add docopt implementation in main file
Add docopt implementation in main file
Python
mit
EdwinKato/Space-Allocator,EdwinKato/Space-Allocator
Add docopt implementation in main file
"""space_allocator.py Usage: create_room <room_type> <room_name> Create rooms in the dojo add_person <person_name> <FELLOW|STAFF> [wants_accommodation] Adds and allocate space to a person in the dojo print_room <room_name> Prints the names of all the people in room_name on the screen. print_allocations [-o=filename] Prints a list of allocations onto the screen space_allocator.py --version Shows the program's version number and exits space_allocator.py (-h | --help) Show this help message and exit print_unallocated [-o=filename] Prints a list of unallocated people to the screen reallocate_person <person_identifier> <new_room_name> Reallocate the person with person_identifier to new_room_name load_people [-o=filename] Adds people to rooms from a txt file. See Appendix 1A for text input format save_state [--db=sqlite_database] Persists all the data stored in the app to an SQLite database load_state <sqlite_database> Loads data from a database into the application. Options: -h --help Show this screen. --version Show version. """ from docopt import docopt if __name__ == '__main__': arguments = docopt(__doc__, version='Space allocator 1.0')
<commit_before><commit_msg>Add docopt implementation in main file<commit_after>
"""space_allocator.py Usage: create_room <room_type> <room_name> Create rooms in the dojo add_person <person_name> <FELLOW|STAFF> [wants_accommodation] Adds and allocate space to a person in the dojo print_room <room_name> Prints the names of all the people in room_name on the screen. print_allocations [-o=filename] Prints a list of allocations onto the screen space_allocator.py --version Shows the program's version number and exits space_allocator.py (-h | --help) Show this help message and exit print_unallocated [-o=filename] Prints a list of unallocated people to the screen reallocate_person <person_identifier> <new_room_name> Reallocate the person with person_identifier to new_room_name load_people [-o=filename] Adds people to rooms from a txt file. See Appendix 1A for text input format save_state [--db=sqlite_database] Persists all the data stored in the app to an SQLite database load_state <sqlite_database> Loads data from a database into the application. Options: -h --help Show this screen. --version Show version. """ from docopt import docopt if __name__ == '__main__': arguments = docopt(__doc__, version='Space allocator 1.0')
Add docopt implementation in main file"""space_allocator.py Usage: create_room <room_type> <room_name> Create rooms in the dojo add_person <person_name> <FELLOW|STAFF> [wants_accommodation] Adds and allocate space to a person in the dojo print_room <room_name> Prints the names of all the people in room_name on the screen. print_allocations [-o=filename] Prints a list of allocations onto the screen space_allocator.py --version Shows the program's version number and exits space_allocator.py (-h | --help) Show this help message and exit print_unallocated [-o=filename] Prints a list of unallocated people to the screen reallocate_person <person_identifier> <new_room_name> Reallocate the person with person_identifier to new_room_name load_people [-o=filename] Adds people to rooms from a txt file. See Appendix 1A for text input format save_state [--db=sqlite_database] Persists all the data stored in the app to an SQLite database load_state <sqlite_database> Loads data from a database into the application. Options: -h --help Show this screen. --version Show version. """ from docopt import docopt if __name__ == '__main__': arguments = docopt(__doc__, version='Space allocator 1.0')
<commit_before><commit_msg>Add docopt implementation in main file<commit_after>"""space_allocator.py Usage: create_room <room_type> <room_name> Create rooms in the dojo add_person <person_name> <FELLOW|STAFF> [wants_accommodation] Adds and allocate space to a person in the dojo print_room <room_name> Prints the names of all the people in room_name on the screen. print_allocations [-o=filename] Prints a list of allocations onto the screen space_allocator.py --version Shows the program's version number and exits space_allocator.py (-h | --help) Show this help message and exit print_unallocated [-o=filename] Prints a list of unallocated people to the screen reallocate_person <person_identifier> <new_room_name> Reallocate the person with person_identifier to new_room_name load_people [-o=filename] Adds people to rooms from a txt file. See Appendix 1A for text input format save_state [--db=sqlite_database] Persists all the data stored in the app to an SQLite database load_state <sqlite_database> Loads data from a database into the application. Options: -h --help Show this screen. --version Show version. """ from docopt import docopt if __name__ == '__main__': arguments = docopt(__doc__, version='Space allocator 1.0')
ea66e543df237cc066b17e3bc814a885eb60cc95
scripts/submit.py
scripts/submit.py
#!/usr/bin/env python import getpass import json import requests import sys from argparse import ArgumentParser, FileType from urllib.parse import urljoin class Nudibranch(object): BASE_URL = 'https://borg.cs.ucsb.edu' PATHS = {'auth': 'session'} @classmethod def url(cls, resource, **kwargs): return urljoin(cls.BASE_URL, cls.PATHS[resource]).format(kwargs) def __init__(self): self.debug = True self.session = requests.session() def msg(self, message): """Output a debugging message.""" if self.debug: print(message) def login(self, username=None, password=None): """Login to establish a valid session.""" auth_url = self.url('auth') while True: if not username and not password: sys.stdout.write('Username: ') sys.stdout.flush() username = sys.stdin.readline().strip() password = getpass.getpass() response = self.request(auth_url, 'PUT', username=username, password=password) if response.status_code == 201: self.msg('logged in') break else: print(response.json['message']) username = password = None def request(self, url, method='get', **data): method = method.lower() if method == 'get': assert data is None response = self.session.get(url) else: response = getattr(self.session, method)(url, json.dumps(data), verify=False) return response def main(): parser = ArgumentParser() parser.add_argument('-p', '--project', required=True) parser.add_argument('files', nargs='+', type=FileType()) args = parser.parse_args() client = Nudibranch() client.login('admin', 'passwor') # Verify project authorization # Submit each file and get submission id # Notify of completed submission return 0 if __name__ == '__main__': sys.exit(main())
Add submission script that currently only performs login.
Add submission script that currently only performs login.
Python
bsd-2-clause
ucsb-cs/submit,ucsb-cs/submit,ucsb-cs/submit,ucsb-cs/submit
Add submission script that currently only performs login.
#!/usr/bin/env python import getpass import json import requests import sys from argparse import ArgumentParser, FileType from urllib.parse import urljoin class Nudibranch(object): BASE_URL = 'https://borg.cs.ucsb.edu' PATHS = {'auth': 'session'} @classmethod def url(cls, resource, **kwargs): return urljoin(cls.BASE_URL, cls.PATHS[resource]).format(kwargs) def __init__(self): self.debug = True self.session = requests.session() def msg(self, message): """Output a debugging message.""" if self.debug: print(message) def login(self, username=None, password=None): """Login to establish a valid session.""" auth_url = self.url('auth') while True: if not username and not password: sys.stdout.write('Username: ') sys.stdout.flush() username = sys.stdin.readline().strip() password = getpass.getpass() response = self.request(auth_url, 'PUT', username=username, password=password) if response.status_code == 201: self.msg('logged in') break else: print(response.json['message']) username = password = None def request(self, url, method='get', **data): method = method.lower() if method == 'get': assert data is None response = self.session.get(url) else: response = getattr(self.session, method)(url, json.dumps(data), verify=False) return response def main(): parser = ArgumentParser() parser.add_argument('-p', '--project', required=True) parser.add_argument('files', nargs='+', type=FileType()) args = parser.parse_args() client = Nudibranch() client.login('admin', 'passwor') # Verify project authorization # Submit each file and get submission id # Notify of completed submission return 0 if __name__ == '__main__': sys.exit(main())
<commit_before><commit_msg>Add submission script that currently only performs login.<commit_after>
#!/usr/bin/env python import getpass import json import requests import sys from argparse import ArgumentParser, FileType from urllib.parse import urljoin class Nudibranch(object): BASE_URL = 'https://borg.cs.ucsb.edu' PATHS = {'auth': 'session'} @classmethod def url(cls, resource, **kwargs): return urljoin(cls.BASE_URL, cls.PATHS[resource]).format(kwargs) def __init__(self): self.debug = True self.session = requests.session() def msg(self, message): """Output a debugging message.""" if self.debug: print(message) def login(self, username=None, password=None): """Login to establish a valid session.""" auth_url = self.url('auth') while True: if not username and not password: sys.stdout.write('Username: ') sys.stdout.flush() username = sys.stdin.readline().strip() password = getpass.getpass() response = self.request(auth_url, 'PUT', username=username, password=password) if response.status_code == 201: self.msg('logged in') break else: print(response.json['message']) username = password = None def request(self, url, method='get', **data): method = method.lower() if method == 'get': assert data is None response = self.session.get(url) else: response = getattr(self.session, method)(url, json.dumps(data), verify=False) return response def main(): parser = ArgumentParser() parser.add_argument('-p', '--project', required=True) parser.add_argument('files', nargs='+', type=FileType()) args = parser.parse_args() client = Nudibranch() client.login('admin', 'passwor') # Verify project authorization # Submit each file and get submission id # Notify of completed submission return 0 if __name__ == '__main__': sys.exit(main())
Add submission script that currently only performs login.#!/usr/bin/env python import getpass import json import requests import sys from argparse import ArgumentParser, FileType from urllib.parse import urljoin class Nudibranch(object): BASE_URL = 'https://borg.cs.ucsb.edu' PATHS = {'auth': 'session'} @classmethod def url(cls, resource, **kwargs): return urljoin(cls.BASE_URL, cls.PATHS[resource]).format(kwargs) def __init__(self): self.debug = True self.session = requests.session() def msg(self, message): """Output a debugging message.""" if self.debug: print(message) def login(self, username=None, password=None): """Login to establish a valid session.""" auth_url = self.url('auth') while True: if not username and not password: sys.stdout.write('Username: ') sys.stdout.flush() username = sys.stdin.readline().strip() password = getpass.getpass() response = self.request(auth_url, 'PUT', username=username, password=password) if response.status_code == 201: self.msg('logged in') break else: print(response.json['message']) username = password = None def request(self, url, method='get', **data): method = method.lower() if method == 'get': assert data is None response = self.session.get(url) else: response = getattr(self.session, method)(url, json.dumps(data), verify=False) return response def main(): parser = ArgumentParser() parser.add_argument('-p', '--project', required=True) parser.add_argument('files', nargs='+', type=FileType()) args = parser.parse_args() client = Nudibranch() client.login('admin', 'passwor') # Verify project authorization # Submit each file and get submission id # Notify of completed submission return 0 if __name__ == '__main__': sys.exit(main())
<commit_before><commit_msg>Add submission script that currently only performs login.<commit_after>#!/usr/bin/env python import getpass import json import requests import sys from argparse import ArgumentParser, FileType from urllib.parse import urljoin class Nudibranch(object): BASE_URL = 'https://borg.cs.ucsb.edu' PATHS = {'auth': 'session'} @classmethod def url(cls, resource, **kwargs): return urljoin(cls.BASE_URL, cls.PATHS[resource]).format(kwargs) def __init__(self): self.debug = True self.session = requests.session() def msg(self, message): """Output a debugging message.""" if self.debug: print(message) def login(self, username=None, password=None): """Login to establish a valid session.""" auth_url = self.url('auth') while True: if not username and not password: sys.stdout.write('Username: ') sys.stdout.flush() username = sys.stdin.readline().strip() password = getpass.getpass() response = self.request(auth_url, 'PUT', username=username, password=password) if response.status_code == 201: self.msg('logged in') break else: print(response.json['message']) username = password = None def request(self, url, method='get', **data): method = method.lower() if method == 'get': assert data is None response = self.session.get(url) else: response = getattr(self.session, method)(url, json.dumps(data), verify=False) return response def main(): parser = ArgumentParser() parser.add_argument('-p', '--project', required=True) parser.add_argument('files', nargs='+', type=FileType()) args = parser.parse_args() client = Nudibranch() client.login('admin', 'passwor') # Verify project authorization # Submit each file and get submission id # Notify of completed submission return 0 if __name__ == '__main__': sys.exit(main())
0bdb41a9e1820abf7d068e58f3cdac9d2c8f7221
sympy/logic/benchmarks/run-solvers.py
sympy/logic/benchmarks/run-solvers.py
from __future__ import print_function, division from sympy.logic.utilities import load_file from sympy.logic import satisfiable import time import os import sys input_path = os.getcwd() + '/' + '/'.join(sys.argv[0].split('/')[:-1]) INPUT = [5 * i for i in range(2, 16)] ALGORITHMS = ['dpll', 'dpll2'] results = {} for test in INPUT: results[test] = {} for test in INPUT: for alg in ALGORITHMS: file_name = "%s/input/%d.cnf" % (input_path, test) theory = load_file(file_name) start = time.time() assert satisfiable(theory, algorithm=alg) end = time.time() results[test][alg] = end - start print("Test %d in time %.2f seconds for algorithm %s." % (test, end - start, alg)) print("problem," + ','.join(ALGORITHMS)) for test in INPUT: line = "%d" % test for alg in ALGORITHMS: line += ",%f" % results[test][alg] print(line)
from __future__ import print_function, division from sympy.logic.utilities import load_file from sympy.logic import satisfiable import time import os import sys input_path = os.getcwd() + '/' + '/'.join(sys.argv[0].split('/')[:-1]) INPUT = [5 * i for i in range(2, 16)] ALGORITHMS = ['dpll', 'dpll2'] results = {} for test in INPUT: results[test] = {} for test in INPUT: for alg in ALGORITHMS: file_name = "%s/input/%d.cnf" % (input_path, test) theory = load_file(file_name) start = time.time() assert satisfiable(theory, algorithm=alg) end = time.time() results[test][alg] = end - start print("Test %d in time %.2f seconds for algorithm %s." % (test, end - start, alg)) print("problem," + ','.join(ALGORITHMS)) for test in INPUT: line = "%d" % test for alg in ALGORITHMS: line += ",%f" % results[test][alg] print(line)
Fix a white space error
Fix a white space error This was introduced in e2a415797d4adb81551c5331b1adfdb4a6ecbe94.
Python
bsd-3-clause
MridulS/sympy,cccfran/sympy,yashsharan/sympy,shipci/sympy,sahilshekhawat/sympy,Sumith1896/sympy,ga7g08/sympy,chaffra/sympy,souravsingh/sympy,sahilshekhawat/sympy,atreyv/sympy,lidavidm/sympy,postvakje/sympy,jerli/sympy,rahuldan/sympy,chaffra/sympy,Sumith1896/sympy,ga7g08/sympy,sunny94/temp,abhiii5459/sympy,sahmed95/sympy,lidavidm/sympy,dqnykamp/sympy,Gadal/sympy,MridulS/sympy,diofant/diofant,beni55/sympy,VaibhavAgarwalVA/sympy,lindsayad/sympy,Arafatk/sympy,grevutiu-gabriel/sympy,mafiya69/sympy,mafiya69/sympy,meghana1995/sympy,cswiercz/sympy,jbbskinny/sympy,skirpichev/omg,asm666/sympy,Gadal/sympy,Arafatk/sympy,aktech/sympy,bukzor/sympy,yashsharan/sympy,abhiii5459/sympy,jbbskinny/sympy,wanglongqi/sympy,liangjiaxing/sympy,Gadal/sympy,ChristinaZografou/sympy,madan96/sympy,drufat/sympy,oliverlee/sympy,jbbskinny/sympy,yukoba/sympy,aktech/sympy,farhaanbukhsh/sympy,kaushik94/sympy,wyom/sympy,dqnykamp/sympy,vipulroxx/sympy,meghana1995/sympy,iamutkarshtiwari/sympy,vipulroxx/sympy,Davidjohnwilson/sympy,abloomston/sympy,souravsingh/sympy,Curious72/sympy,vipulroxx/sympy,kumarkrishna/sympy,kaichogami/sympy,jaimahajan1997/sympy,Mitchkoens/sympy,bukzor/sympy,ahhda/sympy,Mitchkoens/sympy,debugger22/sympy,Titan-C/sympy,maniteja123/sympy,Shaswat27/sympy,aktech/sympy,sahmed95/sympy,emon10005/sympy,madan96/sympy,rahuldan/sympy,yashsharan/sympy,AkademieOlympia/sympy,MridulS/sympy,rahuldan/sympy,debugger22/sympy,sahilshekhawat/sympy,oliverlee/sympy,shikil/sympy,sampadsaha5/sympy,mcdaniel67/sympy,saurabhjn76/sympy,jamesblunt/sympy,toolforger/sympy,sampadsaha5/sympy,kevalds51/sympy,hargup/sympy,Titan-C/sympy,Designist/sympy,Arafatk/sympy,kmacinnis/sympy,hrashk/sympy,jaimahajan1997/sympy,maniteja123/sympy,debugger22/sympy,wanglongqi/sympy,shipci/sympy,pandeyadarsh/sympy,kaichogami/sympy,skidzo/sympy,moble/sympy,cswiercz/sympy,emon10005/sympy,drufat/sympy,kumarkrishna/sympy,sunny94/temp,kaichogami/sympy,sunny94/temp,VaibhavAgarwalVA/sympy,dqnykamp/sympy,farhaanbukhsh/sympy,Mitchkoens/sympy,asm666/sympy,madan96/sympy,wanglongqi/sympy,liangjiaxing/sympy,MechCoder/sympy,maniteja123/sympy,sampadsaha5/sympy,mafiya69/sympy,ahhda/sympy,ahhda/sympy,MechCoder/sympy,hargup/sympy,AunShiLord/sympy,drufat/sympy,jamesblunt/sympy,moble/sympy,atreyv/sympy,Curious72/sympy,toolforger/sympy,skidzo/sympy,mcdaniel67/sympy,Designist/sympy,lindsayad/sympy,skidzo/sympy,pbrady/sympy,ga7g08/sympy,beni55/sympy,kmacinnis/sympy,cccfran/sympy,AunShiLord/sympy,VaibhavAgarwalVA/sympy,kaushik94/sympy,grevutiu-gabriel/sympy,kevalds51/sympy,cccfran/sympy,jaimahajan1997/sympy,hrashk/sympy,AunShiLord/sympy,ChristinaZografou/sympy,mcdaniel67/sympy,postvakje/sympy,shipci/sympy,yukoba/sympy,garvitr/sympy,abloomston/sympy,Titan-C/sympy,abhiii5459/sympy,AkademieOlympia/sympy,grevutiu-gabriel/sympy,oliverlee/sympy,jerli/sympy,MechCoder/sympy,pbrady/sympy,kaushik94/sympy,wyom/sympy,Sumith1896/sympy,Designist/sympy,wyom/sympy,jamesblunt/sympy,abloomston/sympy,Vishluck/sympy,garvitr/sympy,cswiercz/sympy,Davidjohnwilson/sympy,AkademieOlympia/sympy,postvakje/sympy,Vishluck/sympy,yukoba/sympy,asm666/sympy,ChristinaZografou/sympy,hrashk/sympy,Shaswat27/sympy,farhaanbukhsh/sympy,shikil/sympy,atsao72/sympy,shikil/sympy,kmacinnis/sympy,kumarkrishna/sympy,liangjiaxing/sympy,saurabhjn76/sympy,pbrady/sympy,atreyv/sympy,toolforger/sympy,meghana1995/sympy,jerli/sympy,saurabhjn76/sympy,emon10005/sympy,lidavidm/sympy,kevalds51/sympy,chaffra/sympy,iamutkarshtiwari/sympy,bukzor/sympy,pandeyadarsh/sympy,hargup/sympy,Shaswat27/sympy,iamutkarshtiwari/sympy,Curious72/sympy,garvitr/sympy,moble/sympy,sahmed95/sympy,souravsingh/sympy,lindsayad/sympy,atsao72/sympy,beni55/sympy,Vishluck/sympy,Davidjohnwilson/sympy,pandeyadarsh/sympy,atsao72/sympy
from __future__ import print_function, division from sympy.logic.utilities import load_file from sympy.logic import satisfiable import time import os import sys input_path = os.getcwd() + '/' + '/'.join(sys.argv[0].split('/')[:-1]) INPUT = [5 * i for i in range(2, 16)] ALGORITHMS = ['dpll', 'dpll2'] results = {} for test in INPUT: results[test] = {} for test in INPUT: for alg in ALGORITHMS: file_name = "%s/input/%d.cnf" % (input_path, test) theory = load_file(file_name) start = time.time() assert satisfiable(theory, algorithm=alg) end = time.time() results[test][alg] = end - start print("Test %d in time %.2f seconds for algorithm %s." % (test, end - start, alg)) print("problem," + ','.join(ALGORITHMS)) for test in INPUT: line = "%d" % test for alg in ALGORITHMS: line += ",%f" % results[test][alg] print(line) Fix a white space error This was introduced in e2a415797d4adb81551c5331b1adfdb4a6ecbe94.
from __future__ import print_function, division from sympy.logic.utilities import load_file from sympy.logic import satisfiable import time import os import sys input_path = os.getcwd() + '/' + '/'.join(sys.argv[0].split('/')[:-1]) INPUT = [5 * i for i in range(2, 16)] ALGORITHMS = ['dpll', 'dpll2'] results = {} for test in INPUT: results[test] = {} for test in INPUT: for alg in ALGORITHMS: file_name = "%s/input/%d.cnf" % (input_path, test) theory = load_file(file_name) start = time.time() assert satisfiable(theory, algorithm=alg) end = time.time() results[test][alg] = end - start print("Test %d in time %.2f seconds for algorithm %s." % (test, end - start, alg)) print("problem," + ','.join(ALGORITHMS)) for test in INPUT: line = "%d" % test for alg in ALGORITHMS: line += ",%f" % results[test][alg] print(line)
<commit_before>from __future__ import print_function, division from sympy.logic.utilities import load_file from sympy.logic import satisfiable import time import os import sys input_path = os.getcwd() + '/' + '/'.join(sys.argv[0].split('/')[:-1]) INPUT = [5 * i for i in range(2, 16)] ALGORITHMS = ['dpll', 'dpll2'] results = {} for test in INPUT: results[test] = {} for test in INPUT: for alg in ALGORITHMS: file_name = "%s/input/%d.cnf" % (input_path, test) theory = load_file(file_name) start = time.time() assert satisfiable(theory, algorithm=alg) end = time.time() results[test][alg] = end - start print("Test %d in time %.2f seconds for algorithm %s." % (test, end - start, alg)) print("problem," + ','.join(ALGORITHMS)) for test in INPUT: line = "%d" % test for alg in ALGORITHMS: line += ",%f" % results[test][alg] print(line) <commit_msg>Fix a white space error This was introduced in e2a415797d4adb81551c5331b1adfdb4a6ecbe94.<commit_after>
from __future__ import print_function, division from sympy.logic.utilities import load_file from sympy.logic import satisfiable import time import os import sys input_path = os.getcwd() + '/' + '/'.join(sys.argv[0].split('/')[:-1]) INPUT = [5 * i for i in range(2, 16)] ALGORITHMS = ['dpll', 'dpll2'] results = {} for test in INPUT: results[test] = {} for test in INPUT: for alg in ALGORITHMS: file_name = "%s/input/%d.cnf" % (input_path, test) theory = load_file(file_name) start = time.time() assert satisfiable(theory, algorithm=alg) end = time.time() results[test][alg] = end - start print("Test %d in time %.2f seconds for algorithm %s." % (test, end - start, alg)) print("problem," + ','.join(ALGORITHMS)) for test in INPUT: line = "%d" % test for alg in ALGORITHMS: line += ",%f" % results[test][alg] print(line)
from __future__ import print_function, division from sympy.logic.utilities import load_file from sympy.logic import satisfiable import time import os import sys input_path = os.getcwd() + '/' + '/'.join(sys.argv[0].split('/')[:-1]) INPUT = [5 * i for i in range(2, 16)] ALGORITHMS = ['dpll', 'dpll2'] results = {} for test in INPUT: results[test] = {} for test in INPUT: for alg in ALGORITHMS: file_name = "%s/input/%d.cnf" % (input_path, test) theory = load_file(file_name) start = time.time() assert satisfiable(theory, algorithm=alg) end = time.time() results[test][alg] = end - start print("Test %d in time %.2f seconds for algorithm %s." % (test, end - start, alg)) print("problem," + ','.join(ALGORITHMS)) for test in INPUT: line = "%d" % test for alg in ALGORITHMS: line += ",%f" % results[test][alg] print(line) Fix a white space error This was introduced in e2a415797d4adb81551c5331b1adfdb4a6ecbe94.from __future__ import print_function, division from sympy.logic.utilities import load_file from sympy.logic import satisfiable import time import os import sys input_path = os.getcwd() + '/' + '/'.join(sys.argv[0].split('/')[:-1]) INPUT = [5 * i for i in range(2, 16)] ALGORITHMS = ['dpll', 'dpll2'] results = {} for test in INPUT: results[test] = {} for test in INPUT: for alg in ALGORITHMS: file_name = "%s/input/%d.cnf" % (input_path, test) theory = load_file(file_name) start = time.time() assert satisfiable(theory, algorithm=alg) end = time.time() results[test][alg] = end - start print("Test %d in time %.2f seconds for algorithm %s." % (test, end - start, alg)) print("problem," + ','.join(ALGORITHMS)) for test in INPUT: line = "%d" % test for alg in ALGORITHMS: line += ",%f" % results[test][alg] print(line)
<commit_before>from __future__ import print_function, division from sympy.logic.utilities import load_file from sympy.logic import satisfiable import time import os import sys input_path = os.getcwd() + '/' + '/'.join(sys.argv[0].split('/')[:-1]) INPUT = [5 * i for i in range(2, 16)] ALGORITHMS = ['dpll', 'dpll2'] results = {} for test in INPUT: results[test] = {} for test in INPUT: for alg in ALGORITHMS: file_name = "%s/input/%d.cnf" % (input_path, test) theory = load_file(file_name) start = time.time() assert satisfiable(theory, algorithm=alg) end = time.time() results[test][alg] = end - start print("Test %d in time %.2f seconds for algorithm %s." % (test, end - start, alg)) print("problem," + ','.join(ALGORITHMS)) for test in INPUT: line = "%d" % test for alg in ALGORITHMS: line += ",%f" % results[test][alg] print(line) <commit_msg>Fix a white space error This was introduced in e2a415797d4adb81551c5331b1adfdb4a6ecbe94.<commit_after>from __future__ import print_function, division from sympy.logic.utilities import load_file from sympy.logic import satisfiable import time import os import sys input_path = os.getcwd() + '/' + '/'.join(sys.argv[0].split('/')[:-1]) INPUT = [5 * i for i in range(2, 16)] ALGORITHMS = ['dpll', 'dpll2'] results = {} for test in INPUT: results[test] = {} for test in INPUT: for alg in ALGORITHMS: file_name = "%s/input/%d.cnf" % (input_path, test) theory = load_file(file_name) start = time.time() assert satisfiable(theory, algorithm=alg) end = time.time() results[test][alg] = end - start print("Test %d in time %.2f seconds for algorithm %s." % (test, end - start, alg)) print("problem," + ','.join(ALGORITHMS)) for test in INPUT: line = "%d" % test for alg in ALGORITHMS: line += ",%f" % results[test][alg] print(line)
9dc756b627e39c62ffce3af617089c331822fa33
test/test_extra_root.py
test/test_extra_root.py
from support import lib,ffi from qcgc_test import QCGCTest import unittest class ExtraRootTestCase(QCGCTest): def test_extra_root(self): # XXX: Violates the condition of the extra roots but it does not matter atm extra_root_obj = self.allocate_ref(2) # Register roots lib.qcgc_register_extra_root(ffi.cast("object_t **", extra_root_obj.refs)) lib.qcgc_register_extra_root(ffi.cast("object_t **", extra_root_obj.refs) + 1) # Test crash with null objects lib.qcgc_collect() # Allocate objects reachable = list() objects = self.gen_circular_structure(100); reachable.extend(objects) extra_root_obj.refs[0] = objects[0] # No self.set_ref as it triggers the write barrier objects = self.gen_circular_structure(100); reachable.extend(objects) extra_root_obj.refs[1] = objects[1] # Same # lib.qcgc_mark_all() for p in reachable: self.assertEqual(lib.qcgc_arena_get_blocktype(ffi.cast("cell_t *", p)), lib.BLOCK_BLACK) if __name__ == "__main__": unittest.main()
Add test for extra roots
Add test for extra roots
Python
mit
ntruessel/qcgc,ntruessel/qcgc,ntruessel/qcgc
Add test for extra roots
from support import lib,ffi from qcgc_test import QCGCTest import unittest class ExtraRootTestCase(QCGCTest): def test_extra_root(self): # XXX: Violates the condition of the extra roots but it does not matter atm extra_root_obj = self.allocate_ref(2) # Register roots lib.qcgc_register_extra_root(ffi.cast("object_t **", extra_root_obj.refs)) lib.qcgc_register_extra_root(ffi.cast("object_t **", extra_root_obj.refs) + 1) # Test crash with null objects lib.qcgc_collect() # Allocate objects reachable = list() objects = self.gen_circular_structure(100); reachable.extend(objects) extra_root_obj.refs[0] = objects[0] # No self.set_ref as it triggers the write barrier objects = self.gen_circular_structure(100); reachable.extend(objects) extra_root_obj.refs[1] = objects[1] # Same # lib.qcgc_mark_all() for p in reachable: self.assertEqual(lib.qcgc_arena_get_blocktype(ffi.cast("cell_t *", p)), lib.BLOCK_BLACK) if __name__ == "__main__": unittest.main()
<commit_before><commit_msg>Add test for extra roots<commit_after>
from support import lib,ffi from qcgc_test import QCGCTest import unittest class ExtraRootTestCase(QCGCTest): def test_extra_root(self): # XXX: Violates the condition of the extra roots but it does not matter atm extra_root_obj = self.allocate_ref(2) # Register roots lib.qcgc_register_extra_root(ffi.cast("object_t **", extra_root_obj.refs)) lib.qcgc_register_extra_root(ffi.cast("object_t **", extra_root_obj.refs) + 1) # Test crash with null objects lib.qcgc_collect() # Allocate objects reachable = list() objects = self.gen_circular_structure(100); reachable.extend(objects) extra_root_obj.refs[0] = objects[0] # No self.set_ref as it triggers the write barrier objects = self.gen_circular_structure(100); reachable.extend(objects) extra_root_obj.refs[1] = objects[1] # Same # lib.qcgc_mark_all() for p in reachable: self.assertEqual(lib.qcgc_arena_get_blocktype(ffi.cast("cell_t *", p)), lib.BLOCK_BLACK) if __name__ == "__main__": unittest.main()
Add test for extra rootsfrom support import lib,ffi from qcgc_test import QCGCTest import unittest class ExtraRootTestCase(QCGCTest): def test_extra_root(self): # XXX: Violates the condition of the extra roots but it does not matter atm extra_root_obj = self.allocate_ref(2) # Register roots lib.qcgc_register_extra_root(ffi.cast("object_t **", extra_root_obj.refs)) lib.qcgc_register_extra_root(ffi.cast("object_t **", extra_root_obj.refs) + 1) # Test crash with null objects lib.qcgc_collect() # Allocate objects reachable = list() objects = self.gen_circular_structure(100); reachable.extend(objects) extra_root_obj.refs[0] = objects[0] # No self.set_ref as it triggers the write barrier objects = self.gen_circular_structure(100); reachable.extend(objects) extra_root_obj.refs[1] = objects[1] # Same # lib.qcgc_mark_all() for p in reachable: self.assertEqual(lib.qcgc_arena_get_blocktype(ffi.cast("cell_t *", p)), lib.BLOCK_BLACK) if __name__ == "__main__": unittest.main()
<commit_before><commit_msg>Add test for extra roots<commit_after>from support import lib,ffi from qcgc_test import QCGCTest import unittest class ExtraRootTestCase(QCGCTest): def test_extra_root(self): # XXX: Violates the condition of the extra roots but it does not matter atm extra_root_obj = self.allocate_ref(2) # Register roots lib.qcgc_register_extra_root(ffi.cast("object_t **", extra_root_obj.refs)) lib.qcgc_register_extra_root(ffi.cast("object_t **", extra_root_obj.refs) + 1) # Test crash with null objects lib.qcgc_collect() # Allocate objects reachable = list() objects = self.gen_circular_structure(100); reachable.extend(objects) extra_root_obj.refs[0] = objects[0] # No self.set_ref as it triggers the write barrier objects = self.gen_circular_structure(100); reachable.extend(objects) extra_root_obj.refs[1] = objects[1] # Same # lib.qcgc_mark_all() for p in reachable: self.assertEqual(lib.qcgc_arena_get_blocktype(ffi.cast("cell_t *", p)), lib.BLOCK_BLACK) if __name__ == "__main__": unittest.main()
4519910f34ca35224d9b0fdd1cea4a36a425f4da
gdrived.py
gdrived.py
#!/usr/bin/env python # # Copyright 2012 Jim Lawton. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import sys import time import daemon class GDriveDaemon(daemon.Daemon): def run(self): while True: time.sleep(1)
Add skeleton for sync daemon.
Add skeleton for sync daemon.
Python
apache-2.0
jimlawton/gdrive-linux-googlecode,babycaseny/gdrive-linux,jimlawton/gdrive-linux,jmfield2/gdrive-linux
Add skeleton for sync daemon.
#!/usr/bin/env python # # Copyright 2012 Jim Lawton. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import sys import time import daemon class GDriveDaemon(daemon.Daemon): def run(self): while True: time.sleep(1)
<commit_before><commit_msg>Add skeleton for sync daemon.<commit_after>
#!/usr/bin/env python # # Copyright 2012 Jim Lawton. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import sys import time import daemon class GDriveDaemon(daemon.Daemon): def run(self): while True: time.sleep(1)
Add skeleton for sync daemon.#!/usr/bin/env python # # Copyright 2012 Jim Lawton. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import sys import time import daemon class GDriveDaemon(daemon.Daemon): def run(self): while True: time.sleep(1)
<commit_before><commit_msg>Add skeleton for sync daemon.<commit_after>#!/usr/bin/env python # # Copyright 2012 Jim Lawton. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import sys import time import daemon class GDriveDaemon(daemon.Daemon): def run(self): while True: time.sleep(1)
0b7b471af3a4b1a2490b9a0dd2dd4b8f64453c17
test/persistence_test.py
test/persistence_test.py
import os import shutil import tempfile import time from lwr.persistence import PersistedJobStore from lwr.queue_manager import QueueManager def test_persistence(): """ Tests persistence of a managers jobs. """ staging_directory = tempfile.mkdtemp() try: persisted_job_store = PersistedJobStore(**{'shelf_filename': os.path.join(staging_directory, 'persisted_jobs')}) queue1 = QueueManager('test', staging_directory, persisted_job_store, 0) queue1.setup_job_directory('4') touch_file = os.path.join(staging_directory, 'ran') queue1.launch('4', 'touch %s' % touch_file) time.sleep(5) assert (not(os.path.exists(touch_file))) queue1.shutdown() queue2 = QueueManager('test', staging_directory, persisted_job_store, 1) time.sleep(5) assert os.path.exists(touch_file) finally: shutil.rmtree(staging_directory) try: queue2.shutdown() except: pass
Add initial testing for persistence.
Add initial testing for persistence.
Python
apache-2.0
natefoo/pulsar,jmchilton/pulsar,ssorgatem/pulsar,natefoo/pulsar,jmchilton/pulsar,galaxyproject/pulsar,jmchilton/lwr,ssorgatem/pulsar,jmchilton/lwr,galaxyproject/pulsar
Add initial testing for persistence.
import os import shutil import tempfile import time from lwr.persistence import PersistedJobStore from lwr.queue_manager import QueueManager def test_persistence(): """ Tests persistence of a managers jobs. """ staging_directory = tempfile.mkdtemp() try: persisted_job_store = PersistedJobStore(**{'shelf_filename': os.path.join(staging_directory, 'persisted_jobs')}) queue1 = QueueManager('test', staging_directory, persisted_job_store, 0) queue1.setup_job_directory('4') touch_file = os.path.join(staging_directory, 'ran') queue1.launch('4', 'touch %s' % touch_file) time.sleep(5) assert (not(os.path.exists(touch_file))) queue1.shutdown() queue2 = QueueManager('test', staging_directory, persisted_job_store, 1) time.sleep(5) assert os.path.exists(touch_file) finally: shutil.rmtree(staging_directory) try: queue2.shutdown() except: pass
<commit_before><commit_msg>Add initial testing for persistence.<commit_after>
import os import shutil import tempfile import time from lwr.persistence import PersistedJobStore from lwr.queue_manager import QueueManager def test_persistence(): """ Tests persistence of a managers jobs. """ staging_directory = tempfile.mkdtemp() try: persisted_job_store = PersistedJobStore(**{'shelf_filename': os.path.join(staging_directory, 'persisted_jobs')}) queue1 = QueueManager('test', staging_directory, persisted_job_store, 0) queue1.setup_job_directory('4') touch_file = os.path.join(staging_directory, 'ran') queue1.launch('4', 'touch %s' % touch_file) time.sleep(5) assert (not(os.path.exists(touch_file))) queue1.shutdown() queue2 = QueueManager('test', staging_directory, persisted_job_store, 1) time.sleep(5) assert os.path.exists(touch_file) finally: shutil.rmtree(staging_directory) try: queue2.shutdown() except: pass
Add initial testing for persistence.import os import shutil import tempfile import time from lwr.persistence import PersistedJobStore from lwr.queue_manager import QueueManager def test_persistence(): """ Tests persistence of a managers jobs. """ staging_directory = tempfile.mkdtemp() try: persisted_job_store = PersistedJobStore(**{'shelf_filename': os.path.join(staging_directory, 'persisted_jobs')}) queue1 = QueueManager('test', staging_directory, persisted_job_store, 0) queue1.setup_job_directory('4') touch_file = os.path.join(staging_directory, 'ran') queue1.launch('4', 'touch %s' % touch_file) time.sleep(5) assert (not(os.path.exists(touch_file))) queue1.shutdown() queue2 = QueueManager('test', staging_directory, persisted_job_store, 1) time.sleep(5) assert os.path.exists(touch_file) finally: shutil.rmtree(staging_directory) try: queue2.shutdown() except: pass
<commit_before><commit_msg>Add initial testing for persistence.<commit_after>import os import shutil import tempfile import time from lwr.persistence import PersistedJobStore from lwr.queue_manager import QueueManager def test_persistence(): """ Tests persistence of a managers jobs. """ staging_directory = tempfile.mkdtemp() try: persisted_job_store = PersistedJobStore(**{'shelf_filename': os.path.join(staging_directory, 'persisted_jobs')}) queue1 = QueueManager('test', staging_directory, persisted_job_store, 0) queue1.setup_job_directory('4') touch_file = os.path.join(staging_directory, 'ran') queue1.launch('4', 'touch %s' % touch_file) time.sleep(5) assert (not(os.path.exists(touch_file))) queue1.shutdown() queue2 = QueueManager('test', staging_directory, persisted_job_store, 1) time.sleep(5) assert os.path.exists(touch_file) finally: shutil.rmtree(staging_directory) try: queue2.shutdown() except: pass
ffebf307137d0435d7b2f0d10ac912ca7f87f1c7
cnn/getReg.py
cnn/getReg.py
# Copyright (C) 2016 Zhixian MA <zxma_sjtu@qq.com> """ Transform cavities regions of samples selected from paper arXiv-1610.03487. Reference --------- [1] J. Shin, J. Woo, and, J. Mulchaey "A systematic search for X-ray cavities in galaxy clusters, groups, and elliptical galaxies" arXiv-1610.03487 """ import os import argparse import cnn_utils as utils def main(): """The main function""" # Init parser = argparse.ArgumentParser(description="Region transform") # parameters parser.add_argument("inpath", help="path holding the samples.") args = parser.parse_args() # Judge existance of the paths try: samples = os.listdir(args.inpath) except IOError: print("Inpath does not exist.") return # Transform region fp = open("sample_z.log",'a') for s in samples: print("Processing on %s..." % s) # get redshift z = utils.get_redshift(s) if z != -1: # calc rate rate = utils.calc_rate(z) fp.write("%s\t%f\t%f\n" % (s, z, rate)) # region exchange sample_path = args.inpath + '/' + s + '/' regpath = os.path.join(sample_path, 'cavities.reg') print(regpath) utils.reg_exchange(regpath, rate, unit='kpc') else: pass if __name__ == "__main__": main()
Transform units of cavity regions in Shin et al's 2016.
Transform units of cavity regions in Shin et al's 2016.
Python
mit
myinxd/cavdet,myinxd/cavdet
Transform units of cavity regions in Shin et al's 2016.
# Copyright (C) 2016 Zhixian MA <zxma_sjtu@qq.com> """ Transform cavities regions of samples selected from paper arXiv-1610.03487. Reference --------- [1] J. Shin, J. Woo, and, J. Mulchaey "A systematic search for X-ray cavities in galaxy clusters, groups, and elliptical galaxies" arXiv-1610.03487 """ import os import argparse import cnn_utils as utils def main(): """The main function""" # Init parser = argparse.ArgumentParser(description="Region transform") # parameters parser.add_argument("inpath", help="path holding the samples.") args = parser.parse_args() # Judge existance of the paths try: samples = os.listdir(args.inpath) except IOError: print("Inpath does not exist.") return # Transform region fp = open("sample_z.log",'a') for s in samples: print("Processing on %s..." % s) # get redshift z = utils.get_redshift(s) if z != -1: # calc rate rate = utils.calc_rate(z) fp.write("%s\t%f\t%f\n" % (s, z, rate)) # region exchange sample_path = args.inpath + '/' + s + '/' regpath = os.path.join(sample_path, 'cavities.reg') print(regpath) utils.reg_exchange(regpath, rate, unit='kpc') else: pass if __name__ == "__main__": main()
<commit_before><commit_msg>Transform units of cavity regions in Shin et al's 2016.<commit_after>
# Copyright (C) 2016 Zhixian MA <zxma_sjtu@qq.com> """ Transform cavities regions of samples selected from paper arXiv-1610.03487. Reference --------- [1] J. Shin, J. Woo, and, J. Mulchaey "A systematic search for X-ray cavities in galaxy clusters, groups, and elliptical galaxies" arXiv-1610.03487 """ import os import argparse import cnn_utils as utils def main(): """The main function""" # Init parser = argparse.ArgumentParser(description="Region transform") # parameters parser.add_argument("inpath", help="path holding the samples.") args = parser.parse_args() # Judge existance of the paths try: samples = os.listdir(args.inpath) except IOError: print("Inpath does not exist.") return # Transform region fp = open("sample_z.log",'a') for s in samples: print("Processing on %s..." % s) # get redshift z = utils.get_redshift(s) if z != -1: # calc rate rate = utils.calc_rate(z) fp.write("%s\t%f\t%f\n" % (s, z, rate)) # region exchange sample_path = args.inpath + '/' + s + '/' regpath = os.path.join(sample_path, 'cavities.reg') print(regpath) utils.reg_exchange(regpath, rate, unit='kpc') else: pass if __name__ == "__main__": main()
Transform units of cavity regions in Shin et al's 2016.# Copyright (C) 2016 Zhixian MA <zxma_sjtu@qq.com> """ Transform cavities regions of samples selected from paper arXiv-1610.03487. Reference --------- [1] J. Shin, J. Woo, and, J. Mulchaey "A systematic search for X-ray cavities in galaxy clusters, groups, and elliptical galaxies" arXiv-1610.03487 """ import os import argparse import cnn_utils as utils def main(): """The main function""" # Init parser = argparse.ArgumentParser(description="Region transform") # parameters parser.add_argument("inpath", help="path holding the samples.") args = parser.parse_args() # Judge existance of the paths try: samples = os.listdir(args.inpath) except IOError: print("Inpath does not exist.") return # Transform region fp = open("sample_z.log",'a') for s in samples: print("Processing on %s..." % s) # get redshift z = utils.get_redshift(s) if z != -1: # calc rate rate = utils.calc_rate(z) fp.write("%s\t%f\t%f\n" % (s, z, rate)) # region exchange sample_path = args.inpath + '/' + s + '/' regpath = os.path.join(sample_path, 'cavities.reg') print(regpath) utils.reg_exchange(regpath, rate, unit='kpc') else: pass if __name__ == "__main__": main()
<commit_before><commit_msg>Transform units of cavity regions in Shin et al's 2016.<commit_after># Copyright (C) 2016 Zhixian MA <zxma_sjtu@qq.com> """ Transform cavities regions of samples selected from paper arXiv-1610.03487. Reference --------- [1] J. Shin, J. Woo, and, J. Mulchaey "A systematic search for X-ray cavities in galaxy clusters, groups, and elliptical galaxies" arXiv-1610.03487 """ import os import argparse import cnn_utils as utils def main(): """The main function""" # Init parser = argparse.ArgumentParser(description="Region transform") # parameters parser.add_argument("inpath", help="path holding the samples.") args = parser.parse_args() # Judge existance of the paths try: samples = os.listdir(args.inpath) except IOError: print("Inpath does not exist.") return # Transform region fp = open("sample_z.log",'a') for s in samples: print("Processing on %s..." % s) # get redshift z = utils.get_redshift(s) if z != -1: # calc rate rate = utils.calc_rate(z) fp.write("%s\t%f\t%f\n" % (s, z, rate)) # region exchange sample_path = args.inpath + '/' + s + '/' regpath = os.path.join(sample_path, 'cavities.reg') print(regpath) utils.reg_exchange(regpath, rate, unit='kpc') else: pass if __name__ == "__main__": main()
349cf99e67279a7d26abbff9d9ca09eed8300dda
examples/test_canvas.py
examples/test_canvas.py
from seleniumbase import BaseCase class CanvasTests(BaseCase): def get_pixel_colors(self): # Return the RGB colors of the canvas element's top left pixel x = 0 y = 0 if self.browser == "safari": x = 1 y = 1 color = self.execute_script( "return document.querySelector('canvas').getContext('2d')" ".getImageData(%s,%s,1,1).data;" % (x, y) ) if self.is_chromium(): return [color[0], color[1], color[2]] else: return [color['0'], color['1'], color['2']] def test_canvas_actions(self): self.open("https://seleniumbase.io/canvas/") self.highlight("canvas") rgb = self.get_pixel_colors() self.assert_equal(rgb, [221, 242, 231]) # Looks greenish self.click_with_offset("canvas", 500, 350) self.highlight("canvas") rgb = self.get_pixel_colors() self.assert_equal(rgb, [39, 42, 56]) # Blue by hamburger def test_canvas_click(self): self.open("https://seleniumbase.io/other/canvas") self.click_with_offset("canvas", 300, 200) self.sleep(1) # Not needed (Lets you see the alert pop up) alert = self.switch_to_alert() self.assert_equal(alert.text, "You clicked on the square!") self.accept_alert() self.sleep(1) # Not needed (Lets you see the alert go away)
Add example tests for canvas actions
Add example tests for canvas actions
Python
mit
seleniumbase/SeleniumBase,seleniumbase/SeleniumBase,seleniumbase/SeleniumBase,mdmintz/SeleniumBase,mdmintz/SeleniumBase,seleniumbase/SeleniumBase,mdmintz/SeleniumBase,mdmintz/SeleniumBase
Add example tests for canvas actions
from seleniumbase import BaseCase class CanvasTests(BaseCase): def get_pixel_colors(self): # Return the RGB colors of the canvas element's top left pixel x = 0 y = 0 if self.browser == "safari": x = 1 y = 1 color = self.execute_script( "return document.querySelector('canvas').getContext('2d')" ".getImageData(%s,%s,1,1).data;" % (x, y) ) if self.is_chromium(): return [color[0], color[1], color[2]] else: return [color['0'], color['1'], color['2']] def test_canvas_actions(self): self.open("https://seleniumbase.io/canvas/") self.highlight("canvas") rgb = self.get_pixel_colors() self.assert_equal(rgb, [221, 242, 231]) # Looks greenish self.click_with_offset("canvas", 500, 350) self.highlight("canvas") rgb = self.get_pixel_colors() self.assert_equal(rgb, [39, 42, 56]) # Blue by hamburger def test_canvas_click(self): self.open("https://seleniumbase.io/other/canvas") self.click_with_offset("canvas", 300, 200) self.sleep(1) # Not needed (Lets you see the alert pop up) alert = self.switch_to_alert() self.assert_equal(alert.text, "You clicked on the square!") self.accept_alert() self.sleep(1) # Not needed (Lets you see the alert go away)
<commit_before><commit_msg>Add example tests for canvas actions<commit_after>
from seleniumbase import BaseCase class CanvasTests(BaseCase): def get_pixel_colors(self): # Return the RGB colors of the canvas element's top left pixel x = 0 y = 0 if self.browser == "safari": x = 1 y = 1 color = self.execute_script( "return document.querySelector('canvas').getContext('2d')" ".getImageData(%s,%s,1,1).data;" % (x, y) ) if self.is_chromium(): return [color[0], color[1], color[2]] else: return [color['0'], color['1'], color['2']] def test_canvas_actions(self): self.open("https://seleniumbase.io/canvas/") self.highlight("canvas") rgb = self.get_pixel_colors() self.assert_equal(rgb, [221, 242, 231]) # Looks greenish self.click_with_offset("canvas", 500, 350) self.highlight("canvas") rgb = self.get_pixel_colors() self.assert_equal(rgb, [39, 42, 56]) # Blue by hamburger def test_canvas_click(self): self.open("https://seleniumbase.io/other/canvas") self.click_with_offset("canvas", 300, 200) self.sleep(1) # Not needed (Lets you see the alert pop up) alert = self.switch_to_alert() self.assert_equal(alert.text, "You clicked on the square!") self.accept_alert() self.sleep(1) # Not needed (Lets you see the alert go away)
Add example tests for canvas actionsfrom seleniumbase import BaseCase class CanvasTests(BaseCase): def get_pixel_colors(self): # Return the RGB colors of the canvas element's top left pixel x = 0 y = 0 if self.browser == "safari": x = 1 y = 1 color = self.execute_script( "return document.querySelector('canvas').getContext('2d')" ".getImageData(%s,%s,1,1).data;" % (x, y) ) if self.is_chromium(): return [color[0], color[1], color[2]] else: return [color['0'], color['1'], color['2']] def test_canvas_actions(self): self.open("https://seleniumbase.io/canvas/") self.highlight("canvas") rgb = self.get_pixel_colors() self.assert_equal(rgb, [221, 242, 231]) # Looks greenish self.click_with_offset("canvas", 500, 350) self.highlight("canvas") rgb = self.get_pixel_colors() self.assert_equal(rgb, [39, 42, 56]) # Blue by hamburger def test_canvas_click(self): self.open("https://seleniumbase.io/other/canvas") self.click_with_offset("canvas", 300, 200) self.sleep(1) # Not needed (Lets you see the alert pop up) alert = self.switch_to_alert() self.assert_equal(alert.text, "You clicked on the square!") self.accept_alert() self.sleep(1) # Not needed (Lets you see the alert go away)
<commit_before><commit_msg>Add example tests for canvas actions<commit_after>from seleniumbase import BaseCase class CanvasTests(BaseCase): def get_pixel_colors(self): # Return the RGB colors of the canvas element's top left pixel x = 0 y = 0 if self.browser == "safari": x = 1 y = 1 color = self.execute_script( "return document.querySelector('canvas').getContext('2d')" ".getImageData(%s,%s,1,1).data;" % (x, y) ) if self.is_chromium(): return [color[0], color[1], color[2]] else: return [color['0'], color['1'], color['2']] def test_canvas_actions(self): self.open("https://seleniumbase.io/canvas/") self.highlight("canvas") rgb = self.get_pixel_colors() self.assert_equal(rgb, [221, 242, 231]) # Looks greenish self.click_with_offset("canvas", 500, 350) self.highlight("canvas") rgb = self.get_pixel_colors() self.assert_equal(rgb, [39, 42, 56]) # Blue by hamburger def test_canvas_click(self): self.open("https://seleniumbase.io/other/canvas") self.click_with_offset("canvas", 300, 200) self.sleep(1) # Not needed (Lets you see the alert pop up) alert = self.switch_to_alert() self.assert_equal(alert.text, "You clicked on the square!") self.accept_alert() self.sleep(1) # Not needed (Lets you see the alert go away)
3942b52249a061dabb0560941aeb9dd135245de9
gooey/gui/formatters.py
gooey/gui/formatters.py
import os from gooey.gui.util.quoting import quote def checkbox(metadata, value): return metadata['commands'][0] if value else None def radioGroup(metadata, value): # TODO try: return self.commands[self._value.index(True)][0] except ValueError: return None def multiFileChooser(metadata, value): paths = ' '.join(quote(x) for x in value.split(os.pathsep) if x) if metadata['commands'] and paths: return u'{} {}'.format(metadata['commands'][0], paths) return paths or None def textArea(metadata, value): if metadata['commands'] and value: return '{} {}'.format(metadata['commands'][0], quote(value.encode('unicode_escape'))) else: return quote(value.encode('unicode_escape')) if value else '' def commandField(metadata, value): if metadata['commands'] and value: return u'{} {}'.format(metadata['commands'][0], value) else: return value or None def counter(metatdata, value): ''' Returns str(option_string * DropDown Value) e.g. -vvvvv ''' if not str(value).isdigit(): return None arg = str(metatdata['commands'][0]).replace('-', '') repeated_args = arg * int(value) return '-' + repeated_args def dropdown(metadata, value): if value == 'Select Option': return None elif metadata['commands'] and value: return u'{} {}'.format(metadata['commands'][0], quote(value)) else: return quote(value) if value else '' def listbox(meta, value): if meta['commands'] and value: return u'{} {}'.format(meta['commands'][0], ' '.join(map(quote, value))) else: return ' '.join(map(quote, value)) if value else '' def general(metadata, value): if metadata.get('commands') and value: if not metadata.get('nargs'): v = quote(value) else: v = value return u'{0} {1}'.format(metadata['commands'][0], v) else: if not value: return None elif not metadata.get('nargs'): return quote(value) else: return value
Move all formatting logic into a single module
Move all formatting logic into a single module
Python
mit
chriskiehl/Gooey,codingsnippets/Gooey,partrita/Gooey
Move all formatting logic into a single module
import os from gooey.gui.util.quoting import quote def checkbox(metadata, value): return metadata['commands'][0] if value else None def radioGroup(metadata, value): # TODO try: return self.commands[self._value.index(True)][0] except ValueError: return None def multiFileChooser(metadata, value): paths = ' '.join(quote(x) for x in value.split(os.pathsep) if x) if metadata['commands'] and paths: return u'{} {}'.format(metadata['commands'][0], paths) return paths or None def textArea(metadata, value): if metadata['commands'] and value: return '{} {}'.format(metadata['commands'][0], quote(value.encode('unicode_escape'))) else: return quote(value.encode('unicode_escape')) if value else '' def commandField(metadata, value): if metadata['commands'] and value: return u'{} {}'.format(metadata['commands'][0], value) else: return value or None def counter(metatdata, value): ''' Returns str(option_string * DropDown Value) e.g. -vvvvv ''' if not str(value).isdigit(): return None arg = str(metatdata['commands'][0]).replace('-', '') repeated_args = arg * int(value) return '-' + repeated_args def dropdown(metadata, value): if value == 'Select Option': return None elif metadata['commands'] and value: return u'{} {}'.format(metadata['commands'][0], quote(value)) else: return quote(value) if value else '' def listbox(meta, value): if meta['commands'] and value: return u'{} {}'.format(meta['commands'][0], ' '.join(map(quote, value))) else: return ' '.join(map(quote, value)) if value else '' def general(metadata, value): if metadata.get('commands') and value: if not metadata.get('nargs'): v = quote(value) else: v = value return u'{0} {1}'.format(metadata['commands'][0], v) else: if not value: return None elif not metadata.get('nargs'): return quote(value) else: return value
<commit_before><commit_msg>Move all formatting logic into a single module<commit_after>
import os from gooey.gui.util.quoting import quote def checkbox(metadata, value): return metadata['commands'][0] if value else None def radioGroup(metadata, value): # TODO try: return self.commands[self._value.index(True)][0] except ValueError: return None def multiFileChooser(metadata, value): paths = ' '.join(quote(x) for x in value.split(os.pathsep) if x) if metadata['commands'] and paths: return u'{} {}'.format(metadata['commands'][0], paths) return paths or None def textArea(metadata, value): if metadata['commands'] and value: return '{} {}'.format(metadata['commands'][0], quote(value.encode('unicode_escape'))) else: return quote(value.encode('unicode_escape')) if value else '' def commandField(metadata, value): if metadata['commands'] and value: return u'{} {}'.format(metadata['commands'][0], value) else: return value or None def counter(metatdata, value): ''' Returns str(option_string * DropDown Value) e.g. -vvvvv ''' if not str(value).isdigit(): return None arg = str(metatdata['commands'][0]).replace('-', '') repeated_args = arg * int(value) return '-' + repeated_args def dropdown(metadata, value): if value == 'Select Option': return None elif metadata['commands'] and value: return u'{} {}'.format(metadata['commands'][0], quote(value)) else: return quote(value) if value else '' def listbox(meta, value): if meta['commands'] and value: return u'{} {}'.format(meta['commands'][0], ' '.join(map(quote, value))) else: return ' '.join(map(quote, value)) if value else '' def general(metadata, value): if metadata.get('commands') and value: if not metadata.get('nargs'): v = quote(value) else: v = value return u'{0} {1}'.format(metadata['commands'][0], v) else: if not value: return None elif not metadata.get('nargs'): return quote(value) else: return value
Move all formatting logic into a single moduleimport os from gooey.gui.util.quoting import quote def checkbox(metadata, value): return metadata['commands'][0] if value else None def radioGroup(metadata, value): # TODO try: return self.commands[self._value.index(True)][0] except ValueError: return None def multiFileChooser(metadata, value): paths = ' '.join(quote(x) for x in value.split(os.pathsep) if x) if metadata['commands'] and paths: return u'{} {}'.format(metadata['commands'][0], paths) return paths or None def textArea(metadata, value): if metadata['commands'] and value: return '{} {}'.format(metadata['commands'][0], quote(value.encode('unicode_escape'))) else: return quote(value.encode('unicode_escape')) if value else '' def commandField(metadata, value): if metadata['commands'] and value: return u'{} {}'.format(metadata['commands'][0], value) else: return value or None def counter(metatdata, value): ''' Returns str(option_string * DropDown Value) e.g. -vvvvv ''' if not str(value).isdigit(): return None arg = str(metatdata['commands'][0]).replace('-', '') repeated_args = arg * int(value) return '-' + repeated_args def dropdown(metadata, value): if value == 'Select Option': return None elif metadata['commands'] and value: return u'{} {}'.format(metadata['commands'][0], quote(value)) else: return quote(value) if value else '' def listbox(meta, value): if meta['commands'] and value: return u'{} {}'.format(meta['commands'][0], ' '.join(map(quote, value))) else: return ' '.join(map(quote, value)) if value else '' def general(metadata, value): if metadata.get('commands') and value: if not metadata.get('nargs'): v = quote(value) else: v = value return u'{0} {1}'.format(metadata['commands'][0], v) else: if not value: return None elif not metadata.get('nargs'): return quote(value) else: return value
<commit_before><commit_msg>Move all formatting logic into a single module<commit_after>import os from gooey.gui.util.quoting import quote def checkbox(metadata, value): return metadata['commands'][0] if value else None def radioGroup(metadata, value): # TODO try: return self.commands[self._value.index(True)][0] except ValueError: return None def multiFileChooser(metadata, value): paths = ' '.join(quote(x) for x in value.split(os.pathsep) if x) if metadata['commands'] and paths: return u'{} {}'.format(metadata['commands'][0], paths) return paths or None def textArea(metadata, value): if metadata['commands'] and value: return '{} {}'.format(metadata['commands'][0], quote(value.encode('unicode_escape'))) else: return quote(value.encode('unicode_escape')) if value else '' def commandField(metadata, value): if metadata['commands'] and value: return u'{} {}'.format(metadata['commands'][0], value) else: return value or None def counter(metatdata, value): ''' Returns str(option_string * DropDown Value) e.g. -vvvvv ''' if not str(value).isdigit(): return None arg = str(metatdata['commands'][0]).replace('-', '') repeated_args = arg * int(value) return '-' + repeated_args def dropdown(metadata, value): if value == 'Select Option': return None elif metadata['commands'] and value: return u'{} {}'.format(metadata['commands'][0], quote(value)) else: return quote(value) if value else '' def listbox(meta, value): if meta['commands'] and value: return u'{} {}'.format(meta['commands'][0], ' '.join(map(quote, value))) else: return ' '.join(map(quote, value)) if value else '' def general(metadata, value): if metadata.get('commands') and value: if not metadata.get('nargs'): v = quote(value) else: v = value return u'{0} {1}'.format(metadata['commands'][0], v) else: if not value: return None elif not metadata.get('nargs'): return quote(value) else: return value
bd19faaa16c92072dd1a597f35b173e20a768a5b
fpr/migrations/0017_ocr_unique_names.py
fpr/migrations/0017_ocr_unique_names.py
# -*- coding: utf-8 -*- from __future__ import unicode_literals from django.db import migrations def data_migration(apps, schema_editor): """Migration that causes each OCR text file to include the UUID of its source file in its filename. This prevents OCR text files from overwriting one another when there are two identically named source files in a transfer. See https://github.com/artefactual/archivematica-fpr-admin/issues/66 """ IDCommand = apps.get_model('fpr', 'IDCommand') ocr_command = IDCommand.objects.get( uuid='5d501dbf-76bb-4569-a9db-9e367800995e') ocr_command.command = ( 'ocrfiles="%SIPObjectsDirectory%metadata/OCRfiles"\n' 'test -d "$ocrfiles" || mkdir -p "$ocrfiles"\n\n' 'tesseract %fileFullName% "$ocrfiles/%fileName%-%fileUUID%"') ocr_command.output_location = ( '%SIPObjectsDirectory%metadata/OCRfiles/%fileName%-%fileUUID%.txt') ocr_command.save() class Migration(migrations.Migration): dependencies = [ ('fpr', '0016_update_idtools'), ] operations = [ migrations.RunPython(data_migration), ]
Add file UUID to OCR text file name
Add file UUID to OCR text file name Adds a migration that causes each OCR text file to include the UUID of its source file in its filename. This prevents OCR text files from overwriting one another when there are two identically named source files in a transfer. Fixes issue #66.
Python
agpl-3.0
artefactual/archivematica-fpr-admin,artefactual/archivematica-fpr-admin,artefactual/archivematica-fpr-admin,artefactual/archivematica-fpr-admin
Add file UUID to OCR text file name Adds a migration that causes each OCR text file to include the UUID of its source file in its filename. This prevents OCR text files from overwriting one another when there are two identically named source files in a transfer. Fixes issue #66.
# -*- coding: utf-8 -*- from __future__ import unicode_literals from django.db import migrations def data_migration(apps, schema_editor): """Migration that causes each OCR text file to include the UUID of its source file in its filename. This prevents OCR text files from overwriting one another when there are two identically named source files in a transfer. See https://github.com/artefactual/archivematica-fpr-admin/issues/66 """ IDCommand = apps.get_model('fpr', 'IDCommand') ocr_command = IDCommand.objects.get( uuid='5d501dbf-76bb-4569-a9db-9e367800995e') ocr_command.command = ( 'ocrfiles="%SIPObjectsDirectory%metadata/OCRfiles"\n' 'test -d "$ocrfiles" || mkdir -p "$ocrfiles"\n\n' 'tesseract %fileFullName% "$ocrfiles/%fileName%-%fileUUID%"') ocr_command.output_location = ( '%SIPObjectsDirectory%metadata/OCRfiles/%fileName%-%fileUUID%.txt') ocr_command.save() class Migration(migrations.Migration): dependencies = [ ('fpr', '0016_update_idtools'), ] operations = [ migrations.RunPython(data_migration), ]
<commit_before><commit_msg>Add file UUID to OCR text file name Adds a migration that causes each OCR text file to include the UUID of its source file in its filename. This prevents OCR text files from overwriting one another when there are two identically named source files in a transfer. Fixes issue #66.<commit_after>
# -*- coding: utf-8 -*- from __future__ import unicode_literals from django.db import migrations def data_migration(apps, schema_editor): """Migration that causes each OCR text file to include the UUID of its source file in its filename. This prevents OCR text files from overwriting one another when there are two identically named source files in a transfer. See https://github.com/artefactual/archivematica-fpr-admin/issues/66 """ IDCommand = apps.get_model('fpr', 'IDCommand') ocr_command = IDCommand.objects.get( uuid='5d501dbf-76bb-4569-a9db-9e367800995e') ocr_command.command = ( 'ocrfiles="%SIPObjectsDirectory%metadata/OCRfiles"\n' 'test -d "$ocrfiles" || mkdir -p "$ocrfiles"\n\n' 'tesseract %fileFullName% "$ocrfiles/%fileName%-%fileUUID%"') ocr_command.output_location = ( '%SIPObjectsDirectory%metadata/OCRfiles/%fileName%-%fileUUID%.txt') ocr_command.save() class Migration(migrations.Migration): dependencies = [ ('fpr', '0016_update_idtools'), ] operations = [ migrations.RunPython(data_migration), ]
Add file UUID to OCR text file name Adds a migration that causes each OCR text file to include the UUID of its source file in its filename. This prevents OCR text files from overwriting one another when there are two identically named source files in a transfer. Fixes issue #66.# -*- coding: utf-8 -*- from __future__ import unicode_literals from django.db import migrations def data_migration(apps, schema_editor): """Migration that causes each OCR text file to include the UUID of its source file in its filename. This prevents OCR text files from overwriting one another when there are two identically named source files in a transfer. See https://github.com/artefactual/archivematica-fpr-admin/issues/66 """ IDCommand = apps.get_model('fpr', 'IDCommand') ocr_command = IDCommand.objects.get( uuid='5d501dbf-76bb-4569-a9db-9e367800995e') ocr_command.command = ( 'ocrfiles="%SIPObjectsDirectory%metadata/OCRfiles"\n' 'test -d "$ocrfiles" || mkdir -p "$ocrfiles"\n\n' 'tesseract %fileFullName% "$ocrfiles/%fileName%-%fileUUID%"') ocr_command.output_location = ( '%SIPObjectsDirectory%metadata/OCRfiles/%fileName%-%fileUUID%.txt') ocr_command.save() class Migration(migrations.Migration): dependencies = [ ('fpr', '0016_update_idtools'), ] operations = [ migrations.RunPython(data_migration), ]
<commit_before><commit_msg>Add file UUID to OCR text file name Adds a migration that causes each OCR text file to include the UUID of its source file in its filename. This prevents OCR text files from overwriting one another when there are two identically named source files in a transfer. Fixes issue #66.<commit_after># -*- coding: utf-8 -*- from __future__ import unicode_literals from django.db import migrations def data_migration(apps, schema_editor): """Migration that causes each OCR text file to include the UUID of its source file in its filename. This prevents OCR text files from overwriting one another when there are two identically named source files in a transfer. See https://github.com/artefactual/archivematica-fpr-admin/issues/66 """ IDCommand = apps.get_model('fpr', 'IDCommand') ocr_command = IDCommand.objects.get( uuid='5d501dbf-76bb-4569-a9db-9e367800995e') ocr_command.command = ( 'ocrfiles="%SIPObjectsDirectory%metadata/OCRfiles"\n' 'test -d "$ocrfiles" || mkdir -p "$ocrfiles"\n\n' 'tesseract %fileFullName% "$ocrfiles/%fileName%-%fileUUID%"') ocr_command.output_location = ( '%SIPObjectsDirectory%metadata/OCRfiles/%fileName%-%fileUUID%.txt') ocr_command.save() class Migration(migrations.Migration): dependencies = [ ('fpr', '0016_update_idtools'), ] operations = [ migrations.RunPython(data_migration), ]
fb4212e1e3de5a63652561066d8a30c833607b28
alembic/versions/551bc42a839_add_role_type.py
alembic/versions/551bc42a839_add_role_type.py
"""add_role_type Revision ID: 551bc42a839 Revises: 41af1c8d394 Create Date: 2016-02-02 17:19:21.998718 """ # revision identifiers, used by Alembic. revision = '551bc42a839' down_revision = '41af1c8d394' branch_labels = None depends_on = None from alembic import op import sqlalchemy as sa def upgrade(): """ SQL That equal to the following ALTER TABLE app_name._role ADD COLUMN by_default boolean DEFAULT FALSE; ALTER TABLE app_name._role ADD COLUMN is_admin boolean DEFAULT FALSE; UPDATE app_name._version set version_num = '551bc42a839; """ op.add_column('_role', sa.Column('is_admin', sa.Boolean, server_default="FALSE")) op.add_column('_role', sa.Column('by_default', sa.Boolean, server_default="FALSE")) def downgrade(): op.drop_column('_role', 'is_admin') op.drop_column('_role', 'by_default')
Add alembic migration for role type
Add alembic migration for role type refs #295
Python
apache-2.0
rickmak/skygear-server,SkygearIO/skygear-server,rickmak/skygear-server,rickmak/skygear-server,SkygearIO/skygear-server,SkygearIO/skygear-server,SkygearIO/skygear-server
Add alembic migration for role type refs #295
"""add_role_type Revision ID: 551bc42a839 Revises: 41af1c8d394 Create Date: 2016-02-02 17:19:21.998718 """ # revision identifiers, used by Alembic. revision = '551bc42a839' down_revision = '41af1c8d394' branch_labels = None depends_on = None from alembic import op import sqlalchemy as sa def upgrade(): """ SQL That equal to the following ALTER TABLE app_name._role ADD COLUMN by_default boolean DEFAULT FALSE; ALTER TABLE app_name._role ADD COLUMN is_admin boolean DEFAULT FALSE; UPDATE app_name._version set version_num = '551bc42a839; """ op.add_column('_role', sa.Column('is_admin', sa.Boolean, server_default="FALSE")) op.add_column('_role', sa.Column('by_default', sa.Boolean, server_default="FALSE")) def downgrade(): op.drop_column('_role', 'is_admin') op.drop_column('_role', 'by_default')
<commit_before><commit_msg>Add alembic migration for role type refs #295<commit_after>
"""add_role_type Revision ID: 551bc42a839 Revises: 41af1c8d394 Create Date: 2016-02-02 17:19:21.998718 """ # revision identifiers, used by Alembic. revision = '551bc42a839' down_revision = '41af1c8d394' branch_labels = None depends_on = None from alembic import op import sqlalchemy as sa def upgrade(): """ SQL That equal to the following ALTER TABLE app_name._role ADD COLUMN by_default boolean DEFAULT FALSE; ALTER TABLE app_name._role ADD COLUMN is_admin boolean DEFAULT FALSE; UPDATE app_name._version set version_num = '551bc42a839; """ op.add_column('_role', sa.Column('is_admin', sa.Boolean, server_default="FALSE")) op.add_column('_role', sa.Column('by_default', sa.Boolean, server_default="FALSE")) def downgrade(): op.drop_column('_role', 'is_admin') op.drop_column('_role', 'by_default')
Add alembic migration for role type refs #295"""add_role_type Revision ID: 551bc42a839 Revises: 41af1c8d394 Create Date: 2016-02-02 17:19:21.998718 """ # revision identifiers, used by Alembic. revision = '551bc42a839' down_revision = '41af1c8d394' branch_labels = None depends_on = None from alembic import op import sqlalchemy as sa def upgrade(): """ SQL That equal to the following ALTER TABLE app_name._role ADD COLUMN by_default boolean DEFAULT FALSE; ALTER TABLE app_name._role ADD COLUMN is_admin boolean DEFAULT FALSE; UPDATE app_name._version set version_num = '551bc42a839; """ op.add_column('_role', sa.Column('is_admin', sa.Boolean, server_default="FALSE")) op.add_column('_role', sa.Column('by_default', sa.Boolean, server_default="FALSE")) def downgrade(): op.drop_column('_role', 'is_admin') op.drop_column('_role', 'by_default')
<commit_before><commit_msg>Add alembic migration for role type refs #295<commit_after>"""add_role_type Revision ID: 551bc42a839 Revises: 41af1c8d394 Create Date: 2016-02-02 17:19:21.998718 """ # revision identifiers, used by Alembic. revision = '551bc42a839' down_revision = '41af1c8d394' branch_labels = None depends_on = None from alembic import op import sqlalchemy as sa def upgrade(): """ SQL That equal to the following ALTER TABLE app_name._role ADD COLUMN by_default boolean DEFAULT FALSE; ALTER TABLE app_name._role ADD COLUMN is_admin boolean DEFAULT FALSE; UPDATE app_name._version set version_num = '551bc42a839; """ op.add_column('_role', sa.Column('is_admin', sa.Boolean, server_default="FALSE")) op.add_column('_role', sa.Column('by_default', sa.Boolean, server_default="FALSE")) def downgrade(): op.drop_column('_role', 'is_admin') op.drop_column('_role', 'by_default')
5c063317bbf1c67b1e17992ec338b00451735296
src/website/migrations/0005_auto_20180621_1659.py
src/website/migrations/0005_auto_20180621_1659.py
# Generated by Django 2.0.6 on 2018-06-21 07:59 from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ('website', '0004_auto_20170713_2346'), ] operations = [ migrations.AlterField( model_name='user', name='last_name', field=models.CharField(blank=True, max_length=150, verbose_name='last name'), ), ]
Add migrations for website app
Add migrations for website app
Python
apache-2.0
PLUS-POSTECH/study.plus.or.kr,PLUS-POSTECH/study.plus.or.kr,PLUS-POSTECH/study.plus.or.kr
Add migrations for website app
# Generated by Django 2.0.6 on 2018-06-21 07:59 from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ('website', '0004_auto_20170713_2346'), ] operations = [ migrations.AlterField( model_name='user', name='last_name', field=models.CharField(blank=True, max_length=150, verbose_name='last name'), ), ]
<commit_before><commit_msg>Add migrations for website app<commit_after>
# Generated by Django 2.0.6 on 2018-06-21 07:59 from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ('website', '0004_auto_20170713_2346'), ] operations = [ migrations.AlterField( model_name='user', name='last_name', field=models.CharField(blank=True, max_length=150, verbose_name='last name'), ), ]
Add migrations for website app# Generated by Django 2.0.6 on 2018-06-21 07:59 from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ('website', '0004_auto_20170713_2346'), ] operations = [ migrations.AlterField( model_name='user', name='last_name', field=models.CharField(blank=True, max_length=150, verbose_name='last name'), ), ]
<commit_before><commit_msg>Add migrations for website app<commit_after># Generated by Django 2.0.6 on 2018-06-21 07:59 from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ('website', '0004_auto_20170713_2346'), ] operations = [ migrations.AlterField( model_name='user', name='last_name', field=models.CharField(blank=True, max_length=150, verbose_name='last name'), ), ]
be7a803888793a09ac714220be08f3914034c1bb
go/apps/jsbox/tests/test_views.py
go/apps/jsbox/tests/test_views.py
from django.test.client import Client from django.core.urlresolvers import reverse from go.apps.tests.base import DjangoGoApplicationTestCase class JsBoxTestCase(DjangoGoApplicationTestCase): def setUp(self): super(JsBoxTestCase, self).setUp() self.setup_riak_fixtures() self.client = Client() self.client.login(username='username', password='password') def test_new_conversation(self): # render the form self.assertEqual(len(self.conv_store.list_conversations()), 1) response = self.client.get(reverse('jsbox:new')) self.assertEqual(response.status_code, 200) # post the form response = self.client.post(reverse('jsbox:new'), { 'subject': 'the subject', 'message': 'the message', 'delivery_class': 'sms', 'delivery_tag_pool': 'longcode', }) self.assertEqual(len(self.conv_store.list_conversations()), 2) conversation = self.get_latest_conversation() self.assertEqual(conversation.delivery_class, 'sms') self.assertEqual(conversation.delivery_tag_pool, 'longcode') self.assertEqual(conversation.delivery_tag, None) self.assertEqual(conversation.metadata, None) self.assertRedirects(response, reverse('jsbox:edit', kwargs={ 'conversation_key': conversation.key, })) def test_edit_conversation(self): # render the form [conversation_key] = self.conv_store.list_conversations() kwargs = {'conversation_key': conversation_key} response = self.client.get(reverse('jsbox:edit', kwargs=kwargs)) self.assertEqual(response.status_code, 200) # post the form response = self.client.post(reverse('jsbox:edit', kwargs=kwargs), { 'jsbox-javascript': 'x = 1;', 'jsbox-source_url': '', 'jsbox-update_from_source': '0', }) self.assertRedirects(response, reverse('jsbox:people', kwargs=kwargs)) conversation = self.get_latest_conversation() self.assertEqual(conversation.metadata, { 'jsbox': { 'javascript': 'x = 1;', 'source_url': '', }, })
Add basic tests for views.
Add basic tests for views.
Python
bsd-3-clause
praekelt/vumi-go,praekelt/vumi-go,praekelt/vumi-go,praekelt/vumi-go
Add basic tests for views.
from django.test.client import Client from django.core.urlresolvers import reverse from go.apps.tests.base import DjangoGoApplicationTestCase class JsBoxTestCase(DjangoGoApplicationTestCase): def setUp(self): super(JsBoxTestCase, self).setUp() self.setup_riak_fixtures() self.client = Client() self.client.login(username='username', password='password') def test_new_conversation(self): # render the form self.assertEqual(len(self.conv_store.list_conversations()), 1) response = self.client.get(reverse('jsbox:new')) self.assertEqual(response.status_code, 200) # post the form response = self.client.post(reverse('jsbox:new'), { 'subject': 'the subject', 'message': 'the message', 'delivery_class': 'sms', 'delivery_tag_pool': 'longcode', }) self.assertEqual(len(self.conv_store.list_conversations()), 2) conversation = self.get_latest_conversation() self.assertEqual(conversation.delivery_class, 'sms') self.assertEqual(conversation.delivery_tag_pool, 'longcode') self.assertEqual(conversation.delivery_tag, None) self.assertEqual(conversation.metadata, None) self.assertRedirects(response, reverse('jsbox:edit', kwargs={ 'conversation_key': conversation.key, })) def test_edit_conversation(self): # render the form [conversation_key] = self.conv_store.list_conversations() kwargs = {'conversation_key': conversation_key} response = self.client.get(reverse('jsbox:edit', kwargs=kwargs)) self.assertEqual(response.status_code, 200) # post the form response = self.client.post(reverse('jsbox:edit', kwargs=kwargs), { 'jsbox-javascript': 'x = 1;', 'jsbox-source_url': '', 'jsbox-update_from_source': '0', }) self.assertRedirects(response, reverse('jsbox:people', kwargs=kwargs)) conversation = self.get_latest_conversation() self.assertEqual(conversation.metadata, { 'jsbox': { 'javascript': 'x = 1;', 'source_url': '', }, })
<commit_before><commit_msg>Add basic tests for views.<commit_after>
from django.test.client import Client from django.core.urlresolvers import reverse from go.apps.tests.base import DjangoGoApplicationTestCase class JsBoxTestCase(DjangoGoApplicationTestCase): def setUp(self): super(JsBoxTestCase, self).setUp() self.setup_riak_fixtures() self.client = Client() self.client.login(username='username', password='password') def test_new_conversation(self): # render the form self.assertEqual(len(self.conv_store.list_conversations()), 1) response = self.client.get(reverse('jsbox:new')) self.assertEqual(response.status_code, 200) # post the form response = self.client.post(reverse('jsbox:new'), { 'subject': 'the subject', 'message': 'the message', 'delivery_class': 'sms', 'delivery_tag_pool': 'longcode', }) self.assertEqual(len(self.conv_store.list_conversations()), 2) conversation = self.get_latest_conversation() self.assertEqual(conversation.delivery_class, 'sms') self.assertEqual(conversation.delivery_tag_pool, 'longcode') self.assertEqual(conversation.delivery_tag, None) self.assertEqual(conversation.metadata, None) self.assertRedirects(response, reverse('jsbox:edit', kwargs={ 'conversation_key': conversation.key, })) def test_edit_conversation(self): # render the form [conversation_key] = self.conv_store.list_conversations() kwargs = {'conversation_key': conversation_key} response = self.client.get(reverse('jsbox:edit', kwargs=kwargs)) self.assertEqual(response.status_code, 200) # post the form response = self.client.post(reverse('jsbox:edit', kwargs=kwargs), { 'jsbox-javascript': 'x = 1;', 'jsbox-source_url': '', 'jsbox-update_from_source': '0', }) self.assertRedirects(response, reverse('jsbox:people', kwargs=kwargs)) conversation = self.get_latest_conversation() self.assertEqual(conversation.metadata, { 'jsbox': { 'javascript': 'x = 1;', 'source_url': '', }, })
Add basic tests for views.from django.test.client import Client from django.core.urlresolvers import reverse from go.apps.tests.base import DjangoGoApplicationTestCase class JsBoxTestCase(DjangoGoApplicationTestCase): def setUp(self): super(JsBoxTestCase, self).setUp() self.setup_riak_fixtures() self.client = Client() self.client.login(username='username', password='password') def test_new_conversation(self): # render the form self.assertEqual(len(self.conv_store.list_conversations()), 1) response = self.client.get(reverse('jsbox:new')) self.assertEqual(response.status_code, 200) # post the form response = self.client.post(reverse('jsbox:new'), { 'subject': 'the subject', 'message': 'the message', 'delivery_class': 'sms', 'delivery_tag_pool': 'longcode', }) self.assertEqual(len(self.conv_store.list_conversations()), 2) conversation = self.get_latest_conversation() self.assertEqual(conversation.delivery_class, 'sms') self.assertEqual(conversation.delivery_tag_pool, 'longcode') self.assertEqual(conversation.delivery_tag, None) self.assertEqual(conversation.metadata, None) self.assertRedirects(response, reverse('jsbox:edit', kwargs={ 'conversation_key': conversation.key, })) def test_edit_conversation(self): # render the form [conversation_key] = self.conv_store.list_conversations() kwargs = {'conversation_key': conversation_key} response = self.client.get(reverse('jsbox:edit', kwargs=kwargs)) self.assertEqual(response.status_code, 200) # post the form response = self.client.post(reverse('jsbox:edit', kwargs=kwargs), { 'jsbox-javascript': 'x = 1;', 'jsbox-source_url': '', 'jsbox-update_from_source': '0', }) self.assertRedirects(response, reverse('jsbox:people', kwargs=kwargs)) conversation = self.get_latest_conversation() self.assertEqual(conversation.metadata, { 'jsbox': { 'javascript': 'x = 1;', 'source_url': '', }, })
<commit_before><commit_msg>Add basic tests for views.<commit_after>from django.test.client import Client from django.core.urlresolvers import reverse from go.apps.tests.base import DjangoGoApplicationTestCase class JsBoxTestCase(DjangoGoApplicationTestCase): def setUp(self): super(JsBoxTestCase, self).setUp() self.setup_riak_fixtures() self.client = Client() self.client.login(username='username', password='password') def test_new_conversation(self): # render the form self.assertEqual(len(self.conv_store.list_conversations()), 1) response = self.client.get(reverse('jsbox:new')) self.assertEqual(response.status_code, 200) # post the form response = self.client.post(reverse('jsbox:new'), { 'subject': 'the subject', 'message': 'the message', 'delivery_class': 'sms', 'delivery_tag_pool': 'longcode', }) self.assertEqual(len(self.conv_store.list_conversations()), 2) conversation = self.get_latest_conversation() self.assertEqual(conversation.delivery_class, 'sms') self.assertEqual(conversation.delivery_tag_pool, 'longcode') self.assertEqual(conversation.delivery_tag, None) self.assertEqual(conversation.metadata, None) self.assertRedirects(response, reverse('jsbox:edit', kwargs={ 'conversation_key': conversation.key, })) def test_edit_conversation(self): # render the form [conversation_key] = self.conv_store.list_conversations() kwargs = {'conversation_key': conversation_key} response = self.client.get(reverse('jsbox:edit', kwargs=kwargs)) self.assertEqual(response.status_code, 200) # post the form response = self.client.post(reverse('jsbox:edit', kwargs=kwargs), { 'jsbox-javascript': 'x = 1;', 'jsbox-source_url': '', 'jsbox-update_from_source': '0', }) self.assertRedirects(response, reverse('jsbox:people', kwargs=kwargs)) conversation = self.get_latest_conversation() self.assertEqual(conversation.metadata, { 'jsbox': { 'javascript': 'x = 1;', 'source_url': '', }, })
481df944700297300892bd14783310aad14c093c
test/selenium/src/lib/page/modal/delete_object.py
test/selenium/src/lib/page/modal/delete_object.py
# Copyright (C) 2016 Google Inc. # Licensed under http://www.apache.org/licenses/LICENSE-2.0 <see LICENSE file> """Modals for deleting objects""" from lib import base from lib import decorator from lib.constants import locator class DeleteObjectModal(base.Modal): """A generic modal for deleting an object""" _page_model_cls_after_redirect = None _locator = locator.ModalDeleteObject def __init__(self, driver): super(DeleteObjectModal, self).__init__(driver) self.title_modal = base.Label( driver, self._locator.MODAL_TITLE) self.confirmation_text = base.Label( driver, self._locator.CONFIRMATION_TEXT) self.title_object = base.Label( driver, self._locator.OBJECT_TITLE) self.button_delete = base.Button( driver, self._locator.BUTTON_DELETE) @decorator.wait_for_redirect def confirm_delete(self): """ Returns: lib.page.dashboard.Dashboard """ self.button_delete.click()
# Copyright (C) 2016 Google Inc. # Licensed under http://www.apache.org/licenses/LICENSE-2.0 <see LICENSE file> """Modals for deleting objects""" from lib import base from lib.constants import locator class DeleteObjectModal(base.Modal): """A generic modal for deleting an object""" _page_model_cls_after_redirect = None _locator = locator.ModalDeleteObject def __init__(self, driver): super(DeleteObjectModal, self).__init__(driver) self.title_modal = base.Label( driver, self._locator.MODAL_TITLE) self.confirmation_text = base.Label( driver, self._locator.CONFIRMATION_TEXT) self.title_object = base.Label( driver, self._locator.OBJECT_TITLE) self.button_delete = base.Button( driver, self._locator.BUTTON_DELETE) def confirm_delete(self): """ Returns: lib.page.dashboard.Dashboard """ self.button_delete.click()
Remove redirect from delete modal page object
Remove redirect from delete modal page object (cherry picked from commit 480ecdb)
Python
apache-2.0
VinnieJohns/ggrc-core,selahssea/ggrc-core,j0gurt/ggrc-core,josthkko/ggrc-core,plamut/ggrc-core,selahssea/ggrc-core,selahssea/ggrc-core,j0gurt/ggrc-core,kr41/ggrc-core,AleksNeStu/ggrc-core,AleksNeStu/ggrc-core,andrei-karalionak/ggrc-core,VinnieJohns/ggrc-core,j0gurt/ggrc-core,andrei-karalionak/ggrc-core,kr41/ggrc-core,plamut/ggrc-core,josthkko/ggrc-core,plamut/ggrc-core,josthkko/ggrc-core,AleksNeStu/ggrc-core,kr41/ggrc-core,andrei-karalionak/ggrc-core,selahssea/ggrc-core,plamut/ggrc-core,AleksNeStu/ggrc-core,VinnieJohns/ggrc-core,josthkko/ggrc-core,kr41/ggrc-core,andrei-karalionak/ggrc-core,j0gurt/ggrc-core,VinnieJohns/ggrc-core
# Copyright (C) 2016 Google Inc. # Licensed under http://www.apache.org/licenses/LICENSE-2.0 <see LICENSE file> """Modals for deleting objects""" from lib import base from lib import decorator from lib.constants import locator class DeleteObjectModal(base.Modal): """A generic modal for deleting an object""" _page_model_cls_after_redirect = None _locator = locator.ModalDeleteObject def __init__(self, driver): super(DeleteObjectModal, self).__init__(driver) self.title_modal = base.Label( driver, self._locator.MODAL_TITLE) self.confirmation_text = base.Label( driver, self._locator.CONFIRMATION_TEXT) self.title_object = base.Label( driver, self._locator.OBJECT_TITLE) self.button_delete = base.Button( driver, self._locator.BUTTON_DELETE) @decorator.wait_for_redirect def confirm_delete(self): """ Returns: lib.page.dashboard.Dashboard """ self.button_delete.click() Remove redirect from delete modal page object (cherry picked from commit 480ecdb)
# Copyright (C) 2016 Google Inc. # Licensed under http://www.apache.org/licenses/LICENSE-2.0 <see LICENSE file> """Modals for deleting objects""" from lib import base from lib.constants import locator class DeleteObjectModal(base.Modal): """A generic modal for deleting an object""" _page_model_cls_after_redirect = None _locator = locator.ModalDeleteObject def __init__(self, driver): super(DeleteObjectModal, self).__init__(driver) self.title_modal = base.Label( driver, self._locator.MODAL_TITLE) self.confirmation_text = base.Label( driver, self._locator.CONFIRMATION_TEXT) self.title_object = base.Label( driver, self._locator.OBJECT_TITLE) self.button_delete = base.Button( driver, self._locator.BUTTON_DELETE) def confirm_delete(self): """ Returns: lib.page.dashboard.Dashboard """ self.button_delete.click()
<commit_before># Copyright (C) 2016 Google Inc. # Licensed under http://www.apache.org/licenses/LICENSE-2.0 <see LICENSE file> """Modals for deleting objects""" from lib import base from lib import decorator from lib.constants import locator class DeleteObjectModal(base.Modal): """A generic modal for deleting an object""" _page_model_cls_after_redirect = None _locator = locator.ModalDeleteObject def __init__(self, driver): super(DeleteObjectModal, self).__init__(driver) self.title_modal = base.Label( driver, self._locator.MODAL_TITLE) self.confirmation_text = base.Label( driver, self._locator.CONFIRMATION_TEXT) self.title_object = base.Label( driver, self._locator.OBJECT_TITLE) self.button_delete = base.Button( driver, self._locator.BUTTON_DELETE) @decorator.wait_for_redirect def confirm_delete(self): """ Returns: lib.page.dashboard.Dashboard """ self.button_delete.click() <commit_msg>Remove redirect from delete modal page object (cherry picked from commit 480ecdb)<commit_after>
# Copyright (C) 2016 Google Inc. # Licensed under http://www.apache.org/licenses/LICENSE-2.0 <see LICENSE file> """Modals for deleting objects""" from lib import base from lib.constants import locator class DeleteObjectModal(base.Modal): """A generic modal for deleting an object""" _page_model_cls_after_redirect = None _locator = locator.ModalDeleteObject def __init__(self, driver): super(DeleteObjectModal, self).__init__(driver) self.title_modal = base.Label( driver, self._locator.MODAL_TITLE) self.confirmation_text = base.Label( driver, self._locator.CONFIRMATION_TEXT) self.title_object = base.Label( driver, self._locator.OBJECT_TITLE) self.button_delete = base.Button( driver, self._locator.BUTTON_DELETE) def confirm_delete(self): """ Returns: lib.page.dashboard.Dashboard """ self.button_delete.click()
# Copyright (C) 2016 Google Inc. # Licensed under http://www.apache.org/licenses/LICENSE-2.0 <see LICENSE file> """Modals for deleting objects""" from lib import base from lib import decorator from lib.constants import locator class DeleteObjectModal(base.Modal): """A generic modal for deleting an object""" _page_model_cls_after_redirect = None _locator = locator.ModalDeleteObject def __init__(self, driver): super(DeleteObjectModal, self).__init__(driver) self.title_modal = base.Label( driver, self._locator.MODAL_TITLE) self.confirmation_text = base.Label( driver, self._locator.CONFIRMATION_TEXT) self.title_object = base.Label( driver, self._locator.OBJECT_TITLE) self.button_delete = base.Button( driver, self._locator.BUTTON_DELETE) @decorator.wait_for_redirect def confirm_delete(self): """ Returns: lib.page.dashboard.Dashboard """ self.button_delete.click() Remove redirect from delete modal page object (cherry picked from commit 480ecdb)# Copyright (C) 2016 Google Inc. # Licensed under http://www.apache.org/licenses/LICENSE-2.0 <see LICENSE file> """Modals for deleting objects""" from lib import base from lib.constants import locator class DeleteObjectModal(base.Modal): """A generic modal for deleting an object""" _page_model_cls_after_redirect = None _locator = locator.ModalDeleteObject def __init__(self, driver): super(DeleteObjectModal, self).__init__(driver) self.title_modal = base.Label( driver, self._locator.MODAL_TITLE) self.confirmation_text = base.Label( driver, self._locator.CONFIRMATION_TEXT) self.title_object = base.Label( driver, self._locator.OBJECT_TITLE) self.button_delete = base.Button( driver, self._locator.BUTTON_DELETE) def confirm_delete(self): """ Returns: lib.page.dashboard.Dashboard """ self.button_delete.click()
<commit_before># Copyright (C) 2016 Google Inc. # Licensed under http://www.apache.org/licenses/LICENSE-2.0 <see LICENSE file> """Modals for deleting objects""" from lib import base from lib import decorator from lib.constants import locator class DeleteObjectModal(base.Modal): """A generic modal for deleting an object""" _page_model_cls_after_redirect = None _locator = locator.ModalDeleteObject def __init__(self, driver): super(DeleteObjectModal, self).__init__(driver) self.title_modal = base.Label( driver, self._locator.MODAL_TITLE) self.confirmation_text = base.Label( driver, self._locator.CONFIRMATION_TEXT) self.title_object = base.Label( driver, self._locator.OBJECT_TITLE) self.button_delete = base.Button( driver, self._locator.BUTTON_DELETE) @decorator.wait_for_redirect def confirm_delete(self): """ Returns: lib.page.dashboard.Dashboard """ self.button_delete.click() <commit_msg>Remove redirect from delete modal page object (cherry picked from commit 480ecdb)<commit_after># Copyright (C) 2016 Google Inc. # Licensed under http://www.apache.org/licenses/LICENSE-2.0 <see LICENSE file> """Modals for deleting objects""" from lib import base from lib.constants import locator class DeleteObjectModal(base.Modal): """A generic modal for deleting an object""" _page_model_cls_after_redirect = None _locator = locator.ModalDeleteObject def __init__(self, driver): super(DeleteObjectModal, self).__init__(driver) self.title_modal = base.Label( driver, self._locator.MODAL_TITLE) self.confirmation_text = base.Label( driver, self._locator.CONFIRMATION_TEXT) self.title_object = base.Label( driver, self._locator.OBJECT_TITLE) self.button_delete = base.Button( driver, self._locator.BUTTON_DELETE) def confirm_delete(self): """ Returns: lib.page.dashboard.Dashboard """ self.button_delete.click()
6031700ba86611809619409c2a83d551f622e391
hardware/gpio/TrafficLights_gpiozero.py
hardware/gpio/TrafficLights_gpiozero.py
from gpiozero import TrafficLights import time def british_lights_cycle(lights, cycle_time): lights.off() # Red lights.red.on() time.sleep(cycle_time) # Red and Amber lights.amber.on() time.sleep(cycle_time) # Green lights.red.off() lights.amber.off() lights.green.on() time.sleep(cycle_time) # Amber lights.green.off() lights.amber.on() time.sleep(cycle_time) # Red lights.amber.off() lights.red.on() time.sleep(cycle_time) lights = TrafficLights(17, 23, 25) british_lights_cycle(lights, 2) lights.off() print "done!"
Add GPIO-zero-based demo of 4Tronix Pi-Stop
Add GPIO-zero-based demo of 4Tronix Pi-Stop
Python
mit
claremacrae/raspi_code,claremacrae/raspi_code,claremacrae/raspi_code
Add GPIO-zero-based demo of 4Tronix Pi-Stop
from gpiozero import TrafficLights import time def british_lights_cycle(lights, cycle_time): lights.off() # Red lights.red.on() time.sleep(cycle_time) # Red and Amber lights.amber.on() time.sleep(cycle_time) # Green lights.red.off() lights.amber.off() lights.green.on() time.sleep(cycle_time) # Amber lights.green.off() lights.amber.on() time.sleep(cycle_time) # Red lights.amber.off() lights.red.on() time.sleep(cycle_time) lights = TrafficLights(17, 23, 25) british_lights_cycle(lights, 2) lights.off() print "done!"
<commit_before><commit_msg>Add GPIO-zero-based demo of 4Tronix Pi-Stop<commit_after>
from gpiozero import TrafficLights import time def british_lights_cycle(lights, cycle_time): lights.off() # Red lights.red.on() time.sleep(cycle_time) # Red and Amber lights.amber.on() time.sleep(cycle_time) # Green lights.red.off() lights.amber.off() lights.green.on() time.sleep(cycle_time) # Amber lights.green.off() lights.amber.on() time.sleep(cycle_time) # Red lights.amber.off() lights.red.on() time.sleep(cycle_time) lights = TrafficLights(17, 23, 25) british_lights_cycle(lights, 2) lights.off() print "done!"
Add GPIO-zero-based demo of 4Tronix Pi-Stopfrom gpiozero import TrafficLights import time def british_lights_cycle(lights, cycle_time): lights.off() # Red lights.red.on() time.sleep(cycle_time) # Red and Amber lights.amber.on() time.sleep(cycle_time) # Green lights.red.off() lights.amber.off() lights.green.on() time.sleep(cycle_time) # Amber lights.green.off() lights.amber.on() time.sleep(cycle_time) # Red lights.amber.off() lights.red.on() time.sleep(cycle_time) lights = TrafficLights(17, 23, 25) british_lights_cycle(lights, 2) lights.off() print "done!"
<commit_before><commit_msg>Add GPIO-zero-based demo of 4Tronix Pi-Stop<commit_after>from gpiozero import TrafficLights import time def british_lights_cycle(lights, cycle_time): lights.off() # Red lights.red.on() time.sleep(cycle_time) # Red and Amber lights.amber.on() time.sleep(cycle_time) # Green lights.red.off() lights.amber.off() lights.green.on() time.sleep(cycle_time) # Amber lights.green.off() lights.amber.on() time.sleep(cycle_time) # Red lights.amber.off() lights.red.on() time.sleep(cycle_time) lights = TrafficLights(17, 23, 25) british_lights_cycle(lights, 2) lights.off() print "done!"
f1d956a646b86da5fcb97cf918495ceaa1bbaa9b
ideascube/conf/idb_fra_canopecayenne.py
ideascube/conf/idb_fra_canopecayenne.py
from .idb import * # pragma: no flakes CUSTOM_CARDS = [ { # Must be one of create, discover, info, learn, manage, read 'category': 'create', 'url': 'http://etherpad.ideasbox.lan', 'title': 'Etherpad', 'description': 'A collaborative text editor', # The name of a Font Awesome glyph 'fa': 'font', # True if the card should only be visible by the staff 'is_staff': False }, { # Must be one of create, discover, info, learn, manage, read 'category': 'learn', 'url': 'http://moodle.ideasbox.lan', 'title': 'Moodle', 'description': 'Online courses', # The name of a Font Awesome glyph 'fa': 'graduation-cap', # True if the card should only be visible by the staff 'is_staff': False }, ]
Add conf file for Canope Guyane with Moodle and Etherpad card
Add conf file for Canope Guyane with Moodle and Etherpad card
Python
agpl-3.0
ideascube/ideascube,ideascube/ideascube,ideascube/ideascube,ideascube/ideascube
Add conf file for Canope Guyane with Moodle and Etherpad card
from .idb import * # pragma: no flakes CUSTOM_CARDS = [ { # Must be one of create, discover, info, learn, manage, read 'category': 'create', 'url': 'http://etherpad.ideasbox.lan', 'title': 'Etherpad', 'description': 'A collaborative text editor', # The name of a Font Awesome glyph 'fa': 'font', # True if the card should only be visible by the staff 'is_staff': False }, { # Must be one of create, discover, info, learn, manage, read 'category': 'learn', 'url': 'http://moodle.ideasbox.lan', 'title': 'Moodle', 'description': 'Online courses', # The name of a Font Awesome glyph 'fa': 'graduation-cap', # True if the card should only be visible by the staff 'is_staff': False }, ]
<commit_before><commit_msg>Add conf file for Canope Guyane with Moodle and Etherpad card<commit_after>
from .idb import * # pragma: no flakes CUSTOM_CARDS = [ { # Must be one of create, discover, info, learn, manage, read 'category': 'create', 'url': 'http://etherpad.ideasbox.lan', 'title': 'Etherpad', 'description': 'A collaborative text editor', # The name of a Font Awesome glyph 'fa': 'font', # True if the card should only be visible by the staff 'is_staff': False }, { # Must be one of create, discover, info, learn, manage, read 'category': 'learn', 'url': 'http://moodle.ideasbox.lan', 'title': 'Moodle', 'description': 'Online courses', # The name of a Font Awesome glyph 'fa': 'graduation-cap', # True if the card should only be visible by the staff 'is_staff': False }, ]
Add conf file for Canope Guyane with Moodle and Etherpad cardfrom .idb import * # pragma: no flakes CUSTOM_CARDS = [ { # Must be one of create, discover, info, learn, manage, read 'category': 'create', 'url': 'http://etherpad.ideasbox.lan', 'title': 'Etherpad', 'description': 'A collaborative text editor', # The name of a Font Awesome glyph 'fa': 'font', # True if the card should only be visible by the staff 'is_staff': False }, { # Must be one of create, discover, info, learn, manage, read 'category': 'learn', 'url': 'http://moodle.ideasbox.lan', 'title': 'Moodle', 'description': 'Online courses', # The name of a Font Awesome glyph 'fa': 'graduation-cap', # True if the card should only be visible by the staff 'is_staff': False }, ]
<commit_before><commit_msg>Add conf file for Canope Guyane with Moodle and Etherpad card<commit_after>from .idb import * # pragma: no flakes CUSTOM_CARDS = [ { # Must be one of create, discover, info, learn, manage, read 'category': 'create', 'url': 'http://etherpad.ideasbox.lan', 'title': 'Etherpad', 'description': 'A collaborative text editor', # The name of a Font Awesome glyph 'fa': 'font', # True if the card should only be visible by the staff 'is_staff': False }, { # Must be one of create, discover, info, learn, manage, read 'category': 'learn', 'url': 'http://moodle.ideasbox.lan', 'title': 'Moodle', 'description': 'Online courses', # The name of a Font Awesome glyph 'fa': 'graduation-cap', # True if the card should only be visible by the staff 'is_staff': False }, ]
32fb188b7569fae39979b8b0482e618fe5187d26
tests/test_ansi.py
tests/test_ansi.py
#!/usr/bin/env python # -*- coding: utf-8 -*- """Test the cprint function.""" from colorise.nix.color_functions import to_ansi import pytest @pytest.mark.skip_on_windows def test_ansi(): assert to_ansi(34, '95') == '\x1b[34;95m' assert to_ansi(0) == '\x1b[0m' assert to_ansi() == ''
Test ansi escape sequence function
Test ansi escape sequence function
Python
bsd-3-clause
MisanthropicBit/colorise
Test ansi escape sequence function
#!/usr/bin/env python # -*- coding: utf-8 -*- """Test the cprint function.""" from colorise.nix.color_functions import to_ansi import pytest @pytest.mark.skip_on_windows def test_ansi(): assert to_ansi(34, '95') == '\x1b[34;95m' assert to_ansi(0) == '\x1b[0m' assert to_ansi() == ''
<commit_before><commit_msg>Test ansi escape sequence function<commit_after>
#!/usr/bin/env python # -*- coding: utf-8 -*- """Test the cprint function.""" from colorise.nix.color_functions import to_ansi import pytest @pytest.mark.skip_on_windows def test_ansi(): assert to_ansi(34, '95') == '\x1b[34;95m' assert to_ansi(0) == '\x1b[0m' assert to_ansi() == ''
Test ansi escape sequence function#!/usr/bin/env python # -*- coding: utf-8 -*- """Test the cprint function.""" from colorise.nix.color_functions import to_ansi import pytest @pytest.mark.skip_on_windows def test_ansi(): assert to_ansi(34, '95') == '\x1b[34;95m' assert to_ansi(0) == '\x1b[0m' assert to_ansi() == ''
<commit_before><commit_msg>Test ansi escape sequence function<commit_after>#!/usr/bin/env python # -*- coding: utf-8 -*- """Test the cprint function.""" from colorise.nix.color_functions import to_ansi import pytest @pytest.mark.skip_on_windows def test_ansi(): assert to_ansi(34, '95') == '\x1b[34;95m' assert to_ansi(0) == '\x1b[0m' assert to_ansi() == ''
e2a5ead8838d484a2216a84b174418ec18fc21f0
extras/gallery_sync.py
extras/gallery_sync.py
#!/usr/bin/env python """Script to upload pictures to the gallery. This script scans a local picture folder to determine which patients have not yet been created in the gallery. It then creates the missing patients. """ from getpass import getpass import requests API_URL = 'http://localhost:8000/gallery/api/patients/' API_USER = 'chathan' API_PASSWORD = getpass('API Password: ') def get_patient_list(): """Get a list of patients from the gallery api""" response = requests.get(API_URL, auth=(API_USER, API_PASSWORD)) response.raise_for_status() return response.json() if __name__ == '__main__': print(get_patient_list())
Add method to get list of patients from API.
Add method to get list of patients from API.
Python
mit
cdriehuys/chmvh-website,cdriehuys/chmvh-website,cdriehuys/chmvh-website
Add method to get list of patients from API.
#!/usr/bin/env python """Script to upload pictures to the gallery. This script scans a local picture folder to determine which patients have not yet been created in the gallery. It then creates the missing patients. """ from getpass import getpass import requests API_URL = 'http://localhost:8000/gallery/api/patients/' API_USER = 'chathan' API_PASSWORD = getpass('API Password: ') def get_patient_list(): """Get a list of patients from the gallery api""" response = requests.get(API_URL, auth=(API_USER, API_PASSWORD)) response.raise_for_status() return response.json() if __name__ == '__main__': print(get_patient_list())
<commit_before><commit_msg>Add method to get list of patients from API.<commit_after>
#!/usr/bin/env python """Script to upload pictures to the gallery. This script scans a local picture folder to determine which patients have not yet been created in the gallery. It then creates the missing patients. """ from getpass import getpass import requests API_URL = 'http://localhost:8000/gallery/api/patients/' API_USER = 'chathan' API_PASSWORD = getpass('API Password: ') def get_patient_list(): """Get a list of patients from the gallery api""" response = requests.get(API_URL, auth=(API_USER, API_PASSWORD)) response.raise_for_status() return response.json() if __name__ == '__main__': print(get_patient_list())
Add method to get list of patients from API.#!/usr/bin/env python """Script to upload pictures to the gallery. This script scans a local picture folder to determine which patients have not yet been created in the gallery. It then creates the missing patients. """ from getpass import getpass import requests API_URL = 'http://localhost:8000/gallery/api/patients/' API_USER = 'chathan' API_PASSWORD = getpass('API Password: ') def get_patient_list(): """Get a list of patients from the gallery api""" response = requests.get(API_URL, auth=(API_USER, API_PASSWORD)) response.raise_for_status() return response.json() if __name__ == '__main__': print(get_patient_list())
<commit_before><commit_msg>Add method to get list of patients from API.<commit_after>#!/usr/bin/env python """Script to upload pictures to the gallery. This script scans a local picture folder to determine which patients have not yet been created in the gallery. It then creates the missing patients. """ from getpass import getpass import requests API_URL = 'http://localhost:8000/gallery/api/patients/' API_USER = 'chathan' API_PASSWORD = getpass('API Password: ') def get_patient_list(): """Get a list of patients from the gallery api""" response = requests.get(API_URL, auth=(API_USER, API_PASSWORD)) response.raise_for_status() return response.json() if __name__ == '__main__': print(get_patient_list())
5e874fdc4d19f30123b74c00c162bab12e086735
ugly_code.py
ugly_code.py
def pretty_function(string_to_count): raise NotImplementedError() def Ugly_Function(string_to_count): Cpy_Of_String = str(string_to_count) Result = {}#holds result for c in Cpy_Of_String: if (c in Result.keys()) == False: Result[c] = [c] elif (c in Result.keys()) == True: a = Result[c] a.append(c) Result[c] = a NormalResults = {} for r, v in Result.items(): i = 0 for item in v: i += 1 NormalResults[r] = i print(NormalResults) Result2= {} for c in Cpy_Of_String: if (c.lower() in Result2.keys()) == False: Result2[c.lower()] = [c.lower()] elif (c.lower() in Result2.keys()) == True: a = Result2[c.lower()] a.append(c.lower()) Result2[c.lower()] = a smallresult = {} for r, v in Result2.items(): i = 0 for item in v: i += 1 smallresult[r] = i print(smallresult) if __name__ == '__main__': string_to_count = 'Otters are COOL animals! The coolest!' Ugly_Function(string_to_count) # pretty_function(string_to_count)
Add some ugly code to make pretty and more efficient
Add some ugly code to make pretty and more efficient
Python
mit
coolshop-com/coolshop-application-assignment
Add some ugly code to make pretty and more efficient
def pretty_function(string_to_count): raise NotImplementedError() def Ugly_Function(string_to_count): Cpy_Of_String = str(string_to_count) Result = {}#holds result for c in Cpy_Of_String: if (c in Result.keys()) == False: Result[c] = [c] elif (c in Result.keys()) == True: a = Result[c] a.append(c) Result[c] = a NormalResults = {} for r, v in Result.items(): i = 0 for item in v: i += 1 NormalResults[r] = i print(NormalResults) Result2= {} for c in Cpy_Of_String: if (c.lower() in Result2.keys()) == False: Result2[c.lower()] = [c.lower()] elif (c.lower() in Result2.keys()) == True: a = Result2[c.lower()] a.append(c.lower()) Result2[c.lower()] = a smallresult = {} for r, v in Result2.items(): i = 0 for item in v: i += 1 smallresult[r] = i print(smallresult) if __name__ == '__main__': string_to_count = 'Otters are COOL animals! The coolest!' Ugly_Function(string_to_count) # pretty_function(string_to_count)
<commit_before><commit_msg>Add some ugly code to make pretty and more efficient<commit_after>
def pretty_function(string_to_count): raise NotImplementedError() def Ugly_Function(string_to_count): Cpy_Of_String = str(string_to_count) Result = {}#holds result for c in Cpy_Of_String: if (c in Result.keys()) == False: Result[c] = [c] elif (c in Result.keys()) == True: a = Result[c] a.append(c) Result[c] = a NormalResults = {} for r, v in Result.items(): i = 0 for item in v: i += 1 NormalResults[r] = i print(NormalResults) Result2= {} for c in Cpy_Of_String: if (c.lower() in Result2.keys()) == False: Result2[c.lower()] = [c.lower()] elif (c.lower() in Result2.keys()) == True: a = Result2[c.lower()] a.append(c.lower()) Result2[c.lower()] = a smallresult = {} for r, v in Result2.items(): i = 0 for item in v: i += 1 smallresult[r] = i print(smallresult) if __name__ == '__main__': string_to_count = 'Otters are COOL animals! The coolest!' Ugly_Function(string_to_count) # pretty_function(string_to_count)
Add some ugly code to make pretty and more efficientdef pretty_function(string_to_count): raise NotImplementedError() def Ugly_Function(string_to_count): Cpy_Of_String = str(string_to_count) Result = {}#holds result for c in Cpy_Of_String: if (c in Result.keys()) == False: Result[c] = [c] elif (c in Result.keys()) == True: a = Result[c] a.append(c) Result[c] = a NormalResults = {} for r, v in Result.items(): i = 0 for item in v: i += 1 NormalResults[r] = i print(NormalResults) Result2= {} for c in Cpy_Of_String: if (c.lower() in Result2.keys()) == False: Result2[c.lower()] = [c.lower()] elif (c.lower() in Result2.keys()) == True: a = Result2[c.lower()] a.append(c.lower()) Result2[c.lower()] = a smallresult = {} for r, v in Result2.items(): i = 0 for item in v: i += 1 smallresult[r] = i print(smallresult) if __name__ == '__main__': string_to_count = 'Otters are COOL animals! The coolest!' Ugly_Function(string_to_count) # pretty_function(string_to_count)
<commit_before><commit_msg>Add some ugly code to make pretty and more efficient<commit_after>def pretty_function(string_to_count): raise NotImplementedError() def Ugly_Function(string_to_count): Cpy_Of_String = str(string_to_count) Result = {}#holds result for c in Cpy_Of_String: if (c in Result.keys()) == False: Result[c] = [c] elif (c in Result.keys()) == True: a = Result[c] a.append(c) Result[c] = a NormalResults = {} for r, v in Result.items(): i = 0 for item in v: i += 1 NormalResults[r] = i print(NormalResults) Result2= {} for c in Cpy_Of_String: if (c.lower() in Result2.keys()) == False: Result2[c.lower()] = [c.lower()] elif (c.lower() in Result2.keys()) == True: a = Result2[c.lower()] a.append(c.lower()) Result2[c.lower()] = a smallresult = {} for r, v in Result2.items(): i = 0 for item in v: i += 1 smallresult[r] = i print(smallresult) if __name__ == '__main__': string_to_count = 'Otters are COOL animals! The coolest!' Ugly_Function(string_to_count) # pretty_function(string_to_count)
48cb07f0a464bde537be409318448095cabfe10e
useradmin.py
useradmin.py
#!/usr/bin/python import argparse import datetime import json import sys import requests def listusers(server): try: url = 'http://' + server + '/nhlplayoffs/api/v2.0/players' headers = {'content-type': 'application/json'} r = requests.get(url, headers=headers) if not r.ok: print('Invalid request!!!!') return False players = r.json()['players'] for player in players: print("\033[0;94m{n}\033[0m".format(n=player['name'])) print("\t\033[1;30mEmail:\033[0m{e}".format(e=player['email'])) return True except Exception as e: print(e) return False if __name__ == '__main__': parser = argparse.ArgumentParser(description='Manage the nhlpool players') parser.add_argument('cmd', metavar='cmd', help='The command to execute') parser.add_argument('root_psw', metavar='password', default='', nargs='?', help='The root password') parser.add_argument('-s', '--server', metavar='server', default='debug', nargs='?', help='The server to use') args = parser.parse_args() if args.server == 'prod': print('Using production server') server = 'nhlpool.herokuapp.com/' else: print('Using debug server') server = 'localhost:5000' cmd = args.cmd if cmd == 'list': listusers(server) else: print('Invalid command!!!')
Add user admin script with list function
Add user admin script with list function
Python
mit
fjacob21/nhlplayoffs,fjacob21/nhlplayoffs,fjacob21/nhlplayoffs
Add user admin script with list function
#!/usr/bin/python import argparse import datetime import json import sys import requests def listusers(server): try: url = 'http://' + server + '/nhlplayoffs/api/v2.0/players' headers = {'content-type': 'application/json'} r = requests.get(url, headers=headers) if not r.ok: print('Invalid request!!!!') return False players = r.json()['players'] for player in players: print("\033[0;94m{n}\033[0m".format(n=player['name'])) print("\t\033[1;30mEmail:\033[0m{e}".format(e=player['email'])) return True except Exception as e: print(e) return False if __name__ == '__main__': parser = argparse.ArgumentParser(description='Manage the nhlpool players') parser.add_argument('cmd', metavar='cmd', help='The command to execute') parser.add_argument('root_psw', metavar='password', default='', nargs='?', help='The root password') parser.add_argument('-s', '--server', metavar='server', default='debug', nargs='?', help='The server to use') args = parser.parse_args() if args.server == 'prod': print('Using production server') server = 'nhlpool.herokuapp.com/' else: print('Using debug server') server = 'localhost:5000' cmd = args.cmd if cmd == 'list': listusers(server) else: print('Invalid command!!!')
<commit_before><commit_msg>Add user admin script with list function<commit_after>
#!/usr/bin/python import argparse import datetime import json import sys import requests def listusers(server): try: url = 'http://' + server + '/nhlplayoffs/api/v2.0/players' headers = {'content-type': 'application/json'} r = requests.get(url, headers=headers) if not r.ok: print('Invalid request!!!!') return False players = r.json()['players'] for player in players: print("\033[0;94m{n}\033[0m".format(n=player['name'])) print("\t\033[1;30mEmail:\033[0m{e}".format(e=player['email'])) return True except Exception as e: print(e) return False if __name__ == '__main__': parser = argparse.ArgumentParser(description='Manage the nhlpool players') parser.add_argument('cmd', metavar='cmd', help='The command to execute') parser.add_argument('root_psw', metavar='password', default='', nargs='?', help='The root password') parser.add_argument('-s', '--server', metavar='server', default='debug', nargs='?', help='The server to use') args = parser.parse_args() if args.server == 'prod': print('Using production server') server = 'nhlpool.herokuapp.com/' else: print('Using debug server') server = 'localhost:5000' cmd = args.cmd if cmd == 'list': listusers(server) else: print('Invalid command!!!')
Add user admin script with list function#!/usr/bin/python import argparse import datetime import json import sys import requests def listusers(server): try: url = 'http://' + server + '/nhlplayoffs/api/v2.0/players' headers = {'content-type': 'application/json'} r = requests.get(url, headers=headers) if not r.ok: print('Invalid request!!!!') return False players = r.json()['players'] for player in players: print("\033[0;94m{n}\033[0m".format(n=player['name'])) print("\t\033[1;30mEmail:\033[0m{e}".format(e=player['email'])) return True except Exception as e: print(e) return False if __name__ == '__main__': parser = argparse.ArgumentParser(description='Manage the nhlpool players') parser.add_argument('cmd', metavar='cmd', help='The command to execute') parser.add_argument('root_psw', metavar='password', default='', nargs='?', help='The root password') parser.add_argument('-s', '--server', metavar='server', default='debug', nargs='?', help='The server to use') args = parser.parse_args() if args.server == 'prod': print('Using production server') server = 'nhlpool.herokuapp.com/' else: print('Using debug server') server = 'localhost:5000' cmd = args.cmd if cmd == 'list': listusers(server) else: print('Invalid command!!!')
<commit_before><commit_msg>Add user admin script with list function<commit_after>#!/usr/bin/python import argparse import datetime import json import sys import requests def listusers(server): try: url = 'http://' + server + '/nhlplayoffs/api/v2.0/players' headers = {'content-type': 'application/json'} r = requests.get(url, headers=headers) if not r.ok: print('Invalid request!!!!') return False players = r.json()['players'] for player in players: print("\033[0;94m{n}\033[0m".format(n=player['name'])) print("\t\033[1;30mEmail:\033[0m{e}".format(e=player['email'])) return True except Exception as e: print(e) return False if __name__ == '__main__': parser = argparse.ArgumentParser(description='Manage the nhlpool players') parser.add_argument('cmd', metavar='cmd', help='The command to execute') parser.add_argument('root_psw', metavar='password', default='', nargs='?', help='The root password') parser.add_argument('-s', '--server', metavar='server', default='debug', nargs='?', help='The server to use') args = parser.parse_args() if args.server == 'prod': print('Using production server') server = 'nhlpool.herokuapp.com/' else: print('Using debug server') server = 'localhost:5000' cmd = args.cmd if cmd == 'list': listusers(server) else: print('Invalid command!!!')
111813afa21ac0d7e4b537f9eb1295cbc0357413
examples/load_hgpackage.py
examples/load_hgpackage.py
#!/usr/bin/env python from pymoku import Moku import sys, logging logging.basicConfig(format='%(asctime)s:%(name)s:%(levelname)s::%(message)s') log = logging.getLogger() log.setLevel(logging.DEBUG) if len(sys.argv) != 3: print "Usage %s <ip> <packname>" % sys.argv[0] exit(1) m = Moku(sys.argv[1]) pack = sys.argv[2] sha = pack + '.sha256' try: m._send_file('p', pack) except IOError: log.exception("Can't load pack") m.close() exit(1) try: m._send_file('p', sha) except IOError: log.warning("Can't load signature, this will not be deployable in release mode") finally: m.close()
Add simple script to upload hg pack files to a device
PM-86: Add simple script to upload hg pack files to a device
Python
mit
liquidinstruments/pymoku
PM-86: Add simple script to upload hg pack files to a device
#!/usr/bin/env python from pymoku import Moku import sys, logging logging.basicConfig(format='%(asctime)s:%(name)s:%(levelname)s::%(message)s') log = logging.getLogger() log.setLevel(logging.DEBUG) if len(sys.argv) != 3: print "Usage %s <ip> <packname>" % sys.argv[0] exit(1) m = Moku(sys.argv[1]) pack = sys.argv[2] sha = pack + '.sha256' try: m._send_file('p', pack) except IOError: log.exception("Can't load pack") m.close() exit(1) try: m._send_file('p', sha) except IOError: log.warning("Can't load signature, this will not be deployable in release mode") finally: m.close()
<commit_before><commit_msg>PM-86: Add simple script to upload hg pack files to a device<commit_after>
#!/usr/bin/env python from pymoku import Moku import sys, logging logging.basicConfig(format='%(asctime)s:%(name)s:%(levelname)s::%(message)s') log = logging.getLogger() log.setLevel(logging.DEBUG) if len(sys.argv) != 3: print "Usage %s <ip> <packname>" % sys.argv[0] exit(1) m = Moku(sys.argv[1]) pack = sys.argv[2] sha = pack + '.sha256' try: m._send_file('p', pack) except IOError: log.exception("Can't load pack") m.close() exit(1) try: m._send_file('p', sha) except IOError: log.warning("Can't load signature, this will not be deployable in release mode") finally: m.close()
PM-86: Add simple script to upload hg pack files to a device#!/usr/bin/env python from pymoku import Moku import sys, logging logging.basicConfig(format='%(asctime)s:%(name)s:%(levelname)s::%(message)s') log = logging.getLogger() log.setLevel(logging.DEBUG) if len(sys.argv) != 3: print "Usage %s <ip> <packname>" % sys.argv[0] exit(1) m = Moku(sys.argv[1]) pack = sys.argv[2] sha = pack + '.sha256' try: m._send_file('p', pack) except IOError: log.exception("Can't load pack") m.close() exit(1) try: m._send_file('p', sha) except IOError: log.warning("Can't load signature, this will not be deployable in release mode") finally: m.close()
<commit_before><commit_msg>PM-86: Add simple script to upload hg pack files to a device<commit_after>#!/usr/bin/env python from pymoku import Moku import sys, logging logging.basicConfig(format='%(asctime)s:%(name)s:%(levelname)s::%(message)s') log = logging.getLogger() log.setLevel(logging.DEBUG) if len(sys.argv) != 3: print "Usage %s <ip> <packname>" % sys.argv[0] exit(1) m = Moku(sys.argv[1]) pack = sys.argv[2] sha = pack + '.sha256' try: m._send_file('p', pack) except IOError: log.exception("Can't load pack") m.close() exit(1) try: m._send_file('p', sha) except IOError: log.warning("Can't load signature, this will not be deployable in release mode") finally: m.close()
bd94ce78b29d918db59f2702fd76e3207840d46f
pyscf/pbc/ci/test/test_ci.py
pyscf/pbc/ci/test/test_ci.py
import pyscf.pbc.cc.test.make_test_cell as make_test_cell from pyscf.pbc import gto, scf, ci from pyscf.pbc.ci import KCIS import unittest cell = make_test_cell.test_cell_n3() cell.mesh = [29] * 3 cell.build() kmf_n3_none = scf.KRHF(cell, kpts=cell.make_kpts([2,1,1]), exxdiv=None) kmf_n3_none.kernel() kmf_n3_ewald = scf.KRHF(cell, kpts=cell.make_kpts([2,1,1]), exxdiv='ewald') kmf_n3_ewald.kernel() def tearDownModule(): global cell, kmf_n3_none, kmf_n3_ewald del cell, kmf_n3_none, kmf_n3_ewald class KnownValues(unittest.TestCase): def test_n3_cis(self): ehf_bench = [-8.651923514149, -10.530905169078] ekrhf = kmf_n3_none.e_tot self.assertAlmostEqual(ekrhf, ehf_bench[0], 6) ekrhf = kmf_n3_ewald.e_tot self.assertAlmostEqual(ekrhf, ehf_bench[1], 6) # KCIS myci = ci.KCIS(kmf_n3_none) eci, v = myci.kernel(nroots=2, kptlist=[0]) self.assertAlmostEqual(eci[0][0], 0.223920101177) self.assertAlmostEqual(eci[0][1], 0.223920101177) eci, v = myci.kernel(nroots=2, kptlist=[1]) self.assertAlmostEqual(eci[0][0], 0.291182202333) self.assertAlmostEqual(eci[0][1], 0.330573456724) myci = ci.KCIS(kmf_n3_ewald, keep_exxdiv=True) eci, v = myci.kernel(nroots=2, kptlist=[0]) self.assertAlmostEqual(eci[0][0], 0.693665750383) self.assertAlmostEqual(eci[0][1], 0.693665750384) eci, v = myci.kernel(nroots=2, kptlist=[1]) self.assertAlmostEqual(eci[0][0], 0.760927568875) self.assertAlmostEqual(eci[0][1], 0.800318837778)
Add unit test for KCIS (defined in pbc/ci/kcis_rhf.py)
Add unit test for KCIS (defined in pbc/ci/kcis_rhf.py)
Python
apache-2.0
sunqm/pyscf,sunqm/pyscf,sunqm/pyscf,sunqm/pyscf,gkc1000/pyscf,gkc1000/pyscf,gkc1000/pyscf,gkc1000/pyscf,gkc1000/pyscf
Add unit test for KCIS (defined in pbc/ci/kcis_rhf.py)
import pyscf.pbc.cc.test.make_test_cell as make_test_cell from pyscf.pbc import gto, scf, ci from pyscf.pbc.ci import KCIS import unittest cell = make_test_cell.test_cell_n3() cell.mesh = [29] * 3 cell.build() kmf_n3_none = scf.KRHF(cell, kpts=cell.make_kpts([2,1,1]), exxdiv=None) kmf_n3_none.kernel() kmf_n3_ewald = scf.KRHF(cell, kpts=cell.make_kpts([2,1,1]), exxdiv='ewald') kmf_n3_ewald.kernel() def tearDownModule(): global cell, kmf_n3_none, kmf_n3_ewald del cell, kmf_n3_none, kmf_n3_ewald class KnownValues(unittest.TestCase): def test_n3_cis(self): ehf_bench = [-8.651923514149, -10.530905169078] ekrhf = kmf_n3_none.e_tot self.assertAlmostEqual(ekrhf, ehf_bench[0], 6) ekrhf = kmf_n3_ewald.e_tot self.assertAlmostEqual(ekrhf, ehf_bench[1], 6) # KCIS myci = ci.KCIS(kmf_n3_none) eci, v = myci.kernel(nroots=2, kptlist=[0]) self.assertAlmostEqual(eci[0][0], 0.223920101177) self.assertAlmostEqual(eci[0][1], 0.223920101177) eci, v = myci.kernel(nroots=2, kptlist=[1]) self.assertAlmostEqual(eci[0][0], 0.291182202333) self.assertAlmostEqual(eci[0][1], 0.330573456724) myci = ci.KCIS(kmf_n3_ewald, keep_exxdiv=True) eci, v = myci.kernel(nroots=2, kptlist=[0]) self.assertAlmostEqual(eci[0][0], 0.693665750383) self.assertAlmostEqual(eci[0][1], 0.693665750384) eci, v = myci.kernel(nroots=2, kptlist=[1]) self.assertAlmostEqual(eci[0][0], 0.760927568875) self.assertAlmostEqual(eci[0][1], 0.800318837778)
<commit_before><commit_msg>Add unit test for KCIS (defined in pbc/ci/kcis_rhf.py)<commit_after>
import pyscf.pbc.cc.test.make_test_cell as make_test_cell from pyscf.pbc import gto, scf, ci from pyscf.pbc.ci import KCIS import unittest cell = make_test_cell.test_cell_n3() cell.mesh = [29] * 3 cell.build() kmf_n3_none = scf.KRHF(cell, kpts=cell.make_kpts([2,1,1]), exxdiv=None) kmf_n3_none.kernel() kmf_n3_ewald = scf.KRHF(cell, kpts=cell.make_kpts([2,1,1]), exxdiv='ewald') kmf_n3_ewald.kernel() def tearDownModule(): global cell, kmf_n3_none, kmf_n3_ewald del cell, kmf_n3_none, kmf_n3_ewald class KnownValues(unittest.TestCase): def test_n3_cis(self): ehf_bench = [-8.651923514149, -10.530905169078] ekrhf = kmf_n3_none.e_tot self.assertAlmostEqual(ekrhf, ehf_bench[0], 6) ekrhf = kmf_n3_ewald.e_tot self.assertAlmostEqual(ekrhf, ehf_bench[1], 6) # KCIS myci = ci.KCIS(kmf_n3_none) eci, v = myci.kernel(nroots=2, kptlist=[0]) self.assertAlmostEqual(eci[0][0], 0.223920101177) self.assertAlmostEqual(eci[0][1], 0.223920101177) eci, v = myci.kernel(nroots=2, kptlist=[1]) self.assertAlmostEqual(eci[0][0], 0.291182202333) self.assertAlmostEqual(eci[0][1], 0.330573456724) myci = ci.KCIS(kmf_n3_ewald, keep_exxdiv=True) eci, v = myci.kernel(nroots=2, kptlist=[0]) self.assertAlmostEqual(eci[0][0], 0.693665750383) self.assertAlmostEqual(eci[0][1], 0.693665750384) eci, v = myci.kernel(nroots=2, kptlist=[1]) self.assertAlmostEqual(eci[0][0], 0.760927568875) self.assertAlmostEqual(eci[0][1], 0.800318837778)
Add unit test for KCIS (defined in pbc/ci/kcis_rhf.py)import pyscf.pbc.cc.test.make_test_cell as make_test_cell from pyscf.pbc import gto, scf, ci from pyscf.pbc.ci import KCIS import unittest cell = make_test_cell.test_cell_n3() cell.mesh = [29] * 3 cell.build() kmf_n3_none = scf.KRHF(cell, kpts=cell.make_kpts([2,1,1]), exxdiv=None) kmf_n3_none.kernel() kmf_n3_ewald = scf.KRHF(cell, kpts=cell.make_kpts([2,1,1]), exxdiv='ewald') kmf_n3_ewald.kernel() def tearDownModule(): global cell, kmf_n3_none, kmf_n3_ewald del cell, kmf_n3_none, kmf_n3_ewald class KnownValues(unittest.TestCase): def test_n3_cis(self): ehf_bench = [-8.651923514149, -10.530905169078] ekrhf = kmf_n3_none.e_tot self.assertAlmostEqual(ekrhf, ehf_bench[0], 6) ekrhf = kmf_n3_ewald.e_tot self.assertAlmostEqual(ekrhf, ehf_bench[1], 6) # KCIS myci = ci.KCIS(kmf_n3_none) eci, v = myci.kernel(nroots=2, kptlist=[0]) self.assertAlmostEqual(eci[0][0], 0.223920101177) self.assertAlmostEqual(eci[0][1], 0.223920101177) eci, v = myci.kernel(nroots=2, kptlist=[1]) self.assertAlmostEqual(eci[0][0], 0.291182202333) self.assertAlmostEqual(eci[0][1], 0.330573456724) myci = ci.KCIS(kmf_n3_ewald, keep_exxdiv=True) eci, v = myci.kernel(nroots=2, kptlist=[0]) self.assertAlmostEqual(eci[0][0], 0.693665750383) self.assertAlmostEqual(eci[0][1], 0.693665750384) eci, v = myci.kernel(nroots=2, kptlist=[1]) self.assertAlmostEqual(eci[0][0], 0.760927568875) self.assertAlmostEqual(eci[0][1], 0.800318837778)
<commit_before><commit_msg>Add unit test for KCIS (defined in pbc/ci/kcis_rhf.py)<commit_after>import pyscf.pbc.cc.test.make_test_cell as make_test_cell from pyscf.pbc import gto, scf, ci from pyscf.pbc.ci import KCIS import unittest cell = make_test_cell.test_cell_n3() cell.mesh = [29] * 3 cell.build() kmf_n3_none = scf.KRHF(cell, kpts=cell.make_kpts([2,1,1]), exxdiv=None) kmf_n3_none.kernel() kmf_n3_ewald = scf.KRHF(cell, kpts=cell.make_kpts([2,1,1]), exxdiv='ewald') kmf_n3_ewald.kernel() def tearDownModule(): global cell, kmf_n3_none, kmf_n3_ewald del cell, kmf_n3_none, kmf_n3_ewald class KnownValues(unittest.TestCase): def test_n3_cis(self): ehf_bench = [-8.651923514149, -10.530905169078] ekrhf = kmf_n3_none.e_tot self.assertAlmostEqual(ekrhf, ehf_bench[0], 6) ekrhf = kmf_n3_ewald.e_tot self.assertAlmostEqual(ekrhf, ehf_bench[1], 6) # KCIS myci = ci.KCIS(kmf_n3_none) eci, v = myci.kernel(nroots=2, kptlist=[0]) self.assertAlmostEqual(eci[0][0], 0.223920101177) self.assertAlmostEqual(eci[0][1], 0.223920101177) eci, v = myci.kernel(nroots=2, kptlist=[1]) self.assertAlmostEqual(eci[0][0], 0.291182202333) self.assertAlmostEqual(eci[0][1], 0.330573456724) myci = ci.KCIS(kmf_n3_ewald, keep_exxdiv=True) eci, v = myci.kernel(nroots=2, kptlist=[0]) self.assertAlmostEqual(eci[0][0], 0.693665750383) self.assertAlmostEqual(eci[0][1], 0.693665750384) eci, v = myci.kernel(nroots=2, kptlist=[1]) self.assertAlmostEqual(eci[0][0], 0.760927568875) self.assertAlmostEqual(eci[0][1], 0.800318837778)
f6e2bdeb8662cdb44db9de91d0cbebf6fc41a231
Variance.py
Variance.py
import math def listVariance(base, new): for i in new: if i in base: return 0 return 1 def valueVariance(base, new): return math.fabs(base-new) compareVec = [ listVariance, listVariance, valueVariance, valueVariance, valueVariance, valueVariance, valueVariance, valueVariance, valueVariance, valueVariance ] def getVariance(base, new): results = [] for i in range(len(base)): # selectively map our operators based on input type if type(base[i]) == list: results.append(compareVec[i](base[i], new[i])) else: results.append(compareVec[i](base[i], new[i])) return results if __name__ == '__main__': print "No need to run this directly" test1 = [['artist1', 'artist2', 'artist3'],['genre1', 'genre2', 'genre3'],0,0,0,0,0,0,0,0] test2 = [['artist1'],['genre1', 'genre2'],1,2,3,4,5,6,7,8] print getVariance(test1, test2) ''' The expected difference of the above should be: 0.33333333 0.66666666 1 2 3 4 5 6 7 '''
Create initial vector variance helpers file
Create initial vector variance helpers file
Python
mit
SLongofono/448_Project4,SLongofono/448_Project4
Create initial vector variance helpers file
import math def listVariance(base, new): for i in new: if i in base: return 0 return 1 def valueVariance(base, new): return math.fabs(base-new) compareVec = [ listVariance, listVariance, valueVariance, valueVariance, valueVariance, valueVariance, valueVariance, valueVariance, valueVariance, valueVariance ] def getVariance(base, new): results = [] for i in range(len(base)): # selectively map our operators based on input type if type(base[i]) == list: results.append(compareVec[i](base[i], new[i])) else: results.append(compareVec[i](base[i], new[i])) return results if __name__ == '__main__': print "No need to run this directly" test1 = [['artist1', 'artist2', 'artist3'],['genre1', 'genre2', 'genre3'],0,0,0,0,0,0,0,0] test2 = [['artist1'],['genre1', 'genre2'],1,2,3,4,5,6,7,8] print getVariance(test1, test2) ''' The expected difference of the above should be: 0.33333333 0.66666666 1 2 3 4 5 6 7 '''
<commit_before><commit_msg>Create initial vector variance helpers file<commit_after>
import math def listVariance(base, new): for i in new: if i in base: return 0 return 1 def valueVariance(base, new): return math.fabs(base-new) compareVec = [ listVariance, listVariance, valueVariance, valueVariance, valueVariance, valueVariance, valueVariance, valueVariance, valueVariance, valueVariance ] def getVariance(base, new): results = [] for i in range(len(base)): # selectively map our operators based on input type if type(base[i]) == list: results.append(compareVec[i](base[i], new[i])) else: results.append(compareVec[i](base[i], new[i])) return results if __name__ == '__main__': print "No need to run this directly" test1 = [['artist1', 'artist2', 'artist3'],['genre1', 'genre2', 'genre3'],0,0,0,0,0,0,0,0] test2 = [['artist1'],['genre1', 'genre2'],1,2,3,4,5,6,7,8] print getVariance(test1, test2) ''' The expected difference of the above should be: 0.33333333 0.66666666 1 2 3 4 5 6 7 '''
Create initial vector variance helpers fileimport math def listVariance(base, new): for i in new: if i in base: return 0 return 1 def valueVariance(base, new): return math.fabs(base-new) compareVec = [ listVariance, listVariance, valueVariance, valueVariance, valueVariance, valueVariance, valueVariance, valueVariance, valueVariance, valueVariance ] def getVariance(base, new): results = [] for i in range(len(base)): # selectively map our operators based on input type if type(base[i]) == list: results.append(compareVec[i](base[i], new[i])) else: results.append(compareVec[i](base[i], new[i])) return results if __name__ == '__main__': print "No need to run this directly" test1 = [['artist1', 'artist2', 'artist3'],['genre1', 'genre2', 'genre3'],0,0,0,0,0,0,0,0] test2 = [['artist1'],['genre1', 'genre2'],1,2,3,4,5,6,7,8] print getVariance(test1, test2) ''' The expected difference of the above should be: 0.33333333 0.66666666 1 2 3 4 5 6 7 '''
<commit_before><commit_msg>Create initial vector variance helpers file<commit_after>import math def listVariance(base, new): for i in new: if i in base: return 0 return 1 def valueVariance(base, new): return math.fabs(base-new) compareVec = [ listVariance, listVariance, valueVariance, valueVariance, valueVariance, valueVariance, valueVariance, valueVariance, valueVariance, valueVariance ] def getVariance(base, new): results = [] for i in range(len(base)): # selectively map our operators based on input type if type(base[i]) == list: results.append(compareVec[i](base[i], new[i])) else: results.append(compareVec[i](base[i], new[i])) return results if __name__ == '__main__': print "No need to run this directly" test1 = [['artist1', 'artist2', 'artist3'],['genre1', 'genre2', 'genre3'],0,0,0,0,0,0,0,0] test2 = [['artist1'],['genre1', 'genre2'],1,2,3,4,5,6,7,8] print getVariance(test1, test2) ''' The expected difference of the above should be: 0.33333333 0.66666666 1 2 3 4 5 6 7 '''
a71aa49833030795cfe6b187c5497cb5d67ae263
portal/migrations/versions/c3d394d9dfd1_.py
portal/migrations/versions/c3d394d9dfd1_.py
"""empty message Revision ID: c3d394d9dfd1 Revises: ('3c81fc76164c', 'c19bff0f70ab') Create Date: 2021-01-06 12:53:14.809274 """ from alembic import op import sqlalchemy as sa # revision identifiers, used by Alembic. revision = 'c3d394d9dfd1' down_revision = ('3c81fc76164c', 'c19bff0f70ab') def upgrade(): pass def downgrade(): pass
Add merge heads needed migration
Add merge heads needed migration
Python
bsd-3-clause
uwcirg/true_nth_usa_portal,uwcirg/true_nth_usa_portal,uwcirg/true_nth_usa_portal,uwcirg/true_nth_usa_portal
Add merge heads needed migration
"""empty message Revision ID: c3d394d9dfd1 Revises: ('3c81fc76164c', 'c19bff0f70ab') Create Date: 2021-01-06 12:53:14.809274 """ from alembic import op import sqlalchemy as sa # revision identifiers, used by Alembic. revision = 'c3d394d9dfd1' down_revision = ('3c81fc76164c', 'c19bff0f70ab') def upgrade(): pass def downgrade(): pass
<commit_before><commit_msg>Add merge heads needed migration<commit_after>
"""empty message Revision ID: c3d394d9dfd1 Revises: ('3c81fc76164c', 'c19bff0f70ab') Create Date: 2021-01-06 12:53:14.809274 """ from alembic import op import sqlalchemy as sa # revision identifiers, used by Alembic. revision = 'c3d394d9dfd1' down_revision = ('3c81fc76164c', 'c19bff0f70ab') def upgrade(): pass def downgrade(): pass
Add merge heads needed migration"""empty message Revision ID: c3d394d9dfd1 Revises: ('3c81fc76164c', 'c19bff0f70ab') Create Date: 2021-01-06 12:53:14.809274 """ from alembic import op import sqlalchemy as sa # revision identifiers, used by Alembic. revision = 'c3d394d9dfd1' down_revision = ('3c81fc76164c', 'c19bff0f70ab') def upgrade(): pass def downgrade(): pass
<commit_before><commit_msg>Add merge heads needed migration<commit_after>"""empty message Revision ID: c3d394d9dfd1 Revises: ('3c81fc76164c', 'c19bff0f70ab') Create Date: 2021-01-06 12:53:14.809274 """ from alembic import op import sqlalchemy as sa # revision identifiers, used by Alembic. revision = 'c3d394d9dfd1' down_revision = ('3c81fc76164c', 'c19bff0f70ab') def upgrade(): pass def downgrade(): pass
85f8bb3e46c5c79af6ba1e246ad5938642feadcc
test/test_i18n.py
test/test_i18n.py
# -*- coding: utf8 -*- ### # Copyright (c) 2012, Valentin Lorentz # All rights reserved. # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are met: # # * Redistributions of source code must retain the above copyright notice, # this list of conditions, and the following disclaimer. # * Redistributions in binary form must reproduce the above copyright notice, # this list of conditions, and the following disclaimer in the # documentation and/or other materials provided with the distribution. # * Neither the name of the author of this software nor the name of # contributors to this software may be used to endorse or promote products # derived from this software without specific prior written consent. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" # AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE # IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE # ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE # LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR # CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF # SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS # INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN # CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) # ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE # POSSIBILITY OF SUCH DAMAGE. ### from supybot.test import * from supybot.i18n import PluginInternationalization, internationalizeDocstring import supybot.conf as conf import supybot.i18n as i18n msg_en = 'The operation succeeded.' msg_fr = 'Opération effectuée avec succès.' _ = PluginInternationalization() @internationalizeDocstring def foo(): 'The operation succeeded.' pass class I18nTestCase(SupyTestCase): def testPluginInternationalization(self): self.assertEqual(_(msg_en), msg_en) with conf.supybot.language.context('fr'): self.assertEqual(_(msg_en), msg_fr) conf.supybot.language.setValue('en') self.assertEqual(_(msg_en), msg_en) def testDocstring(self): self.assertEqual(foo.__doc__, msg_en) with conf.supybot.language.context('fr'): self.assertEqual(foo.__doc__, msg_fr) i18n.reloadLocales() self.assertEqual(foo.__doc__, msg_en)
Add unit tests for i18n.
Add unit tests for i18n.
Python
bsd-3-clause
Ban3/Limnoria,Ban3/Limnoria,ProgVal/Limnoria-test,ProgVal/Limnoria-test
Add unit tests for i18n.
# -*- coding: utf8 -*- ### # Copyright (c) 2012, Valentin Lorentz # All rights reserved. # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are met: # # * Redistributions of source code must retain the above copyright notice, # this list of conditions, and the following disclaimer. # * Redistributions in binary form must reproduce the above copyright notice, # this list of conditions, and the following disclaimer in the # documentation and/or other materials provided with the distribution. # * Neither the name of the author of this software nor the name of # contributors to this software may be used to endorse or promote products # derived from this software without specific prior written consent. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" # AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE # IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE # ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE # LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR # CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF # SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS # INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN # CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) # ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE # POSSIBILITY OF SUCH DAMAGE. ### from supybot.test import * from supybot.i18n import PluginInternationalization, internationalizeDocstring import supybot.conf as conf import supybot.i18n as i18n msg_en = 'The operation succeeded.' msg_fr = 'Opération effectuée avec succès.' _ = PluginInternationalization() @internationalizeDocstring def foo(): 'The operation succeeded.' pass class I18nTestCase(SupyTestCase): def testPluginInternationalization(self): self.assertEqual(_(msg_en), msg_en) with conf.supybot.language.context('fr'): self.assertEqual(_(msg_en), msg_fr) conf.supybot.language.setValue('en') self.assertEqual(_(msg_en), msg_en) def testDocstring(self): self.assertEqual(foo.__doc__, msg_en) with conf.supybot.language.context('fr'): self.assertEqual(foo.__doc__, msg_fr) i18n.reloadLocales() self.assertEqual(foo.__doc__, msg_en)
<commit_before><commit_msg>Add unit tests for i18n.<commit_after>
# -*- coding: utf8 -*- ### # Copyright (c) 2012, Valentin Lorentz # All rights reserved. # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are met: # # * Redistributions of source code must retain the above copyright notice, # this list of conditions, and the following disclaimer. # * Redistributions in binary form must reproduce the above copyright notice, # this list of conditions, and the following disclaimer in the # documentation and/or other materials provided with the distribution. # * Neither the name of the author of this software nor the name of # contributors to this software may be used to endorse or promote products # derived from this software without specific prior written consent. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" # AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE # IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE # ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE # LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR # CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF # SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS # INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN # CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) # ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE # POSSIBILITY OF SUCH DAMAGE. ### from supybot.test import * from supybot.i18n import PluginInternationalization, internationalizeDocstring import supybot.conf as conf import supybot.i18n as i18n msg_en = 'The operation succeeded.' msg_fr = 'Opération effectuée avec succès.' _ = PluginInternationalization() @internationalizeDocstring def foo(): 'The operation succeeded.' pass class I18nTestCase(SupyTestCase): def testPluginInternationalization(self): self.assertEqual(_(msg_en), msg_en) with conf.supybot.language.context('fr'): self.assertEqual(_(msg_en), msg_fr) conf.supybot.language.setValue('en') self.assertEqual(_(msg_en), msg_en) def testDocstring(self): self.assertEqual(foo.__doc__, msg_en) with conf.supybot.language.context('fr'): self.assertEqual(foo.__doc__, msg_fr) i18n.reloadLocales() self.assertEqual(foo.__doc__, msg_en)
Add unit tests for i18n.# -*- coding: utf8 -*- ### # Copyright (c) 2012, Valentin Lorentz # All rights reserved. # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are met: # # * Redistributions of source code must retain the above copyright notice, # this list of conditions, and the following disclaimer. # * Redistributions in binary form must reproduce the above copyright notice, # this list of conditions, and the following disclaimer in the # documentation and/or other materials provided with the distribution. # * Neither the name of the author of this software nor the name of # contributors to this software may be used to endorse or promote products # derived from this software without specific prior written consent. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" # AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE # IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE # ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE # LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR # CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF # SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS # INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN # CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) # ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE # POSSIBILITY OF SUCH DAMAGE. ### from supybot.test import * from supybot.i18n import PluginInternationalization, internationalizeDocstring import supybot.conf as conf import supybot.i18n as i18n msg_en = 'The operation succeeded.' msg_fr = 'Opération effectuée avec succès.' _ = PluginInternationalization() @internationalizeDocstring def foo(): 'The operation succeeded.' pass class I18nTestCase(SupyTestCase): def testPluginInternationalization(self): self.assertEqual(_(msg_en), msg_en) with conf.supybot.language.context('fr'): self.assertEqual(_(msg_en), msg_fr) conf.supybot.language.setValue('en') self.assertEqual(_(msg_en), msg_en) def testDocstring(self): self.assertEqual(foo.__doc__, msg_en) with conf.supybot.language.context('fr'): self.assertEqual(foo.__doc__, msg_fr) i18n.reloadLocales() self.assertEqual(foo.__doc__, msg_en)
<commit_before><commit_msg>Add unit tests for i18n.<commit_after># -*- coding: utf8 -*- ### # Copyright (c) 2012, Valentin Lorentz # All rights reserved. # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are met: # # * Redistributions of source code must retain the above copyright notice, # this list of conditions, and the following disclaimer. # * Redistributions in binary form must reproduce the above copyright notice, # this list of conditions, and the following disclaimer in the # documentation and/or other materials provided with the distribution. # * Neither the name of the author of this software nor the name of # contributors to this software may be used to endorse or promote products # derived from this software without specific prior written consent. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" # AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE # IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE # ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE # LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR # CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF # SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS # INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN # CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) # ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE # POSSIBILITY OF SUCH DAMAGE. ### from supybot.test import * from supybot.i18n import PluginInternationalization, internationalizeDocstring import supybot.conf as conf import supybot.i18n as i18n msg_en = 'The operation succeeded.' msg_fr = 'Opération effectuée avec succès.' _ = PluginInternationalization() @internationalizeDocstring def foo(): 'The operation succeeded.' pass class I18nTestCase(SupyTestCase): def testPluginInternationalization(self): self.assertEqual(_(msg_en), msg_en) with conf.supybot.language.context('fr'): self.assertEqual(_(msg_en), msg_fr) conf.supybot.language.setValue('en') self.assertEqual(_(msg_en), msg_en) def testDocstring(self): self.assertEqual(foo.__doc__, msg_en) with conf.supybot.language.context('fr'): self.assertEqual(foo.__doc__, msg_fr) i18n.reloadLocales() self.assertEqual(foo.__doc__, msg_en)
30cb67f454165a72287ba511adba0d8606f3ae72
utils/esicog.py
utils/esicog.py
import esipy from discord.ext import commands import time ESI_SWAGGER_JSON = 'https://esi.evetech.net/latest/swagger.json' ESI_APP: esipy.App = None ESI_CLIENT: esipy.EsiClient = None def get_esi_app(): global ESI_APP if not ESI_APP: ESI_APP = esipy.App.create(url=ESI_SWAGGER_JSON) return ESI_APP def get_esi_client(): global ESI_CLIENT if not ESI_CLIENT: ESI_CLIENT = esipy.EsiClient() return ESI_CLIENT class EsiCog: def __init__(self, bot: commands.Bot): self._esi_app_task = bot.loop.run_in_executor(None, get_esi_app) self._esi_client_task = bot.loop.run_in_executor(None, get_esi_client) async def get_esi_app(self, bot: commands.Bot): return await self._esi_app_task async def get_esi_client(self, bot: commands.Bot): return await self._esi_client_task
Add base class for cogs using ESIpy
Add base class for cogs using ESIpy
Python
mit
randomic/antinub-gregbot
Add base class for cogs using ESIpy
import esipy from discord.ext import commands import time ESI_SWAGGER_JSON = 'https://esi.evetech.net/latest/swagger.json' ESI_APP: esipy.App = None ESI_CLIENT: esipy.EsiClient = None def get_esi_app(): global ESI_APP if not ESI_APP: ESI_APP = esipy.App.create(url=ESI_SWAGGER_JSON) return ESI_APP def get_esi_client(): global ESI_CLIENT if not ESI_CLIENT: ESI_CLIENT = esipy.EsiClient() return ESI_CLIENT class EsiCog: def __init__(self, bot: commands.Bot): self._esi_app_task = bot.loop.run_in_executor(None, get_esi_app) self._esi_client_task = bot.loop.run_in_executor(None, get_esi_client) async def get_esi_app(self, bot: commands.Bot): return await self._esi_app_task async def get_esi_client(self, bot: commands.Bot): return await self._esi_client_task
<commit_before><commit_msg>Add base class for cogs using ESIpy<commit_after>
import esipy from discord.ext import commands import time ESI_SWAGGER_JSON = 'https://esi.evetech.net/latest/swagger.json' ESI_APP: esipy.App = None ESI_CLIENT: esipy.EsiClient = None def get_esi_app(): global ESI_APP if not ESI_APP: ESI_APP = esipy.App.create(url=ESI_SWAGGER_JSON) return ESI_APP def get_esi_client(): global ESI_CLIENT if not ESI_CLIENT: ESI_CLIENT = esipy.EsiClient() return ESI_CLIENT class EsiCog: def __init__(self, bot: commands.Bot): self._esi_app_task = bot.loop.run_in_executor(None, get_esi_app) self._esi_client_task = bot.loop.run_in_executor(None, get_esi_client) async def get_esi_app(self, bot: commands.Bot): return await self._esi_app_task async def get_esi_client(self, bot: commands.Bot): return await self._esi_client_task
Add base class for cogs using ESIpyimport esipy from discord.ext import commands import time ESI_SWAGGER_JSON = 'https://esi.evetech.net/latest/swagger.json' ESI_APP: esipy.App = None ESI_CLIENT: esipy.EsiClient = None def get_esi_app(): global ESI_APP if not ESI_APP: ESI_APP = esipy.App.create(url=ESI_SWAGGER_JSON) return ESI_APP def get_esi_client(): global ESI_CLIENT if not ESI_CLIENT: ESI_CLIENT = esipy.EsiClient() return ESI_CLIENT class EsiCog: def __init__(self, bot: commands.Bot): self._esi_app_task = bot.loop.run_in_executor(None, get_esi_app) self._esi_client_task = bot.loop.run_in_executor(None, get_esi_client) async def get_esi_app(self, bot: commands.Bot): return await self._esi_app_task async def get_esi_client(self, bot: commands.Bot): return await self._esi_client_task
<commit_before><commit_msg>Add base class for cogs using ESIpy<commit_after>import esipy from discord.ext import commands import time ESI_SWAGGER_JSON = 'https://esi.evetech.net/latest/swagger.json' ESI_APP: esipy.App = None ESI_CLIENT: esipy.EsiClient = None def get_esi_app(): global ESI_APP if not ESI_APP: ESI_APP = esipy.App.create(url=ESI_SWAGGER_JSON) return ESI_APP def get_esi_client(): global ESI_CLIENT if not ESI_CLIENT: ESI_CLIENT = esipy.EsiClient() return ESI_CLIENT class EsiCog: def __init__(self, bot: commands.Bot): self._esi_app_task = bot.loop.run_in_executor(None, get_esi_app) self._esi_client_task = bot.loop.run_in_executor(None, get_esi_client) async def get_esi_app(self, bot: commands.Bot): return await self._esi_app_task async def get_esi_client(self, bot: commands.Bot): return await self._esi_client_task
75015c024520af76d454e48cc59eb76c23c35aee
soccer/gameplay/timeout_behavior.py
soccer/gameplay/timeout_behavior.py
import composite_behavior import behavior import constants import robocup import time import main import enum import logging class TimeoutBehavior(composite_behavior.CompositeBehavior): class State(enum.Enum): timeout = 1 def __init__(self, subbehavior, timeout): super().__init__(continuous=False) self.add_state(TimeoutBehavior.State.timeout, behavior.Behavior.State.failed) self.add_transition(behavior.Behavior.State.start, behavior.Behavior.State.running, lambda: True, 'immediately') self.add_transition( behavior.Behavior.State.running, TimeoutBehavior.State.timeout, lambda: self.timeout_exceeded(), 'Subbehavior timed out') self._behavior = subbehavior self.add_subbehavior(subbehavior, 'toTimeout') self._timeout = timeout self.start_time = time.time() @property def behavior(self): return self._behavior @property def timeout(self): return self._timeout def timeout_exceeded(self): if time.time() - self.start_time > self.timeout: return True return False def on_enter_failed(self): self.remove_all_subbehaviors()
Add a timeout wrapper play
Add a timeout wrapper play
Python
apache-2.0
JNeiger/robocup-software,JNeiger/robocup-software,JNeiger/robocup-software,RoboJackets/robocup-software,RoboJackets/robocup-software,JNeiger/robocup-software,RoboJackets/robocup-software,RoboJackets/robocup-software,JNeiger/robocup-software
Add a timeout wrapper play
import composite_behavior import behavior import constants import robocup import time import main import enum import logging class TimeoutBehavior(composite_behavior.CompositeBehavior): class State(enum.Enum): timeout = 1 def __init__(self, subbehavior, timeout): super().__init__(continuous=False) self.add_state(TimeoutBehavior.State.timeout, behavior.Behavior.State.failed) self.add_transition(behavior.Behavior.State.start, behavior.Behavior.State.running, lambda: True, 'immediately') self.add_transition( behavior.Behavior.State.running, TimeoutBehavior.State.timeout, lambda: self.timeout_exceeded(), 'Subbehavior timed out') self._behavior = subbehavior self.add_subbehavior(subbehavior, 'toTimeout') self._timeout = timeout self.start_time = time.time() @property def behavior(self): return self._behavior @property def timeout(self): return self._timeout def timeout_exceeded(self): if time.time() - self.start_time > self.timeout: return True return False def on_enter_failed(self): self.remove_all_subbehaviors()
<commit_before><commit_msg>Add a timeout wrapper play<commit_after>
import composite_behavior import behavior import constants import robocup import time import main import enum import logging class TimeoutBehavior(composite_behavior.CompositeBehavior): class State(enum.Enum): timeout = 1 def __init__(self, subbehavior, timeout): super().__init__(continuous=False) self.add_state(TimeoutBehavior.State.timeout, behavior.Behavior.State.failed) self.add_transition(behavior.Behavior.State.start, behavior.Behavior.State.running, lambda: True, 'immediately') self.add_transition( behavior.Behavior.State.running, TimeoutBehavior.State.timeout, lambda: self.timeout_exceeded(), 'Subbehavior timed out') self._behavior = subbehavior self.add_subbehavior(subbehavior, 'toTimeout') self._timeout = timeout self.start_time = time.time() @property def behavior(self): return self._behavior @property def timeout(self): return self._timeout def timeout_exceeded(self): if time.time() - self.start_time > self.timeout: return True return False def on_enter_failed(self): self.remove_all_subbehaviors()
Add a timeout wrapper playimport composite_behavior import behavior import constants import robocup import time import main import enum import logging class TimeoutBehavior(composite_behavior.CompositeBehavior): class State(enum.Enum): timeout = 1 def __init__(self, subbehavior, timeout): super().__init__(continuous=False) self.add_state(TimeoutBehavior.State.timeout, behavior.Behavior.State.failed) self.add_transition(behavior.Behavior.State.start, behavior.Behavior.State.running, lambda: True, 'immediately') self.add_transition( behavior.Behavior.State.running, TimeoutBehavior.State.timeout, lambda: self.timeout_exceeded(), 'Subbehavior timed out') self._behavior = subbehavior self.add_subbehavior(subbehavior, 'toTimeout') self._timeout = timeout self.start_time = time.time() @property def behavior(self): return self._behavior @property def timeout(self): return self._timeout def timeout_exceeded(self): if time.time() - self.start_time > self.timeout: return True return False def on_enter_failed(self): self.remove_all_subbehaviors()
<commit_before><commit_msg>Add a timeout wrapper play<commit_after>import composite_behavior import behavior import constants import robocup import time import main import enum import logging class TimeoutBehavior(composite_behavior.CompositeBehavior): class State(enum.Enum): timeout = 1 def __init__(self, subbehavior, timeout): super().__init__(continuous=False) self.add_state(TimeoutBehavior.State.timeout, behavior.Behavior.State.failed) self.add_transition(behavior.Behavior.State.start, behavior.Behavior.State.running, lambda: True, 'immediately') self.add_transition( behavior.Behavior.State.running, TimeoutBehavior.State.timeout, lambda: self.timeout_exceeded(), 'Subbehavior timed out') self._behavior = subbehavior self.add_subbehavior(subbehavior, 'toTimeout') self._timeout = timeout self.start_time = time.time() @property def behavior(self): return self._behavior @property def timeout(self): return self._timeout def timeout_exceeded(self): if time.time() - self.start_time > self.timeout: return True return False def on_enter_failed(self): self.remove_all_subbehaviors()
d91807ff8c8cd652e220a4add65d47e0afbcdb47
back_office/migrations/0007_teacher_nationality.py
back_office/migrations/0007_teacher_nationality.py
# -*- coding: utf-8 -*- from __future__ import unicode_literals from django.db import models, migrations class Migration(migrations.Migration): dependencies = [ ('master_data', '0001_initial'), ('back_office', '0006_auto_20160111_1315'), ] operations = [ migrations.AddField( model_name='teacher', name='nationality', field=models.ForeignKey(verbose_name='Nationality', to='master_data.Nationality', null=True), ), ]
Make migration for Teacher model after add nationlaity field to it
Make migration for Teacher model after add nationlaity field to it
Python
mit
EmadMokhtar/halaqat,EmadMokhtar/halaqat,EmadMokhtar/halaqat
Make migration for Teacher model after add nationlaity field to it
# -*- coding: utf-8 -*- from __future__ import unicode_literals from django.db import models, migrations class Migration(migrations.Migration): dependencies = [ ('master_data', '0001_initial'), ('back_office', '0006_auto_20160111_1315'), ] operations = [ migrations.AddField( model_name='teacher', name='nationality', field=models.ForeignKey(verbose_name='Nationality', to='master_data.Nationality', null=True), ), ]
<commit_before><commit_msg>Make migration for Teacher model after add nationlaity field to it<commit_after>
# -*- coding: utf-8 -*- from __future__ import unicode_literals from django.db import models, migrations class Migration(migrations.Migration): dependencies = [ ('master_data', '0001_initial'), ('back_office', '0006_auto_20160111_1315'), ] operations = [ migrations.AddField( model_name='teacher', name='nationality', field=models.ForeignKey(verbose_name='Nationality', to='master_data.Nationality', null=True), ), ]
Make migration for Teacher model after add nationlaity field to it# -*- coding: utf-8 -*- from __future__ import unicode_literals from django.db import models, migrations class Migration(migrations.Migration): dependencies = [ ('master_data', '0001_initial'), ('back_office', '0006_auto_20160111_1315'), ] operations = [ migrations.AddField( model_name='teacher', name='nationality', field=models.ForeignKey(verbose_name='Nationality', to='master_data.Nationality', null=True), ), ]
<commit_before><commit_msg>Make migration for Teacher model after add nationlaity field to it<commit_after># -*- coding: utf-8 -*- from __future__ import unicode_literals from django.db import models, migrations class Migration(migrations.Migration): dependencies = [ ('master_data', '0001_initial'), ('back_office', '0006_auto_20160111_1315'), ] operations = [ migrations.AddField( model_name='teacher', name='nationality', field=models.ForeignKey(verbose_name='Nationality', to='master_data.Nationality', null=True), ), ]
6609836c0d36930a73074ec4f0cf136d887ee0f9
src/data_preparation/get_dhis_data.py
src/data_preparation/get_dhis_data.py
import pandas as pd from blsqpy.postgres_hook import PostgresHook from blsqpy.dhis2 import Dhis2 pgHook = PostgresHook('local_pdss') dhis = Dhis2(pgHook) dhis.build_de_cc_table() dhis.get_data de_maps_id = pd.read_excel("credes_monitoring/data/raw/mapping_sheet.xlsx", 1) de_maps_name = pd.read_excel("credes_monitoring/data/raw/mapping_sheet.xlsx", 0) declared_data = dhis.get_data(de_maps_id.dec.to_list()) validated_data = dhis.get_data(de_maps_id.val.to_list()) declared_data.to_csv("data/raw/pdss_declared.csv") validated_data.to_csv("data/raw/pdss_validated.csv")
Add data extraction from dhis
Add data extraction from dhis
Python
mit
grlurton/orbf_data_validation,grlurton/orbf_data_validation
Add data extraction from dhis
import pandas as pd from blsqpy.postgres_hook import PostgresHook from blsqpy.dhis2 import Dhis2 pgHook = PostgresHook('local_pdss') dhis = Dhis2(pgHook) dhis.build_de_cc_table() dhis.get_data de_maps_id = pd.read_excel("credes_monitoring/data/raw/mapping_sheet.xlsx", 1) de_maps_name = pd.read_excel("credes_monitoring/data/raw/mapping_sheet.xlsx", 0) declared_data = dhis.get_data(de_maps_id.dec.to_list()) validated_data = dhis.get_data(de_maps_id.val.to_list()) declared_data.to_csv("data/raw/pdss_declared.csv") validated_data.to_csv("data/raw/pdss_validated.csv")
<commit_before><commit_msg>Add data extraction from dhis<commit_after>
import pandas as pd from blsqpy.postgres_hook import PostgresHook from blsqpy.dhis2 import Dhis2 pgHook = PostgresHook('local_pdss') dhis = Dhis2(pgHook) dhis.build_de_cc_table() dhis.get_data de_maps_id = pd.read_excel("credes_monitoring/data/raw/mapping_sheet.xlsx", 1) de_maps_name = pd.read_excel("credes_monitoring/data/raw/mapping_sheet.xlsx", 0) declared_data = dhis.get_data(de_maps_id.dec.to_list()) validated_data = dhis.get_data(de_maps_id.val.to_list()) declared_data.to_csv("data/raw/pdss_declared.csv") validated_data.to_csv("data/raw/pdss_validated.csv")
Add data extraction from dhisimport pandas as pd from blsqpy.postgres_hook import PostgresHook from blsqpy.dhis2 import Dhis2 pgHook = PostgresHook('local_pdss') dhis = Dhis2(pgHook) dhis.build_de_cc_table() dhis.get_data de_maps_id = pd.read_excel("credes_monitoring/data/raw/mapping_sheet.xlsx", 1) de_maps_name = pd.read_excel("credes_monitoring/data/raw/mapping_sheet.xlsx", 0) declared_data = dhis.get_data(de_maps_id.dec.to_list()) validated_data = dhis.get_data(de_maps_id.val.to_list()) declared_data.to_csv("data/raw/pdss_declared.csv") validated_data.to_csv("data/raw/pdss_validated.csv")
<commit_before><commit_msg>Add data extraction from dhis<commit_after>import pandas as pd from blsqpy.postgres_hook import PostgresHook from blsqpy.dhis2 import Dhis2 pgHook = PostgresHook('local_pdss') dhis = Dhis2(pgHook) dhis.build_de_cc_table() dhis.get_data de_maps_id = pd.read_excel("credes_monitoring/data/raw/mapping_sheet.xlsx", 1) de_maps_name = pd.read_excel("credes_monitoring/data/raw/mapping_sheet.xlsx", 0) declared_data = dhis.get_data(de_maps_id.dec.to_list()) validated_data = dhis.get_data(de_maps_id.val.to_list()) declared_data.to_csv("data/raw/pdss_declared.csv") validated_data.to_csv("data/raw/pdss_validated.csv")
c12bf811be03d8bf9f1d91adea5104f538210910
share/gdb/python/libport/__init__.py
share/gdb/python/libport/__init__.py
# Copyright (C) 2011, Gostai S.A.S. # # This software is provided "as is" without warranty of any kind, # either expressed or implied, including but not limited to the # implied warranties of fitness for a particular purpose. # # See the LICENSE file for more information. """This file define the libport gdb module""" import libport.tools import libport.printers
Add module index for libport instrumentation for GDB.
Add module index for libport instrumentation for GDB. * share/gdb/python/libport/__init__.py: New.
Python
bsd-3-clause
aldebaran/libport,aldebaran/libport,aldebaran/libport,aldebaran/libport,aldebaran/libport
Add module index for libport instrumentation for GDB. * share/gdb/python/libport/__init__.py: New.
# Copyright (C) 2011, Gostai S.A.S. # # This software is provided "as is" without warranty of any kind, # either expressed or implied, including but not limited to the # implied warranties of fitness for a particular purpose. # # See the LICENSE file for more information. """This file define the libport gdb module""" import libport.tools import libport.printers
<commit_before><commit_msg>Add module index for libport instrumentation for GDB. * share/gdb/python/libport/__init__.py: New.<commit_after>
# Copyright (C) 2011, Gostai S.A.S. # # This software is provided "as is" without warranty of any kind, # either expressed or implied, including but not limited to the # implied warranties of fitness for a particular purpose. # # See the LICENSE file for more information. """This file define the libport gdb module""" import libport.tools import libport.printers
Add module index for libport instrumentation for GDB. * share/gdb/python/libport/__init__.py: New.# Copyright (C) 2011, Gostai S.A.S. # # This software is provided "as is" without warranty of any kind, # either expressed or implied, including but not limited to the # implied warranties of fitness for a particular purpose. # # See the LICENSE file for more information. """This file define the libport gdb module""" import libport.tools import libport.printers
<commit_before><commit_msg>Add module index for libport instrumentation for GDB. * share/gdb/python/libport/__init__.py: New.<commit_after># Copyright (C) 2011, Gostai S.A.S. # # This software is provided "as is" without warranty of any kind, # either expressed or implied, including but not limited to the # implied warranties of fitness for a particular purpose. # # See the LICENSE file for more information. """This file define the libport gdb module""" import libport.tools import libport.printers
9358b83c1dc0c6e0306416705d28a48f478878db
test/tests/python-imports/container.py
test/tests/python-imports/container.py
import curses import dbm import readline import bz2 assert(bz2.decompress(bz2.compress(b'IT WORKS IT WORKS IT WORKS')) == b'IT WORKS IT WORKS IT WORKS') import platform if platform.python_implementation() != 'PyPy' and platform.python_version_tuple()[0] != '2': # PyPy and Python 2 don't support lzma import lzma assert(lzma.decompress(lzma.compress(b'IT WORKS IT WORKS IT WORKS')) == b'IT WORKS IT WORKS IT WORKS') import zlib assert(zlib.decompress(zlib.compress(b'IT WORKS IT WORKS IT WORKS')) == b'IT WORKS IT WORKS IT WORKS')
import curses import readline import bz2 assert(bz2.decompress(bz2.compress(b'IT WORKS IT WORKS IT WORKS')) == b'IT WORKS IT WORKS IT WORKS') import platform isNotPypy = platform.python_implementation() != 'PyPy' isCaveman = platform.python_version_tuple()[0] == '2' if isCaveman: import gdbm else: import dbm.gnu if isNotPypy: # PyPy and Python 2 don't support lzma import lzma assert(lzma.decompress(lzma.compress(b'IT WORKS IT WORKS IT WORKS')) == b'IT WORKS IT WORKS IT WORKS') import zlib assert(zlib.decompress(zlib.compress(b'IT WORKS IT WORKS IT WORKS')) == b'IT WORKS IT WORKS IT WORKS')
Reorder python import test to use gnu dbm imports instead of generic dbm
Reorder python import test to use gnu dbm imports instead of generic dbm
Python
apache-2.0
nodejs-docker-bot/official-images,robfrank/official-images,mattrobenolt/official-images,mattrobenolt/official-images,infosiftr/stackbrew,infosiftr/stackbrew,neo-technology/docker-official-images,chorrell/official-images,nodejs-docker-bot/official-images,pesho/docker-official-images,chorrell/official-images,pesho/docker-official-images,chorrell/official-images,31z4/official-images,31z4/official-images,infosiftr/stackbrew,jperrin/official-images,nodejs-docker-bot/official-images,docker-solr/official-images,infosiftr/stackbrew,emilevauge/official-images,davidl-zend/official-images,chorrell/official-images,mattrobenolt/official-images,31z4/official-images,mattrobenolt/official-images,jperrin/official-images,jperrin/official-images,dinogun/official-images,neo-technology/docker-official-images,31z4/official-images,docker-flink/official-images,docker-flink/official-images,chorrell/official-images,31z4/official-images,docker-flink/official-images,31z4/official-images,nodejs-docker-bot/official-images,pesho/docker-official-images,31z4/official-images,pesho/docker-official-images,davidl-zend/official-images,thresheek/official-images,mattrobenolt/official-images,docker-flink/official-images,robfrank/official-images,nodejs-docker-bot/official-images,robfrank/official-images,robfrank/official-images,emilevauge/official-images,dinogun/official-images,mattrobenolt/official-images,emilevauge/official-images,robfrank/official-images,dinogun/official-images,thresheek/official-images,thresheek/official-images,robfrank/official-images,emilevauge/official-images,robfrank/official-images,robfrank/official-images,docker-library/official-images,infosiftr/stackbrew,chorrell/official-images,pesho/docker-official-images,infosiftr/stackbrew,davidl-zend/official-images,docker-solr/official-images,dinogun/official-images,nodejs-docker-bot/official-images,thresheek/official-images,infosiftr/stackbrew,chorrell/official-images,docker-flink/official-images,thresheek/official-images,robfrank/official-images,emilevauge/official-images,davidl-zend/official-images,emilevauge/official-images,docker-library/official-images,jperrin/official-images,neo-technology/docker-official-images,emilevauge/official-images,docker-library/official-images,dinogun/official-images,docker-flink/official-images,thresheek/official-images,docker-solr/official-images,thresheek/official-images,davidl-zend/official-images,neo-technology/docker-official-images,docker-flink/official-images,robfrank/official-images,davidl-zend/official-images,neo-technology/docker-official-images,pesho/docker-official-images,mattrobenolt/official-images,chorrell/official-images,docker-library/official-images,docker-solr/official-images,nodejs-docker-bot/official-images,nodejs-docker-bot/official-images,thresheek/official-images,dinogun/official-images,docker-flink/official-images,mattrobenolt/official-images,docker-library/official-images,infosiftr/stackbrew,davidl-zend/official-images,dinogun/official-images,pesho/docker-official-images,emilevauge/official-images,docker-flink/official-images,thresheek/official-images,chorrell/official-images,docker-flink/official-images,docker-solr/official-images,infosiftr/stackbrew,neo-technology/docker-official-images,docker-solr/official-images,jperrin/official-images,pesho/docker-official-images,emilevauge/official-images,mattrobenolt/official-images,infosiftr/stackbrew,chorrell/official-images,dinogun/official-images,31z4/official-images,robfrank/official-images,dinogun/official-images,docker-library/official-images,neo-technology/docker-official-images,jperrin/official-images,infosiftr/stackbrew,jperrin/official-images,31z4/official-images,31z4/official-images,pesho/docker-official-images,31z4/official-images,infosiftr/stackbrew,dinogun/official-images,jperrin/official-images,pesho/docker-official-images,docker-solr/official-images,mattrobenolt/official-images,neo-technology/docker-official-images,emilevauge/official-images,infosiftr/stackbrew,pesho/docker-official-images,emilevauge/official-images,docker-flink/official-images,davidl-zend/official-images,neo-technology/docker-official-images,docker-library/official-images,docker-solr/official-images,thresheek/official-images,docker-library/official-images,31z4/official-images,docker-solr/official-images,31z4/official-images,pesho/docker-official-images,docker-library/official-images,davidl-zend/official-images,neo-technology/docker-official-images,docker-flink/official-images,docker-library/official-images,thresheek/official-images,docker-library/official-images,thresheek/official-images,docker-library/official-images,neo-technology/docker-official-images,docker-solr/official-images,jperrin/official-images,jperrin/official-images,dinogun/official-images,nodejs-docker-bot/official-images,jperrin/official-images,davidl-zend/official-images,docker-library/official-images,docker-solr/official-images,chorrell/official-images,neo-technology/docker-official-images,davidl-zend/official-images,nodejs-docker-bot/official-images,dinogun/official-images,nodejs-docker-bot/official-images,nodejs-docker-bot/official-images,chorrell/official-images,docker-flink/official-images,chorrell/official-images,jperrin/official-images,davidl-zend/official-images,mattrobenolt/official-images,davidl-zend/official-images,docker-solr/official-images,thresheek/official-images,mattrobenolt/official-images,docker-solr/official-images,docker-library/official-images,docker-solr/official-images,emilevauge/official-images,jperrin/official-images,infosiftr/stackbrew,robfrank/official-images,thresheek/official-images,neo-technology/docker-official-images,dinogun/official-images,neo-technology/docker-official-images,robfrank/official-images,31z4/official-images,mattrobenolt/official-images
import curses import dbm import readline import bz2 assert(bz2.decompress(bz2.compress(b'IT WORKS IT WORKS IT WORKS')) == b'IT WORKS IT WORKS IT WORKS') import platform if platform.python_implementation() != 'PyPy' and platform.python_version_tuple()[0] != '2': # PyPy and Python 2 don't support lzma import lzma assert(lzma.decompress(lzma.compress(b'IT WORKS IT WORKS IT WORKS')) == b'IT WORKS IT WORKS IT WORKS') import zlib assert(zlib.decompress(zlib.compress(b'IT WORKS IT WORKS IT WORKS')) == b'IT WORKS IT WORKS IT WORKS') Reorder python import test to use gnu dbm imports instead of generic dbm
import curses import readline import bz2 assert(bz2.decompress(bz2.compress(b'IT WORKS IT WORKS IT WORKS')) == b'IT WORKS IT WORKS IT WORKS') import platform isNotPypy = platform.python_implementation() != 'PyPy' isCaveman = platform.python_version_tuple()[0] == '2' if isCaveman: import gdbm else: import dbm.gnu if isNotPypy: # PyPy and Python 2 don't support lzma import lzma assert(lzma.decompress(lzma.compress(b'IT WORKS IT WORKS IT WORKS')) == b'IT WORKS IT WORKS IT WORKS') import zlib assert(zlib.decompress(zlib.compress(b'IT WORKS IT WORKS IT WORKS')) == b'IT WORKS IT WORKS IT WORKS')
<commit_before>import curses import dbm import readline import bz2 assert(bz2.decompress(bz2.compress(b'IT WORKS IT WORKS IT WORKS')) == b'IT WORKS IT WORKS IT WORKS') import platform if platform.python_implementation() != 'PyPy' and platform.python_version_tuple()[0] != '2': # PyPy and Python 2 don't support lzma import lzma assert(lzma.decompress(lzma.compress(b'IT WORKS IT WORKS IT WORKS')) == b'IT WORKS IT WORKS IT WORKS') import zlib assert(zlib.decompress(zlib.compress(b'IT WORKS IT WORKS IT WORKS')) == b'IT WORKS IT WORKS IT WORKS') <commit_msg>Reorder python import test to use gnu dbm imports instead of generic dbm<commit_after>
import curses import readline import bz2 assert(bz2.decompress(bz2.compress(b'IT WORKS IT WORKS IT WORKS')) == b'IT WORKS IT WORKS IT WORKS') import platform isNotPypy = platform.python_implementation() != 'PyPy' isCaveman = platform.python_version_tuple()[0] == '2' if isCaveman: import gdbm else: import dbm.gnu if isNotPypy: # PyPy and Python 2 don't support lzma import lzma assert(lzma.decompress(lzma.compress(b'IT WORKS IT WORKS IT WORKS')) == b'IT WORKS IT WORKS IT WORKS') import zlib assert(zlib.decompress(zlib.compress(b'IT WORKS IT WORKS IT WORKS')) == b'IT WORKS IT WORKS IT WORKS')
import curses import dbm import readline import bz2 assert(bz2.decompress(bz2.compress(b'IT WORKS IT WORKS IT WORKS')) == b'IT WORKS IT WORKS IT WORKS') import platform if platform.python_implementation() != 'PyPy' and platform.python_version_tuple()[0] != '2': # PyPy and Python 2 don't support lzma import lzma assert(lzma.decompress(lzma.compress(b'IT WORKS IT WORKS IT WORKS')) == b'IT WORKS IT WORKS IT WORKS') import zlib assert(zlib.decompress(zlib.compress(b'IT WORKS IT WORKS IT WORKS')) == b'IT WORKS IT WORKS IT WORKS') Reorder python import test to use gnu dbm imports instead of generic dbmimport curses import readline import bz2 assert(bz2.decompress(bz2.compress(b'IT WORKS IT WORKS IT WORKS')) == b'IT WORKS IT WORKS IT WORKS') import platform isNotPypy = platform.python_implementation() != 'PyPy' isCaveman = platform.python_version_tuple()[0] == '2' if isCaveman: import gdbm else: import dbm.gnu if isNotPypy: # PyPy and Python 2 don't support lzma import lzma assert(lzma.decompress(lzma.compress(b'IT WORKS IT WORKS IT WORKS')) == b'IT WORKS IT WORKS IT WORKS') import zlib assert(zlib.decompress(zlib.compress(b'IT WORKS IT WORKS IT WORKS')) == b'IT WORKS IT WORKS IT WORKS')
<commit_before>import curses import dbm import readline import bz2 assert(bz2.decompress(bz2.compress(b'IT WORKS IT WORKS IT WORKS')) == b'IT WORKS IT WORKS IT WORKS') import platform if platform.python_implementation() != 'PyPy' and platform.python_version_tuple()[0] != '2': # PyPy and Python 2 don't support lzma import lzma assert(lzma.decompress(lzma.compress(b'IT WORKS IT WORKS IT WORKS')) == b'IT WORKS IT WORKS IT WORKS') import zlib assert(zlib.decompress(zlib.compress(b'IT WORKS IT WORKS IT WORKS')) == b'IT WORKS IT WORKS IT WORKS') <commit_msg>Reorder python import test to use gnu dbm imports instead of generic dbm<commit_after>import curses import readline import bz2 assert(bz2.decompress(bz2.compress(b'IT WORKS IT WORKS IT WORKS')) == b'IT WORKS IT WORKS IT WORKS') import platform isNotPypy = platform.python_implementation() != 'PyPy' isCaveman = platform.python_version_tuple()[0] == '2' if isCaveman: import gdbm else: import dbm.gnu if isNotPypy: # PyPy and Python 2 don't support lzma import lzma assert(lzma.decompress(lzma.compress(b'IT WORKS IT WORKS IT WORKS')) == b'IT WORKS IT WORKS IT WORKS') import zlib assert(zlib.decompress(zlib.compress(b'IT WORKS IT WORKS IT WORKS')) == b'IT WORKS IT WORKS IT WORKS')
6b218f9e6c714af7c63bcc683a55b8d98f8a1506
WR31/ain_diff.py
WR31/ain_diff.py
import digihw import sarcli from time import sleep GPIO_CLI = 'gpio ain' def cli(cmd): s = sarcli.open() s.write(cmd) resp = s.read() s.close() return resp.strip('\r\n').strip('\r\nOK') def python_ain(): return digihw.wr31_ain_get_value() if __name__ == "__main__": for x in range(20): ain = python_ain() print "Loop %s" % x print "DIGIHW Value: %s" % (ain,) resp = cli(GPIO_CLI) print "CLI Value: %s" % (resp,) sleep(2)
Create initial script for gathering AIN value from CLI.
Create initial script for gathering AIN value from CLI.
Python
mpl-2.0
digidotcom/transport_examples
Create initial script for gathering AIN value from CLI.
import digihw import sarcli from time import sleep GPIO_CLI = 'gpio ain' def cli(cmd): s = sarcli.open() s.write(cmd) resp = s.read() s.close() return resp.strip('\r\n').strip('\r\nOK') def python_ain(): return digihw.wr31_ain_get_value() if __name__ == "__main__": for x in range(20): ain = python_ain() print "Loop %s" % x print "DIGIHW Value: %s" % (ain,) resp = cli(GPIO_CLI) print "CLI Value: %s" % (resp,) sleep(2)
<commit_before><commit_msg>Create initial script for gathering AIN value from CLI.<commit_after>
import digihw import sarcli from time import sleep GPIO_CLI = 'gpio ain' def cli(cmd): s = sarcli.open() s.write(cmd) resp = s.read() s.close() return resp.strip('\r\n').strip('\r\nOK') def python_ain(): return digihw.wr31_ain_get_value() if __name__ == "__main__": for x in range(20): ain = python_ain() print "Loop %s" % x print "DIGIHW Value: %s" % (ain,) resp = cli(GPIO_CLI) print "CLI Value: %s" % (resp,) sleep(2)
Create initial script for gathering AIN value from CLI.import digihw import sarcli from time import sleep GPIO_CLI = 'gpio ain' def cli(cmd): s = sarcli.open() s.write(cmd) resp = s.read() s.close() return resp.strip('\r\n').strip('\r\nOK') def python_ain(): return digihw.wr31_ain_get_value() if __name__ == "__main__": for x in range(20): ain = python_ain() print "Loop %s" % x print "DIGIHW Value: %s" % (ain,) resp = cli(GPIO_CLI) print "CLI Value: %s" % (resp,) sleep(2)
<commit_before><commit_msg>Create initial script for gathering AIN value from CLI.<commit_after>import digihw import sarcli from time import sleep GPIO_CLI = 'gpio ain' def cli(cmd): s = sarcli.open() s.write(cmd) resp = s.read() s.close() return resp.strip('\r\n').strip('\r\nOK') def python_ain(): return digihw.wr31_ain_get_value() if __name__ == "__main__": for x in range(20): ain = python_ain() print "Loop %s" % x print "DIGIHW Value: %s" % (ain,) resp = cli(GPIO_CLI) print "CLI Value: %s" % (resp,) sleep(2)