repo_name stringlengths 6 97 | path stringlengths 3 341 | text stringlengths 8 1.02M |
|---|---|---|
ofirshi/SmartMove | bin/Debug/SmartConnector/smartconnector.py | #!/usr/bin/env python
import sys
import argparse
import json
import os
sys.path.append(os.path.abspath(os.path.join(os.path.dirname(__file__), '.')))
from cpapi import APIClient, APIClientArgs
# printing messages to console and log file
# res_action - response from server, used if response is not OK
# message - message to inform user
# error - message with mark to inform user about issue
# ---
# returns: nothing
def printStatus(res_action, message, error=None):
line = ""
if res_action is not None and res_action.success is False:
if 'errors' in res_action.data:
for msg_err in res_action.data['errors']:
line += "WARN:" + "\t" + msg_err['message'] + "\n"
if 'warnings' in res_action.data:
for msg_wrn in res_action.data['warnings']:
line += "WARN:" + "\t" + msg_wrn['message'] + "\n"
if line == "":
line = "WARN:" + "\t" + res_action.data['message'] + "\n"
elif message is not None:
line += "\t" + message + "\n"
elif error is not None:
line += "WARN:" + "\t" + error + "\n"
if line != "":
print(line.rstrip())
file_log.write(line)
file_log.flush()
# printing info message "process..." with delimeters
# objectsType - string of objects type
# ---
# returns: nothing
def printMessageProcessObjects(objectsType):
printStatus(None, "==========")
printStatus(None, "process " + objectsType + " ...")
printStatus(None, "")
# publishing to database new updates by condition; increasing counter by 1
# counter - is number of new updates. if it equals threshold then updates will be published
# isForced - publishing to database anyway
# ---
# returns: updated counter
def publishUpdate(counter, isForced):
if counter < 0:
counter = 0
counter += 1
if isForced or counter >= args.threshold:
if not isForced:
printStatus(None, "")
printStatus(None, "----------")
printStatus(None, "publishing to database...")
res_publish = client.api_call("publish", {})
if res_publish.success:
counter = 0
printStatus(res_publish, "publish is completed")
printStatus(None, "----------")
if isForced:
printStatus(None, "")
return counter
# check if response contains message that name of "new" object exists in database
# res_add_obj - response from server
# ---
# returns: True - if the name is duplicated, False - otherwise
def isNameDuplicated(res_add_obj):
isNameDuplicated = False
if 'errors' in res_add_obj.data:
for msg in res_add_obj.data['errors']:
if msg['message'].startswith("More than one object named") and msg['message'].endswith("exists."):
isNameDuplicated = True
return isNameDuplicated
# check if response contains message that IP of "new" object exists in database
# res_add_obj - response from server
# ---
# returns: True - if the IP is duplicated, False - otherwise
def isIpDuplicated(res_add_obj):
isIpDuplicated = False
if 'warnings' in res_add_obj.data:
messagePrefixes = ("Multiple objects have the same IP address",)
messagePrefixes += ("More than one network have the same IP",)
messagePrefixes += ("More than one network has the same IP",)
for msg in res_add_obj.data['warnings']:
if msg['message'].startswith(messagePrefixes):
isIpDuplicated = True
return isIpDuplicated
# check if object from server comes from "global" domain
# serverObject - JSON presentation of object
# ---
# returns: True - if object comes from "global" domain, False - otherwise
def isServerObjectGlobal(serverObject):
return serverObject['domain']['domain-type'] == "global domain"
# check if object from server comes from "local" domain
# serverObject - JSON presentation of object
# ---
# returns: True - if object comes from "local" domain, False - otherwise
def isServerObjectLocal(serverObject):
return serverObject['domain']['domain-type'] == "domain"
# adding "new" object to server
# adjusting the name if object with the name exists at server: <initial_object_name>_<postfix>
# client - client object
# apiCommand - short string which indicates what should be done
# payload - JSON representation of "new" object
# userObjectNamePostfix - postfix as number
# changeName=True - True: to try to add object and adjust the name; False: to try to add object and NOT adjust the name
# ---
# returns: added object from server in JSON format, None - otherwise
def addUserObjectToServer(client, apiCommand, payload, userObjectNamePostfix=1, changeName=True):
isObjectAdded = False
userObjectNameInitial = ""
if changeName:
userObjectNameInitial = payload['name']
addedObject = None
while not isObjectAdded:
res_add_obj = client.api_call(apiCommand, payload)
printStatus(res_add_obj, None)
if res_add_obj.success is False:
if not changeName:
break
if isNameDuplicated(res_add_obj):
payload['name'] = userObjectNameInitial + '_' + str(userObjectNamePostfix)
userObjectNamePostfix += 1
else:
break
else:
addedObject = res_add_obj.data
isObjectAdded = True
return addedObject
# adding to server the object which contains fields with IP: hosts, networks
# adjusting the name if object with the name exists at server: <initial_object_name>_<postfix>
# using the object from server side if object exsits with the same IP at server
# client - client object
# payload - JSON representation of "new" object
# userObjectType - the type of object: host or network
# userObjectIp - IP which will be used as filter in request to server
# mergedObjectsNamesMap - the map which contains name of user's object (key) and name of resulting object (value)
# ---
# returns: updated mergedObjectsNamesMap
def addCpObjectWithIpToServer(client, payload, userObjectType, userObjectIp, mergedObjectsNamesMap):
printStatus(None, "processing " + userObjectType + ": " + payload['name'])
userObjectNameInitial = payload['name']
userObjectNamePostfix = 1
isFinished = False
isIgnoreWarnings = False
while not isFinished:
payload["ignore-warnings"] = isIgnoreWarnings
res_add_obj_with_ip = client.api_call("add-" + userObjectType, payload)
printStatus(res_add_obj_with_ip, "REPORT: " + userObjectNameInitial + " is added as " + payload['name'])
if res_add_obj_with_ip.success is False:
if isIpDuplicated(res_add_obj_with_ip) and not isIgnoreWarnings:
res_get_obj_with_ip = client.api_query("show-objects", payload={"filter": userObjectIp, "ip-only": True,
"type": userObjectType})
printStatus(res_get_obj_with_ip, None)
if res_get_obj_with_ip.success is True:
if len(res_get_obj_with_ip.data) > 0:
for serverObject in res_get_obj_with_ip.data:
if isServerObjectLocal(serverObject) and not isReplaceFromGlobalFirst:
mergedObjectsNamesMap[userObjectNameInitial] = serverObject['name']
break
if isServerObjectGlobal(serverObject) and isReplaceFromGlobalFirst:
mergedObjectsNamesMap[userObjectNameInitial] = serverObject['name']
break
mergedObjectsNamesMap[userObjectNameInitial] = serverObject['name']
printStatus(None, "REPORT: " + "CP object " + mergedObjectsNamesMap[
userObjectNameInitial] + " is used instead of " + userObjectNameInitial)
isFinished = True
else:
isIgnoreWarnings = True
else:
isFinished = True
elif isNameDuplicated(res_add_obj_with_ip):
payload['name'] = userObjectNameInitial + '_' + str(userObjectNamePostfix)
userObjectNamePostfix += 1
else:
isFinished = True
else:
mergedObjectsNamesMap[userObjectNameInitial] = payload['name']
isFinished = True
return mergedObjectsNamesMap
# processing and adding to server the groups which contains list of members
# adjusting the name if group with the name exists at server: <initial_object_name>_<postfix>
# client - client object
# apiCommand - short string which indicates what should be done
# userGroup - group which will be processed and added to server
# mergedObjectsMap - map of objects which will be used for replacing
# mergedGroupsNamesMap - the map which contains name of user's object (key) and name of resulting object (value)
# ---
# returns: updated mergedGroupsNamesMap
def processGroupWithMembers(client, apiCommand, userGroup, mergedObjectsMap, mergedGroupsNamesMap):
for i, userGroupMember in enumerate(userGroup['Members']):
if userGroupMember in mergedObjectsMap:
userGroup['Members'][i] = mergedObjectsMap[userGroupMember]
elif userGroupMember in mergedGroupsNamesMap:
userGroup['Members'][i] = mergedGroupsNamesMap[userGroupMember]
addedGroup = addUserObjectToServer(
client,
apiCommand,
{
"name": userGroup['Name'],
"members": userGroup['Members'],
"comments": userGroup['Comments'],
"tags": userGroup['Tags']
}
)
return addedGroup
# processing and adding to server the CheckPoint Domains
# adjusting the name if domain with the name exists at server: <initial_object_name>_<postfix>
# client - client object
# userDomains - the list of domains which will be processed and added to server
# ---
# returns: mergedDomainsNamesMap dictionary
# the map contains name of user's object (key) and name of resulting object (value)
def processDomains(client, userDomains):
printMessageProcessObjects("domains...")
publishCounter = 0
mergedDomainsNamesMap = {}
if len(userDomains) == 0:
return mergedDomainsNamesMap
for userDomain in userDomains:
userDomainNameInitial = userDomain['Name']
printStatus(None, "processing domain: " + userDomain['Name'])
addedDomain = addUserObjectToServer(
client,
"add-dns-domain",
{
"name": userDomain['Name'],
"is-sub-domain": userDomain['IsSubDomain'],
"comments": userDomain['Comments'],
"tags": userDomain['Tags']
}
)
if addedDomain is not None:
mergedDomainsNamesMap[userDomainNameInitial] = addedDomain['name']
printStatus(None, "REPORT: " + userDomainNameInitial + " is added as " + addedDomain['name'])
publishCounter = publishUpdate(publishCounter, False)
else:
printStatus(None, "REPORT: " + userDomainNameInitial + ' is not added.')
printStatus(None, "")
publishUpdate(publishCounter, True)
return mergedDomainsNamesMap
# processing and adding to server the CheckPoint Hosts
# adjusting the name if host with the name exists at server: <initial_object_name>_<postfix>
# if host contains existing IP address then Host object from server will be used instead
# client - client object
# userHosts - the list of hosts which will be processed and added to server
# ---
# returns: mergedHostsNamesMap dictionary
# the map contains name of user's object (key) and name of resulting object (value)
def processHosts(client, userHosts):
printMessageProcessObjects("hosts")
publishCounter = 0
mergedHostsNamesMap = {}
if len(userHosts) == 0:
return mergedHostsNamesMap
for userHost in userHosts:
payload = {
"name": userHost['Name'],
"ip-address": userHost['IpAddress'],
"comments": userHost['Comments'],
"tags": userHost['Tags']
}
initialMapLength = len(mergedHostsNamesMap)
mergedHostsNamesMap = addCpObjectWithIpToServer(client, payload, "host", userHost['IpAddress'],
mergedHostsNamesMap)
if initialMapLength == len(mergedHostsNamesMap):
printStatus(None, "REPORT: " + userHost['Name'] + ' is not added.')
else:
publishCounter = publishUpdate(publishCounter, False)
printStatus(None, "")
publishUpdate(publishCounter, True)
return mergedHostsNamesMap
# processing and adding to server the CheckPoint Networks
# adjusting the name if network with the name exists at server: <initial_object_name>_<postfix>
# if network contains existing IP subnet then Network object from server will be used instead
# client - client object
# userNetworks - the list of networks which will be processed and added to server
# ---
# returns: mergedNetworksNamesMap dictionary
# the map contains name of user's object (key) and name of resulting object (value)
def processNetworks(client, userNetworks):
printMessageProcessObjects("networks")
publishCounter = 0
mergedNetworksNamesMap = {}
if len(userNetworks) == 0:
return mergedNetworksNamesMap
for userNetwork in userNetworks:
payload = {
"name": userNetwork['Name'],
"subnet4": userNetwork['Subnet'],
"subnet-mask": userNetwork['Netmask'],
"comments": userNetwork['Comments'],
"tags": userNetwork['Tags']
}
initialMapLength = len(mergedNetworksNamesMap)
mergedNetworksNamesMap = addCpObjectWithIpToServer(client, payload, "network", userNetwork['Subnet'],
mergedNetworksNamesMap)
if initialMapLength == len(mergedNetworksNamesMap):
printStatus(None, "REPORT: " + userNetwork['Name'] + ' is not added.')
else:
publishCounter = publishUpdate(publishCounter, False)
printStatus(None, "")
publishUpdate(publishCounter, True)
return mergedNetworksNamesMap
# processing and adding to server the CheckPoint Ranges
# adjusting the name if range with the name exists at server: <initial_object_name>_<postfix>
# if range contains existing IP start and end then Range object from server will be used instead
# client - client object
# userRanges - the list of ranges which will be processed and added to server
# ---
# returns: mergedRangesNamesMap dictionary
# the map contains name of user's object (key) and name of resulting object (value)
def processRanges(client, userRanges):
printMessageProcessObjects("ranges")
publishCounter = 0
mergedRangesNamesMap = {}
if len(userRanges) == 0:
return mergedRangesNamesMap
serverRangesMap = {}
serverRangesMapGlobal = {}
serverRangesMapLocal = {}
printStatus(None, "reading address ranges from server")
res_get_ranges = client.api_query("show-address-ranges")
printStatus(res_get_ranges, None)
for serverRange in res_get_ranges.data:
key = serverRange['ipv4-address-first'] + '_' + serverRange['ipv4-address-last']
if isServerObjectGlobal(serverRange) and key not in serverRangesMapGlobal:
serverRangesMapGlobal[key] = serverRange['name']
elif isServerObjectLocal(serverRange) and key not in serverRangesMapLocal:
serverRangesMapLocal[key] = serverRange['name']
elif key not in serverRangesMapGlobal and key not in serverRangesMapLocal and key not in serverRangesMap:
serverRangesMap[key] = serverRange['name']
printStatus(None, "")
if sys.version_info >= (3, 0):
serverRangesMap = serverRangesMap.copy()
if isReplaceFromGlobalFirst:
serverRangesMap.update(serverRangesMapLocal)
serverRangesMap.update(serverRangesMapGlobal)
else:
serverRangesMap.update(serverRangesMapGlobal)
serverRangesMap.update(serverRangesMapLocal)
else:
if isReplaceFromGlobalFirst:
serverRangesMap = dict(
serverRangesMap.items() + serverRangesMapLocal.items() + serverRangesMapGlobal.items())
else:
serverRangesMap = dict(
serverRangesMap.items() + serverRangesMapGlobal.items() + serverRangesMapLocal.items())
for userRange in userRanges:
printStatus(None, "processing range: " + userRange['Name'])
userRangeNameInitial = userRange['Name']
key = userRange['RangeFrom'] + '_' + userRange['RangeTo']
if key in serverRangesMap:
printStatus(None, None,
"More than one range has the same ip: '" + userRange['RangeFrom'] + "' and '" + userRange[
'RangeTo'] + "'")
mergedRangesNamesMap[userRangeNameInitial] = serverRangesMap[key]
printStatus(None, "REPORT: " + "CP object " + mergedRangesNamesMap[
userRangeNameInitial] + " is used instead of " + userRangeNameInitial)
else:
userRangeNamePostfix = 1
if userRange['Name'] in serverRangesMap.values():
printStatus(None, None, "More than one object named '" + userRange['Name'] + "' exists.")
while userRange['Name'] in serverRangesMap.values():
userRange['Name'] = userRangeNameInitial + '_' + str(userRangeNamePostfix)
userRangeNamePostfix += 1
payload = {
"name": userRange['Name'],
"ip-address-first": userRange['RangeFrom'],
"ip-address-last": userRange['RangeTo'],
"comments": userRange['Comments'],
"tags": userRange['Tags'],
"ignore-warnings": True
}
addedRange = addUserObjectToServer(client, "add-address-range", payload, userRangeNamePostfix)
if addedRange is not None:
mergedRangesNamesMap[userRangeNameInitial] = addedRange['name']
key = addedRange['ipv4-address-first'] + '_' + addedRange['ipv4-address-last']
serverRangesMap[key] = addedRange['name']
printStatus(None, "REPORT: " + userRangeNameInitial + " is added as " + addedRange['name'])
publishCounter = publishUpdate(publishCounter, False)
else:
printStatus(None, "REPORT: " + userRangeNameInitial + ' is not added.')
printStatus(None, "")
publishUpdate(publishCounter, True)
return mergedRangesNamesMap
# processing and adding to server the CheckPoint Network Groups
# adjusting the name if network group with the name exists at server: <initial_object_name>_<postfix>
# client - client object
# userNetworkGroups - the list of network groups which will be processed and added to server
# mergedNetworkObjectsMap - map of network objects which will be used for replacing
# ---
# returns: mergedGroupsNamesDict dictionary
# the map contains name of user's object (key) and name of resulting object (value)
def processNetGroups(client, userNetworkGroups, mergedNetworkObjectsMap):
printMessageProcessObjects("network groups")
publishCounter = 0
mergedGroupsNamesDict = {}
if len(userNetworkGroups) == 0:
return mergedGroupsNamesDict
for userNetworkGroup in userNetworkGroups:
userNetworkGroupNameInitial = userNetworkGroup['Name']
addedNetworkGroup = None
if userNetworkGroup['TypeName'] == 'CheckPoint_GroupWithExclusion':
printStatus(None, "processing network group with exclusion: " + userNetworkGroup['Name'])
if userNetworkGroup['Include'] in mergedGroupsNamesDict:
userNetworkGroup['Include'] = mergedGroupsNamesDict[userNetworkGroup['Include']]
if userNetworkGroup['Except'] in mergedGroupsNamesDict:
userNetworkGroup['Except'] = mergedGroupsNamesDict[userNetworkGroup['Except']]
addedNetworkGroup = addUserObjectToServer(
client,
"add-group-with-exclusion",
{
"name": userNetworkGroup['Name'],
"include": userNetworkGroup['Include'],
"except": userNetworkGroup['Except'],
"comments": userNetworkGroup['Comments'],
"tags": userNetworkGroup['Tags']
}
)
else:
printStatus(None, "processing network group: " + userNetworkGroup['Name'])
addedNetworkGroup = processGroupWithMembers(client, "add-group", userNetworkGroup, mergedNetworkObjectsMap,
mergedGroupsNamesDict)
if addedNetworkGroup is not None:
mergedGroupsNamesDict[userNetworkGroupNameInitial] = addedNetworkGroup['name']
printStatus(None, "REPORT: " + userNetworkGroupNameInitial + " is added as " + addedNetworkGroup['name'])
publishCounter = publishUpdate(publishCounter, False)
else:
printStatus(None, "REPORT: " + userNetworkGroupNameInitial + " is not added.")
printStatus(None, "")
publishUpdate(publishCounter, True)
return mergedGroupsNamesDict
# processing and adding to server the CheckPoint Simple Gateways
# adjusting the name if simple gateway with the name exists at server: <initial_object_name>_<postfix>
# client - client object
# userSimpleGateways - the list of simple gateways which will be processed and added to server
# ---
# returns: mergedSimpleGatewaysNamesMap dictionary
# the map contains name of user's object (key) and name of resulting object (value)
def processSimpleGateways(client, userSimpleGateways):
printMessageProcessObjects("simple gateways")
publishCounter = 0
mergedSimpleGatewaysNamesMap = {}
if len(userSimpleGateways) == 0:
return mergedSimpleGatewaysNamesMap
for userSimpleGateway in userSimpleGateways:
printStatus(None, "processing simple getway: " + userSimpleGateway['Name'])
userSimpleGatewayNameInitial = userSimpleGateway['Name']
addedSimpleGateway = addUserObjectToServer(
client,
"add-simple-gateway",
{
"name": userSimpleGateway['Name'],
"ip-address": userSimpleGateway['IpAddress'],
"comments": userSimpleGateway['Comments'],
"tags": userSimpleGateway['Tags']
}
)
if addedSimpleGateway is not None:
mergedSimpleGatewaysNamesMap[userSimpleGatewayNameInitial] = addedSimpleGateway['name']
printStatus(None, "REPORT: " + userSimpleGatewayNameInitial + " is added as " + addedSimpleGateway['name'])
publishCounter = publishUpdate(publishCounter, False)
else:
printStatus(None, "REPORT: " + userSimpleGatewayNameInitial + ' is not added.')
printStatus(None, "")
publishUpdate(publishCounter, True)
return mergedSimpleGatewaysNamesMap
# processing and adding to server the CheckPoint Zones
# adjusting the name if zone with the name exists at server: <initial_object_name>_<postfix>
# client - client object
# userZones - the list of zones which will be processed and added to server
# ---
# returns: mergedZonesNamesMap dictionary
# the map contains name of user's object (key) and name of resulting object (value)
def processZones(client, userZones):
printMessageProcessObjects("zones")
publishCounter = 0
mergedZonesNamesMap = {}
if len(userZones) == 0:
return mergedZonesNamesMap
for userZone in userZones:
printStatus(None, "processing zone: " + userZone['Name'])
userZoneNameInitial = userZone['Name']
addedZone = addUserObjectToServer(
client,
"add-security-zone",
{
"name": userZone['Name'],
"comments": userZone['Comments'],
"tags": userZone['Tags']
}
)
if addedZone is not None:
mergedZonesNamesMap[userZoneNameInitial] = addedZone['name']
printStatus(None, "REPORT: " + userZoneNameInitial + " is added as " + addedZone['name'])
publishCounter = publishUpdate(publishCounter, False)
else:
printStatus(None, "REPORT: " + userZoneNameInitial + ' is not added.')
printStatus(None, "")
publishUpdate(publishCounter, True)
return mergedZonesNamesMap
# generate and provide key for Services dictionary
# serverService - service in JSON format
# ---
# returns: string as key
def provideServerServiceKey(serverService):
key = ""
if 'port' in serverService: # key for TCP or UDP or SCTP
key = serverService['port']
elif 'icmp-type' in serverService: # key for ICMP
key = str(serverService['icmp-type'])
if 'icmp-code' in serverService and serverService['icmp-code'] != 'null':
key += "_" + str(serverService['icmp-code'])
elif 'ip-protocol' in serverService: # key for Other
key = serverService['ip-protocol']
return key
# processing and adding to server the CheckPoint Services (TCP, UDP, SCTP, ICMP or Other)
# adjusting the name if service with the name exists at server: <initial_object_name>_<postfix>
# if service contains existing port then Service object from server will be used instead
# client - client object
# userServices - the list of services which will be processed and added to server
# userServiceType - the type of service which should be processed
# ---
# returns: mergedServicesMap dictionary
# the map contains name of user's object (key) and name of resulting object (value)
def processServices(client, userServices, userServiceType):
printMessageProcessObjects(userServiceType + " services")
publishCounter = 0
mergedServicesMap = {}
serverServicesMap = {}
serverServicesMapGlobal = {}
serverServicesMapLocal = {}
printStatus(None, "reading " + userServiceType + " services from server")
res_get_services = client.api_query("show-services-" + userServiceType)
printStatus(res_get_services, None)
for serverService in res_get_services.data:
mergedServicesMap[serverService['name']] = serverService['uid']
key = provideServerServiceKey(serverService)
isServiceReplacing = False
if 'port' in serverService and ('protocol' not in serverService or serverService['protocol'] == 'null'):
isServiceReplacing = True
if isServerObjectGlobal(serverService) and (key not in serverServicesMapGlobal or isServiceReplacing):
serverServicesMapGlobal[key] = (serverService['name'], serverService['uid'])
elif isServerObjectLocal(serverService) and (key not in serverServicesMapLocal or isServiceReplacing):
serverServicesMapLocal[key] = (serverService['name'], serverService['uid'])
elif not isServerObjectGlobal(serverService) and not isServerObjectLocal(serverService) and (
key not in serverServicesMap or isServiceReplacing):
serverServicesMap[key] = (serverService['name'], serverService['uid'])
printStatus(None, "")
if sys.version_info >= (3, 0):
serverServicesMap = serverServicesMap.copy()
if isReplaceFromGlobalFirst:
serverServicesMap.update(serverServicesMapLocal)
serverServicesMap.update(serverServicesMapGlobal)
else:
serverServicesMap.update(serverServicesMapGlobal)
serverServicesMap.update(serverServicesMapLocal)
else:
if isReplaceFromGlobalFirst:
serverServicesMap = dict(
serverServicesMap.items() + serverServicesMapLocal.items() + serverServicesMapGlobal.items())
else:
serverServicesMap = dict(
serverServicesMap.items() + serverServicesMapGlobal.items() + serverServicesMapLocal.items())
if len(userServices) == 0:
return mergedServicesMap
for userService in userServices:
printStatus(None, "processing " + userServiceType + " service: " + userService['Name'])
userServiceNameInitial = userService['Name']
key = ""
duplicationValueMessagePostfix = ""
if 'Port' in userService:
key = userService['Port']
duplicationValueMessagePostfix = "port: " + userService['Port']
elif 'Type' in userService:
key = userService['Type']
duplicationValueMessagePostfix = "type: " + userService['Type']
if 'Code' in userService and userService['Code'] != 'null':
key += "_" + userService['Code']
duplicationValueMessagePostfix = "type / code: " + userService['Type'] + " / " + userService['Code']
elif 'IpProtocol' in userService:
key = userService['IpProtocol']
duplicationValueMessagePostfix = "ip-protocol: " + userService['IpProtocol']
if key in serverServicesMap:
printStatus(None, None,
"More than one " + userServiceType + " service has the same " + duplicationValueMessagePostfix)
mergedServicesMap[userServiceNameInitial] = serverServicesMap[key][1]
printStatus(None, "REPORT: " + "CP object " + serverServicesMap[key][
0] + " is used instead of " + userServiceNameInitial)
else:
userServiceNamePostfix = 1
serverServicesNames = [serverServiceNameUid[0] for serverServiceNameUid in serverServicesMap.values()]
if userService['Name'] in serverServicesNames:
printStatus(None, None, "More than one object named '" + userService['Name'] + "' exists.")
while userService['Name'] in serverServicesNames:
userService['Name'] = userServiceNameInitial + '_' + str(userServiceNamePostfix)
userServiceNamePostfix += 1
payload = {}
payload["name"] = userService['Name']
payload["comments"] = userService['Comments']
payload["tags"] = userService['Tags']
payload["ignore-warnings"] = True
if 'Port' in userService:
payload["port"] = userService['Port']
payload["source-port"] = userService['SourcePort']
payload["session-timeout"] = userService['SessionTimeout']
elif 'Type' in userService:
payload["icmp-type"] = userService['Type']
if 'Code' in userService and userService['Code'] != 'null':
payload["icmp-code"] = userService['Code']
elif 'IpProtocol' in userService:
payload["ip-protocol"] = userService['IpProtocol']
payload["match-for-any"] = True
addedService = addUserObjectToServer(client, "add-service-" + userServiceType, payload,
userServiceNamePostfix)
if addedService is not None:
mergedServicesMap[userServiceNameInitial] = addedService['uid']
key = provideServerServiceKey(addedService)
serverServicesMap[key] = (addedService['name'], addedService['uid'])
printStatus(None, "REPORT: " + userServiceNameInitial + " is added as " + addedService['name'])
publishCounter = publishUpdate(publishCounter, False)
else:
printStatus(None, "REPORT: " + userServiceNameInitial + ' is not added.')
printStatus(None, "")
publishUpdate(publishCounter, True)
return mergedServicesMap
# processing and adding to server the CheckPoint Service Groups
# adjusting the name if service group with the name exists at server: <initial_object_name>_<postfix>
# client - client object
# userServicesGroups - the list of service groups which will be processed and added to server
# mergedServicesMap - map of service objects which will be used for replacing
# ---
# returns: mergedServicesGroupsNamesMap dictionary
# the map contains name of user's object (key) and name of resulting object (value)
def processServicesGroups(client, userServicesGroups, mergedServicesMap):
printMessageProcessObjects("services groups")
publishCounter = 0
mergedServicesGroupsNamesMap = {}
if len(userServicesGroups) == 0:
return mergedServicesGroupsNamesMap
for userServicesGroup in userServicesGroups:
printStatus(None, "processing services group: " + userServicesGroup['Name'])
userServicesGroupNameInitial = userServicesGroup['Name']
addedServicesGroup = processGroupWithMembers(client, "add-service-group", userServicesGroup, mergedServicesMap,
mergedServicesGroupsNamesMap)
if addedServicesGroup is not None:
mergedServicesGroupsNamesMap[userServicesGroupNameInitial] = addedServicesGroup['name']
printStatus(None, "REPORT: " + userServicesGroupNameInitial + " is added as " + addedServicesGroup['name'])
publishCounter = publishUpdate(publishCounter, False)
else:
printStatus(None, "REPORT: " + userServicesGroupNameInitial + " is not added.")
printStatus(None, "")
publishUpdate(publishCounter, True)
return mergedServicesGroupsNamesMap
# processing and adding to server the CheckPoint Time Groups
# adjusting the name if time group with the name exists at server: <initial_object_name>_<postfix>
# client - client object
# userTimesGroups - the list of time groups which will be processed and added to server
# ---
# returns: mergedTimesGroupsNamesMap dictionary
# the map contains name of user's object (key) and name of resulting object (value)
def processTimesGroups(client, userTimesGroups):
printMessageProcessObjects("times groups")
publishCounter = 0
mergedTimesGroupsNamesMap = {}
if len(userTimesGroups) == 0:
return mergedTimesGroupsNamesMap
for userTimesGroup in userTimesGroups:
printStatus(None, "processing times group: " + userTimesGroup['Name'])
userTimesGroupNameInitial = userTimesGroup['Name']
addedTimesGroup = addUserObjectToServer(
client,
"add-time-group",
{
"name": userTimesGroup['Name'],
"members": userTimesGroup['Members'],
"comments": userTimesGroup['Comments'],
"tags": userTimesGroup['Tags']
}
)
if addedTimesGroup is not None:
mergedTimesGroupsNamesMap[userTimesGroupNameInitial] = addedTimesGroup['name']
printStatus(None, "REPORT: " + userTimesGroupNameInitial + " is added as " + addedTimesGroup['name'])
publishCounter = publishUpdate(publishCounter, False)
else:
printStatus(None, "REPORT: " + userTimesGroupNameInitial + ' is not added.')
printStatus(None, "")
publishUpdate(publishCounter, True)
return mergedTimesGroupsNamesMap
# processing and adding to server the CheckPoint Time objects
# adjusting the name if time object with the name exists at server: <initial_object_name>_<postfix>
# client - client object
# userTimes - the list of time objects which will be processed and added to server
# ---
# returns: mergedTimesNamesMap dictionary
# the map contains name of user's object (key) and name of resulting object (value)
def processTimes(client, userTimes):
printMessageProcessObjects("times")
publishCounter = 0
mergedTimesNamesMap = {}
payload = {}
if len(userTimes) == 0:
return mergedTimesNamesMap
weekdays = {0: "Sun", 1: "Mon", 2: "Tue", 3: "Wed", 4: "Thu", 5: "Fri", 6: "Sat"}
for userTime in userTimes:
printStatus(None, "processing time: " + userTime['Name'])
userTimeNameInitial = userTime['Name']
payload["name"] = userTime['Name']
payload["comments"] = userTime['Comments']
payload["start-now"] = userTime['StartNow']
payload["start"] = {
"date": userTime['StartDate'],
"time": userTime['StartTime']
}
payload["end-never"] = userTime['EndNever']
payload["end"] = {
"date": userTime['EndDate'],
"time": userTime['EndTime']
}
payload["hours-ranges"] = [
{
"enabled": userTime['HoursRangesEnabled_1'],
"from": userTime['HoursRangesFrom_1'] if userTime['HoursRangesFrom_1'] is not None else "00:00",
"to": userTime['HoursRangesTo_1'] if userTime['HoursRangesTo_1'] is not None else "00:00",
"index": 1
},
{
"enabled": userTime['HoursRangesEnabled_2'],
"from": userTime['HoursRangesFrom_2'] if userTime['HoursRangesFrom_2'] is not None else "00:00",
"to": userTime['HoursRangesTo_2'] if userTime['HoursRangesTo_2'] is not None else "00:00",
"index": 2
},
{
"enabled": userTime['HoursRangesEnabled_3'],
"from": userTime['HoursRangesFrom_3'] if userTime['HoursRangesFrom_3'] is not None else "00:00",
"to": userTime['HoursRangesTo_3'] if userTime['HoursRangesTo_3'] is not None else "00:00",
"index": 3
}
]
daysNames = [] # list of weekdays names e.g. "Sun", "Mon"...
# weekdays are presented as [1,2,3.. ] in userTime['RecurrenceWeekdays']
for day in userTime['RecurrenceWeekdays']:
daysNames.append(weekdays[day])
payload["recurrence"] = {
"pattern": "Daily" if userTime['RecurrencePattern'] == 1 else (
"Weekly" if userTime['RecurrencePattern'] == 2 else (
"Monthly" if userTime['RecurrencePattern'] == 3 else None)),
"weekdays": daysNames
}
payload["tags"] = userTime['Tags']
addedTime = addUserObjectToServer(
client,
"add-time",
payload
)
if addedTime is not None:
mergedTimesNamesMap[userTimeNameInitial] = addedTime['name']
printStatus(None, "REPORT: " + userTimeNameInitial + " is added as " + addedTime['name'])
publishCounter = publishUpdate(publishCounter, False)
else:
printStatus(None, "REPORT: " + userTimeNameInitial + ' is not added.')
printStatus(None, "")
publishUpdate(publishCounter, True)
return mergedTimesNamesMap
# processing and adding to server the CheckPoint Access Rules
# the rules is added in back order: the last item of list goes first and the first item goes last
# client - client object
# userRules - the list of access rules which will be processed and added to server
# userLayerName - the name of layer where access rules will be added
# skipCleanUpRule - the flag which indicates to exclude "Clean up" rule from layer or not; "Clean up" rule is the last rule in the layer always
# mergedNetworkObjectsMap - map of all network objects (groups is included) which will be used for replacing
# mergedServiceObjectsMap - map of all services objects (groups is included) which will be used for replacing
# mergedTimesGroupsNamesMap - map of time groups objects which will be used for replacing
# ---
# returns: nothing
def addAccessRules(client, userRules, userLayerName, skipCleanUpRule, mergedNetworkObjectsMap, mergedServiceObjectsMap,
mergedTimesGroupsNamesMap, mergedTimesNamesMap):
if userRules is not None:
publishCounter = 0
printStatus(None, "processing access rules to " + userLayerName + " layer")
printStatus(None, "")
userRulesStartPosition = -2 if skipCleanUpRule else -1
# userRules[userRulesStartPosition::-1]:
# -1 - minus means to iterate backwards, 1 means step
# userRulesStartPosition - start point, length of list "- userRulesStartPosition" because reverse mode is specified
# end point is not specified - all elements
for i, userRule in enumerate(userRules[userRulesStartPosition::-1]):
printStatus(None, "processing access rule: #" + str(len(userRules) - i) + ", " + (
userRule['Name'] if userRule['Name'] is not None else ""))
# JSON access rules contain "action" as number
# "action" number points to the next list of values from SmartMove:
# 0 = Accept
# 1 = Drop
# 2 = Reject
# 3 = SubPolicy
actions = {0: "accept", 1: "drop", 2: "reject", 3: "apply layer"}
sources = []
for source in userRule['Source']:
sourceName = source['Name']
sourceName = mergedNetworkObjectsMap[
sourceName] if sourceName in mergedNetworkObjectsMap else sourceName
sources.append(sourceName)
destinations = []
for destination in userRule['Destination']:
destinationName = destination['Name']
destinationName = mergedNetworkObjectsMap[
destinationName] if destinationName in mergedNetworkObjectsMap else destinationName
destinations.append(destinationName)
services = []
for service in userRule['Service']:
serviceName = service['Name']
serviceName = mergedServiceObjectsMap[
serviceName] if serviceName in mergedServiceObjectsMap else serviceName
services.append(serviceName)
times = []
for time in userRule['Time']:
timeName = time['Name']
# support of time-ranges along with time-groups is added
if timeName in mergedTimesGroupsNamesMap:
timeName = mergedTimesGroupsNamesMap[timeName]
elif timeName in mergedTimesNamesMap:
timeName = mergedTimesNamesMap[timeName]
else:
timeName = timeName
# timeName = mergedTimesGroupsNamesMap[timeName] if timeName in mergedTimesGroupsNamesMap else timeName
times.append(timeName)
payload = {
"layer": userRule['Layer'],
"position": "top",
"name": userRule['Name'],
"action": actions[userRule['Action']],
"destination": destinations,
"destination-negate": userRule['DestinationNegated'],
"enabled": userRule['Enabled'],
"service": services,
"source": sources,
"source-negate": userRule['SourceNegated'],
"time": times,
"track": {"type": "None" if userRule['Track'] == 0 else "Log"},
"comments": userRule['Comments']
}
if userRule['Action'] == 3:
payload["inline-layer"] = userRule['SubPolicyName']
if userRule['ConversionComments'].strip() != "":
payload["custom-fields"] = {"field-1": userRule['ConversionComments']}
addedRule = addUserObjectToServer(client, "add-access-rule", payload, changeName=False)
if addedRule is not None:
printStatus(None, "REPORT: access rule is added")
publishCounter = publishUpdate(publishCounter, False)
else:
printStatus(None, "REPORT: access rule is not added")
printStatus(None, "")
publishUpdate(publishCounter, True)
# processing and adding to server the CheckPoint Package with Layers and Access Rules
# client - client object
# userPackage - the package which contains layers and access rules
# mergedNetworkObjectsMap - map of all network objects (groups is included) which will be used for replacing
# mergedServiceObjectsMap - map of all services objects (groups is included) which will be used for replacing
# mergedTimesGroupsNamesMap - map of time groups objects which will be used for replacing
# ---
# returns: added package in JSON format
def processPackage(client, userPackage, mergedNetworkObjectsMap, mergedServiceObjectsMap, mergedTimesGroupsNamesMap,
mergedTimesNamesMap):
printMessageProcessObjects("package")
addedPackage = None
if userPackage is not None:
publishCounter = 0
printStatus(None, "processing package: " + userPackage['Name'])
addedPackage = addUserObjectToServer(
client,
"add-package",
{
"name": userPackage['Name'],
"threat-prevention": False,
"tags": userPackage['Tags']
},
changeName=False
)
if addedPackage is None:
printStatus(None, "REPORT: " + userPackage['Name'] + " package is not added")
return addedPackage
printStatus(None, "REPORT: " + userPackage['Name'] + " package is added")
printStatus(None, "")
publishCounter = publishUpdate(publishCounter, True)
if userPackage['SubPolicies'] is not None:
for userSubLayer in userPackage['SubPolicies']:
printStatus(None, "processing access layer: " + userSubLayer['Name'])
addedSubLayer = addUserObjectToServer(
client,
"add-access-layer",
{
"name": userSubLayer['Name'],
"add-default-rule": False,
"applications-and-url-filtering": userSubLayer['ApplicationsAndUrlFiltering'],
"comments": userSubLayer['Comments'],
"tags": userSubLayer['Tags']
},
changeName=False
)
if addedSubLayer is None:
printStatus(None, "REPORT: " + userSubLayer['Name'] + " layer is not added")
continue
printStatus(None, "REPORT: " + userSubLayer['Name'] + " layer is added")
printStatus(None, "")
publishCounter = publishUpdate(publishCounter, True)
addAccessRules(client, userSubLayer['Rules'], userSubLayer['Name'], False, mergedNetworkObjectsMap,
mergedServiceObjectsMap, mergedTimesGroupsNamesMap, mergedTimesNamesMap)
if userPackage['ParentLayer'] is not None:
addAccessRules(client, userPackage['ParentLayer']['Rules'], "parent", True, mergedNetworkObjectsMap,
mergedServiceObjectsMap, mergedTimesGroupsNamesMap, mergedTimesNamesMap)
return addedPackage
# processing and adding to server the CheckPoint NAT rules
# NAT rules are added if package has been added
# client - client object
# addedPackage - added package in JSON format
# userNatRules - the list of NAT rules which will be processed and added to server
# mergedNetworkObjectsMap - map of all network objects (groups is included) which will be used for replacing
# mergedServiceObjectsMap - map of all services objects (groups is included) which will be used for replacing
# ---
# returns: nothing
def processNatRules(client, addedPackage, userNatRules, mergedNetworkObjectsMap, mergedServiceObjectsMap):
printMessageProcessObjects("nat rules")
if addedPackage is None:
printStatus(None, "REPORT: nat rules can not been added because package was not added")
return
publishCounter = 0
for i, userNatRule in enumerate(userNatRules):
printStatus(None, "processing nat rule: #" + str(i))
sourceOrig = ""
if userNatRule['Source'] is not None:
sourceOrig = userNatRule['Source']['Name']
sourceOrig = mergedNetworkObjectsMap[sourceOrig] if sourceOrig in mergedNetworkObjectsMap else sourceOrig
destinationOrig = ""
if userNatRule['Destination'] is not None:
destinationOrig = userNatRule['Destination']['Name']
destinationOrig = mergedNetworkObjectsMap[
destinationOrig] if destinationOrig in mergedNetworkObjectsMap else destinationOrig
serviceOrig = ""
if userNatRule['Service'] is not None:
serviceOrig = userNatRule['Service']['Name']
serviceOrig = mergedServiceObjectsMap[
serviceOrig] if serviceOrig in mergedServiceObjectsMap else serviceOrig
sourceTrans = ""
if userNatRule['TranslatedSource'] is not None:
sourceTrans = userNatRule['TranslatedSource']['Name']
sourceTrans = mergedNetworkObjectsMap[
sourceTrans] if sourceTrans in mergedNetworkObjectsMap else sourceTrans
destinationTrans = ""
if userNatRule['TranslatedDestination'] is not None:
destinationTrans = userNatRule['TranslatedDestination']['Name']
destinationTrans = mergedNetworkObjectsMap[
destinationTrans] if destinationTrans in mergedNetworkObjectsMap else destinationTrans
serviceTrans = ""
if userNatRule['TranslatedService'] is not None:
serviceTrans = userNatRule['TranslatedService']['Name']
serviceTrans = mergedServiceObjectsMap[
serviceTrans] if serviceTrans in mergedServiceObjectsMap else serviceTrans
payload = {
"package": userNatRule['Package'],
"position": "bottom",
"comments": userNatRule['Comments'],
"enabled": userNatRule['Enabled'],
"method": "static" if userNatRule['Method'] == 0 else "hide",
"original-source": sourceOrig,
"original-destination": destinationOrig,
"original-service": serviceOrig,
"translated-source": sourceTrans,
"translated-destination": destinationTrans,
"translated-service": serviceTrans
}
addedNatRule = addUserObjectToServer(client, "add-nat-rule", payload, changeName=False)
if addedNatRule is not None:
printStatus(None, "REPORT: nat rule is added")
publishCounter = publishUpdate(publishCounter, False)
else:
printStatus(None, "REPORT: nat rule is not added")
printStatus(None, "")
publishCounter = publishUpdate(publishCounter, True)
# START
args_parser = argparse.ArgumentParser()
args_parser._optionals.title = "arguments"
args_parser.add_argument('-r', '--root', action="store_true",
help="If administrator logged into the management server and wants to receive SuperUser permissions, 'login-as-root' feature might be used. " +
"In this case providing additional login credentials is not required.")
args_parser.add_argument('-m', '--management', default='127.0.0.1',
help="Management server IP address or name. Default: 127.0.0.1")
args_parser.add_argument('--port', type=int,
help="Server port. Default: 443")
args_parser.add_argument('-u', '--user',
help="User name")
args_parser.add_argument('-p', '--password',
help="<PASSWORD> password")
args_parser.add_argument('-f', '--file', default='cp_objects.json',
help="JSON file with CheckPoint Objects. Default: cp_objects.json")
args_parser.add_argument('-t', '--threshold', type=int, default=100,
help="Parameter specifies maximum number of Check Point objects/rules to add before starting publish operation. Default: 100")
args_parser.add_argument('-d', '--domain', default=None,
help="The name/uid of the domain you want to log into in an MDS environment.")
args_parser.add_argument('--replace-from-global-first', default="false",
help="The argument indicates that SmartConnector should use 'Global' objects at first, by default it uses 'Local' objects. [true, false]")
args = args_parser.parse_args()
file_name_log = "smartconnector"
if args.file != "cp_objects.json":
file_name_log += "_" + os.path.splitext(args.file)[0]
file_name_log += ".log"
if os.path.exists(file_name_log):
os.remove(file_name_log)
file_log = open(file_name_log, "w+")
if not args.root and args.user is None:
print("")
printStatus(None, None, "No user or root option is specified.")
print("")
args_parser.print_help()
elif args.root and args.user is not None:
print("")
printStatus(None, None, "Command contains ambiguous parameters. User is unexpected when logging in as root.")
print("")
args_parser.print_help()
elif args.root and args.management != '127.0.0.1':
print("")
printStatus(None, None, "Command contains ambiguous parameters. Management is unexpected when logging in as root.")
print("")
args_parser.print_help()
elif not args.root and args.password is None:
print("")
printStatus(None, None, "No password option is specified.")
print("")
args_parser.print_help()
elif not os.path.isfile(args.file):
print("")
printStatus(None, None, "The file does not exists")
print("")
args_parser.print_help()
elif args.replace_from_global_first.lower() != "true" and args.replace_from_global_first.lower() != "false":
print("")
printStatus(None, None,
"smartconnector.py: error: argument --replace-from-global-first: invalid boolean value: '" + args.replace_from_global_first + "'")
print("")
args_parser.print_help()
else:
if args.replace_from_global_first.lower() == "true":
isReplaceFromGlobalFirst = True
elif args.replace_from_global_first.lower() == "false":
isReplaceFromGlobalFirst = False
printStatus(None, "Input arguments:")
printStatus(None, "root flag is set" if args.root else "root flag is not set")
printStatus(None, "management: " + args.management)
printStatus(None,
"port: " + str(args.port) if args.port is not None else "port: is not set, default value will be used")
printStatus(None, "domain: " + args.domain if args.domain is not None else "domain: is not set")
printStatus(None, "user: " + args.user if args.user is not None else "user: is not set")
printStatus(None, "password: ***" if args.password is not None else "password: is not set")
printStatus(None, "file: " + args.file)
printStatus(None, "threshold: " + str(args.threshold))
printStatus(None, "replace-from-global-first: " + str(isReplaceFromGlobalFirst))
printStatus(None, "===========================================")
printStatus(None, "reading and parsing processes are started for JSON file: " + args.file)
with open(args.file) as json_file:
json_data = json.load(json_file)
# define lists of CheckPoint Objects
userDomains = []
userHosts = []
userNetworks = []
userRanges = []
userNetGroups = []
userSimpleGateways = []
userZones = []
userServicesTcp = []
userServicesUdp = []
userServicesSctp = [] # is not used in Cisco
userServicesIcmp = [] # is not used in Cisco
userServicesOther = []
userServicesGroups = []
userTimesGroups = []
userTimes = []
userPackage = None
userNatRules = []
for jsonObject in json_data:
if jsonObject is None or 'TypeName' not in jsonObject:
continue
if jsonObject['TypeName'] == 'CheckPoint_Domain':
userDomains.append(jsonObject)
if jsonObject['TypeName'] == 'CheckPoint_Host':
userHosts.append(jsonObject)
if jsonObject['TypeName'] == 'CheckPoint_Network':
userNetworks.append(jsonObject)
if jsonObject['TypeName'] == 'CheckPoint_Range':
userRanges.append(jsonObject)
if jsonObject['TypeName'] == 'CheckPoint_NetworkGroup' or jsonObject[
'TypeName'] == 'CheckPoint_GroupWithExclusion':
userNetGroups.append(jsonObject)
if jsonObject['TypeName'] == 'CheckPoint_SimpleGateway':
userSimpleGateways.append(jsonObject)
if jsonObject['TypeName'] == 'CheckPoint_Zone':
userZones.append(jsonObject)
if jsonObject['TypeName'] == 'CheckPoint_TcpService':
userServicesTcp.append(jsonObject)
if jsonObject['TypeName'] == 'CheckPoint_UdpService':
userServicesUdp.append(jsonObject)
if jsonObject['TypeName'] == 'CheckPoint_SctpService':
userServicesSctp.append(jsonObject)
if jsonObject['TypeName'] == 'CheckPoint_IcmpService':
userServicesIcmp.append(jsonObject)
if jsonObject['TypeName'] == 'CheckPoint_OtherService':
userServicesOther.append(jsonObject)
if jsonObject['TypeName'] == 'CheckPoint_ServiceGroup':
userServicesGroups.append(jsonObject)
if jsonObject['TypeName'] == 'CheckPoint_TimeGroup':
userTimesGroups.append(jsonObject)
if jsonObject['TypeName'] == 'CheckPoint_Time':
userTimes.append(jsonObject)
if jsonObject['TypeName'] == 'CheckPoint_Package':
userPackage = jsonObject
if jsonObject['TypeName'] == 'CheckPoint_NAT_Rule':
userNatRules.append(jsonObject)
printStatus(None, "reading and parsing processes are completed for JSON file: " + args.file)
client_args = None
if args.port is not None:
client_args = APIClientArgs(server=args.management, port=args.port)
else:
client_args = APIClientArgs(server=args.management)
with APIClient(client_args) as client:
client.debug_file = "api_calls.json"
printStatus(None, "checking fingerprint")
if client.check_fingerprint() is False:
printStatus(None, "Could not get the server's fingerprint - Check connectivity with the server.")
else:
if args.root:
msg = "login as root to "
if args.domain is not None:
msg += args.domain + " domain of local server"
else:
msg += "local server"
printStatus(None, msg)
login_res = client.login_as_root(domain=args.domain)
else:
msg = "login as " + args.user + " to "
if args.domain is not None:
msg += args.domain + " domain of " + args.management + " server"
else:
msg += args.management + " server"
printStatus(None, msg)
login_res = client.login(args.user, args.password, domain=args.domain)
if login_res.success is False:
printStatus(None, "Login failed: {}".format(login_res.error_message))
else:
printStatus(None, "")
mergedNetworkObjectsMap = {}
mergedNetworkObjectsMap.update(processDomains(client, userDomains))
mergedNetworkObjectsMap.update(processHosts(client, userHosts))
mergedNetworkObjectsMap.update(processNetworks(client, userNetworks))
mergedNetworkObjectsMap.update(processRanges(client, userRanges))
mergedNetworkObjectsMap.update(processNetGroups(client, userNetGroups, mergedNetworkObjectsMap))
mergedNetworkObjectsMap.update(processSimpleGateways(client, userSimpleGateways))
mergedNetworkObjectsMap.update(processZones(client, userZones))
mergedServicesObjectsMap = {}
mergedServicesObjectsMap.update(processServices(client, userServicesTcp, "tcp"))
mergedServicesObjectsMap.update(processServices(client, userServicesUdp, "udp"))
mergedServicesObjectsMap.update(processServices(client, userServicesSctp, "sctp"))
mergedServicesObjectsMap.update(processServices(client, userServicesIcmp, "icmp"))
mergedServicesObjectsMap.update(processServices(client, userServicesOther, "other"))
mergedServicesObjectsMap.update(
processServicesGroups(client, userServicesGroups, mergedServicesObjectsMap))
mergedTimesGroupsMap = processTimesGroups(client, userTimesGroups)
mergedTimesMap = processTimes(client, userTimes)
addedPackage = processPackage(client, userPackage, mergedNetworkObjectsMap, mergedServicesObjectsMap,
mergedTimesGroupsMap, mergedTimesMap)
processNatRules(client, addedPackage, userNatRules, mergedNetworkObjectsMap, mergedServicesObjectsMap)
printStatus(None, "==========")
file_log.close()
# END
|
uriegel/registry-changes-detector | binding.gyp | <gh_stars>1-10
{
"targets": [{
"target_name": "registry-changes-detector",
"sources": [
'addon.cpp'
],
'include_dirs': [
"<!@(node -p \"require('node-addon-api').include\")",
"<!@(node -p \"var a = require('node-addon-api').include; var b = a.substr(0, a.length - 15); b + 'event-source-base' + a[a.length-1]\")"
],
'dependencies': ["<!(node -p \"require('node-addon-api').gyp\")"],
"cflags": ["-Wall", "-std=c++17"],
'cflags_cc': ["-Wall", "-std=c++17"],
'cflags!': [ '-fno-exceptions' ],
'cflags_cc!': [ '-fno-exceptions' ],
'link_settings': {
"libraries": [
]
},
'conditions': [
['OS=="win"', {
'defines': ['WINDOWS'],
"msvs_settings": {
"VCCLCompilerTool": {
"ExceptionHandling": 1
}
},
"sources": [
'registry.cpp'
]
}]
]
}]
}
|
QuiNovas/tfe-dependency-manager | src/lambda_function/function.py | import boto3
import hmac
import json
import logging.config
import re
import requests
from boto3.dynamodb.conditions import Attr
from hashlib import sha512
from os import environ
BASE_TFE_API_URL = 'https://app.terraform.io/api/v2'
CURRENT_STATE_VERSION_API_URL = BASE_TFE_API_URL + '/workspaces/{}/current-state-version'
SHOW_WORKSPACE_API_URL = BASE_TFE_API_URL + '/organizations/{}/workspaces/{}'
CREATE_RUN_API_URL = BASE_TFE_API_URL + '/runs'
API_REQUEST_HEADERS = {
'Authorization': 'Bearer {}'.format(environ['API_TOKEN']),
'Content-Type': 'application/vnd.api+json'
}
API_RESPONSE_HEADERS = {
'Content-Type': 'text/plain'
}
WORKSPACE_DEPENDENCIES_TABLE = boto3.resource('dynamodb').Table(environ['WORKSPACE_DEPENDENCIES_TABLE'])
ORGANIZATION_FIELD = 'organization_name'
WORKSPACE_FIELD = 'workspace_name'
WORKSPACE_ID_FIELD = 'workspace_id'
REMOTE_WORKSPACES_FIELD = 'remote_workspaces'
logger = logging.getLogger()
logger.setLevel(logging.INFO)
def handler(event, context):
body = event['body']
if 'NOTIFICATION_TOKEN' in environ or 'X-Tfe-Notification-Signature' in event['headers']:
if not 'NOTIFICATION_TOKEN' in environ:
logger.error('Missing NOTIFICATION_TOKEN in environment')
return {
'statusCode': requests.codes.server_error,
'headers': API_RESPONSE_HEADERS,
'body': 'Missing NOTIFICATION_TOKEN in environment',
}
elif not 'X-Tfe-Notification-Signature' in event['headers']:
logger.warning('Missing X-Tfe-Notification-Signature header')
return {
'statusCode': requests.codes.bad_request,
'headers': API_RESPONSE_HEADERS,
'body': 'Missing X-Tfe-Notification-Signature header',
}
elif hmac.new(str.encode(environ['NOTIFICATION_TOKEN']), str.encode(body), sha512).hexdigest() != event['headers'].get('X-Tfe-Notification-Signature'):
logger.warning('Invalid X-Tfe-Notification-Signature header: {}'.format(event['headers'].get('X-Tfe-Notification-Signature')))
return {
'statusCode': requests.codes.bad_request,
'headers': API_RESPONSE_HEADERS,
'body': 'Invalid X-Tfe-Notification-Signature header {}'.format(event['headers'].get('X-Tfe-Notification-Signature')),
}
notification_body = json.loads(body)
for notification in notification_body['notifications']:
if notification['trigger'] == 'run:completed':
organization = notification_body[ORGANIZATION_FIELD]
workspace = notification_body[WORKSPACE_FIELD]
logger.info('Run completed notification received for {}/{}'.format(organization, workspace))
remote_workspaces = _get_remote_workspaces(notification_body[WORKSPACE_ID_FIELD])
_register_workspace_dependencies(organization, workspace, remote_workspaces)
_run_dependent_workspaces(organization, workspace)
return {
'statusCode': requests.codes.accepted,
'headers': API_RESPONSE_HEADERS,
'body': 'Registered and running dependencies for {}/{}'.format(organization, workspace),
}
return {
'statusCode': requests.codes.ok,
}
def _get_remote_workspaces(workspace_id):
logger.info('Getting current state version for {}'.format(workspace_id))
response = requests.get(
CURRENT_STATE_VERSION_API_URL.format(workspace_id),
headers=API_REQUEST_HEADERS
)
response.raise_for_status()
current_state_version = response.json()
logger.info('Getting current state for workspace {}'.format(workspace_id))
response = requests.get(current_state_version['data']['attributes']['hosted-state-download-url'])
response.raise_for_status()
state = response.json()
remote_workspaces = set()
for resource in state['resources']:
if resource['type'] == 'terraform_remote_state':
for instance in resource['instances']:
if instance['attributes']['backend'] == 'remote':
remote_workspaces.add(instance['attributes']['config']['value']['workspaces']['name'])
break
return remote_workspaces
def _register_workspace_dependencies(organization, workspace, remote_workspaces):
logger.info(
'Registering remote workspaces {} for workspace {}/{}'.format(
remote_workspaces if remote_workspaces else None,
organization,
workspace
)
)
if remote_workspaces:
WORKSPACE_DEPENDENCIES_TABLE.put_item(
Item={
ORGANIZATION_FIELD: organization,
WORKSPACE_FIELD: workspace,
REMOTE_WORKSPACES_FIELD: remote_workspaces,
}
)
else:
WORKSPACE_DEPENDENCIES_TABLE.put_item(
Item={
ORGANIZATION_FIELD: organization,
WORKSPACE_FIELD: workspace,
}
)
def _run_dependent_workspaces(organization, workspace, exclusive_start_key=None):
logger.info('Scanning for dependent workspaces for workspace {}/{}'.format(organization, workspace))
if exclusive_start_key:
response = WORKSPACE_DEPENDENCIES_TABLE.scan(
ExclusiveStartKey=exclusive_start_key,
ProjectionExpression='{},{}'.format(ORGANIZATION_FIELD, WORKSPACE_FIELD),
FilterExpression=Attr(REMOTE_WORKSPACES_FIELD).contains('{}/{}'.format(organization, workspace)),
ConsistentRead=True,
)
else:
response = WORKSPACE_DEPENDENCIES_TABLE.scan(
ProjectionExpression='{},{}'.format(ORGANIZATION_FIELD, WORKSPACE_FIELD),
FilterExpression=Attr(REMOTE_WORKSPACES_FIELD).contains('{}/{}'.format(organization, workspace)),
ConsistentRead=True,
)
if 'Items' in response:
for item in response['Items']:
_run_dependent_workspace(item[ORGANIZATION_FIELD], item[WORKSPACE_FIELD], '{}/{}'.format(organization, workspace))
if 'LastEvaluatedKey' in response:
_run_dependent_workspaces(organization, workspace, exclusive_start_key=response['LastEvaluatedKey'])
def _run_dependent_workspace(organization, workspace, originating_workspace):
workspace_response = requests.get(
SHOW_WORKSPACE_API_URL.format(organization, workspace),
headers=API_REQUEST_HEADERS
)
if workspace_response.status_code == requests.codes.ok:
logger.info('Creating run for workspace {}/{}'.format(organization, workspace))
run_response = requests.post(
CREATE_RUN_API_URL,
headers=API_REQUEST_HEADERS,
data=json.dumps(
{
'data': {
'attributes': {
'message': 'Queued automatically from {}'.format(originating_workspace),
},
'type': 'runs',
'relationships': {
'workspace': {
'data': {
'type': 'workspaces',
'id': workspace_response.json()['data']['id'],
},
},
},
},
}
)
)
if run_response.status_code != requests.codes.created:
logger.warning(
'Failed to create run for workspace {}/{}, code {}, message {}'.format(
organization, workspace, run_response.status_code, json.dumps(run_response.json())
)
)
elif workspace_response.status_code == requests.codes.not_found:
logger.info('Workspace {}/{} does not exist, deleteing'.format(organization, workspace))
WORKSPACE_DEPENDENCIES_TABLE.delete_item(
Key={
ORGANIZATION_FIELD: organization,
WORKSPACE_FIELD: workspace
}
)
else:
logger.warning(
'Failed to get Workspace ID for workspace {}/{}, code {}, message {}'.format(
organization, workspace, workspace_response.status_code, workspace_response.text
)
)
|
makaramkd/thenewboston-python-client | tnb/validators.py | from tnb.base_client import BaseClient
class Validator(BaseClient):
def fetch_accounts(self, offset: int = 0, limit: int = 50) -> dict:
"""
Fetch accounts from validator
:param offset: The offset to start at. Default: 0
:param limit: The limit of results to retrieve. Default: 50
Return response as a Python object
"""
params = {"offset": offset, "limit": limit}
return self.fetch("/accounts", params=params)
def fetch_account_balance(self, account_number: str) -> dict:
"""
Fetch account balance from account
:param account_number: The account number of the account
Return response as a Python object
"""
return self.fetch(f"/accounts/{account_number}/balance")
def fetch_account_balance_lock(self, account_number: str) -> dict:
"""
Fetch balance lock for account with number account_number
:param account_number: The account number of the account
Return response as a Python object
"""
return self.fetch(f"/accounts/{account_number}/balance_lock")
def fetch_confirmation_block(self, block_identifier: str) -> dict:
"""
Fetch confirmation block by block_identifier
:param block_identifier: ID for the block
Return response as Python object
"""
return self.fetch(f"/confirmation_blocks/{block_identifier}/valid")
def fetch_validator_config(self) -> dict:
"""
Fetch config from validator
Return response as a Python object
"""
return self.fetch("/config")
def connection_requests(
self, address: str, port: int, protocol: str, node_id: str, signature: str
) -> dict:
"""
Send connection request to the Validator
:param address: The IP address of requesting node
:param port: The port of requesting node
:param protocol: The protocol of requesting node
:param node_id: The Node Identifier of the requesting node
:param signature: The signature is signed by requesting node Identifier
signing key
Return response as Python object
"""
body = {
"message": {
"ip_address": address,
"port": port,
"protocol": protocol,
},
"node_identifier": node_id,
"signature": signature,
}
return self.post("/connection_requests", body=body)
def fetch_banks(self, offset: int = 0, limit: int = 50) -> dict:
"""
Fetch Bank list
:param offset: The offset to start at. Default: 0
:param limit: The limit of results to retrieve. Default: 50
Return response as a Python object
"""
params = {"offset": offset, "limit": limit}
return self.fetch("/banks", params=params)
def fetch_bank(self, node_id: str) -> dict:
"""
Fetch Bank
:param node_id: Node identifier
Return response as a Python object
"""
return self.fetch(f"/banks/{node_id}")
def patch_bank(self, trust: float, node_id: str, signature: str) -> dict:
"""
Set trust level
:param trust: Trust level as float
:param node_id: Node identifier
:param signature: Message signed by signature key
Return response as Python object
"""
resource = f"/banks/{node_id}"
body = {
"message": {"trust": trust},
"node_identifier": node_id,
"signature": signature,
}
return self.patch(resource, body=body)
def fetch_validators(self, offset: int = 0, limit: int = 50) -> dict:
"""
Fetch Validators
:param offset: The offset to start at. Default: 0
:param limit: The limit of results to retrieve. Default: 50
Return response as Python object
"""
params = {"offset": offset, "limit": limit}
return self.fetch("/validators", params=params)
def fetch_validator(self, node_id: str) -> dict:
"""
Fetch Validator
:param node_id: Node identifier
Return response as a Python object
"""
return self.fetch(f"/validators/{node_id}")
def patch_validators(self, node_id: str, trust: float, signature: str) -> dict:
"""
Set Validator trust level
:param node_id: Node identifier of the Bank
:param trust: The value assigned to trust level of an account
:param signature: The signature is signed by Bank's Node Identifier
Signing Key
Return response as Python object
"""
resource = f"/validators/{node_id}"
body = {
"message": {"trust": trust},
"node_identifier": node_id,
"signature": signature,
}
return self.patch(resource, body=body)
def post_upgrade_request(
self,
validator_node_identifier: str,
node_identifier: str,
signature: str,
) -> dict:
"""
Post an upgrade notice to a validator and get the result status code
:param validator_node_identifier: Node identifier of bank receiving
the request
:param node_identifier: Node identifier of Validator sending
the request
:param signature: Signature of the message
Return response as Python object
"""
body = {
"message": {"validator_node_identifier": validator_node_identifier},
"node_identifier": node_identifier,
"signature": signature,
}
return self.post("/upgrade_request", body=body)
|
leferrad/tensorflow-mobilenet | scripts/make_inferences.py | from __future__ import print_function
from mobilenet.core import MobileNetV1Restored
from mobilenet.fileio import get_logger
from mobilenet.imagenet import LabelLookup
import os
import argparse
logger = get_logger(name="make_inferences", level='debug')
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument('-pb', '--pb-path', dest='pb_path', required=True,
help="Path to inference graph in Protobuf format (i.e. file ending with .pb)")
parser.add_argument('-img', '--img', dest='img_path', required=True,
help="Path to directory with images to use for predictions")
args = parser.parse_args()
pb_path = args.pb_path
img_path = args.img_path
if not os.path.exists(pb_path):
logger.error("Argument '-pb' not valid! Please enter the correct path to the MobileNet protobuf model.")
exit(0)
try:
logger.info("Processing pb_file '%s' ...", pb_path)
mobilenet_model = MobileNetV1Restored(img_size=224, model_factor=1.0)
_ = mobilenet_model.restore_session_from_frozen_graph(filename=pb_path)
predictions = mobilenet_model.predict_on_images(img_path)
label_lookup = LabelLookup()
for fn, prediction in predictions.items():
top_predictions = sorted(enumerate(prediction), key=lambda (i, p): -p)[:10]
top_predictions = [(label_lookup.id_to_string(i), p) for (i, p) in top_predictions]
logger.info("Top %i predictions for the image given by '%s':", 10, fn)
c = 1
for l, p in top_predictions:
logger.info("%i. %s (prob=%.5f)", c, l, p)
c += 1
logger.info("\n")
logger.info("DONE")
except Exception as e:
logger.error("Failed to process pb_file '%s': %s", pb_path, str(e))
|
leferrad/tensorflow-mobilenet | scripts/convert_tflite_model.py | from __future__ import print_function
"""Script to convert a model in Protobuf format into a one in TFLite format"""
from mobilenet.core import MobileNetV1Restored
from mobilenet.fileio import get_logger
import os
import argparse
logger = get_logger(name="make_inferences", level='debug')
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument('-pb', '--pb-path', dest='pb_path', #required=True,
help="Path to inference graph in Protobuf format (i.e. file ending with .pb)")
parser.add_argument('-out', '--out-fn', dest='output_filename', #required=True,
help="Path to output filename for the resulting model converted")
args = parser.parse_args()
pb_path = args.pb_path
output_filename = args.output_filename
if not os.path.exists(pb_path):
logger.error("Argument '-pb' not valid! Please enter the correct path to the MobileNet protobuf model.")
exit(0)
try:
logger.info("Processing pb_file '%s' ...", pb_path)
mobilenet_model = MobileNetV1Restored(img_size=224, model_factor=1.0)
_ = mobilenet_model.restore_session_from_frozen_graph(filename=pb_path)
success = mobilenet_model.convert_tflite_format(output_filename=output_filename)
if success:
logger.info("Conversion achieved successfully! Model was saved in '%s'", output_filename)
else:
logger.error("Conversion to TF Lite format couldn't been achieved. "
"Please check the logs for more information")
except Exception as e:
logger.error("An error has ocurred! %s", str(e))
|
leferrad/tensorflow-mobilenet | mobilenet/toco.py | <filename>mobilenet/toco.py
# Copyright 2017 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""TensorFlow Lite tooling helper functionality.
EXPERIMENTAL: APIs here are unstable and likely to change without notice.
@@toco_convert
@@toco_convert_protos
NOTE: this was adapted from https://github.com/tensorflow/tensorflow/blob/master/tensorflow/contrib/lite/python/lite.py
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from tensorflow.contrib.lite.toco import model_flags_pb2 as _model_flags_pb2
from tensorflow.contrib.lite.toco import toco_flags_pb2 as _toco_flags_pb2
from tensorflow.contrib.lite.toco import types_pb2 as _types_pb2
from tensorflow.python.framework import dtypes as _dtypes
# Enum types from the protobuf promoted to the API
FLOAT = _types_pb2.FLOAT
INT32 = _types_pb2.INT32
INT64 = _types_pb2.INT64
STRING = _types_pb2.STRING
QUANTIZED_UINT8 = _types_pb2.QUANTIZED_UINT8
TENSORFLOW_GRAPHDEF = _toco_flags_pb2.TENSORFLOW_GRAPHDEF
TFLITE = _toco_flags_pb2.TFLITE
GRAPHVIZ_DOT = _toco_flags_pb2.GRAPHVIZ_DOT
def toco_convert_protos(model_flags_str, toco_flags_str, input_data_str, output_filename):
"""Convert `input_data_str` according to model and toco parameters.
Unless you know what you are doing consider using
the more friendly @{tf.contrib.lite.toco_convert}}.
Args:
model_flags_str: Serialized proto describing model properties, see
`toco/model_flags.proto`.
toco_flags_str: Serialized proto describing conversion properties, see
`toco/toco_flags.proto`.
input_data_str: Input data in serialized form (e.g. a graphdef is common)
Returns:
Converted model in serialized form (e.g. a TFLITE model is common).
Raises:
RuntimeError: When conversion fails, an exception is raised with the error
message embedded.
"""
# I need to do this import here, otherwise somehow it is not done outer this function
from tensorflow.contrib.lite.toco.python import tensorflow_wrap_toco
success = True
try:
output_str = tensorflow_wrap_toco.TocoConvert(model_flags_str, toco_flags_str, input_data_str)
with open(output_filename, "wb") as f:
f.write(output_str)
except Exception as e:
print("An error has occurred while converting model with TOCO!: %s" % str(e))
success = False
return success
def _tensor_name(x):
return x.name.split(":")[0]
def toco_convert(input_data,
input_tensors,
output_tensors,
output_filename,
inference_type=FLOAT,
input_format=TENSORFLOW_GRAPHDEF,
output_format=TFLITE,
quantized_input_stats=None,
drop_control_dependency=True):
"""Convert a model using TOCO from `input_format` to `output_format`.
Typically this is to convert from TensorFlow GraphDef to TFLite, in which
case the default `input_format` and `output_format` are sufficient.
Args:
input_data: Input data (i.e. often `sess.graph_def`).
input_tensors: List of input tensors. Type and shape are computed using
`foo.get_shape()` and `foo.dtype`.
output_tensors: List of output tensors (only .name is used from this).
inference_type: Currently must be `{FLOAT, QUANTIZED_UINT8}`.
input_format: Type of data to read (currently must be TENSORFLOW_GRAPHDEF).
output_format: Type of data to write (currently must be TFLITE or
GRAPHVIZ_DOT)
quantized_input_stats: For each member of input_tensors the mean and
std deviation of training data. Only needed if `inference_type` is
`QUANTIZED_UINT8`.
drop_control_dependency: Drops control dependencies silently. This is due
to tf lite not supporting control dependencies.
Returns:
The converted data. For example if tflite was the destination, then
this will be a tflite flatbuffer in a bytes array.
Raises:
ValueError: If the input tensor type is unknown
RuntimeError: If TOCO fails to convert (in which case the runtime error's
error text will contain the TOCO error log)
"""
toco = _toco_flags_pb2.TocoFlags()
toco.input_format = input_format
toco.output_format = output_format
toco.drop_control_dependency = drop_control_dependency
model = _model_flags_pb2.ModelFlags()
toco.inference_type = inference_type
for idx, input_tensor in enumerate(input_tensors):
if input_tensor.dtype == _dtypes.float32:
tflite_input_type = FLOAT
elif input_tensor.dtype == _dtypes.int32:
tflite_input_type = INT32
elif input_tensor.dtype == _dtypes.int64:
tflite_input_type = INT64
else:
raise ValueError("Tensors %s not known type %r" % (input_tensor.name,
input_tensor.dtype))
input_array = model.input_arrays.add()
if inference_type == QUANTIZED_UINT8:
if tflite_input_type == FLOAT:
tflite_input_type = QUANTIZED_UINT8
input_array.mean, input_array.std = quantized_input_stats[idx]
input_array.name = _tensor_name(input_tensor)
input_array.shape.dims.extend(map(int, input_tensor.get_shape()))
toco.inference_input_type = tflite_input_type
for output_tensor in output_tensors:
model.output_arrays.append(_tensor_name(output_tensor))
success = toco_convert_protos(model.SerializeToString(),
toco.SerializeToString(),
input_data.SerializeToString(),
output_filename)
return success
|
leferrad/tensorflow-mobilenet | mobilenet/imagenet.py | # Copyright 2016 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Adapted from https://github.com/tensorflow/models/blob/master/research/slim/datasets/imagenet.py"""
from mobilenet.fileio import load_json_as_dict, save_dict_as_json, get_logger
from mobilenet.paths import paths
from six.moves import urllib
import os
import re
NUM_CLASSES = 1001
DATA_BASE_URL = 'https://raw.githubusercontent.com/tensorflow/models/master/research/inception/inception/data/'
SYNSET_URL = '{}/imagenet_lsvrc_2015_synsets.txt'.format(DATA_BASE_URL)
SYNSET_TO_HUMAN_URL = '{}/imagenet_metadata.txt'.format(DATA_BASE_URL)
logger = get_logger(name=__name__, level='debug')
def create_readable_names_for_imagenet_labels():
"""Create a dict mapping label id to human readable string.
Returns:
labels_to_names: dictionary where keys are integers from to 1000
and values are human-readable names.
We retrieve a synset file, which contains a list of valid synset labels used
by ILSVRC competition. There is one synset one per line, eg.
# n01440764
# n01443537
We also retrieve a synset_to_human_file, which contains a mapping from synsets
to human-readable names for every synset in Imagenet. These are stored in a
tsv format, as follows:
# n02119247 black fox
# n02119359 silver fox
We assign each synset (in alphabetical order) an integer, starting from 1
(since 0 is reserved for the background class).
Code is based on
https://github.com/tensorflow/models/blob/master/research/inception/inception/data/build_imagenet_data.py#L463
"""
# pylint: disable=g-line-too-long
filename, _ = urllib.request.urlretrieve(SYNSET_URL)
synset_list = [s.strip() for s in open(filename).readlines()]
num_synsets_in_ilsvrc = len(synset_list)
assert num_synsets_in_ilsvrc == 1000
filename, _ = urllib.request.urlretrieve(SYNSET_TO_HUMAN_URL)
synset_to_human_list = open(filename).readlines()
num_synsets_in_all_imagenet = len(synset_to_human_list)
assert num_synsets_in_all_imagenet == 21842
synset_to_human = {}
for s in synset_to_human_list:
parts = s.strip().split('\t')
assert len(parts) == 2
synset = parts[0]
human = parts[1]
synset_to_human[synset] = human
label_index = 1
# There are 1000 classes plus the 0 class (background)
labels_to_names = {0: 'background'}
for synset in synset_list:
name = synset_to_human[synset]
labels_to_names[label_index] = name
label_index += 1
return labels_to_names
def load_imagenet_labels():
path_to_labels = os.path.join(paths['imagenet'], 'labels.json')
labels = None
if os.path.exists(path_to_labels):
# return: dict
labels = load_json_as_dict(path_to_labels)
# dict to list to allow compatibility with the other func
labels = [labels[k] for k in sorted(labels.keys(), key=lambda k: int(k))]
if labels is None:
# return: list
labels = create_readable_names_for_imagenet_labels()
#save_dict_as_json(labels, filename=path_to_labels, pretty_print=True)
return labels
class LabelManager(object):
def __init__(self):
paths_to_load = ['labels', 'words', 'hierarchy', 'synset_to_human_label_map']
self.paths = dict(filter(lambda (k, p): p in paths_to_load, paths.items()))
self.label_lookup = None
self.load()
@staticmethod
def _parse_2col_txt_into_dict(filename):
lines_txt = open(filename, 'r').readlines()
result = {}
p = re.compile(r'[n\d]*[ \S,]*')
for line in lines_txt:
parsed_items = p.findall(line)
k = parsed_items[0]
v = parsed_items[2]
result[k] = v
return result
def load(self):
# Asserting existence of all used paths
for p in self.paths.values():
if not os.path.exists(p):
logger.error('File does not exist %s', p)
return
# Loads mapping from string UID to human-readable string
self.uid_to_human = self._parse_2col_txt_into_dict(self.paths['synset_to_human_label_map'])
# Loads the final mapping of integer node ID to human-readable string
self.node_id_to_name = {}
for key, val in self.node_id_to_uid.items():
if val not in self.uid_to_human:
logger.error('Failed to locate: %s', val)
name = self.uid_to_human[val]
self.node_id_to_name[key] = name
# Finally, add the class 0
self.node_id_to_name[0] = 'background'
def id_to_string(self, node_id):
return self.node_id_to_name.get(node_id, '')
def __getitem__(self, item):
return self.id_to_string(item)
class LabelLookup(object):
def __init__(self, path_to_labels=None):
if path_to_labels is None:
path_to_labels = paths['labels']
self.path = path_to_labels
if not os.path.exists(self.path):
logger.error("Path to labels '%s' does not exist!", self.path)
return
self.label_lookup = {}
# Loads the mapping of integer node ID to human-readable string
with open(self.path, 'r') as lines_txt:
for i, line in enumerate(lines_txt):
self.label_lookup[i] = line.strip()
def id_to_string(self, i):
return self.label_lookup.get(i, None)
def __getitem__(self, item):
return self.id_to_string(item)
class WNIDLookup(object):
def __init__(self, path_to_words=None, cache=False):
if path_to_words is None:
path_to_words = paths['words']
self.path = path_to_words
if not os.path.exists(self.path):
logger.error("Path to words mapping '%s' does not exist!", self.path)
return
self.flag_cache = cache
self.wnid_lookup = {}
self.regex = re.compile(r'[n\d]*[ \S,]*')
if self.flag_cache:
self.load_words()
def generator_lines(self):
with open(self.path, 'r') as lines:
for line in lines:
parsed_items = self.regex.findall(line)
if parsed_items:
yield parsed_items[0], parsed_items[2]
def load_words(self):
for k, v in self.generator_lines():
self.wnid_lookup[k] = v
def get_label_from_wnid(self, wnid):
if self.flag_cache:
return self.wnid_lookup.get(wnid, None)
else:
for k, v in self.generator_lines():
if k == wnid:
return v
def get_wnid_from_label(self, label):
if self.flag_cache:
result = None
for wnid, lab in self.wnid_lookup.items():
if lab == label:
result = wnid
break
return result
else:
for k, v in self.generator_lines():
if v == label:
return k
class HierarchyLookup(object):
def __init__(self, path_to_hierarchy=None, cache=True):
if path_to_hierarchy is None:
path_to_hierarchy = paths['hierarchy']
self.path = path_to_hierarchy
if not os.path.exists(self.path):
logger.error("Path to hierarchy file '%s' does not exist!", self.path)
return
self.flag_cache = cache
self.parent_lookup = {}
self.regex = re.compile(r'[n\d]*[ \S,]*')
if self.flag_cache:
self.load_hierarchy_lookup()
def generator_lines(self):
with open(self.path, 'r') as lines:
for line in lines:
yield line.strip().split(' ')
def load_hierarchy_lookup(self):
for w1, w2 in self.generator_lines():
if w2 not in self.parent_lookup:
self.parent_lookup[w2] = [w1]
else:
self.parent_lookup[w2].append(w1)
def get_full_hierarchy(self, item, flat=True, depth=-1):
result = [tuple([item])]
item = tuple([item])
d = 0
if depth == -1:
depth = float("inf")
while d < depth:
parents = reduce(lambda a, b: a+b, [self.parent_lookup.get(it, []) for it in item])
if len(parents) == 0:
break
else:
result = [tuple(parents)]+result
item = parents
d += 1
if flat:
result = reduce(lambda a, b: a+b, map(lambda x: list(x), result))
return result
if __name__ == '__main__':
node_lookup = LabelLookup()
print("Labels: %s" % str(node_lookup.label_lookup.items()))
wnid = 'n02124075'
hier_lookup = HierarchyLookup()
hier = hier_lookup.get_full_hierarchy(wnid, flat=True, depth=5)
wnid_lookup = WNIDLookup(cache=True)
for w in reversed(hier):
print("wnid=%s, label=%s" % (w, wnid_lookup.get_label_from_wnid(w))) |
leferrad/tensorflow-mobilenet | mobilenet/paths.py | <gh_stars>1-10
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from __future__ import print_function
import os
CWD = os.path.abspath(__file__)
def get_mobilenet_path():
return os.path.abspath(os.path.join(CWD, os.path.pardir))
def get_root_path():
return os.path.abspath(os.path.join(get_mobilenet_path(), os.path.pardir))
def get_scripts_path():
return os.path.abspath(os.path.join(get_root_path(), 'scripts'))
def get_imagenet_path():
return os.path.abspath(os.path.join(get_root_path(), 'imagenet'))
def get_gloss_path():
return os.path.abspath(os.path.join(get_imagenet_path(), 'gloss.txt'))
def get_label_map_proto_path():
return os.path.abspath(os.path.join(get_imagenet_path(), 'imagenet_2012_challenge_label_map_proto.pbtxt'))
def get_synset_to_human_label_map_path():
return os.path.abspath(os.path.join(get_imagenet_path(), 'imagenet_synset_to_human_label_map.txt'))
def get_words_mapping_path():
return os.path.abspath(os.path.join(get_imagenet_path(), 'words.txt'))
def get_words_hierarchy_path():
return os.path.abspath(os.path.join(get_imagenet_path(), 'wordnet.is_a.txt'))
def get_labels_path():
return os.path.abspath(os.path.join(get_imagenet_path(), 'labels.txt'))
paths = {'root': get_root_path(),
'mobilenet': get_mobilenet_path(),
'imagenet': get_imagenet_path(),
'scripts': get_scripts_path(),
'gloss': get_gloss_path(),
'label_map_proto': get_label_map_proto_path(),
'synset_to_human_label_map': get_synset_to_human_label_map_path(),
'words': get_words_mapping_path(),
'hierarchy': get_words_hierarchy_path(),
'labels': get_labels_path()
}
if __name__ == '__main__':
for k, p in paths.iteritems():
print(k+": "+p) |
leferrad/tensorflow-mobilenet | mobilenet/fileio.py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
from __future__ import print_function
import logging
import logging.handlers
import tarfile
import os
import json
from sys import stdout
from six.moves import urllib
def get_logger(name='mobilenet', level='debug'):
"""
Function to obtain a normal logger
:param name: string
:param level: string, which can be 'info' or 'debug'
:return: logging.Logger
"""
levels = {'info': logging.INFO,
'debug': logging.DEBUG}
# If the level is not supported, then force it to be info
if level not in levels:
level = 'info'
level = levels[level]
logger = logging.getLogger(name)
logger.setLevel(level)
# create formatter
formatter = logging.Formatter('%(asctime)s - %(name)s - %(levelname)s - %(message)s')
# create console handler and set level to debug
ch = logging.StreamHandler()
ch.setLevel(logging.DEBUG)
# add formatter to ch
ch.setFormatter(formatter)
# add ch to logger
logger.addHandler(ch)
return logger
logger = get_logger(name=__name__, level='debug')
def compress_tar_files(files, filename):
if isinstance(files, list) is False:
files = [files]
try:
with tarfile.open(filename, "w:gz") as tar:
for f in files:
tar.add(f, arcname=os.path.basename(f))
successful = True
except:
successful = False
return successful
def uncompress_tar_files(filename):
try:
with tarfile.open(filename, "r:*") as tar:
tar.extractall(path=os.path.dirname(filename))
successful = True
except:
successful = False
return successful
class MobileNetDefaultFile(object):
MODEL_REPO_BASE_URL = 'http://download.tensorflow.org/models/'
MODEL_DATE = '2017_06_14'
MODEL_BASE_FMT = 'mobilenet_v1_{}_{}'
MODEL_DL_FMT = MODEL_BASE_FMT + '_{}.tar.gz'
MODEL_PB_FMT = MODEL_BASE_FMT + '.pb'
MODEL_FACTOR = '0.50'
IMG_SIZE = 224
def download_and_uncompress_tarball(data_dir, filename):
def _progress(count, block_size, total_size):
stdout.write('\r>> Downloading %s %.1f%%' % (filename, float(count * block_size) / float(total_size) * 100.0))
stdout.flush()
tarball_url = os.path.join(MobileNetDefaultFile.MODEL_REPO_BASE_URL, filename)
filepath = os.path.join(data_dir, filename)
success = False
if not os.path.exists(filepath):
try:
logger.info("Downloading file from %s ...", tarball_url)
filepath, _ = urllib.request.urlretrieve(tarball_url, filepath, _progress)
print()
statinfo = os.stat(filepath)
logger.info("Successfully downloaded '%s': %s bytes.", filename, str(statinfo.st_size))
success = True
except Exception as e:
logger.info("Error occurred while downloading model: %s", str(e))
else:
logger.info("Tarball '%s' already exists -- not downloading", filename)
success = True
if success:
logger.info("Extracting tarball '%s'...", filename)
success = uncompress_tar_files(filepath)
return success
def is_valid_slim_directory(slim_directory):
"""
An easy way to figure out if the TensorFlow Slim's directory is correct or not,
by comparing their sub-folders with the expected ones.
:param slim_directory: str
:return: boolean
"""
expected_subfolders = ['nets', 'preprocessing', 'datasets'] # The most important ones
actual_subfolders = os.listdir(slim_directory)
return all([f in actual_subfolders for f in expected_subfolders])
def save_dict_as_json(dictobj, filename, pretty_print=True):
try:
with open(filename, 'w') as f:
if pretty_print is True:
json.dump(dictobj, f, sort_keys=True, indent=4)
else:
json.dump(dictobj, f)
successful = True
except:
successful = False
return successful
def load_json_as_dict(filename):
try:
with open(filename, 'r') as f:
dictobj = json.load(f)
except:
dictobj = None
return dictobj |
leferrad/tensorflow-mobilenet | mobilenet/core.py | <gh_stars>1-10
# Copyright 2017 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# =============================================================================
"""MobileNet v1.
MobileNet is a general architecture and can be used for multiple use cases.
Depending on the use case, it can use different input layer size and different
head (for example: embeddings, localization and classification).
As described in https://arxiv.org/abs/1704.04861.
MobileNets: Efficient Convolutional Neural Networks for
Mobile Vision Applications
<NAME>, <NAME>, <NAME>, <NAME>, <NAME>,
<NAME>, <NAME>, <NAME>
100% Mobilenet V1 (base) with input size 224x224:
See mobilenet_v1()
Layer params macs
--------------------------------------------------------------------------------
MobilenetV1/Conv2d_0/Conv2D: 864 10,838,016
MobilenetV1/Conv2d_1_depthwise/depthwise: 288 3,612,672
MobilenetV1/Conv2d_1_pointwise/Conv2D: 2,048 25,690,112
MobilenetV1/Conv2d_2_depthwise/depthwise: 576 1,806,336
MobilenetV1/Conv2d_2_pointwise/Conv2D: 8,192 25,690,112
MobilenetV1/Conv2d_3_depthwise/depthwise: 1,152 3,612,672
MobilenetV1/Conv2d_3_pointwise/Conv2D: 16,384 51,380,224
MobilenetV1/Conv2d_4_depthwise/depthwise: 1,152 903,168
MobilenetV1/Conv2d_4_pointwise/Conv2D: 32,768 25,690,112
MobilenetV1/Conv2d_5_depthwise/depthwise: 2,304 1,806,336
MobilenetV1/Conv2d_5_pointwise/Conv2D: 65,536 51,380,224
MobilenetV1/Conv2d_6_depthwise/depthwise: 2,304 451,584
MobilenetV1/Conv2d_6_pointwise/Conv2D: 131,072 25,690,112
MobilenetV1/Conv2d_7_depthwise/depthwise: 4,608 903,168
MobilenetV1/Conv2d_7_pointwise/Conv2D: 262,144 51,380,224
MobilenetV1/Conv2d_8_depthwise/depthwise: 4,608 903,168
MobilenetV1/Conv2d_8_pointwise/Conv2D: 262,144 51,380,224
MobilenetV1/Conv2d_9_depthwise/depthwise: 4,608 903,168
MobilenetV1/Conv2d_9_pointwise/Conv2D: 262,144 51,380,224
MobilenetV1/Conv2d_10_depthwise/depthwise: 4,608 903,168
MobilenetV1/Conv2d_10_pointwise/Conv2D: 262,144 51,380,224
MobilenetV1/Conv2d_11_depthwise/depthwise: 4,608 903,168
MobilenetV1/Conv2d_11_pointwise/Conv2D: 262,144 51,380,224
MobilenetV1/Conv2d_12_depthwise/depthwise: 4,608 225,792
MobilenetV1/Conv2d_12_pointwise/Conv2D: 524,288 25,690,112
MobilenetV1/Conv2d_13_depthwise/depthwise: 9,216 451,584
MobilenetV1/Conv2d_13_pointwise/Conv2D: 1,048,576 51,380,224
--------------------------------------------------------------------------------
Total: 3,185,088 567,716,352
75% Mobilenet V1 (base) with input size 128x128:
See mobilenet_v1_075()
Layer params macs
--------------------------------------------------------------------------------
MobilenetV1/Conv2d_0/Conv2D: 648 2,654,208
MobilenetV1/Conv2d_1_depthwise/depthwise: 216 884,736
MobilenetV1/Conv2d_1_pointwise/Conv2D: 1,152 4,718,592
MobilenetV1/Conv2d_2_depthwise/depthwise: 432 442,368
MobilenetV1/Conv2d_2_pointwise/Conv2D: 4,608 4,718,592
MobilenetV1/Conv2d_3_depthwise/depthwise: 864 884,736
MobilenetV1/Conv2d_3_pointwise/Conv2D: 9,216 9,437,184
MobilenetV1/Conv2d_4_depthwise/depthwise: 864 221,184
MobilenetV1/Conv2d_4_pointwise/Conv2D: 18,432 4,718,592
MobilenetV1/Conv2d_5_depthwise/depthwise: 1,728 442,368
MobilenetV1/Conv2d_5_pointwise/Conv2D: 36,864 9,437,184
MobilenetV1/Conv2d_6_depthwise/depthwise: 1,728 110,592
MobilenetV1/Conv2d_6_pointwise/Conv2D: 73,728 4,718,592
MobilenetV1/Conv2d_7_depthwise/depthwise: 3,456 221,184
MobilenetV1/Conv2d_7_pointwise/Conv2D: 147,456 9,437,184
MobilenetV1/Conv2d_8_depthwise/depthwise: 3,456 221,184
MobilenetV1/Conv2d_8_pointwise/Conv2D: 147,456 9,437,184
MobilenetV1/Conv2d_9_depthwise/depthwise: 3,456 221,184
MobilenetV1/Conv2d_9_pointwise/Conv2D: 147,456 9,437,184
MobilenetV1/Conv2d_10_depthwise/depthwise: 3,456 221,184
MobilenetV1/Conv2d_10_pointwise/Conv2D: 147,456 9,437,184
MobilenetV1/Conv2d_11_depthwise/depthwise: 3,456 221,184
MobilenetV1/Conv2d_11_pointwise/Conv2D: 147,456 9,437,184
MobilenetV1/Conv2d_12_depthwise/depthwise: 3,456 55,296
MobilenetV1/Conv2d_12_pointwise/Conv2D: 294,912 4,718,592
MobilenetV1/Conv2d_13_depthwise/depthwise: 6,912 110,592
MobilenetV1/Conv2d_13_pointwise/Conv2D: 589,824 9,437,184
--------------------------------------------------------------------------------
Total: 1,800,144 106,002,432
"""
# Tensorflow mandates these.
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
# From this repo
from mobilenet.fileio import get_logger
from mobilenet.imagenet import load_imagenet_labels
from mobilenet.toco import toco_convert, FLOAT, QUANTIZED_UINT8
from scipy.misc import imread, imresize
import numpy as np
from collections import namedtuple
import functools
import time
import os
from tensorflow.python.framework import graph_util
import tensorflow as tf
slim = tf.contrib.slim
# Conv and DepthSepConv namedtuple define layers of the MobileNet architecture
# Conv defines 3x3 convolution layers
# DepthSepConv defines 3x3 depthwise convolution followed by 1x1 convolution.
# stride is the stride of the convolution
# depth is the number of channels or filters in a layer
Conv = namedtuple('Conv', ['kernel', 'stride', 'depth'])
DepthSepConv = namedtuple('DepthSepConv', ['kernel', 'stride', 'depth'])
# _CONV_DEFS specifies the MobileNet body
_CONV_DEFS = [
Conv(kernel=[3, 3], stride=2, depth=32),
DepthSepConv(kernel=[3, 3], stride=1, depth=64),
DepthSepConv(kernel=[3, 3], stride=2, depth=128),
DepthSepConv(kernel=[3, 3], stride=1, depth=128),
DepthSepConv(kernel=[3, 3], stride=2, depth=256),
DepthSepConv(kernel=[3, 3], stride=1, depth=256),
DepthSepConv(kernel=[3, 3], stride=2, depth=512),
DepthSepConv(kernel=[3, 3], stride=1, depth=512),
DepthSepConv(kernel=[3, 3], stride=1, depth=512),
DepthSepConv(kernel=[3, 3], stride=1, depth=512),
DepthSepConv(kernel=[3, 3], stride=1, depth=512),
DepthSepConv(kernel=[3, 3], stride=1, depth=512),
DepthSepConv(kernel=[3, 3], stride=2, depth=1024),
DepthSepConv(kernel=[3, 3], stride=1, depth=1024)
]
def mobilenet_v1_base(inputs,
final_endpoint='Conv2d_13_pointwise',
min_depth=8,
depth_multiplier=1.0,
conv_defs=None,
output_stride=None,
scope=None):
"""Mobilenet v1.
Constructs a Mobilenet v1 network from inputs to the given final endpoint.
Args:
inputs: a tensor of shape [batch_size, height, width, channels].
final_endpoint: specifies the endpoint to construct the network up to. It
can be one of ['Conv2d_0', 'Conv2d_1_pointwise', 'Conv2d_2_pointwise',
'Conv2d_3_pointwise', 'Conv2d_4_pointwise', 'Conv2d_5_pointwise',
'Conv2d_6_pointwise', 'Conv2d_7_pointwise', 'Conv2d_8_pointwise',
'Conv2d_9_pointwise', 'Conv2d_10_pointwise', 'Conv2d_11_pointwise',
'Conv2d_12_pointwise', 'Conv2d_13_pointwise'].
min_depth: Minimum depth value (number of channels) for all convolution ops.
Enforced when depth_multiplier < 1, and not an active constraint when
depth_multiplier >= 1.
depth_multiplier: Float multiplier for the depth (number of channels)
for all convolution ops. The value must be greater than zero. Typical
usage will be to set this value in (0, 1) to reduce the number of
parameters or computation cost of the model.
conv_defs: A list of ConvDef namedtuples specifying the net architecture.
output_stride: An integer that specifies the requested ratio of input to
output spatial resolution. If not None, then we invoke atrous convolution
if necessary to prevent the network from reducing the spatial resolution
of the activation maps. Allowed values are 8 (accurate fully convolutional
mode), 16 (fast fully convolutional mode), 32 (classification mode).
scope: Optional variable_scope.
Returns:
tensor_out: output tensor corresponding to the final_endpoint.
end_points: a set of activations for external use, for example summaries or
losses.
Raises:
ValueError: if final_endpoint is not set to one of the predefined values,
or depth_multiplier <= 0, or the target output_stride is not
allowed.
"""
depth = lambda d: max(int(d * depth_multiplier), min_depth)
end_points = {}
# Used to find thinned depths for each layer.
if depth_multiplier <= 0:
raise ValueError('depth_multiplier is not greater than zero.')
if conv_defs is None:
conv_defs = _CONV_DEFS
if output_stride is not None and output_stride not in [8, 16, 32]:
raise ValueError('Only allowed output_stride values are 8, 16, 32.')
with tf.variable_scope(scope, 'MobilenetV1', [inputs]):
with slim.arg_scope([slim.conv2d, slim.separable_conv2d], padding='SAME'):
# The current_stride variable keeps track of the output stride of the
# activations, i.e., the running product of convolution strides up to the
# current network layer. This allows us to invoke atrous convolution
# whenever applying the next convolution would result in the activations
# having output stride larger than the target output_stride.
current_stride = 1
# The atrous convolution rate parameter.
rate = 1
net = inputs
for i, conv_def in enumerate(conv_defs):
end_point_base = 'Conv2d_%d' % i
if output_stride is not None and current_stride == output_stride:
# If we have reached the target output_stride, then we need to employ
# atrous convolution with stride=1 and multiply the atrous rate by the
# current unit's stride for use in subsequent layers.
layer_stride = 1
layer_rate = rate
rate *= conv_def.stride
else:
layer_stride = conv_def.stride
layer_rate = 1
current_stride *= conv_def.stride
if isinstance(conv_def, Conv):
end_point = end_point_base
net = slim.conv2d(net, depth(conv_def.depth), conv_def.kernel,
stride=conv_def.stride,
normalizer_fn=slim.batch_norm,
scope=end_point)
end_points[end_point] = net
if end_point == final_endpoint:
return net, end_points
elif isinstance(conv_def, DepthSepConv):
end_point = end_point_base + '_depthwise'
# By passing filters=None
# separable_conv2d produces only a depthwise convolution layer
net = slim.separable_conv2d(net, None, conv_def.kernel,
depth_multiplier=1,
stride=layer_stride,
rate=layer_rate,
normalizer_fn=slim.batch_norm,
scope=end_point)
end_points[end_point] = net
if end_point == final_endpoint:
return net, end_points
end_point = end_point_base + '_pointwise'
net = slim.conv2d(net, depth(conv_def.depth), [1, 1],
stride=1,
normalizer_fn=slim.batch_norm,
scope=end_point)
end_points[end_point] = net
if end_point == final_endpoint:
return net, end_points
else:
raise ValueError('Unknown convolution type %s for layer %d'
% (conv_def.ltype, i))
raise ValueError('Unknown final endpoint %s' % final_endpoint)
def mobilenet_v1(inputs,
num_classes=1000,
dropout_keep_prob=0.999,
is_training=True,
min_depth=8,
depth_multiplier=1.0,
conv_defs=None,
prediction_fn=tf.contrib.layers.softmax,
spatial_squeeze=True,
reuse=None,
scope='MobilenetV1'):
"""Mobilenet v1 model for classification.
Args:
inputs: a tensor of shape [batch_size, height, width, channels].
num_classes: number of predicted classes.
dropout_keep_prob: the percentage of activation values that are retained.
is_training: whether is training or not.
min_depth: Minimum depth value (number of channels) for all convolution ops.
Enforced when depth_multiplier < 1, and not an active constraint when
depth_multiplier >= 1.
depth_multiplier: Float multiplier for the depth (number of channels)
for all convolution ops. The value must be greater than zero. Typical
usage will be to set this value in (0, 1) to reduce the number of
parameters or computation cost of the model.
conv_defs: A list of ConvDef namedtuples specifying the net architecture.
prediction_fn: a function to get predictions out of logits.
spatial_squeeze: if True, logits is of shape is [B, C], if false logits is
of shape [B, 1, 1, C], where B is batch_size and C is number of classes.
reuse: whether or not the network and its variables should be reused. To be
able to reuse 'scope' must be given.
scope: Optional variable_scope.
Returns:
logits: the pre-softmax activations, a tensor of size
[batch_size, num_classes]
end_points: a dictionary from components of the network to the corresponding
activation.
Raises:
ValueError: Input rank is invalid.
"""
input_shape = inputs.get_shape().as_list()
if len(input_shape) != 4:
raise ValueError('Invalid input tensor rank, expected 4, was: %d' %
len(input_shape))
with tf.variable_scope(scope, 'MobilenetV1', [inputs, num_classes],
reuse=reuse) as scope:
with slim.arg_scope([slim.batch_norm, slim.dropout],
is_training=is_training):
net, end_points = mobilenet_v1_base(inputs, scope=scope,
min_depth=min_depth,
depth_multiplier=depth_multiplier,
conv_defs=conv_defs)
with tf.variable_scope('Logits'):
kernel_size = _reduced_kernel_size_for_small_input(net, [7, 7])
net = slim.avg_pool2d(net, kernel_size, padding='VALID',
scope='AvgPool_1a')
end_points['AvgPool_1a'] = net
# 1 x 1 x 1024
net = slim.dropout(net, keep_prob=dropout_keep_prob, scope='Dropout_1b')
logits = slim.conv2d(net, num_classes, [1, 1], activation_fn=None,
normalizer_fn=None, scope='Conv2d_1c_1x1')
if spatial_squeeze:
logits = tf.squeeze(logits, [1, 2], name='SpatialSqueeze')
end_points['Logits'] = logits
if prediction_fn:
end_points['Predictions'] = prediction_fn(logits, scope='Predictions')
return logits, end_points
mobilenet_v1.default_image_size = 224
def wrapped_partial(func, *args, **kwargs):
partial_func = functools.partial(func, *args, **kwargs)
functools.update_wrapper(partial_func, func)
return partial_func
mobilenet_v1_075 = wrapped_partial(mobilenet_v1, depth_multiplier=0.75)
mobilenet_v1_050 = wrapped_partial(mobilenet_v1, depth_multiplier=0.50)
mobilenet_v1_025 = wrapped_partial(mobilenet_v1, depth_multiplier=0.25)
def _reduced_kernel_size_for_small_input(input_tensor, kernel_size):
"""Define kernel size which is automatically reduced for small input.
If the shape of the input images is unknown at graph construction time this
function assumes that the input images are large enough.
Args:
input_tensor: input tensor of size [batch_size, height, width, channels].
kernel_size: desired kernel size of length 2: [kernel_height, kernel_width]
Returns:
a tensor with the kernel size.
"""
shape = input_tensor.get_shape().as_list()
if shape[1] is None or shape[2] is None:
kernel_size_out = kernel_size
else:
kernel_size_out = [min(shape[1], kernel_size[0]),
min(shape[2], kernel_size[1])]
return kernel_size_out
def mobilenet_v1_arg_scope(is_training=True,
weight_decay=0.00004,
stddev=0.09,
regularize_depthwise=False):
"""Defines the default MobilenetV1 arg scope.
Args:
is_training: Whether or not we're training the model.
weight_decay: The weight decay to use for regularizing the model.
stddev: The standard deviation of the trunctated normal weight initializer.
regularize_depthwise: Whether or not apply regularization on depthwise.
Returns:
An `arg_scope` to use for the mobilenet v1 model.
"""
batch_norm_params = {
'is_training': is_training,
'center': True,
'scale': True,
'decay': 0.9997,
'epsilon': 0.001,
}
# Set weight_decay for weights in Conv and DepthSepConv layers.
weights_init = tf.truncated_normal_initializer(stddev=stddev)
regularizer = tf.contrib.layers.l2_regularizer(weight_decay)
if regularize_depthwise:
depthwise_regularizer = regularizer
else:
depthwise_regularizer = None
with slim.arg_scope([slim.conv2d, slim.separable_conv2d],
weights_initializer=weights_init,
activation_fn=tf.nn.relu6, normalizer_fn=slim.batch_norm):
with slim.arg_scope([slim.batch_norm], **batch_norm_params):
with slim.arg_scope([slim.conv2d], weights_regularizer=regularizer):
with slim.arg_scope([slim.separable_conv2d],
weights_regularizer=depthwise_regularizer) as sc:
return sc
# ---------------------------------------------------------------------------------------------------------------------
# Added in this repo
logger = get_logger(name=__name__, level='debug')
class MobileNetDefaultFile(object):
MODEL_REPO_BASE_URL = 'http://download.tensorflow.org/models/'
MODEL_DATE = '2017_06_14'
MODEL_BASE_FMT = 'mobilenet_v1_{}_{}'
MODEL_DL_FMT = MODEL_BASE_FMT + '_{}.tar.gz'
MODEL_PB_FMT = MODEL_BASE_FMT + '.pb'
MODEL_FACTOR = '0.50'
IMG_SIZE = 224
class MobileNetV1Restored(object):
def __init__(self, img_size=224, model_factor=1.0, weight_decay=0.0, num_classes=1001,
input_node_name='input', output_node_name='output',
logs_directory='/tmp/tf-logs/'):
self.img_size = img_size
self.model_factor = model_factor
self.weight_decay = weight_decay
self.num_classes = num_classes
# Store node names for useful tensors
self.input_node_name = 'mobilenet/%s:0' % input_node_name
self.output_node_name = 'mobilenet/%s:0' % output_node_name
# TODO: a smarter way to figure out the embeddings tensor name?
self.embedding_node_name = 'mobilenet/MobilenetV1/Logits/AvgPool_1a/AvgPool:0'
self.input_tensor = None
self.output_tensor = None
self.embedding_tensor = None
self.labels = load_imagenet_labels()
self.logs_directory = logs_directory
if not os.path.exists(self.logs_directory):
os.mkdir(self.logs_directory)
# Start an interactive session
self.sess = tf.InteractiveSession()
@staticmethod
def load_frozen_graph(pb_file, graph_name='mobilenet'):
graph = tf.Graph()
graph_def = tf.GraphDef()
with open(pb_file, "rb") as f:
graph_def.ParseFromString(f.read())
with graph.as_default():
tf.import_graph_def(
graph_def,
input_map=None,
return_elements=None,
name=graph_name,
op_dict=None,
producer_op_list=None
)
return graph
def restore_session_from_checkpoint(self, filename, tensorboard=True):
# tf.reset_default_graph()
# Insert Input placeholder for images
self.input_tensor = tf.placeholder(tf.float32, shape=(None, self.img_size, self.img_size, 3),
name=self.input_node_name)
arg_scope = mobilenet_v1_arg_scope(weight_decay=self.weight_decay)
with slim.arg_scope(arg_scope):
logits, end_points = mobilenet_v1(self.input_tensor, num_classes=self.num_classes,
is_training=False,
depth_multiplier=self.model_factor)
predictions = tf.contrib.layers.softmax(logits)
self.output_tensor = tf.identity(predictions, name=self.output_node_name)
self.embedding_tensor = end_points['AvgPool_1a']
# We retrieve the protobuf graph definition
graph = tf.get_default_graph()
# Add ops to restore all the variables
rest_var = slim.get_variables_to_restore() # TODO: change this! maybe it can optimize the final network
saver = tf.train.Saver(rest_var)
saver.restore(self.sess, filename)
if tensorboard:
tf.summary.FileWriter(self.logs_directory, graph=graph)
return graph
def restore_session_from_frozen_graph(self, filename, tensorboard=True):
graph = self.load_frozen_graph(filename, graph_name='mobilenet')
self.sess = tf.InteractiveSession(graph=graph)
self.input_node_name = graph.get_operations()[0].name+':0'
self.output_node_name = graph.get_operations()[-1].name+':0'
self.input_tensor = graph.get_tensor_by_name(self.input_node_name)
self.output_tensor = graph.get_tensor_by_name(self.output_node_name)
self.embedding_tensor = graph.get_tensor_by_name(self.embedding_node_name)
self.img_size = int(self.input_tensor.shape[1])
# TODO: extract model_factor from graph
if tensorboard:
tf.summary.FileWriter(self.logs_directory, graph=graph)
return graph
def freeze_inference_graph(self, input_checkpoint, output_filename='frozen_graph.pb'):
graph = self.restore_session_from_checkpoint(input_checkpoint)
input_graph_def = graph.as_graph_def()
# We retrieve our checkpoint fullpath
basedir = os.path.dirname(input_checkpoint)
output_filename = os.path.join(basedir, output_filename)
# We use a built-in TF helper to export variables to constants
output_graph_def = graph_util.convert_variables_to_constants(
self.sess, # The session is used to retrieve the weights
input_graph_def, # The graph_def is used to retrieve the nodes
self.output_node_name.split(",") # The output node names are used to select the useful nodes
)
# Finally we serialize and dump the output graph to the filesystem
with tf.gfile.GFile(output_filename, "wb") as f:
f.write(output_graph_def.SerializeToString())
logger.info("%i ops in the final graph.", len(output_graph_def.node))
def get_output(self, nparr, get_embedding=False):
if self.sess is None:
raise ValueError("TensorFlow session was not initialized! "
"Restore a session before predicting.")
if get_embedding:
prediction, embedding = self.sess.run((self.output_tensor, self.embedding_tensor),
{self.input_tensor: nparr})
prediction = np.squeeze(prediction)
embedding = np.squeeze(embedding)
output = prediction, embedding
else:
prediction = self.sess.run(self.output_tensor, {self.input_tensor: nparr})
prediction = np.squeeze(prediction)
output = prediction
return output
def predict_on_images(self, images_path):
# Now load model and make inferences
predictions = []
t_ini = time.time()
for fn in os.listdir(images_path):
if not fn.endswith('.jpg') and not fn.endswith('.png'):
continue
logger.info("Processing image '%s' and making prediction...", fn)
img = self.load_and_prepare_image(os.path.join(images_path, fn), img_size=self.img_size)
tic = time.time()
prediction = self.get_output(img, get_embedding=False)
toc = time.time()
logger.info("Prediction made in %.4f sec", toc-tic)
predictions.append((fn, prediction))
t_end = time.time()
logger.info("Elapsed time on making %i predictions: %.4f sec",
len(predictions), t_end - t_ini)
return dict(predictions)
def prediction_to_classes(self, prediction, n_top=10):
top_predictions = sorted(enumerate(prediction), key=lambda i_p: -i_p[1])[:n_top]
return [(self.labels[i], p) for (i, p) in top_predictions]
def convert_tflite_format(self, output_filename, quantized=False):
inference_type = QUANTIZED_UINT8 if quantized else FLOAT
quantized_input_stats = None if QUANTIZED_UINT8 else None # TODO: fill this!
success = toco_convert(input_data=self.sess.graph_def, input_tensors=[self.input_tensor],
output_tensors=[self.output_tensor, self.embedding_tensor],
inference_type=inference_type, quantized_input_stats=quantized_input_stats,
output_filename=output_filename)
return success
@staticmethod
def load_and_prepare_image(filename, img_size=224):
"""
:param filename:
:param img_size:
:return:
"""
img = imread(filename, flatten=False)
img = imresize(img, (img_size, img_size))
img = img.astype(np.float32)
img = np.expand_dims(img, 0)
# Preprocess
img = (img - 127.) / 127.
return img
@staticmethod
def read_tensor_from_image_file(file_name, input_height=224, input_width=224, input_mean=-127, input_std=127):
input_name = "file_reader"
output_name = "normalized"
file_reader = tf.read_file(file_name, input_name)
if file_name.endswith(".png"):
image_reader = tf.image.decode_png(file_reader, channels=3, name='png_reader')
elif file_name.endswith(".gif"):
image_reader = tf.squeeze(tf.image.decode_gif(file_reader, name='gif_reader'))
elif file_name.endswith(".bmp"):
image_reader = tf.image.decode_bmp(file_reader, name='bmp_reader')
else:
image_reader = tf.image.decode_jpeg(file_reader, channels=3, name='jpeg_reader')
float_caster = tf.cast(image_reader, tf.float32)
dims_expander = tf.expand_dims(float_caster, 0)
resized = tf.image.resize_bilinear(dims_expander, [input_height, input_width])
normalized = tf.divide(tf.subtract(resized, [input_mean]), [input_std])
sess = tf.Session()
result = sess.run(normalized)
return result
|
leferrad/tensorflow-mobilenet | scripts/freeze_graph.py | from __future__ import print_function
from mobilenet.core import MobileNetDefaultFile, MobileNetV1Restored
from mobilenet.fileio import download_and_uncompress_tarball, get_logger
from mobilenet.imagenet import create_readable_names_for_imagenet_labels
import os
import re
import json
import argparse
logger = get_logger(name="freeze_mobilenet", level='debug')
def create_label_json_file(json_fn):
labels = create_readable_names_for_imagenet_labels()
with open(json_fn, 'w') as f:
json.dump(labels, f,
sort_keys=True,
indent=4,
separators=(',', ': '))
return labels
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument('-ckpt', '--checkpoint-path', dest='checkpoint_path',
default='',
help="Path to directory with the checkpoints of the MobileNet model")
args = parser.parse_args()
checkpoint_path = args.checkpoint_path
if checkpoint_path == '':
# Then we have to download a default model
checkpoint_path = '/tmp/mobilenet'
logger.info("Checkpoint path is not valid. Setting default: %s", checkpoint_path)
if not os.path.exists(checkpoint_path):
logger.info("Creating non existing directory: %s", checkpoint_path)
os.mkdir(checkpoint_path)
# Model properties
factor = MobileNetDefaultFile.MODEL_FACTOR
img_size = MobileNetDefaultFile.IMG_SIZE
logger.info("Setting default model properties...")
model_dl = MobileNetDefaultFile.MODEL_DL_FMT.format(factor, img_size,
MobileNetDefaultFile.MODEL_DATE)
logger.info("Setting default file to download: %s", model_dl)
success = download_and_uncompress_tarball(checkpoint_path, filename=model_dl)
if not success:
logger.error("Model couldn't been downloaded and extracted. Exiting...")
exit(0)
else:
if not os.path.exists(checkpoint_path):
logger.error("Argument '-ckpt' not valid! Please enter the correct path to the MobileNet checkpoint. "
"You can omit this argument to allow the script to download a default model")
exit(0)
regex = r'mobilenet_v1_[0-9].[0-9]+_[0-9]+.*'
match = False
for fn in os.listdir(checkpoint_path):
if re.match(regex, fn):
numbers = re.findall(r'[0-9]+', fn)
factor = numbers[1]+'.'+numbers[2]
img_size = numbers[3]
match = True
break
if not match:
logger.error("Argument '-ckpt' doesn't have an expected format for the MobileNet model. "
"Please assert that the checkpoint file matches with the following expression: %s", str(regex))
exit(0)
logger.info("Model properties: factor=%s, img_size=%s", factor, img_size)
model_pb = MobileNetDefaultFile.MODEL_PB_FMT.format(factor, img_size)
checkpoint_file = model_pb.replace('.pb', '.ckpt')
checkpoint_file = os.path.join(checkpoint_path, checkpoint_file)
try:
if os.path.exists(checkpoint_file+'.meta'):
logger.info("Processing checkpoint file '%s' ...", checkpoint_file)
mobilenet_model = MobileNetV1Restored(img_size=int(img_size), model_factor=float(factor))
mobilenet_model.freeze_inference_graph(checkpoint_file, output_filename='frozen_graph.pb')
create_label_json_file('/tmp/mobilenet/labels.json')
else:
logger.info("Skipping not existing meta file '%s'...", checkpoint_file)
pass
except Exception as e:
logger.error("Failed to process meta_file '%s': %s", checkpoint_file, str(e)) |
mdshaffer8/TeachersAid | config.py | <filename>config.py
import os
from os.path import join, isfile
from werkzeug.utils import secure_filename
basedir = os.path.abspath(os.path.dirname(__file__))
class Config(object):
SECRET_KEY = os.environ.get('SECRET_KEY') or 'you-will-never-guess'
SQLALCHEMY_DATABASE_URI = os.environ.get('DATABASE_URL') or \
'sqlite:///' + os.path.join(basedir, 'app.db')
SQLALCHEMY_TRACK_MODIFICATIONS = False
MAIL_SERVER = 'smtp.googlemail.com'
MAIL_PORT = 587
MAIL_USE_TLS = True
MAIL_USERNAME = os.environ.get('EMAIL_USER')
MAIL_PASSWORD = <PASSWORD>('EMAIL_PASS')
# UPLOAD_FOLDER = '/static/img'
# ALLOWED_EXTENSIONS = set(['txt', 'pdf', 'png', 'jpg', 'jpeg', 'gif'])
UPLOADS_DEFAULT_DEST = basedir + '/teachersaid/static/img/'
UPLOADS_DEFAULT_URL = 'http://localhost:5000/static/img/'
UPLOADED_IMAGES_DEST = basedir + '/teachersaid/static/img/'
UPLOADED_IMAGES_URL = 'http://localhost:5000/static/img/' |
mdshaffer8/TeachersAid | migrations/versions/89f0e7905ca9_uploads.py | <filename>migrations/versions/89f0e7905ca9_uploads.py<gh_stars>0
"""uploads
Revision ID: 89f0e7905ca9
Revises: <PASSWORD>
Create Date: 2018-06-07 10:42:40.640721
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = '89f0e7905ca9'
down_revision = '<PASSWORD>'
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.add_column('post', sa.Column('image_filename', sa.String(), nullable=True))
op.add_column('post', sa.Column('image_url', sa.String(), nullable=True))
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.drop_column('post', 'image_url')
op.drop_column('post', 'image_filename')
# ### end Alembic commands ###
|
mdshaffer8/TeachersAid | teachersaid/__init__.py | from flask import Flask
from flask_sqlalchemy import SQLAlchemy
from flask_bcrypt import Bcrypt
from flask_login import LoginManager
from flask_mail import Mail
from flask_uploads import UploadSet, IMAGES, configure_uploads
from flask_migrate import Migrate
from config import Config
app = Flask(__name__)
app.config.from_object(Config)
db = SQLAlchemy(app)
migrate = Migrate(app, db)
bcrypt = Bcrypt(app)
login_manager = LoginManager(app)
login_manager.login_view = 'login'
login_manager.login_message_category = 'info'
mail = Mail(app)
images = UploadSet('images', IMAGES)
configure_uploads(app, images)
from teachersaid import routes, models |
mdshaffer8/TeachersAid | old/bluprint/main/routes.py | from flask import render_template, request, Blueprint
from teachersaid.models import Post
main = Blueprint('main', __name__)
@main.route("/")
@main.route("/home")
def home():
return render_template('home.html', title='Home')
@main.route("/list")
def list():
page = request.args.get('page', 1, type=int)
posts = Post.query.order_by(Post.date_posted.desc()).paginate(page=page, per_page=5)
return render_template('list.html', posts=posts)
@main.route("/test")
def about():
return render_template('test.html', title='Test') |
fionahiklas/udp-broadcast-examples | udp-client.py | <filename>udp-client.py<gh_stars>0
import socket, traceback
host = '255.255.255.255' # Bind to all interfaces
port = 2081
print "Creating socker on port: ", port
s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
#print "Setting REUSEADDR option"
#s.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
print "Setting SO_BROADCAST option"
s.setsockopt(socket.SOL_SOCKET, socket.SO_BROADCAST, 1)
print "Connecting to host: ", host
s.connect((host, port))
try:
print "Going to send data ..."
s.send("I am here")
print "... sent"
except (KeyboardInterrupt, SystemExit):
raise
except:
traceback.print_exc()
|
fionahiklas/udp-broadcast-examples | udp-server.py | import socket, traceback
host = '0.0.0.0' # Bind to all interfaces
port = 2081
print "Creating socker on port: ", port
s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
#print "Setting REUSEADDR option"
#s.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
print "Setting SO_BROADCAST option"
s.setsockopt(socket.SOL_SOCKET, socket.SO_BROADCAST, 1)
print "Binding to host: ", host
s.bind((host, port))
print "Entering loop ..."
while 1:
try:
print "... waiting for data ..."
message, address = s.recvfrom(8192)
print "Got data from", address
# Acknowledge it.
s.sendto("I am here", address)
except (KeyboardInterrupt, SystemExit):
raise
except:
traceback.print_exc()
|
aaronbenz/flask-restplus | benchmarks/swagger.bench.py | from minibench import Benchmark
from faker import Faker
from flask import Flask
from flask_restplus import fields, Api, Resource
from flask_restplus.swagger import Swagger
fake = Faker()
api = Api()
person = api.model('Person', {
'name': fields.String,
'age': fields.Integer
})
family = api.model('Family', {
'name': fields.String,
'father': fields.Nested(person),
'mother': fields.Nested(person),
'children': fields.List(fields.Nested(person))
})
@api.route('/families', endpoint='families')
class Families(Resource):
@api.marshal_with(family)
def get(self):
'''List all families'''
pass
@api.marshal_with(family)
@api.response(201, 'Family created')
def post(self):
'''Create a new family'''
pass
@api.route('/families/<name>/', endpoint='family')
@api.response(404, 'Family not found')
class Family(Resource):
@api.marshal_with(family)
def get(self):
'''Get a family given its name'''
pass
@api.marshal_with(family)
def put(self):
'''Update a family given its name'''
pass
@api.route('/persons', endpoint='persons')
class Persons(Resource):
@api.marshal_with(person)
def get(self):
'''List all persons'''
pass
@api.marshal_with(person)
@api.response(201, 'Person created')
def post(self):
'''Create a new person'''
pass
@api.route('/persons/<name>/', endpoint='person')
@api.response(404, 'Person not found')
class Person(Resource):
@api.marshal_with(person)
def get(self):
'''Get a person given its name'''
pass
@api.marshal_with(person)
def put(self):
'''Update a person given its name'''
pass
class SwaggerBenchmark(Benchmark):
'''Swagger serialization benchmark for a full API'''
times = 1000
def before_class(self):
self.app = Flask(__name__)
api.init_app(self.app)
def bench_swagger_specs(self):
with self.app.test_request_context('/'):
return Swagger(api).as_dict()
def bench_swagger_specs_cached(self):
with self.app.test_request_context('/'):
return api.__schema__
|
aaronbenz/flask-restplus | tests/test_payload.py | <filename>tests/test_payload.py
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
import json
import flask_restplus as restplus
from . import TestCase
class PayloadTestCase(TestCase):
def post(self, url, data):
with self.app.test_client() as client:
return client.post(url, data=json.dumps(data),
headers={'content-type': 'application/json'})
def assert_errors(self, response, *errors):
self.assertEquals(response.status_code, 400)
out = json.loads(response.data.decode('utf8'))
self.assertIn('message', out)
self.assertIn('errors', out)
for error in errors:
self.assertIn(error, out['errors'])
def test_validation_false_on_constructor(self):
api = restplus.Api(self.app, validate=False)
fields = api.model('Person', {
'name': restplus.fields.String(required=True),
'age': restplus.fields.Integer,
'birthdate': restplus.fields.DateTime,
})
@api.route('/validation/')
class ValidationOff(restplus.Resource):
@api.expect(fields)
def post(self):
return {}
data = {}
response = self.post('/validation/', data)
self.assertEquals(response.status_code, 200)
out = json.loads(response.data.decode('utf8'))
self.assertEquals(out, {})
def test_validation_false_on_constructor_with_override(self):
api = restplus.Api(self.app, validate=False)
fields = api.model('Person', {
'name': restplus.fields.String(required=True),
'age': restplus.fields.Integer,
'birthdate': restplus.fields.DateTime,
})
@api.route('/validation/')
class ValidationOn(restplus.Resource):
@api.expect(fields, validate=True)
def post(self):
return {}
data = {}
response = self.post('/validation/', data)
self.assert_errors(response, 'name')
def test_validation_true_on_constructor(self):
api = restplus.Api(self.app, validate=True)
fields = api.model('Person', {
'name': restplus.fields.String(required=True),
'age': restplus.fields.Integer,
'birthdate': restplus.fields.DateTime,
})
@api.route('/validation/')
class ValidationOff(restplus.Resource):
@api.expect(fields)
def post(self):
return {}
data = {}
response = self.post('/validation/', data)
self.assert_errors(response, 'name')
def test_validation_true_on_constructor_with_override(self):
api = restplus.Api(self.app, validate=True)
fields = api.model('Person', {
'name': restplus.fields.String(required=True),
'age': restplus.fields.Integer,
'birthdate': restplus.fields.DateTime,
})
@api.route('/validation/')
class ValidationOff(restplus.Resource):
@api.expect(fields, validate=False)
def post(self):
return {}
data = {}
response = self.post('/validation/', data)
self.assert_errors(response, 'name')
def _setup_api_format_checker_tests(self, format_checker=None):
class IPAddress(restplus.fields.Raw):
__schema_type__ = 'string'
__schema_format__ = 'ipv4'
api = restplus.Api(self.app, format_checker=format_checker)
model = api.model('MyModel', {'ip': IPAddress(required=True)})
@api.route('/format_checker/')
class TestResource(restplus.Resource):
@api.expect(model, validate=True)
def post(self):
return {}
def test_format_checker_none_on_constructor(self):
self._setup_api_format_checker_tests()
data = {'ip': '192.168.1'}
response = self.post('/format_checker/', data)
self.assertEquals(response.status_code, 200)
out = json.loads(response.data.decode('utf8'))
self.assertEquals(out, {})
def test_format_checker_object_on_constructor(self):
from jsonschema import FormatChecker
self._setup_api_format_checker_tests(format_checker=FormatChecker())
data = {'ip': '192.168.1'}
response = self.post('/format_checker/', data)
self.assertEquals(response.status_code, 400)
out = json.loads(response.data.decode('utf8'))
self.assertIn('ipv4', out['errors']['ip'])
def test_validation_false_in_config(self):
with self.settings(RESTPLUS_VALIDATE=False):
api = restplus.Api(self.app)
fields = api.model('Person', {
'name': restplus.fields.String(required=True),
'age': restplus.fields.Integer,
'birthdate': restplus.fields.DateTime,
})
@api.route('/validation/')
class ValidationOff(restplus.Resource):
@api.expect(fields)
def post(self):
return {}
data = {}
response = self.post('/validation/', data)
self.assertEquals(response.status_code, 200)
out = json.loads(response.data.decode('utf8'))
self.assertEquals(out, {})
def test_validation_in_config(self):
with self.settings(RESTPLUS_VALIDATE=True):
api = restplus.Api(self.app)
fields = api.model('Person', {
'name': restplus.fields.String(required=True),
'age': restplus.fields.Integer,
'birthdate': restplus.fields.DateTime,
})
@api.route('/validation/')
class ValidationOn(restplus.Resource):
@api.expect(fields)
def post(self):
return {}
data = {}
response = self.post('/validation/', data)
self.assert_errors(response, 'name')
def test_api_payload(self):
api = restplus.Api(self.app, validate=True)
fields = api.model('Person', {
'name': restplus.fields.String(required=True),
'age': restplus.fields.Integer,
'birthdate': restplus.fields.DateTime,
})
@api.route('/validation/')
class Payload(restplus.Resource):
payload = None
@api.expect(fields)
def post(self):
Payload.payload = api.payload
return {}
data = {
'name': '<NAME>',
'age': 15,
}
response = self.post('/validation/', data)
self.assertEquals(response.status_code, 200)
self.assertEquals(Payload.payload, data)
def test_validation_with_inheritance(self):
'''It should perform validation with inheritance (allOf/$ref)'''
api = restplus.Api(self.app, validate=True)
fields = api.model('Parent', {
'name': restplus.fields.String(required=True),
})
child_fields = api.inherit('Child', fields, {
'age': restplus.fields.Integer,
})
@api.route('/validation/')
class Inheritance(restplus.Resource):
@api.expect(child_fields)
def post(self):
return {}
response = self.post('/validation/', {
'name': '<NAME>',
'age': 15,
})
self.assertEquals(response.status_code, 200)
response = self.post('/validation/', {
'age': '15',
})
self.assert_errors(response, 'name', 'age')
def test_validation_on_list(self):
'''It should perform validation on lists'''
api = restplus.Api(self.app, validate=True)
person = api.model('Person', {
'name': restplus.fields.String(required=True),
'age': restplus.fields.Integer(required=True),
})
family = api.model('Family', {
'name': restplus.fields.String(required=True),
'members': restplus.fields.List(restplus.fields.Nested(person))
})
@api.route('/validation/')
class List(restplus.Resource):
@api.expect(family)
def post(self):
return {}
response = self.post('/validation/', {
'name': 'Doe',
'members': [{'name': 'Jonn'}, {'age': 42}]
})
self.assert_errors(response, 'members.0.age', 'members.1.name')
def test_validation_with_propagate(self):
self.app.config['PROPAGATE_EXCEPTIONS'] = True
api = restplus.Api(self.app, validate=True)
fields = api.model('Person', {
'name': restplus.fields.String(required=True),
'age': restplus.fields.Integer,
'birthdate': restplus.fields.DateTime,
})
@api.route('/validation/')
class ValidationOff(restplus.Resource):
@api.expect(fields)
def post(self):
return {}
data = {}
response = self.post('/validation/', data)
self.assert_errors(response, 'name')
def test_empty_payload(self):
api = restplus.Api(self.app, validate=True)
@api.route('/empty/')
class Payload(restplus.Resource):
def post(self):
return {}
with self.app.test_client() as client:
response = client.post('/empty/', data='',
headers={'content-type': 'application/json'})
self.assertEquals(response.status_code, 200)
|
aaronbenz/flask-restplus | setup.py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
# flake8: noqa
from __future__ import unicode_literals
import re
import sys
from setuptools import setup, find_packages
RE_REQUIREMENT = re.compile(r'^\s*-r\s*(?P<filename>.*)$')
PYPI_RST_FILTERS = (
# Replace code-blocks
(r'\.\.\s? code-block::\s*(\w|\+)+', '::'),
# Remove all badges
(r'\.\. image:: .*', ''),
(r'\s+:target: .*', ''),
(r'\s+:alt: .*', ''),
# Replace Python crossreferences by simple monospace
(r':(?:class|func|meth|mod|attr|obj|exc|data|const):`~(?:\w+\.)*(\w+)`', r'``\1``'),
(r':(?:class|func|meth|mod|attr|obj|exc|data|const):`([^`]+)`', r'``\1``'),
# replace doc references
(r':doc:`(.+) <(.*)>`', r'`\1 <http://flask-restplus.readthedocs.org/en/stable\2.html>`_'),
# replace issues references
(r':issue:`(.+)`', r'`#\1 <https://github.com/noirbizarre/flask-restplus/issues/\1>`_'),
# Drop unrecognized currentmodule
(r'\.\. currentmodule:: .*', ''),
)
def rst(filename):
'''
Load rst file and sanitize it for PyPI.
Remove unsupported github tags:
- code-block directive
- all badges
'''
content = open(filename).read()
for regex, replacement in PYPI_RST_FILTERS:
content = re.sub(regex, replacement, content)
return content
long_description = '\n'.join((
rst('README.rst'),
rst('CHANGELOG.rst'),
''
))
exec(compile(open('flask_restplus/__about__.py').read(), 'flask_restplus/__about__.py', 'exec'))
tests_require = ['nose', 'rednose', 'blinker', 'tzlocal']
install_requires = ['Flask>=0.8', 'six>=1.3.0', 'jsonschema', 'pytz', 'aniso8601>=0.82']
doc_require = ['sphinx', 'alabaster', 'sphinx_issues']
dev_requires = ['flake8', 'minibench', 'tox', 'invoke'] + tests_require + doc_require
if sys.version_info[0:2] < (2, 7):
install_requires += ['ordereddict']
tests_require += ['unittest2']
try:
from unittest.mock import Mock
except:
tests_require += ['mock']
setup(
name='flask-restplus',
version=__version__,
description=__description__,
long_description=long_description,
url='https://github.com/noirbizarre/flask-restplus',
author='<NAME>',
author_email='<EMAIL>',
packages=find_packages(exclude=['tests', 'tests.*']),
include_package_data=True,
install_requires=install_requires,
tests_require=tests_require,
extras_require={
'test': tests_require,
'doc': doc_require,
'dev': dev_requires,
},
license='MIT',
use_2to3=True,
zip_safe=False,
keywords='',
classifiers=[
'Development Status :: 3 - Alpha',
'Programming Language :: Python',
'Environment :: Web Environment',
'Operating System :: OS Independent',
'Intended Audience :: Developers',
'Topic :: System :: Software Distribution',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: Implementation :: PyPy',
'Topic :: Software Development :: Libraries :: Python Modules',
'License :: OSI Approved :: MIT License',
],
)
|
aaronbenz/flask-restplus | tasks.py | # -*- coding: utf-8 -*-
from __future__ import unicode_literals, absolute_import
from invoke import run, task
from os.path import join, abspath, dirname
ROOT = abspath(join(dirname(__file__)))
def lrun(cmd, *args, **kwargs):
'''Run a command ensuring cwd is project root'''
return run('cd {0} && {1}'.format(ROOT, cmd), *args, **kwargs)
@task
def clean(docs=False, bytecode=False, extra=''):
'''Cleanup all build artifacts'''
patterns = ['build', 'dist', 'cover', 'docs/_build', '**/*.pyc', '*.egg-info', '.tox']
for pattern in patterns:
print('Removing {0}'.format(pattern))
lrun('rm -rf {0}'.format(pattern))
@task
def demo():
'''Run the demo'''
lrun('python examples/todo.py')
@task
def test():
'''Run tests suite'''
lrun('nosetests --force-color', pty=True)
@task
def cover():
'''Run tests suite with coverage'''
lrun('nosetests --force-color --with-coverage --cover-html', pty=True)
@task
def tox():
'''Run tests against Python versions'''
run('tox', pty=True)
@task
def qa():
'''Run a quality report'''
lrun('flake8 flask_restplus')
@task
def doc():
'''Build the documentation'''
lrun('cd doc && make html', pty=True)
@task
def assets():
'''Fetch web assets'''
lrun('bower install')
@task
def dist():
'''Package for distribution'''
lrun('python setup.py sdist bdist_wheel', pty=True)
@task(tox, doc, qa, assets, dist, default=True)
def all():
'''Run tests, reports and packaging'''
pass
|
aaronbenz/flask-restplus | benchmarks/marshal.bench.py | from minibench import Benchmark
from faker import Faker
from flask import Flask
from flask_restplus import marshal, fields
fake = Faker()
person_fields = {
'name': fields.String,
'age': fields.Integer
}
family_fields = {
'father': fields.Nested(person_fields),
'mother': fields.Nested(person_fields),
'children': fields.List(fields.Nested(person_fields))
}
def person():
return {
'name': fake.name(),
'age': fake.pyint()
}
def family():
return {
'father': person(),
'mother': person(),
'children': [person(), person()]
}
class MarshalBenchmark(Benchmark):
times = 1000
def before_class(self):
self.app = Flask(__name__)
self.app.config.setdefault('RESTPLUS_MASK_HEADER', 'X-Fields')
def bench_marshal_simple(self):
return marshal(person(), person_fields)
def bench_marshal_nested(self):
return marshal(family(), family_fields)
def bench_marshal_simple_with_mask(self):
with self.app.test_request_context('/', headers={'X-Fields': 'name'}):
return marshal(person(), person_fields)
def bench_marshal_nested_with_mask(self):
with self.app.test_request_context('/', headers={'X-Fields': 'father,children{name}'}):
return marshal(family(), family_fields)
|
mpg-age-bioinformatics/b | bit/rsync.py | from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import os
import sys
import getpass
from os.path import expanduser
from subprocess import Popen, PIPE, STDOUT
import stat
import tempfile
import bit.config as config
def get_remote_config(sshLogin,remotePass):
remote_address=sshLogin.split("@")[1]
call="sshpass -p "+remotePass+" scp "+sshLogin+":~/.bit_config \
~/.bit_config."+remote_address
#print(call)
os.system(call)
uhome=expanduser("~")+"/"
if os.path.isfile(uhome+".bit_config.%s" %remote_address) :
os.chmod(uhome+".bit_config.%s" %remote_address, stat.S_IRWXU )
else:
print("Could not find ~/.bit_config on remote server.\
\nPlease run 'bit --config' on remote server.")
sys.exit(0)
def read_remote_config(sshLogin,remotePass,forceImport=None):
remote_address=sshLogin.split("@")[1]
if os.path.isfile(".bit_config.%s" %remote_address):
if not forceImport:
print("Using previously collected remote config.")
else:
get_remote_config(sshLogin,remotePass)
else:
get_remote_config(sshLogin,remotePass)
remote_config=config.read_bitconfig(showit=None,bit_config=".bit_config.%s" \
%remote_address)
return remote_config
def list_local_sync(base_destination,list_of_files):
# check if files all come from the same project folder
configdic=config.read_bitconfig()
local_path=configdic["local_path"]
size_local=len(local_path.split("/"))
parent_folder=[]
check_project=[]
for i in list_of_files:
f=os.path.abspath(i)
parent_folder.append(f.split("/")[size_local])
check_project.append(f.split("/")[size_local+1])
check_project=list(set(check_project))
if len(check_project) > 1:
print("Found more than one project:\n")
for p in check_project:
print(p)
sys.stdout.flush()
sys.exit(0)
else:
project_name=check_project[0]
parent_folder=parent_folder[0]+"/"
target_project=parent_folder+project_name
base_destination=base_destination+"/"+parent_folder+"/"+project_name
upload_dic={}
subfolders=[base_destination]
check=base_destination.split("/")
for i in range(len(check)):
c="/".join(check[:i-len(check)])
subfolders.append(c)
for f in list_of_files:
full=os.path.abspath(f)
if CheckFoldersCon(local_path,full):
if os.path.isdir(full):
subfol=base_destination+"/"+os.path.basename(full)
subfolders.append(subfol)
for root, directories, filenames in os.walk(full):
bad_dirs=[]
for directory in directories:
if os.path.basename(directory)[0] != ".":
subdir=os.path.join(root, directory).split(full)[-1]
subdir=subfol+subdir
subfolders.append(subdir)
else:
bad_dirs.append(os.path.basename(directory))
for filename in filenames:
if not any(x in filename for x in bad_dirs):
subfile=os.path.join(root,filename)
if os.path.isfile(subfile):
upload_dic[subfile]=subfol+subfile.split(full)[-1]
elif os.path.isfile(full):
tfile=base_destination+full.split(local_path+"/"+target_project )[1]
upload_dic[full]=tfile
subfolders.append( tfile.split(os.path.basename(tfile))[0] )
else:
upload_dic[full]=base_destination+"/"+os.path.basename(full)
else:
print("%s is either a link to %s or is not on your projects path. This file\
will not be syncronized." %(f, full))
sys.stdout.flush()
subfolders=list(set(subfolders))
subfolders=[ xx for xx in subfolders if len(xx) > 0 ]
subfolders.sort()
#print(upload_dic,"\n",subfolders)
return upload_dic, subfolders, base_destination, parent_folder
def list_local_for_remote_sync(base_destination,list_of_files):
# check if files all come from the same project folder
configdic=config.read_bitconfig()
local_path=configdic["local_path"]
size_local=len(local_path.split("/"))
parent_folder=[]
check_project=[]
for i in list_of_files:
f=os.path.abspath(i)
parent_folder.append(f.split("/")[size_local])
check_project.append(f.split("/")[size_local+1])
check_project=list(set(check_project))
if len(check_project) > 1:
print("Found more than one project:\n")
for p in check_project:
print(p)
sys.stdout.flush()
sys.exit(0)
else:
project_name=check_project[0]
parent_folder=parent_folder[0]
target_project=parent_folder+"/"+project_name
base_destination=base_destination+parent_folder+"/"+project_name
upload_dic={}
subfolders=[base_destination]
check=base_destination.split("/")
for i in range(len(check)):
c="/".join(check[:i-len(check)])
subfolders.append(c)
for f in list_of_files:
full=os.path.abspath(f)
if CheckFoldersCon(local_path,full):
if os.path.isdir(full):
subfol=base_destination+"/"+os.path.basename(full)
upload_dic[full]=base_destination+"/"+os.path.basename(full)
subfolders.append(subfol)
elif os.path.isfile(full):
tfile=base_destination+full.split(local_path+"/"+target_project )[1]
upload_dic[full]=tfile
subfolders.append( tfile.split(os.path.basename(tfile))[0] )
else:
upload_dic[full]=base_destination+"/"+os.path.basename(full)
else:
print("%s is either a link to %s or is not on your projects path. This file\
will not be syncronized." %(f, full))
sys.stdout.flush()
subfolders=list(set(subfolders))
subfolders=[ xx for xx in subfolders if len(xx) > 0 ]
subfolders.sort()
return upload_dic, subfolders, base_destination, parent_folder
def CheckFoldersCon(base,files_to_check):
lbase=len(base)
if files_to_check[:lbase] == base:
return True
else:
return False
def rsync_to(sshLogin,rsync_files,forceImport=None,sync_to=True,sync_from=False):#,n_processors=1):
remotePass=str(getpass.getpass(prompt="Please give in your password for %s:\
" %sshLogin.split("@")[1] ))
remote_config=read_remote_config(sshLogin,remotePass,forceImport)
remote_path=remote_config["local_path"]
sync_dic, subfolders, path_to_project, parent_folder=list_local_sync(remote_path,\
rsync_files)
create_subfolders=[ ff for ff in subfolders if ff not in remote_path ]
create_subfolders=" ".join(create_subfolders)
if remote_config["user_group"]:
remote_group=" ".join(remote_config["user_group"])
remote_group_group="; chmod 700 "+remote_path+parent_folder+" ; for us in "\
+remote_group+" ; do setfacl -m user:${us}:rwx "+remote_path+parent_folder+" ; done "
remote_group_project="; chmod 700 "+path_to_project+" ; for us in "\
+remote_group+" ; do setfacl -m user:${us}:rwx "+path_to_project+" ; done "
else:
remote_group_group="; echo Not_using_acls "
remote_group_project="; echo Not_using_acls "
create_subfolders="\'MANAGER=$(ls -ld "+remote_path+" | awk \"{ print \\$3 }\" ); \
if [ ! -d "+remote_path+"/"+parent_folder+" ]; then mkdir -p "+remote_path+"/"+\
parent_folder+remote_group_group+"; chown $MANAGER "+remote_path+"/"+parent_folder+"; fi; \
if [ ! -d "+path_to_project+" ]; then mkdir -p "+path_to_project+remote_group_project+"; \
chown $MANAGER "+path_to_project+"; fi; \
for f in "+create_subfolders+"; do if [ ! -d $f ]; then mkdir -p $f; fi; done\'"
create_subfolders="sshpass -p "+str(remotePass)+" ssh "+str(sshLogin)+" "+create_subfolders
os.system(create_subfolders)
# def SENDFILES(f,sync_dic=sync_dic,remotePass=remotePass,sshLogin=sshLogin):
# call='rsync -tlzhPL --rsh="sshpass -p %s ssh -o \
# StrictHostKeyChecking=no -l %s" %s %s:%s' %(str(remotePass), \
# str(sshLogin.split("@")[0]), f, str(sshLogin.split("@")[1]), \
# sync_dic[f])
# print(f)
# sys.stdout.flush()
# print(call)
# #os.system(call)
# return f
#pool = mp.Pool(n_processors)
funclist = []
for f in sync_dic:
call='rsync -rtlzhP --rsh="sshpass -p %s ssh -o StrictHostKeyChecking=no -l %s" %s %s:%s' \
%(str(remotePass), str(sshLogin.split("@")[0]), f, str(sshLogin.split("@")[1]), sync_dic[f])
#print(call)
funclist.append(call)
#out=pool.apply_async(SENDFILES,[call])
#funclist.append(out)
#call='rsync -tlzhPL --rsh="sshpass -p %s ssh -o \
#StrictHostKeyChecking=no -l %s" %s %s:%s' %(str(remotePass), \
#str(sshLogin.split("@")[0]), f, str(sshLogin.split("@")[1]), \
#sync_dic[f])
#os.system(call)
#results=[]
#for ff in funclist:
# res=ff.get()
# results.append(res)
# #print( res )
# #sys.stdout.flush()
#print(results)
return funclist
def rsync_from(sshLogin,rsync_files,forceImport=None,sync_to=False,sync_from=True):
remotePass=str(getpass.getpass(prompt="Please give in your password for %s: "\
%sshLogin.split("@")[1] ))
configdic=config.read_bitconfig()
local_path=configdic["local_path"]
remote_config=read_remote_config(sshLogin,remotePass,forceImport)
remote_path=remote_config["local_path"]
# get remote dirs and subdirs
sync_dic, subfolders, path_to_project, parent_folder=list_local_for_remote_sync(\
remote_path, rsync_files)
check_remote=[]
for f in sync_dic:
check_remote.append(sync_dic[f])
check_remote=" ".join(check_remote)
# check if files exist on remote
temp=tempfile.NamedTemporaryFile()
check_remote_files="\'for f in "+check_remote+" ; do if [ -f $f ]; then echo $f; fi; done\' > "+temp.name
check_remote_files="sshpass -p "+str(remotePass)+" ssh "+str(sshLogin)+" "+check_remote_files
os.system(check_remote_files)
res=temp.readlines()
temp.close()
resFiles=[ s.strip("\n") for s in res ]
# check if folders exist on remote
temp=tempfile.NamedTemporaryFile()
check_remote_folder="\'for f in "+check_remote+" ; do if [ -d $f ]; then echo $f; find $f -type d -print; fi; done\' > "+temp.name
check_remote_folder="sshpass -p "+str(remotePass)+" ssh "+str(sshLogin)+" "+check_remote_folder
os.system(check_remote_folder)
res=temp.readlines()
temp.close()
resFolders=[ s.strip("\n")+"/" for s in res ]
list_folders_contents=" ".join(resFolders)
temp=tempfile.NamedTemporaryFile()
check_remote_folder="\'for f in "+list_folders_contents+" ; do cd $f;\
for c in $(ls); do cc=$(readlink -f $c); if [ -f $cc ]; then echo $cc; fi; done; done\' > "+temp.name
check_remote_folder="sshpass -p "+str(remotePass)+" ssh "+str(sshLogin)+" "+check_remote_folder
os.system(check_remote_folder)
res=temp.readlines()
temp.close()
resAllFiles=[ s.strip("\n") for s in res ]
res=[resFiles, resAllFiles] #resFolders
res=[item for sublist in res for item in sublist]
res=list(set(res))
showres="\n".join(res)
print("The following targets could be found on the remote server:\n%s" %showres)
sys.stdout.flush()
inv_sync_dic={}
for r in res:
inv_sync_dic[r]=local_path+"/"+r.split(remote_path)[1]
lenLocalPath=len(os.path.abspath(local_path).split("/"))
for remfold in resFolders:
ltf=local_path+"/"+remfold.split(remote_path)[1]
if not os.path.exists(ltf):
os.makedirs(ltf)
if len(os.path.abspath(ltf).split("/")) in [lenLocalPath+1,lenLocalPath+2]:
if configdic["user_group"]:
os.chmod(ltf, stat.S_IRWXU)
user_group=configdic["user_group"]
try:
for use in user_group:
call=["setfacl","-m","user:%s:rwx" %use,ltf]
out=Popen(call, stdout=PIPE, stdin=PIPE, stderr=STDOUT)
print(out.communicate()[0].rstrip())
except:
print("Failed to setfacls.")
sys.stdout.flush()
local_path_owner=os.stat(local_path)
local_path_owner=local_path_owner.st_uid
os.chown(ltf,local_path_owner,-1)
sync_from_calls=[]
for f in inv_sync_dic:
call='rsync -tlzhP --rsh="sshpass -p %s ssh -o \
StrictHostKeyChecking=no -l %s" %s:%s %s' %(str(remotePass), \
str(sshLogin.split("@")[0]), str(sshLogin.split("@")[1]), \
f, inv_sync_dic[f] )
sync_from_calls.append(call)
#os.system(call)
return sync_from_calls
|
mpg-age-bioinformatics/b | bit/automation.py | <reponame>mpg-age-bioinformatics/b
import smtplib
from email.mime.multipart import MIMEMultipart
from email.mime.base import MIMEBase
from email.mime.text import MIMEText
from email import encoders
import re
import subprocess
from subprocess import Popen, PIPE, STDOUT
import requests
import json
import sys
import os
from bit.config import read_bitconfig
groups_dic={"Adam_Antebi":"AA",\
"Aleksandra_Filipovska":"AF",\
"Anne_Schaefer":"AS",\
"Bioinformatics":"bit",\
"Constantinos_Demetriades":"CD",\
"CRISPR_Screening":"CS",\
"Dario_Valenzano":"DV",\
"Ivan_Matic":"IM",\
"James_Stewart":"JS",\
"Lena_Pernas":"LPe",\
"Linda_Partridge":"LP",\
"Martin_Denzel":"MD",\
"Martin_Graef":"MG",\
"Metabolomics":"met",\
"Nils_Larson":"NL",\
"Peter_Tessarz":"PT",\
"Phenotyping":"Ph",\
"Proteomics":"Prot",\
"Ron_Jachimowicz":"RJ",\
"Sara_Wickstroem":"SW",\
"Thomas_Langer":"TL"}
def isnotebook():
try:
shell = get_ipython().__class__.__name__
if shell == 'ZMQInteractiveShell':
return True # Jupyter notebook or qtconsole
elif shell == 'TerminalInteractiveShell':
return False # Terminal running IPython
else:
return False # Other type (?)
except NameError:
return False # Proba
def read_config(config_file="/beegfs/group_bit/data/projects/departments/Bioinformatics/bit_automation/automation.config"):
config_dic={}
with open(config_file, "r") as file_in:
for line in file_in:
line=line.rstrip("\n")
line=line.split("=")
config_dic[line[0]]=line[1]
bit_config=read_bitconfig()
config_dic["github_user"]=bit_config["github_user"]
config_dic["github_pass"]=bit_config["github_pass"]
config_dic["DAVIDUSER"]=bit_config["DAVIDUSER"]
return config_dic
def send_email(subject, body="", EMAIL_TOKEN=None, \
attach=None, \
toaddr=[],\
fromaddr="<EMAIL>",\
project_type=None,\
config_dic=None):
if not EMAIL_TOKEN:
EMAIL_TOKEN=config_dic["EMAIL_TOKEN"]
if not project_type:
project_type="["+config_dic["project_type"]+"]"
elif project_type=="empty":
project_type=""
else:
project_type="["+project_type+"]"
msg = MIMEMultipart()
static_receiver=["<EMAIL>"]
toaddr=static_receiver+toaddr
msg['From'] = fromaddr
msg['To'] = ", ".join(toaddr)
if str(subject)[0] != "[":
subject=" "+subject
msg['Subject'] = "{project_type}{subject}".format(project_type=project_type,subject=subject)
msg.attach(MIMEText(body, 'plain'))
if attach:
attachment = open(str(attach), "rb")
part = MIMEBase('application', 'octet-stream')
part.set_payload((attachment).read())
encoders.encode_base64(part)
part.add_header('Content-Disposition', "attachment; filename= %s" % report_file)
msg.attach(part)
server = smtplib.SMTP_SSL('mail.age.mpg.de', 465)
server.login(fromaddr, EMAIL_TOKEN)
text = msg.as_string()
server.sendmail(fromaddr, toaddr, text)
server.quit()
print( "Email sent to {toaddr}".format(toaddr=toaddr) )
sys.stdout.flush()
def check_email(email,config_dic,submission_file):
email=str(email).rstrip().lstrip()
email=email.split(",")
email=[ re.search("([^@|\s]+@[^@]+\.[^@|\s]+)",e,re.I) for e in email ]
email=[ e.group(1) for e in email if e ]
if not email :
print("Contact email not provided." )
sys.stdout.flush()
send_email("contact email not provided.",\
"For {submission_file} contact email not provided.".format(submission_file=submission_file),\
config_dic=config_dic)
sys.exit(1)
return email
def check_group(group,submission_file,email):
if group not in list(groups_dic.keys()):
print("Group {group} does not exist.".format(group=group) )
sys.stdout.flush()
send_email("group does not exist",\
"For {submission_file} group {group} does not exist.".format(submission_file=submission_file,group=group),\
toaddr=email,
EMAIL_TOKEN=config_dic["EMAIL_TOKEN"],
project_type=project_type)
sys.exit(1)
return True
def check_project_exists(folder,config_dic,group,project_title, email):
if os.path.isdir(folder):
send_email("[{group_prefix}_{project_title}] project already exists".format(group_prefix=groups_dic[group],project_title=project_title),\
"{project_title} already exists. Please submit again using a different project name.\n{folder}".format(project_title=project_title,folder=folder),\
toaddr=email,
config_dic=config_dic )
print("{folder} already exists".format(folder=folder))
sys.stdout.flush()
sys.exit(1)
return True
def check_source_files(files,md5file,store_age_folder,GET_RAW,project_title,email,config_dic,metadata):
for f in files:
if ( not os.path.exists(store_age_folder+f) ) & ( GET_RAW ):
send_email("[{group_prefix}_{project_title}] raw data file missing".format(group_prefix=groups_dic[metadata["Group"]],project_title=project_title),\
"{f} missing".format(f=f),\
toaddr=email,
config_dic=config_dic)
print("{f} missing".format(f=store_age_folder+f))
sys.stdout.flush()
sys.exit(1)
if md5file:
if ( not os.path.exists(store_age_folder+md5file) ) & ( GET_RAW ):
send_email("[{group_prefix}_{project_title}] md5sums file missing".format(group_prefix=groups_dic[metadata["Group"]],project_title=metadata["Project title"]),\
"{f} missing".format(f=f),\
toaddr=email,
config_dic=config_dic)
print("md5sums file missing".format(f=f))
sys.stdout.flush()
sys.exit(1)
return True
def verify_md5sum(md5sums,md5file_in):
for file in list(md5sums.keys()):
with open(md5file_in, "r") as md5file:
md5check=False
for line in md5file:
if file in line:
if md5sums[file] in line:
md5check=True
if not md5check:
return file
return False
def md5sumcheck(file,config_dic,group,project_title,project_archive,email,md5file):
cmd = [ "md5sum", file ]
sys.stdout.flush()
out=Popen(cmd, stdout=PIPE, stdin=PIPE, stderr=PIPE)
re=str( out.communicate()[0].decode('utf-8').rstrip())
out.stdout.close()
out.stdin.close()
out.stderr.close()
try:
out.kill()
except:
pass
md5sum=str(re).split(" ")[0]
md5sums={ os.path.basename(file): md5sum }
missing_file=verify_md5sum(md5sums,project_archive+md5file)
if missing_file:
send_email("[{group_prefix}_{project_title}] md5sum could not be comfirmed".format(group_prefix=groups_dic[group],project_title=project_title),\
"{file} missing".format(file=missing_file),\
toaddr=email,
config_dic=config_dic)
print("{file} md5 missing".format(file=missing_file))
sys.stdout.flush()
sys.exit(1)
return True
def make_github_repo( repo_name, user, token, config_dic):
url = 'https://github.molgen.mpg.de/api/v3/orgs/mpg-age-bioinformatics/repos'
repo = { "name" : repo_name , \
"private" : 'true' ,\
"auto_init": 'true' }
response = requests.post( url, data=json.dumps(repo), auth=( user, token ))#, headers=headers)
if response.status_code == 201:
print('Successfully created Repository "%s"' % repo_name )
else:
print('Could not create Repository "%s"' % repo_name)
print('Response:', response.content)
send_email("[{repo_name}] repository could not be created".format(repo_name=repo_name),
config_dic=config_dic)
sys.stdout.flush()
sys.exit(1)
return response
def make_github_issue(title, repo_name, user, token, config_dic, body=None, assignee=None ):
'''Create an issue on github.com using the given parameters.'''
# Our url to create issues via POST
url = 'https://github.molgen.mpg.de/api/v3/repos/mpg-age-bioinformatics/{repo_name}/issues'.format(repo_name=repo_name)
issue = {'title': title,\
'assignee': assignee}
# Add the issue to our repository
response = requests.post( url, data=json.dumps(issue), headers={"Accept": "application/vnd.github.v3+json"}, auth=( user, token ))#, headers=headers)
if response.status_code == 201:
print('Successfully created Issue "%s"' % title )
else:
print('Could not create Issue "%s"' % title)
print('Response:', response.content)
print(response.text)
send_email("[{repo}] could not create Issue".format(repo=repo_name),
config_dic=config_dic)
sys.stdout.flush()
sys.exit(1)
return response
def make_github_card(make_issue_response, repo_name, user, token, config_dic):
'''Create an card for an issue on github.com using the given parameters.'''
# Our url to create issues via POST
url = 'https://github.molgen.mpg.de/api/v3/projects/columns/301/cards'
issue_response=json.loads(make_issue_response.text)
issue_id=issue_response["id"]
card = {'content_id': issue_id,\
"content_type":"Issue"}
# Add the issue to our repository
response = requests.post( url, data=json.dumps(card), headers={"Accept": "application/vnd.github.inertia-preview+json"}, auth=( user, token ))#, headers=headers)
if response.status_code == 201:
print('Successfully created card.' )
else:
print('Could not create card.')
print('Response:', response.content)
print(response.text)
send_email("[{repo}] could not create card".format(repo=repo_name),
config_dic=config_dic)
sys.stdout.flush()
sys.exit(1)
return response
def git_clone(local_name,github_repo):
git="<EMAIL>:mpg-age-bioinformatics/{github_repo}.git".format(github_repo=github_repo)
# cwd = os.getcwd()
# os.chdir(local_name)
# out=subprocess.call(['git','init'])
# out=subprocess.call(['git','config','remote.origin.url',git])
# out=subprocess.call(['git','config','branch.master.remote','origin'])
# out=subprocess.call(['git','config','branch.master.merge','refs/heads/master'])
# out=subprocess.call(['git','pull', git])
out=subprocess.call(['git','clone',git, local_name ])
# if not os.path.exists(local_name):
# os.makedirs(local_name)
out=subprocess.call(['setfacl', '-Rdm', 'g:group_bit:rwx', local_name ])
out=subprocess.call(['chmod', '-R','g+w', local_name ])
# os.chdir(cwd)
return out
def git_fetch(github_repo):
git="<EMAIL>:mpg-age-bioinformatics/{github_repo}.git".format(github_repo=github_repo)
call=["git","fetch",git]
out=Popen(call, stdout=PIPE, stdin=PIPE, stderr=PIPE)
print(str( out.communicate()[0].decode('utf-8').rstrip()) )
out.stdout.close()
out.stdin.close()
out.stderr.close()
try:
out.kill()
except:
pass
def git_merge(message):
call=["git","merge","FETCH_HEAD","-m",message]
out=Popen(call, stdout=PIPE, stdin=PIPE, stderr=PIPE)
print(str( out.communicate()[0].decode('utf-8').rstrip()) )
out.stdout.close()
out.stdin.close()
out.stderr.close()
try:
out.kill()
except:
pass
def git_pull(github_repo):
git="<EMAIL>:mpg-age-bioinformatics/{github_repo}.git".format(github_repo=github_repo)
call=["git","pull",git]
out=Popen(call, stdout=PIPE, stdin=PIPE, stderr=PIPE)
print(str( out.communicate()[0].decode('utf-8').rstrip()) )
out.stdout.close()
out.stdin.close()
out.stderr.close()
try:
out.kill()
except:
pass
def git_add():
call=["git","add","-A", "." ]
out=Popen(call, stdout=PIPE, stdin=PIPE, stderr=PIPE)
print(str(out.communicate()[0].decode('utf-8').rstrip()) )
out.stdout.close()
out.stdin.close()
out.stderr.close()
try:
out.kill()
except:
pass
def git_commit(message):
call=["git","commit","-m", message]
out=Popen(call, stdout=PIPE, stdin=PIPE, stderr=PIPE)
print(str( out.communicate()[0].decode('utf-8').rstrip()) )
out.stdout.close()
out.stdin.close()
out.stderr.close()
try:
out.kill()
except:
pass
def git_push(github_repo):
git="<EMAIL>:mpg-age-bioinformatics/{github_repo}.git".format(github_repo=github_repo)
call=["git","push",git,"--all"]
out=Popen(call, stdout=PIPE, stdin=PIPE, stderr=PIPE)
print(str( out.communicate()[0].decode('utf-8').rstrip()) )
out.stdout.close()
out.stdin.close()
out.stderr.close()
try:
out.kill()
except:
pass
def git_sync(local_name, github_repo, message):
cwd = os.getcwd()
os.chdir(local_name)
git_add()
git_commit(message)
git_fetch(github_repo)
git_merge(message)
git_push(github_repo)
os.chdir(cwd)
def main():
import argparse
parser = argparse.ArgumentParser(description="automation helper",\
formatter_class = argparse.ArgumentDefaultsHelpFormatter)
parser.add_argument("-e", "--email", help="Email subject.")
parser.add_argument("-p", "--project", help="Project type. eg. RNAseq.", default="empty")
parser.add_argument("-t", "--to", help="Receivers.",default=None)
parser.add_argument("-l", "--link", help="Link to results.",default=None)
args = parser.parse_args()
config_dic=read_config()
if args.link:
body="Hi,\n\nyou can find the results for this project here:\n\n{link}\n\nThis is an automatically generated email.".format(link=args.link)
else:
body=""
if args.to:
to=str(args.to).split(",")
else:
to=[]
send_email(args.email, body=body, \
attach=None, \
toaddr=to,\
fromaddr="<EMAIL>",\
project_type=args.project,\
config_dic=config_dic)
sys.exit(0)
|
mpg-age-bioinformatics/b | bit/__init__.py | <gh_stars>1-10
#!/usr/bin/env python
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import os
import sys
from subprocess import Popen, PIPE, STDOUT
import stat
import shlex
import bit.config as config
import bit.git as git
import bit.owncloud as oc
import bit.rsync as rsync
import multiprocessing as mp
def worker(call):
out=Popen(shlex.split(call), stdout=PIPE, stdin=PIPE, stderr=PIPE)
message=out.communicate()
out.stdout.close()
out.stdin.close()
out.stderr.close()
try:
out.kill()
except:
pass
return "\n********************\n"+call.split(" ")[-1]+"\n"+message[0]+"\n"+message[1]
def main():
import argparse
parser = argparse.ArgumentParser(description="bit, [b]ermuda [i]nformation [t]riangle.\
bit is a git-based tool for the management of code and data. It uses git for code versioning\
and ownCloud for storing and exchanging data. It saves storage by avoiding versioning\
of data while logging changes in associated git wikis.",\
formatter_class = argparse.ArgumentDefaultsHelpFormatter)
parser.add_argument("-i", "--input", nargs='*', help="Input files")
parser.add_argument("-s", "--subfolder", help="Subfolder to be created.", default=None)
parser.add_argument("-m", "--message",nargs='*', help="Message to write on log file.", default=None)
parser.add_argument("-d", "--pick_a_date", help="Pick an existing date folder to transfer data to/from. Format=YYYY-MM-DD", default=None)
parser.add_argument("-c", "--create_folder", help="Create dropbox folder for user to upload data.", action="store_true")
parser.add_argument("-g", "--getfolder", help="Downloads a folder as zip file. Requires --pick_a_date. Defaults base_folder=upload:download to download", action="store_true")
parser.add_argument("-t", "--days_to_share", help="Number of days you wish to share this folder further.", default=21)
parser.add_argument("--issue", help="Issue to comment on with --message and owncloud data links", default=None)
parser.add_argument("--scripts",help="Needs -i and -m. Simultaneously sync the scripts.user folder when uploading data.", action="store_true")
parser.add_argument("--start", help="Project name of the format. PI_PROJECT_NAME. Initiates a project. This will create the required local folders and respective git repositories.", default=None)
parser.add_argument("--stdfolders",nargs='*', help="Folders to be created in addition to scripts.user and and wiki.user when a project is started.", default=["tmp","slurm_logs"])
parser.add_argument("--adduser",help="Add a user to a project creating his scripts.user and wiki.user folder",action="store_true")
parser.add_argument("--sync", nargs='*', help="Files or folders to syncronize with remote server using rsync over ssh.",default=None)
parser.add_argument("--sync_to", help="Destination server to sync to in the form: <user_name>@<server.address>", default=None)
parser.add_argument("--sync_from", help="Destination server to sync from in the form: <user_name>@<server.address>", default=None)
parser.add_argument("--cpus",help="Number of CPUs/channels to open for rsync.", default=1)
parser.add_argument("--forceRemote", help="If syncing from or to a remoter server force the import of a remote 'bit_config'.", action="store_true")
parser.add_argument("--gitnossh", help="Use password instead of git SSH keys.", action="store_false")
parser.add_argument("--config", help="Generate a config file.", action="store_true")
args = parser.parse_args()
if args.sync:
if args.sync_to:
calls=rsync.rsync_to(args.sync_to, args.sync, forceImport=args.forceRemote, \
sync_to=True, sync_from=False)
elif args.sync_from:
calls=rsync.rsync_from(args.sync_from, args.sync, forceImport=args.forceRemote, \
sync_to=False, sync_from=True)
pool=mp.Pool(int(args.cpus))
funclist=[]
for call in calls:
out=pool.apply_async(worker,[call])
funclist.append(out)
results=[]
for ff in funclist:
res=ff.get()
print(res)
results.append(res)
if args.config:
print("Setting up your config file.")
sys.stdout.flush()
config.make_bitconfig()
sys.exit(0)
# initate a project
if args.start:
configdic=config.read_bitconfig()
for r in config.start_reqs:
if r != "user_group":
while configdic[r] == None:
configdic=config.check_reqs([r],configdic,config_file=None, gitssh=None)
local_path=os.path.abspath(configdic["local_path"])
full_path=os.path.abspath(args.start)
project_name=os.path.basename(full_path)
# check format projects_folder/group_head/project_name
if len(full_path.split("/")) != len(local_path.split("/"))+2:
print("The path (%s) to this project does not obey the structure and/or defined local path (%s). Check the reference structure:\n%s" \
%(full_path,local_path,config.structure) )
sys.stdout.flush()
sys.exit(0)
# have the user rechecking that the the string for the project name is really correct
checks=None
while checks not in ["Y","N"]:
checks=str(input("Is the label %s in agreement with the structure PF_project_name where PF stands for the initials of the Parent_Folder? (Y/N) " \
%project_name )) or None
if checks=="N":
sys.exit(0)
# create the repo
github_api=config.get_github_api(configdic["github_address"])
# github_api=github_api+configdic["github_organization"]+"/repos"
# create_call=["curl","-u",configdic["github_user"]+":"+configdic["github_pass"]\
# ,github_api,"-d",'{"name":"'+project_name+'","private": true,\
# "auto_init": true }']
# p = Popen(create_call, stdout=PIPE, stdin=PIPE, stderr=STDOUT)
# print(p.communicate()[0].decode('utf-8').rstrip())
# sys.stdout.flush()
response = git.make_github_repo(github_api, project_name, configdic)
response = git.make_github_issue(github_api, project_name, project_name, configdic, configdic["github_user"] )
response = git.make_github_card(response, github_api, configdic, "77")
# !!removing the need for wiki!!
# clone the repo and the wiki by initiating this user
#input("\n\n*************\n\nPlease go to %s/%s/%s/wiki and click on 'Create the first page' and then 'Save Page'.\n\nPress Enter once you have saved the first wiki page.\n\n*************\n\n" \
#%(configdic["github_address"],configdic["github_organization"],project_name) )
config.init_user(full_path,configdic["github_address"],configdic["github_organization"],\
project_name,github_user=configdic["github_user"],\
github_pass=configdic["github_pass"],gitssh=args.gitnossh)
# create additional folders
for f in args.stdfolders:
if not os.path.exists(full_path+"/"+f):
os.makedirs(full_path+"/"+f)
if configdic["user_group"]:
os.chmod(full_path, stat.S_IRWXU)
user_group=configdic["user_group"].split(",")
try:
for use in user_group:
call=["setfacl","-m","user:%s:rwx" %use,full_path]
out=Popen(call, stdout=PIPE, stdin=PIPE, stderr=STDOUT)
print(out.communicate()[0].decode('utf-8').rstrip())
except:
print("Failed to setfacls.")
sys.stdout.flush()
local_path_owner=os.stat(local_path)
local_path_owner=local_path_owner.st_uid
#os.chown(full_path,local_path_owner,-1)
sys.exit(0)
if args.adduser:
configdic=config.read_bitconfig()
for r in config.start_reqs:
while configdic[r] == None:
configdic=config.check_reqs([r],configdic,config_file=None, gitssh=args.gitnossh)
local_path=os.path.abspath(configdic["local_path"])
if args.start:
full_path=os.path.abspath(args.start)
else:
full_path=os.path.abspath(os.getcwd())
project_name=os.path.basename(full_path)
# check format projects_folder/group_head/project_name
if len(full_path.split("/")) != len(local_path.split("/"))+2:
print("The path (%s) to this project does not obey the structure and/or defined local path (%s). Check the reference structure:\n%s" %(full_path,local_path,config.structure))
sys.stdout.flush()
sys.exit(0)
config.init_user(full_path,configdic["github_address"],configdic["github_organization"],project_name,github_user=configdic["github_user"],github_pass=configdic["github_pass"],gitssh=args.gitnossh)
sys.exit(0)
if args.input:
if not args.message:
print("ERROR\nYou need to use -m to leave a message in the logs.")
sys.exit()
oc.ownCloud_upload(input_files=args.input,message=args.message,gitssh=args.gitnossh,days_to_share=args.days_to_share,scripts=args.scripts,issue=args.issue, subfolder=args.subfolder,pick_a_date=args.pick_a_date)
sys.exit(0)
if args.create_folder:
oc.ownCloud_create_folder(gitssh=args.gitnossh,pick_a_date=args.pick_a_date,days_to_share=args.days_to_share)
sys.exit(0)
if args.getfolder:
if not args.pick_a_date:
print("--getfolder implies --pick_a_date.\nPlease use -d in combination with -g.\nThank you!")
sys.exit(0)
oc.ownCloud_download(gitssh=args.gitnossh,pick_a_date=args.pick_a_date)
sys.exit(0)
sys.exit(0)
|
mpg-age-bioinformatics/b | bit/owncloud.py | <reponame>mpg-age-bioinformatics/b<gh_stars>1-10
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import time
import datetime
import os
import sys
import getpass
import bit.config as config
import bit.git as git
import bit._owncloud as owncloud
def list_upload(base_destination,list_of_files):
upload_dic={}
subfolders=[base_destination]
check=base_destination.split("/")
for i in range(len(check)):
c="/".join(check[:i-len(check)])
subfolders.append(c)
for f in list_of_files:
full=os.path.abspath(f)
if os.path.isdir(full):
subfol=base_destination+"/"+os.path.basename(full)
subfolders.append(subfol)
for root, directories, filenames in os.walk(full):
bad_dirs=[]
for directory in directories:
if os.path.basename(directory)[0] != ".":
subdir=os.path.join(root, directory).split(full)[-1]
subdir=subfol+subdir
subfolders.append(subdir)
else:
bad_dirs.append(os.path.basename(directory))
for filename in filenames:
if not any(x in filename for x in bad_dirs):
subfile=os.path.join(root,filename)
if os.path.isfile(subfile):
upload_dic[subfile]=subfol+subfile.split(full)[-1]
elif os.path.isfile(full):
upload_dic[full]=base_destination+"/"+os.path.basename(full)
subfolders=list(set(subfolders))
subfolders=[ xx for xx in subfolders if len(xx) > 0 ]
subfolders.sort()
return upload_dic, subfolders
def get_ownCloud_links(link_info,http):
link_info=str(link_info)
store=link_info.split("path=")[1].split(",")[0]
store=store.split("/")
store="%2F".join(store)
link=link_info.split("url=")[1].split(",")[0]
print("\nYour link:\n%s" %http+"/index.php/apps/files?dir="+store)
print("Public link:\n%s\n" %link)
return http+"/index.php/apps/files?dir="+store
def get_owncloud_base_folder(configdic,project_name,getfolder=None,pick_a_date=None,create_folder=None,subfolder=None):
if getfolder:
if not pick_a_date:
print("--getfolder implies --pick_a_date.\nPlease use -d in \
combination with -g.\nThank you!")
sys.exit()
else:
base_folder=configdic["owncloud_download_folder"]
elif create_folder:
base_folder=configdic["owncloud_download_folder"]
else:
base_folder=configdic["owncloud_upload_folder"]
if pick_a_date == None:
d = str(datetime.date.today())
else:
d = str(pick_a_date)
if subfolder:
d = d+"/"+str(subfolder)
base_destination=base_folder+"/"+project_name+"/"+d
return base_destination
def ownCloud_upload(input_files=None,message=None,gitssh=None,days_to_share=None,scripts=None,issue=None, subfolder=None, pick_a_date=None):
if type(message) == list:
message=[ str(xx) for xx in message ]
message=" ".join(message)
else:
message=str(message)
configdic=config.read_bitconfig()
for r in config.requirements:
if not gitssh:
if r not in ["user_group" ]:
while configdic[r] == None:
configdic=config.check_reqs([r],configdic,config_file=None, \
gitssh=None)
else:
if r not in [ "github_user", "github_pass","user_group" ]:
while configdic[r] == None:
configdic=config.check_reqs([r],configdic,config_file=None, \
gitssh=gitssh)
local_path=os.path.abspath(configdic["local_path"])
# check if files all come from the same project folder
size_local=len(local_path.split("/"))
parent_folder=[]
check_project=[]
for i in input_files:
f=os.path.abspath(i)
parent_folder.append(f.split("/")[size_local])
check_project.append(f.split("/")[size_local+1])
check_project=list(set(check_project))
if len(check_project) > 1:
print("Found more than one project:\n")
for p in check_project:
print(p)
sys.stdout.flush()
sys.exit(0)
else:
project_name=check_project[0]
parent_folder=parent_folder[0]
target_project=parent_folder+"/"+project_name
base_destination=get_owncloud_base_folder(configdic,target_project, subfolder=subfolder, pick_a_date=pick_a_date)
upload_dic, subfolders=list_upload(base_destination,input_files)
# login to owncloud
try:
oc=owncloud.Client(configdic["owncloud_address"])
oc.login(configdic["owncloud_user"],configdic["owncloud_pass"])
except:
print("Could not login to ownCloud.\nPlease make sure you are giving \
the right address to your owncloud and using the right login credentials.")
sys.exit(0)
# create required subfolders in ownCloud
for fold in subfolders:
try:
oc.file_info(fold)
except:
oc.mkdir(fold)
# Upload files
if len(upload_dic)>1:
print("Uploading %s files.." %str(len(upload_dic)))
sys.stdout.flush()
else:
print("Uploading %s file.." %str(len(upload_dic)))
sys.stdout.flush()
skipped_files=[]
for f in upload_dic:
file_handle = open(f, 'r', 8192)
file_handle.seek(0, os.SEEK_END)
size = file_handle.tell()
file_handle.seek(0)
if size == 0:
skipped_files.append(os.path.basename(f))
print("\t%s is empty. Skipping .. " %str(f))
sys.stdout.flush()
continue
if size > 1879048192:
print("\t%s\t(chunked)" %str(upload_dic[f]))
sys.stdout.flush()
oc.put_file(upload_dic[f],f)
else:
print("\t%s" %str(upload_dic[f]))
sys.stdout.flush()
oc.put_file(upload_dic[f],f,chunked=False)
print("Finished uploading.")
# Time stamp for expiration date
tshare = datetime.date.today()
tshare = tshare + datetime.timedelta(days=int(days_to_share))
tshare = time.mktime(tshare.timetuple())
link_info = oc.share_file_with_link(base_destination,expiration=tshare)
private_link=get_ownCloud_links(link_info,configdic["owncloud_address"])
oc.logout()
# Go to wiki folder and make a git sync
print("Logging changes..")
sys.stdout.flush()
user_name=getpass.getuser()
wikidir=local_path+"/"+target_project+"/wiki."+user_name
scriptsdir=local_path+"/"+target_project+"/scripts."+user_name
if os.path.isdir(wikidir):
logdir=wikidir
log_project=project_name+".wiki"
elif os.path.isdir(scriptsdir):
logdir=scriptsdir
log_project=project_name
else:
print("Could not find wiki."+user_name+" nor scripts."+user_name)
sys.exit(1)
os.chdir(logdir)
files_to_add=os.listdir(logdir)
git.git_sync(files_to_add,"bit sync",configdic["github_address"],\
configdic["github_organization"],log_project,\
github_user=configdic["github_user"],github_pass=configdic["github_pass"],\
gitssh=gitssh)
# Write log file
if len(skipped_files) > 0:
skipped_files=", ".join(skipped_files)
skipped_files="\n\n(skipped: %s)" %skipped_files
else:
skipped_files=""
logfile="uploads.md"
logtext="\n\n##### ["+base_destination.split("/")[3]+"\t::\t"+user_name+"]("+private_link+") : "\
+str(" ".join(message))+"\n"+\
str(datetime.datetime.now()).split(".")[0]+", "+str(", ".join(input_files))\
+skipped_files
log=open(logfile,"a")
log.write(logtext)
log.close()
# push the log
git.git_add(["uploads.md"])
git.git_commit(message)
git.git_push(configdic["github_address"],configdic["github_organization"],\
log_project,github_user=configdic["github_user"],\
github_pass=configdic["github_pass"],gitssh=gitssh)
if scripts:
print("Syncronizing your code..")
sys.stdout.flush()
os.chdir(local_path+"/"+target_project+"/scripts."+user_name)
#files_to_add=os.listdir(local_path+"/"+target_project+"/scripts."+user_name)
#git.git_sync(files_to_add,message,configdic["github_address"],\
git.git_sync(["-A"],message,configdic["github_address"],\
configdic["github_organization"],project_name,\
github_user=configdic["github_user"],\
github_pass=configdic["github_pass"],gitssh=gitssh)
if issue:
for r in [ "github_user", "github_pass"]:
while configdic[r] == None:
configdic=config.check_reqs([r],configdic,config_file=None, \
gitssh=None)
publink=str(link_info).split("url=")[1].split(",")[0]
issueMSG="Public link: %s; Private link: %s; Commit message: %s" \
%(publink, private_link,message)
git.git_write_comment(issueMSG,config.get_github_api(configdic["github_address"]),\
configdic["github_organization"],project_name,str(issue),\
github_user=configdic["github_user"],github_pass=configdic["github_pass"])
downloadreqs=["owncloud_address","owncloud_upload_folder",\
"owncloud_download_folder","owncloud_user",\
"owncloud_pass","local_path"]
def ownCloud_download(gitssh=None,pick_a_date=None):
configdic=config.read_bitconfig()
for r in downloadreqs:
while configdic[r] == None:
configdic=config.check_reqs([r],configdic,config_file=None, \
gitssh=gitssh)
local_path=os.path.abspath(configdic["local_path"])
size_local=len(local_path.split("/"))
f=os.path.abspath(str(pick_a_date))
parent_folder=f.split("/")[size_local]
project_name=f.split("/")[size_local+1]
target_project=parent_folder+"/"+project_name
base_destination=get_owncloud_base_folder(configdic,target_project,getfolder=True, pick_a_date=pick_a_date)
# login to owncloud
try:
oc=owncloud.Client(configdic["owncloud_address"] )
oc.login(configdic["owncloud_user"],configdic["owncloud_pass"])
except:
print("Could not login to ownCloud.\nPlease make sure you are giving \
the right address to your owncloud and using the right login credentials.")
sys.exit(0)
oc.get_directory_as_zip(base_destination, pick_a_date+".zip")
oc.logout()
print("Downloaded %s.zip" %pick_a_date)
sys.stdout.flush()
def ownCloud_create_folder(gitssh=None,pick_a_date=None,days_to_share=None):
configdic=config.read_bitconfig()
for r in downloadreqs:
while configdic[r] == None:
configdic=config.check_reqs([r],configdic,config_file=None, \
gitssh=gitssh)
local_path=os.path.abspath(configdic["local_path"])
size_local=len(local_path.split("/"))
f=os.path.abspath(str(pick_a_date))
parent_folder=f.split("/")[size_local]
project_name=f.split("/")[size_local+1]
target_project=parent_folder+"/"+project_name
base_destination=get_owncloud_base_folder(configdic,target_project,create_folder=True, pick_a_date=pick_a_date)
# login to owncloud
try:
oc=owncloud.Client(configdic["owncloud_address"] )
oc.login(configdic["owncloud_user"],configdic["owncloud_pass"])
except:
print("Could not login to ownCloud.\nPlease make sure you are giving \
the right address to your owncloud and using the right login credentials.")
sys.exit(0)
check=base_destination.split("/")
print(check)
for i in range(len(check)+1):
c="/".join(check[:i])
print(c)
try:
oc.file_info(c)
except:
oc.mkdir(c)
# Time stamp for expiration date
tshare = datetime.date.today()
tshare = tshare + datetime.timedelta(days=int(days_to_share))
tshare = time.mktime(tshare.timetuple())
link_info = oc.share_file_with_link(base_destination,expiration=tshare,public_upload=True)
private_link=get_ownCloud_links(link_info,configdic["owncloud_address"])
oc.logout()
|
mpg-age-bioinformatics/b | setup.py | from setuptools import setup
setup(name = 'bit',
version = '0.2.0',
description = '[b]ermuda [i]nformation [t]riangle',
url = 'https://github.com/mpg-age-bioinformatics/bit',
author = 'Bioinformatics Core Facility of the Max Planck Institute for Biology of Ageing',
author_email = '<EMAIL>',
license = 'MIT',
packages = [ 'bit' ],
install_requires = [ "requests >= 2.0.1","six", "xlrd==1.2.0", "openpyxl==3.0.5"],
zip_safe = False,
entry_points = {'console_scripts': ['bit=bit.__init__:main','mail=bit.automation:main']}
)
|
mpg-age-bioinformatics/b | bit/config.py | <gh_stars>1-10
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import json
import os
import sys
import getpass
from os.path import expanduser
import stat
import shutil
import bit.git as git
structure="\n\
/file_system_a\n\
|\n\
'- data\n\
|\n\
'- projects\n\
|\n\
|- Company_A\n\
| |\n\
| |- CA_project_y\n\
| |\n\
| '- CA_project_x\n\
| |\n\
| |- results\n\
| |- models\n\
| |- scripts\n\
| |- tmp\n\
| |- slurm_logs\n\
| '- wiki\n\
|\n\
'- Company_B\n\
|\n\
'- CB_project_n\n\n\
absolute path to projects = /file_system_a/data/projects/"
requirements=["owncloud_address","owncloud_upload_folder",\
"owncloud_download_folder","owncloud_user",\
"owncloud_pass","github_address",\
"github_organization","github_user",\
"github_pass","local_path", "user_group" ]
special_reqs=["owncloud_user","owncloud_pass",\
"github_user","github_pass"]
start_reqs=["github_address","github_organization",\
"github_user","github_pass","local_path"]
def get_owncloud_address():
owncloud_address=str(input("Please give in your ownCloud address (eg. http://domain.tld/owncloud): ")) or None
return owncloud_address
def get_owncloud_upload_folder():
owncloud_upload_folder=str(input("Please give in the folder in your ownCloud that will be used to deliver data to users.\nYou can share this folder with your colleagues so that everybody delivers data through the same folder. (default: DELIVERY_SERVICE):")) or "DELIVERY_SERVICE"
return owncloud_upload_folder
def get_owncloud_download_folder():
owncloud_download_folder=str(input("Please give in the folder in your ownCloud that will be used to retrieve data from users.\nYou can share this folder with your colleagues so that everybody retrieves data through the same folder. (default: DROPBOX):")) or "DROPBOX"
return owncloud_download_folder
def get_owncloud_user(config_file=None):
if config_file:
owncloud_user=str(input("Please give in your ownCloud user name or press Enter if you do not want to save this information on the config file: ")) or None
else:
owncloud_user=str(input("Please give in your ownCloud user name: ")) or None
return owncloud_user
def get_owncloud_pass(config_file=None):
if config_file:
owncloud_pass=str(getpass.getpass(prompt="Please give in your ownCloud password or press Enter if you do not want to save this information on the config file: ")) or None
else:
owncloud_pass=str(getpass.getpass(prompt="Please give in your ownCloud password: ")) or None
return owncloud_pass
def get_github_address():
github_address=str(input("Github server address (default: https://github.com): ") or "https://github.com")
return github_address
def get_github_organization():
github_organization=str(input("Your GitHub organization name (eg. mpg-age-bioinformatics for https://github.com/mpg-age-bioinformatics): ")) or None
return github_organization
def get_github_user(config_file=None,gitssh=None):
if not gitssh:
if config_file:
github_user=str(input("Please give in your user name for your github server or press Enter if you do not want to save this information on the config file: ")) or None
else:
github_user=str(input("Please give in your user name for your github server: ")) or None
else:
github_user=None
return github_user
def get_github_pass(config_file=None,gitssh=None):
if not gitssh:
if config_file:
github_pass=str(getpass.getpass(prompt="Please give in your password or access token (infos on: https://help.github.com/articles/creating-a-personal-access-token-for-the-command-line/) for your github server or press Enter if you do not want to save this information on the config file: ")) or None
else:
github_pass=str(getpass.getpass(prompt="Please give in your password or access token for your github server: ")) or None
else:
github_pass=None
return github_pass
def get_local_path(structure=structure):
local_path=str(input("The bermuda information triangle works on the basis that all your projects are located in the same path and have a parent subpath in your local machine ie. %s\n Please give in the absolute path to your projects folder: " %structure ) ) or None
return local_path
def get_user_group():
user_group=str(input("If you are using ACLs to give your group members access to this project please give in the users that will have read write access to every projects top folders. eg. userA,userB,userC -- DO NOT forger to give in your own user name: ")) or None
if user_group:
user_group=user_group.split(",")
return user_group
def get_github_api(github_address):
if "github.com" in github_address:
github_api="https://api.github.com/orgs/"
else:
github_api=github_address+"/api/v3/orgs/"
return github_api
def make_bitconfig(require_func=requirements,special_reqs=special_reqs):
configdic={}
configdic=check_reqs(require_func,configdic,config_file=True, gitssh=None)
uhome=expanduser("~")+"/"
configfile=open(uhome+".bit_config","w+")
with open(uhome+".bit_config", 'w') as configfile:
json.dump(configdic, configfile)
os.chmod(uhome+".bit_config", stat.S_IRWXU )
print("Your bit config file as been generated:")
for c in configdic:
if "pass" not in c:
print( c, configdic.get(c) )
sys.stdout.flush()
elif configdic.get(c) == None:
print(c, configdic.get(c) )
sys.stdout.flush()
else:
print(c, "*")
sys.stdout.flush()
def read_bitconfig(showit=None,bit_config=".bit_config"):
uhome=expanduser("~")+"/"
with open(uhome+bit_config, 'r') as configfile:
configdic=json.load(configfile)
if showit:
for c in configdic:
if "pass" not in c:
print(c, configdic.get(c))
sys.stdout.flush()
elif configdic.get(c) == None:
print(c, configdic.get(c))
sys.stdout.flush()
else:
print(c, "*")
sys.stdout.flush()
return configdic
def check_reqs(requirements,configdic,config_file=None, gitssh=None):
if "owncloud_address" in requirements:
configdic["owncloud_address"]=get_owncloud_address()
if "owncloud_upload_folder" in requirements:
configdic["owncloud_upload_folder"]=get_owncloud_upload_folder()
if "owncloud_download_folder" in requirements:
configdic["owncloud_download_folder"]=get_owncloud_download_folder()
if "owncloud_user" in requirements:
configdic["owncloud_user"]=get_owncloud_user(config_file=config_file)
if "owncloud_pass" in requirements:
configdic["owncloud_pass"]=get_owncloud_pass(config_file=config_file)
if "github_address" in requirements:
configdic["github_address"]=get_github_address()
if "github_organization" in requirements:
configdic["github_organization"]=get_github_organization()
if "github_user" in requirements:
configdic["github_user"]=get_github_user(config_file=config_file,gitssh=gitssh )
if "github_pass" in requirements:
configdic["github_pass"]=get_github_pass(config_file=config_file,gitssh=gitssh )
if "local_path" in requirements:
configdic["local_path"]=get_local_path()
if "user_group" in requirements:
configdic["user_group"]=get_user_group()
return configdic
def init_user(path_to_project,github_address,github_organization,github_repo,github_user=None,github_pass=None,gitssh=None):
user_name=getpass.getuser()
if not os.path.exists(path_to_project):
os.makedirs(path_to_project)
response=git.git_clone(path_to_project+"/scripts."+user_name , github_address, github_organization, github_repo, github_user=github_user, github_pass=github_pass, gitssh=gitssh)
response=git.git_clone(path_to_project+"/wiki."+user_name , github_address, github_organization, github_repo+".wiki", github_user=github_user, github_pass=github_pass, gitssh=gitssh)
if response == 1:
input("\n\n*************\n\nThe wiki for this project has not yet been created.\n\n Please go to %s/%s/%s/wiki and click on 'Create the first page' and then 'Save Page'.\n\nPress Enter once you have saved the first wiki page.\n\nOtherwise press enter to skip wiki creation.\n\n*************\n\n" %(github_address,github_organization,github_repo) )
response=git.git_clone(path_to_project+"/wiki."+user_name ,github_address,github_organization,github_repo+".wiki",github_user=github_user,github_pass=github_pass,gitssh=gitssh)
if response == 1:
shutil.rmtree(path_to_project+"/wiki."+user_name, ignore_errors=True)
print("Skipping wiki creation.")
sys.stdout.flush()
print("User initialized.")
sys.stdout.flush() |
mpg-age-bioinformatics/b | bit/git.py | from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import os
import sys
import subprocess as sb
from subprocess import Popen, PIPE, STDOUT
import requests
import json
def git_target(github_address,github_organization,github_repo,github_user=None,github_pass=None,gitssh=None,usepw=None):
url=github_address.split("//")[-1]
if not gitssh:
git="https://%s:%s@%s/%s/%s.git" %(github_user,github_pass,url,github_organization,github_repo)
else:
git="git@%s:%s/%s.git" %(url,github_organization,github_repo)
if usepw:
git2="https://%s@%s/%s/%s.git" %(github_user,url,github_organization,github_repo)
return git, git2
else:
return git
def git_clone(local_name,github_address,github_organization,github_repo,github_user=None,github_pass=None,gitssh=None):
git, git2 =git_target(github_address,github_organization,github_repo,github_user=github_user,github_pass=github_pass,gitssh=gitssh,usepw=True)
# if not os.path.exists(local_name):
# os.makedirs(local_name)
# cwd = os.getcwd()
# os.chdir(local_name)
# out=sb.call(['git','init'])
# out=sb.call(['git','config','remote.origin.url',git2])
# out=sb.call(['git','config','branch.master.remote','origin'])
# out=sb.call(['git','config','branch.master.merge','refs/heads/master'])
# out=sb.call(['git','pull', git])
out=sb.call(['git','clone',git, local_name ])
# os.chdir(cwd)
return out
def git_fetch(github_address,github_organization,github_repo,github_user=None,github_pass=None,gitssh=None):
git=git_target(github_address,github_organization,github_repo,github_user=github_user,github_pass=github_pass,gitssh=gitssh)
call=["git","fetch",git]
out=Popen(call, stdout=PIPE, stdin=PIPE, stderr=PIPE)
print(out.communicate()[0].decode('utf-8').rstrip())
out.stdout.close()
out.stdin.close()
out.stderr.close()
try:
out.kill()
except:
pass
def git_merge(message):
call=["git","merge","FETCH_HEAD","-m",message]
out=Popen(call, stdout=PIPE, stdin=PIPE, stderr=PIPE)
print(out.communicate()[0].decode('utf-8').rstrip())
out.stdout.close()
out.stdin.close()
out.stderr.close()
try:
out.kill()
except:
pass
def git_pull(github_address,github_organization,github_repo,github_user=None,github_pass=None,gitssh=None):
git=git_target(github_address,github_organization,github_repo,github_user=github_user,github_pass=<PASSWORD>,gitssh=gitssh)
call=["git","pull",git]
out=Popen(call, stdout=PIPE, stdin=PIPE, stderr=PIPE)
print(out.communicate()[0].decode('utf-8').rstrip())
out.stdout.close()
out.stdin.close()
out.stderr.close()
try:
out.kill()
except:
pass
def git_add(files_to_add):
for f in files_to_add:
call=["git","add",f]
out=Popen(call, stdout=PIPE, stdin=PIPE, stderr=PIPE)
print(out.communicate()[0].decode('utf-8').rstrip())
out.stdout.close()
out.stdin.close()
out.stderr.close()
try:
out.kill()
except:
pass
def git_commit(message):
call=["git","commit","-m", message]
out=Popen(call, stdout=PIPE, stdin=PIPE, stderr=PIPE)
print(out.communicate()[0].decode('utf-8').rstrip())
out.stdout.close()
out.stdin.close()
out.stderr.close()
try:
out.kill()
except:
pass
def git_push(github_address,github_organization,github_repo,github_user=None,github_pass=None,gitssh=None):
git=git_target(github_address,github_organization,github_repo,github_user=github_user,github_pass=github_pass,gitssh=gitssh)
call=["git","push",git,"--all"]
if gitssh:
out=Popen(call, stdout=PIPE, stdin=PIPE, stderr=PIPE)
print(out.communicate()[0].decode('utf-8').rstrip())
else:
FNULL = open(os.devnull, 'w')
out=Popen(call, stdout=FNULL, stdin=PIPE ,stderr=PIPE) #, stdout=FNULL, stderr=subprocess.STDOUT old: stdout=PIPE, stdin=PIPE, stderr=STDOUT
out=Popen(["git","push"],stdout=PIPE, stdin=PIPE, stderr=PIPE)
out.stdout.close()
out.stdin.close()
out.stderr.close()
try:
out.kill()
except:
pass
def git_sync(files_to_add,message,github_address,github_organization,github_repo,github_user=None,github_pass=None,gitssh=None):
git_add(files_to_add)
git_commit(message)
git_fetch(github_address,github_organization,github_repo,github_user=github_user,github_pass=github_pass,gitssh=gitssh)
git_merge(message)
git_push(github_address,github_organization,github_repo,github_user=github_user,github_pass=github_pass,gitssh=gitssh)
def git_write_comment(message,github_api,github_organization,github_repo,issue,github_user=None,github_pass=None):
github_api=github_api.split("orgs/")[0]+"repos/"+github_organization+"/"+github_repo+"/issues/"+issue+"/comments"
create_call=["curl","-u",github_user+":"+github_pass\
,github_api,"-d",'{"body":"'+message+'"}']
#print( " ".join(create_call) )
out=Popen(create_call, stdout=PIPE, stdin=PIPE, stderr=PIPE)
#print(out.communicate()[0].rstrip())
import time
time.sleep(2)
try:
out.stdout.close()
except:
pass
try:
out.stdin.close()
except:
pass
try:
out.stderr.close()
except:
pass
try:
out.kill()
except:
pass
def make_github_repo( github_api, repo_name, configdic):
url=f'{github_api}{configdic["github_organization"]}/repos'
repo = { "name" : repo_name , \
"private" : 'true' ,\
"auto_init": 'true' }
response = requests.post( url, data=json.dumps(repo), auth=( configdic["github_user"], configdic["github_pass"] ))
if response.status_code == 201:
print('Successfully created Repository "%s"' % repo_name )
else:
print('Could not create Repository "%s"' % repo_name)
print('Response:', response.content)
sys.stdout.flush()
sys.exit(1)
return response
def make_github_issue(github_api, title, repo_name, configdic, assignee ):
'''Create an issue on github.com using the given parameters.'''
# Our url to create issues via POST
base_api=github_api.split("orgs")[0]
url=f'{base_api}repos/{configdic["github_organization"]}/{repo_name}/issues'
issue = {'title': title,\
'assignee': assignee}
# Add the issue to our repository
response = requests.post( url, data=json.dumps(issue), headers={"Accept": "application/vnd.github.v3+json"}, auth=( configdic["github_user"], configdic["github_pass"] ))#, headers=headers)
if response.status_code == 201:
print('Successfully created Issue "%s"' % title )
else:
print('Could not create Issue "%s"' % title)
print('Response:', response.content)
sys.stdout.flush()
sys.exit(1)
return response
def make_github_card(make_issue_response, github_api, configdic, column):
'''Create an card for an issue on github.com using the given parameters.'''
# Our url to create issues via POST
base_api=github_api.split("orgs")[0]
url=f'{base_api}projects/columns/{column}/cards'
issue_response=json.loads(make_issue_response.text)
issue_id=issue_response["id"]
card = {'content_id': issue_id,\
"content_type":"Issue"}
# Add the issue to our repository
response = requests.post( url, data=json.dumps(card), headers={"Accept": "application/vnd.github.inertia-preview+json"}, auth=( configdic["github_user"], configdic["github_pass"] ))#, headers=headers)
if response.status_code == 201:
print('Successfully created card.' )
else:
print('Could not create card.')
print('Response:', response.content)
sys.stdout.flush()
sys.exit(1)
return response |
abs51295/fabric8-analytics-rudra | rudra/data_store/bigquery/base.py | <filename>rudra/data_store/bigquery/base.py
"""Implementation Bigquery builder base."""
import os
import time
import tempfile
import json
from google.cloud import bigquery
from rudra import logger
from rudra.data_store.aws import AmazonS3
_POLLING_DELAY = 1 # sec
class BigqueryBuilder:
"""BigqueryBuilder class Implementation."""
def __init__(self, query_job_config=None, credential_path=None):
"""Initialize the BigqueryBuilder object."""
self.original_credential_path = os.getenv('GOOGLE_APPLICATION_CREDENTIALS') \
or credential_path
try:
json.loads(self.original_credential_path)
json_credentials = True
except Exception as e:
logger.error("Not JSON credentials, reverting to local env JSON file: {}".format(e))
json_credentials = False
if json_credentials:
tfile = tempfile.NamedTemporaryFile(mode='w+', delete=False)
tfile.write(self.original_credential_path)
tfile.flush()
tfile.seek(0)
self.new_credential_path = tfile.name
os.environ['GOOGLE_APPLICATION_CREDENTIALS'] = self.new_credential_path
else:
self.new_credential_path = self.original_credential_path
if isinstance(query_job_config, bigquery.job.QueryJobConfig):
self.query_job_config = query_job_config
else:
self.query_job_config = bigquery.job.QueryJobConfig()
self.client = None
if self.new_credential_path:
self.client = bigquery.Client(
default_query_job_config=self.query_job_config)
else:
raise ValueError("Please provide the the valid credential_path")
def _run_query(self, job_config=None):
if self.client and self.query:
self.job_query_obj = self.client.query(
self.query, job_config=job_config)
while not self.job_query_obj.done():
time.sleep(0.1)
return self.job_query_obj.job_id
else:
raise ValueError
def run_query_sync(self):
"""Run the bigquery synchronously."""
return self._run_query()
def run_query_async(self):
"""Run the bigquery asynchronously."""
job_config = bigquery.QueryJobConfig()
job_config.priority = bigquery.QueryPriority.BATCH
return self._run_query(job_config=job_config)
def get_status(self, job_id):
"""Get the job status of async query."""
response = self.client.get_job(job_id)
return response.state
def get_result(self, job_id=None, job_query_obj=None):
"""Get the result of the job."""
if job_id is None:
job_query_obj = job_query_obj or self.job_query_obj
for row in job_query_obj.result():
yield ({k: v for k, v in row.items()})
else:
job_obj = self.client.get_job(job_id)
while job_obj.state == 'PENDING':
job_obj = self.client.get_job(job_id)
logger.info("Job State for Job Id:{} is {}".format(
job_id, job_obj.state))
time.sleep(_POLLING_DELAY)
yield from self.get_result(job_query_obj=job_obj)
def __iter__(self):
"""Iterate over the query result."""
yield from self.get_result()
class DataProcessing:
"""Process the Bigquery Data."""
def __init__(self, s3_client=None):
"""Initialize DataProcessing object."""
self.s3_client = s3_client
def update_s3_bucket(self, data,
bucket_name,
filename='collated.json'):
"""Upload s3 bucket."""
if self.s3_client is None:
# creat s3 client if not exists.
self.s3_client = AmazonS3(
bucket_name=bucket_name,
aws_access_key_id=os.getenv('AWS_S3_ACCESS_KEY_ID'),
aws_secret_access_key=os.getenv('AWS_S3_SECRET_ACCESS_KEY')
)
# connect after creating or with existing s3 client
self.s3_client.connect()
if not self.s3_client.is_connected():
raise ValueError("Unable to connect to s3.")
json_data = dict()
if self.s3_client.object_exists(filename):
logger.info("{} exists, updating it.".format(filename))
json_data = self.s3_client.read_json_file(filename)
if not json_data:
raise ValueError("Unable to get the json data path:{}/{}"
.format(bucket_name, filename))
json_data.update(data)
self.s3_client.write_json_file(filename, json_data)
logger.info("Updated file Succefully!")
|
abs51295/fabric8-analytics-rudra | tests/data_store/bigquery/test_maven_bigquery.py | import types
import tempfile
import shutil
import json
import pathlib
import pytest
import mock
from tests.data_store.bigquery.test_base import MockBigQuery
from rudra.data_store.local_data_store import LocalDataStore
class MockS3(LocalDataStore):
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.is_connected = lambda: True
def object_exists(self, fname):
return pathlib.Path(self.src_dir).joinpath(fname).exists()
def write_json_file(self, fname, content):
fpath = pathlib.Path(self.src_dir).joinpath(fname)
if not fpath.parent.exists():
fpath.parent.mkdir(parents=True, exist_ok=True)
with open(str(fpath.absolute()), 'w') as json_fileobj:
return json.dump(content, json_fileobj)
def __del__(self):
shutil.rmtree(self.src_dir)
def connect(self):
self.is_connected()
@pytest.fixture
@mock.patch('rudra.data_store.bigquery.base.bigquery', new_callable=MockBigQuery)
def _maven_bigquery_client(_mock_bigquery_obj):
from rudra.data_store.bigquery.maven_bigquery import MavenBigQuery
_client = MavenBigQuery()
_client.query = "select id, name, content from manifests where name like '%requirements.txt'"
return _client
@pytest.fixture
@mock.patch('rudra.data_store.bigquery.base.bigquery', new_callable=MockBigQuery)
def _data_process_client(_mock_bigquery_obj):
from rudra.data_store.bigquery.maven_bigquery import MavenBigQuery, MavenBQDataProcessing
_mvn_ins = MavenBigQuery()
s3_client = MockS3(tempfile.mkdtemp())
_mvn_ins.query = "select id, name, content from manifests\
where name like '%pom.xml'"
_client = MavenBQDataProcessing(_mvn_ins, s3_client=s3_client)
return _client, s3_client
class TestMavenBigQuery:
def test_run_query(self, _maven_bigquery_client):
job_id = _maven_bigquery_client._run_query()
assert job_id is not None
def test_run_query_sync(self, _maven_bigquery_client):
job_id = _maven_bigquery_client.run_query_sync()
assert job_id is not None
def test_run_query_async(self, _maven_bigquery_client):
job_id = _maven_bigquery_client.run_query_async()
assert job_id is not None
assert _maven_bigquery_client.get_status(job_id) == 'PENDING'
assert _maven_bigquery_client.get_status(job_id) == 'DONE'
def test_get_result_sync(self, _maven_bigquery_client):
job_id = _maven_bigquery_client.run_query_sync()
assert job_id is not None
result = _maven_bigquery_client.get_result()
assert isinstance(result, types.GeneratorType)
result = list(result)
assert len(result) > 0
for d in result:
assert not set(['id', 'name', 'content']).difference(d)
def test_get_result_async(self, _maven_bigquery_client):
job_id = _maven_bigquery_client.run_query_async()
assert job_id is not None
result = _maven_bigquery_client.get_result(job_id=job_id)
assert isinstance(result, types.GeneratorType)
result = list(result)
assert len(result) > 0
for d in result:
assert not set(['id', 'name', 'content']).difference(d)
class TestMavenDataProcessing:
def test_process(self, _data_process_client):
dp_client, s3_client = _data_process_client
dp_client.process()
data = s3_client.read_json_file(dp_client.filename)
assert 'maven' in data
assert len(data['maven']) > 0
for k, v in data['maven'].items():
assert 'org.apache.camel:camel-spring-boot-starter' in k
assert 'org.springframework.boot:spring-boot-starter-web' in k
assert v == 2
def test_construct_packages(self, _data_process_client):
dp_client, s3_client = _data_process_client
content = """
<project><dependencies>
<dependency>
<groupId>grp1.id</groupId>
<artifactId>art1.id</artifactId>
</dependency>
<dependency>
<groupId>grp2.id</groupId>
<artifactId>art2.id</artifactId>
<scope>test</scope>
</dependency>
<dependency><groupId>gid</groupId></dependency>
</dependencies></project>
"""
result = dp_client.construct_packages(content)
assert len(result) == 1
assert 'grp1.id:art1.id' in result
|
abs51295/fabric8-analytics-rudra | rudra/deployments/emr_scripts/__init__.py | <filename>rudra/deployments/emr_scripts/__init__.py
"""EMR Deployments."""
from rudra.deployments.emr_scripts.maven_emr import MavenEMR
from rudra.deployments.emr_scripts.npm_emr import NpmEMR
from rudra.deployments.emr_scripts.pypi_emr import PyPiEMR
__all__ = ['MavenEMR', 'NpmEMR', 'PyPiEMR']
|
abs51295/fabric8-analytics-rudra | rudra/deployments/emr_scripts/emr_config.py | """Configurations for EMR instance."""
class EMRConfig:
"""Config class for EMR."""
home_dir = '/home/hadoop'
def __init__(self, name, log_uri, ecosystem, s3_bootstrap_uri, training_repo_url,
training_file_name='training/train.py', release_label='emr-5.10.0',
instance_count=1, instance_type='m3.xlarge', applications=[{'Name': 'MXNet'}],
visible_to_all_users=True, job_flow_role='EMR_EC2_DefaultRole',
service_role='EMR_DefaultRole', properties={}, hyper_params='{}'):
"""Initialize the EMRConfig object."""
self.instances = {
'KeepJobFlowAliveWhenNoSteps': False,
'TerminationProtected': False,
'Ec2SubnetId': 'subnet-50271f16',
'Ec2KeyName': 'Zeppelin2Spark',
'InstanceGroups': []
}
self.steps = [
{
'Name': 'Setup Debugging',
'ActionOnFailure': 'TERMINATE_CLUSTER',
'HadoopJarStep': {
'Jar': 'command-runner.jar',
'Args': ['state-pusher-script']
}
},
]
self.bootstrap_actions = []
self.emr_config = None
self.name = name
self.log_uri = log_uri
self.hyper_params = hyper_params
self.release_label = release_label
self.s3_bootstrap_uri = s3_bootstrap_uri
self.applications = applications
self.visible_to_all_users = visible_to_all_users
self.job_flow_role = job_flow_role
self.service_role = service_role
self.instance_type = instance_type or 'm3.xlarge'
self.instance_count = instance_count or 1
self.instance_group_name = '{}_master_group'.format(ecosystem)
self.training_repo_url = training_repo_url
self.repo_dir = "{}/{}".format(self.home_dir, 'repo')
self.training_file_name = training_file_name
self.instance_type_properties = {
"LC_ALL": "en_US.UTF-8",
"LANG": "en_US.UTF-8",
"PYTHONPATH": "{}/repo".format(self.home_dir),
"PYTHONUNBUFFERED": "0"
}
self.instance_type_properties.update(properties)
def get_config(self):
"""Get the config object."""
training_file = "{}/{}".format(self.repo_dir, self.training_file_name)
download_training_code = [
'git', 'clone', self.training_repo_url, self.repo_dir]
execute_training_code = ['python3.6'.format(self.repo_dir),
training_file, self.hyper_params]
step2 = {
'Name': 'setup - copy files',
'ActionOnFailure': 'TERMINATE_CLUSTER',
'HadoopJarStep': {
'Jar': 'command-runner.jar',
'Args': download_training_code
}
}
step3 = {
'Name': 'Run training job',
'ActionOnFailure': 'TERMINATE_CLUSTER',
'HadoopJarStep': {
'Jar': 'command-runner.jar',
'Args': execute_training_code
}
}
self.steps = self.steps + [step2, step3]
bootstrap_action = {
'Name': 'Metadata setup',
'ScriptBootstrapAction': {
'Path': self.s3_bootstrap_uri
}
}
self.bootstrap_actions.append(bootstrap_action)
instance_group = {
'Name': self.instance_group_name,
'InstanceRole': 'MASTER',
'InstanceType': self.instance_type,
'InstanceCount': self.instance_count,
'Configurations': [
{
"Classification": "hadoop-env",
"Properties": {},
"Configurations": [
{
"Classification": "export",
"Configurations": [],
"Properties": self.instance_type_properties
}
]
}
]
}
self.instances['InstanceGroups'].append(instance_group)
self.emr_config = {
"Name": self.name,
"LogUri": self.log_uri,
"ReleaseLabel": self.release_label,
"Instances": self.instances,
"BootstrapActions": self.bootstrap_actions,
"Steps": self.steps,
"Applications": self.applications,
"VisibleToAllUsers": self.visible_to_all_users,
"JobFlowRole": self.job_flow_role,
"ServiceRole": self.service_role
}
return self.emr_config
|
abs51295/fabric8-analytics-rudra | tests/utils/test_validation.py | from rudra.utils.validation import check_field_exists, check_url_alive, BQValidation
import pytest
def test_check_field_exists():
input_data = ['a', 'b', 'c']
missing = check_field_exists(input_data, ['a', 'd'])
assert 'd' in missing
missing = check_field_exists(input_data, ['a', 'c'])
assert not missing
input_data = {'a': 1, 'b': 2, 'c': 3}
missing = check_field_exists(input_data, ['a', 'd'])
assert 'd' in missing
with pytest.raises(ValueError):
check_field_exists(111, ['a'])
def test_check_url_alive():
url = 'https://google.com'
assert check_url_alive(url)
url = 'https://234j23ksadasca.com'
assert not check_url_alive(url)
class TestBQValidation:
@staticmethod
def test_validate_pypi_content():
bq_validation = BQValidation()
content = 'flask'
assert not set(bq_validation.validate_pypi(content)).difference([content])
content = ['flask', 'django', 'unknownpkg']
assert not set(['flask', 'django']).difference(bq_validation.validate_pypi(content))
content = {'flask', 'django'}
assert not content.difference(bq_validation.validate_pypi(content))
content = frozenset(['flask', 'django'])
assert not content.difference(bq_validation.validate_pypi(content))
with pytest.raises(ValueError):
bq_validation.validate_pypi({"name": "flask"})
|
abs51295/fabric8-analytics-rudra | tests/data_store/bigquery/test_pypi_bigquery.py | import types
import tempfile
import shutil
import json
import pathlib
import pytest
import mock
from tests.data_store.bigquery.test_base import MockBigQuery
from rudra.data_store.local_data_store import LocalDataStore
class MockS3(LocalDataStore):
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.is_connected = lambda: True
def object_exists(self, fname):
return pathlib.Path(self.src_dir).joinpath(fname).exists()
def write_json_file(self, fname, content):
fpath = pathlib.Path(self.src_dir).joinpath(fname)
if not fpath.parent.exists():
fpath.parent.mkdir(parents=True, exist_ok=True)
with open(str(fpath.absolute()), 'w') as json_fileobj:
return json.dump(content, json_fileobj)
def __del__(self):
shutil.rmtree(self.src_dir)
def connect(self):
self.is_connected()
@pytest.fixture
@mock.patch('rudra.data_store.bigquery.base.bigquery', new_callable=MockBigQuery)
def _pypi_bigquery_client(_mock_bigquery_obj):
from rudra.data_store.bigquery.pypi_bigquery import PyPiBigQuery
_client = PyPiBigQuery()
_client.query = "select id, name, content from manifests where name like '%requirements.txt'"
return _client
@pytest.fixture
@mock.patch('rudra.data_store.bigquery.base.bigquery', new_callable=MockBigQuery)
def _data_process_client(_mock_bigquery_obj):
from rudra.data_store.bigquery.pypi_bigquery import PyPiBigQuery, PyPiBigQueryDataProcessing
_pypi_ins = PyPiBigQuery()
s3_client = MockS3(tempfile.mkdtemp())
_pypi_ins.query = "select id, name, content from manifests\
where name like '%requirements.txt'"
_client = PyPiBigQueryDataProcessing(_pypi_ins, s3_client=s3_client)
return _client, s3_client
class TestPyPiBigQuery:
def test_run_query(self, _pypi_bigquery_client):
job_id = _pypi_bigquery_client._run_query()
assert job_id is not None
def test_run_query_sync(self, _pypi_bigquery_client):
job_id = _pypi_bigquery_client.run_query_sync()
assert job_id is not None
def test_run_query_async(self, _pypi_bigquery_client):
job_id = _pypi_bigquery_client.run_query_async()
assert job_id is not None
assert _pypi_bigquery_client.get_status(job_id) == 'PENDING'
assert _pypi_bigquery_client.get_status(job_id) == 'DONE'
def test_get_result_sync(self, _pypi_bigquery_client):
job_id = _pypi_bigquery_client.run_query_sync()
assert job_id is not None
result = _pypi_bigquery_client.get_result()
assert isinstance(result, types.GeneratorType)
result = list(result)
assert len(result) > 0
for d in result:
assert not set(['id', 'name', 'content']).difference(d)
def test_get_result_async(self, _pypi_bigquery_client):
job_id = _pypi_bigquery_client.run_query_async()
assert job_id is not None
result = _pypi_bigquery_client.get_result(job_id=job_id)
assert isinstance(result, types.GeneratorType)
result = list(result)
assert len(result) > 0
for d in result:
assert not set(['id', 'name', 'content']).difference(d)
class TestPyPiDataProcessing:
def test_process_with_validation(self, _data_process_client):
dp_client, s3_client = _data_process_client
dp_client.process(validate=True)
data = s3_client.read_json_file(dp_client.filename)
assert 'pypi' in data
assert len(data['pypi']) > 0
for k, v in data['pypi'].items():
assert 'boto' in k
assert 'chardet' in k
assert 'flask' in k
assert 'unknown1' not in k
assert v == 2
def test_process_without_validation(self, _data_process_client):
dp_client, s3_client = _data_process_client
dp_client.process(validate=False)
data = s3_client.read_json_file(dp_client.filename)
assert 'pypi' in data
assert len(data['pypi']) > 0
for k, v in data['pypi'].items():
assert 'boto' in k
assert 'chardet' in k
assert 'flask' in k
assert 'unknown1'in k
assert v == 2
|
ant1fact/jalapino-api | tests/conftest.py | import pytest
from api import create_app
from api.config import TestConfig
def make_auth_header(role: str) -> str:
assert role in {'customer', 'restaurant'}
tokens = {
'customer': TestConfig.CUSTOMER_TOKEN,
'restaurant': TestConfig.RESTAURANT_TOKEN
}
return {'Authorization': f'Bearer {tokens[role]}'}
@pytest.fixture()
def app(): # sourcery skip: inline-immediately-yielded-variable
app = create_app(config=TestConfig)
# other setup can go here
yield app
# clean up / reset resources here
@pytest.fixture()
def client(app):
return app.test_client()
@pytest.fixture()
def runner(app):
return app.test_cli_runner() |
ant1fact/jalapino-api | tests/test_api_rbac.py | from api.models import Category, Customer, Item, Restaurant
from conftest import create_app, make_auth_header
app = create_app()
### RESTAURANTS ###
def test_pass_get_restaurants(client):
response = client.get('/restaurants')
assert response.status_code == 200
restaurants = response.get_json()
assert type(restaurants) == list
with app.app_context():
for restaurant in restaurants:
assert type(restaurant) == dict
assert type(restaurant['id']) == int
assert type(restaurant['name']) == str
assert 'auth0_id' not in restaurant
assert Restaurant.query.get(restaurant['id']).auth0_id
def test_fail_get_restaurants(client):
# GET /restaurant (singular instead of plural)
response = client.get('/restaurant')
assert response.status_code == 404
assert response.get_json() is None
def test_pass_get_restaurant(client):
response = client.get('/restaurants/1')
assert response.status_code == 200
assert response.get_json() is not None
def test_fail_get_restaurant(client):
# Use incorrect ID format
response = client.get('/restaurants/somestring')
assert response.status_code == 404
def test_pass_create_restaurant(client):
new_restaurant_data = {
"name": "TEST_RESTAURANT",
"email": "<EMAIL>",
"phone": "1-234-567890",
"address": "111 Test St, Test City, RE",
}
response = client.post(
'/restaurants', json=new_restaurant_data, headers=make_auth_header('restaurant')
)
assert response.status_code == 201
def test_fail_create_restaurant(client):
# Try to verify ownership of restaurant resource using customer token
response = client.post(
'/restaurants', json={}, headers=make_auth_header('customer')
)
assert response.status_code == 403
def test_pass_update_restaurant(client):
with app.app_context():
latest_id = max(r.id for r in Restaurant.query.all())
response = client.patch(
f'/restaurants/{latest_id}',
json={'name': 'Renamed Restaurant'},
headers=make_auth_header('restaurant'),
)
assert response.status_code == 200
restaurant = Restaurant.query.get_or_404(latest_id)
assert restaurant.name == 'Renamed Restaurant'
def test_fail_update_restaurant(client):
with app.app_context():
latest_id = max(r.id for r in Restaurant.query.all())
response = client.put(
f'/restaurants/{latest_id}', json={}, headers=make_auth_header('restaurant')
)
assert response.status_code == 400
def test_pass_delete_restaurant(client):
with app.app_context():
num_restaurants_before = Restaurant.query.count()
latest_id = max(r.id for r in Restaurant.query.all())
response = client.delete(
f'/restaurants/{latest_id}', headers=make_auth_header('restaurant')
)
assert response.status_code == 200
assert num_restaurants_before > Restaurant.query.count()
def test_fail_delete_restaurant(client):
with app.app_context():
# Cannot delete all restaurants at once
response = client.delete('/restaurants')
assert response.status_code == 405
### CUSTOMERS ###
def test_pass_create_customer(client):
new_customer_data = {
"name": "TEST",
"email": "<EMAIL>",
"phone": "1-234-567890",
"address": "999 Test St, Test City, CU",
}
response = client.post(
'/customers', json=new_customer_data, headers=make_auth_header('customer')
)
assert response.status_code == 201
def test_fail_create_customer(client):
response = client.post('/customers', json={})
assert response.status_code == 401
def test_pass_get_customer(client):
with app.app_context():
latest_id = max(c.id for c in Customer.query.all())
response = client.get(
f'/customers/{latest_id}', headers=make_auth_header('customer')
)
assert response.status_code == 200
# Make sure all the data points are present in the response
assert all(response.json[key] for key in Customer.defaults())
def test_fail_get_customers(client):
response = client.get('/customers')
assert response.status_code == 405
def test_pass_update_customer(client):
with app.app_context():
latest_id = max(c.id for c in Customer.query.all())
response = client.patch(
f'/customers/{latest_id}',
json={'phone': '987-654-321'},
headers=make_auth_header('customer'),
)
assert response.status_code == 200
customer = Customer.query.get_or_404(latest_id)
assert customer.phone == '987-654-321'
def test_fail_update_customer(client):
with app.app_context():
latest_id = max(c.id for c in Customer.query.all())
response = client.patch(
f'/customers/{latest_id}',
json={'phone': None},
headers=make_auth_header('customer'),
)
assert response.status_code == 400
def test_pass_delete_customer(client):
with app.app_context():
num_customers_before = Customer.query.count()
latest_id = max(c.id for c in Customer.query.all())
response = client.delete(
f'/customers/{latest_id}', headers=make_auth_header('customer')
)
assert response.status_code == 200
assert num_customers_before > Customer.query.count()
def test_fail_delete_customer(client):
with app.app_context():
# Cannot delete all customers at once
response = client.delete('/customers')
assert response.status_code == 405
### CATEGORIES ###
def test_pass_create_category(client):
response = client.post(
'/restaurants/1/categories',
json={'name': 'Tastiest'},
headers=make_auth_header('restaurant'),
)
assert response.status_code == 201
def test_fail_create_category(client):
response = client.post(
'/restaurants/1/categories',
json={'name': 'Yummies'},
headers=make_auth_header('customer'),
)
assert response.status_code == 403
def test_pass_update_category(client):
with app.app_context():
latest_id = max(c.id for c in Category.query.all())
response = client.patch(
f'/categories/{latest_id}',
json={'name': 'Yummies'},
headers=make_auth_header('restaurant'),
)
assert response.status_code == 200
assert Category.query.get_or_404(latest_id).name == 'Yummies'
def test_fail_update_category(client):
with app.app_context():
latest_id = max(c.id for c in Category.query.all())
response = client.put(
f'/categories/{latest_id}',
json={'blame': 'Yummies'},
headers=make_auth_header('restaurant'),
)
assert response.status_code == 400
def test_pass_delete_category(client):
with app.app_context():
latest_id = max(c.id for c in Category.query.all())
response = client.delete(
f'/categories/{latest_id}', headers=make_auth_header('restaurant')
)
assert response.status_code == 200
def test_fail_delete_category(client):
with app.app_context():
latest_id = max(c.id for c in Category.query.all())
response = client.delete(
f'/categories/{latest_id}', headers=make_auth_header('customer')
)
assert response.status_code == 403
### ITEMS AND INGREDIENTS ###
def test_pass_search_items(client):
response = client.post('/items', json={'search_term': 'soup'})
assert response.status_code == 200
assert len(response.json) >= 1
def test_fail_search_items(client):
# Invalid request format
response = client.post('/items', data={'search_term': 'soup'})
assert response.status_code == 400
def test_pass_get_item(client):
response = client.get('/items/1')
assert response.status_code == 200
assert 'name' in response.json
assert 'price' in response.json
def test_fail_get_item(client):
# Singular resource in URL
response = client.get('/item/1')
assert response.status_code == 404
def test_pass_create_item(client):
with app.app_context():
num_items_before = Item.query.count()
new_item_data = {
'name': 'Bullseye',
'description': 'Not what it sounds.',
'price': 24.42,
'ingredients': ['bull', 'duh', 'saffron', 'turmeric']
}
response = client.post('/categories/1/items', json=new_item_data, headers=make_auth_header('restaurant'))
assert response.status_code == 201
with app.app_context():
num_items_before < Item.query.count()
def test_fail_create_item(client):
new_item_data = {
'name': 'Bullseye',
'ingredients': []
}
response = client.post('/categories/1/items', json=new_item_data)
assert response.status_code == 401
def test_pass_update_item(client):
put_item_data = {
'name': 'Catseye',
'description': "It was a bull, now it's a cat.",
'price': 42.24,
'ingredients': ['cat', 'nip', 'saffron', 'turmeric']
}
response = client.put('/items/1', json=put_item_data, headers=make_auth_header('restaurant'))
assert response.status_code == 200
with app.app_context():
assert Item.query.get_or_404(1).name == 'Catseye'
def test_fail_update_item(client):
with app.app_context():
latest_id = max(i.id for i in Item.query.all())
put_item_data = {
'name': 'Catseye',
'ingredients': ['cat', 'nip', 'saffron', 'turmeric']
}
response = client.put(f'/items/{latest_id}', json=put_item_data, headers=make_auth_header('restaurant'))
assert response.status_code == 400
def test_pass_delete_item(client):
with app.app_context():
latest_id = max(i.id for i in Item.query.all())
response = client.delete(f'/items/{latest_id}', headers=make_auth_header('restaurant'))
assert response.status_code == 200
def test_fail_delete_item(client):
response = client.delete('/items/3')
assert response.status_code == 401
def test_pass_get_items_by_ingredient(client):
for ingredient_id in {1, 23, 57}:
response = client.get(f'/ingredients/{ingredient_id}/items')
assert response.status_code == 200
assert response.json
def test_fail_get_items_by_ingredient(client):
for ingredient_name in {'turmeric', 'saffron', 'pepper'}:
response = client.get(f'/ingredients/{ingredient_name}/items')
assert response.status_code == 404
### ORDERS ###
def test_pass_create_order(client):
orders = [
{'items': [1, 2, 3, 4]},
{'items': [44, 45, 46, 47, 48]},
{'items': [87, 88, 89]},
]
for order in orders:
response = client.post(
'/customers/1/orders', json=order, headers=make_auth_header('customer')
)
assert response.status_code == 201
def test_fail_create_order(client):
# Mix up items from different restaurants per order
orders = [
{'items': [93, 2, 75, 4]},
{'items': [23, 45, 1, 6]},
{'items': [52, 2, 43]},
]
for order in orders:
response = client.post(
'/customers/2/orders', json=order, headers=make_auth_header('customer')
)
assert response.status_code == 400
def test_pass_get_customer_orders(client):
response = client.get('/customers/1/orders', headers=make_auth_header('customer'))
assert response.status_code == 200
assert len(response.json) >= 3
def test_fail_get_customer_orders(client):
# Mix up resources in the URL
response = client.get('/orders/1/customer', headers=make_auth_header('customer'))
assert response.status_code == 404
def test_pass_get_restaurant_orders(client):
response = client.get(
'/restaurants/1/orders', headers=make_auth_header('restaurant')
)
assert response.status_code == 200
assert len(response.json) >= 1
assert response.json[0]['customer_id'] == 1
def test_fail_get_restaurant_orders(client):
# Mix up resources in the URL
response = client.get(
'/orders/1/restaurant', headers=make_auth_header('restaurant')
)
assert response.status_code == 404
|
ant1fact/jalapino-api | api/config.py | <reponame>ant1fact/jalapino-api<filename>api/config.py
from os import getenv
# Config inspired by:
# https://github.com/miguelgrinberg/flasky/blob/master/config.py
class Config:
SECRET_KEY = getenv('CLIENT_SECRET')
SQLALCHEMY_DATABASE_URI = getenv(
'SQLALCHEMY_DATABASE_URI',
# If SQLALCHEMY_DATABASE_URI is not found,
# take the path from DATABASE_URL, but replace the scheme
# as SQLAlchemy requires it to be postgresql instead of postgres
getenv('DATABASE_URL').replace('postgres:', 'postgresql:')
)
SQLALCHEMY_RECORD_QUERIES = True
SQLALCHEMY_TRACK_MODIFICATIONS = False
AUTH0_AUDIENCE = getenv('AUTH0_AUDIENCE')
AUTH0_CLIENTID = getenv('AUTH0_CLIENTID')
AUTH0_DOMAIN = getenv('AUTH0_DOMAIN')
PAGINATE_RESULTS_PER_PAGE = 10
class TestConfig(Config):
TESTING = True
PRESERVE_CONTEXT_ON_EXCEPTION = False
CUSTOMER_TOKEN = getenv('CUSTOMER_TOKEN')
RESTAURANT_TOKEN = getenv('RESTAURANT_TOKEN')
|
ant1fact/jalapino-api | api/__init__.py | from flask import Flask, redirect
from flask_cors import CORS
from flask_migrate import Migrate
from . import config
def create_app(config=config.Config):
app = Flask(__name__)
app.config.from_object(config)
from .models import db
db.init_app(app)
Migrate(app, db)
CORS(app)
from .api import api
app.register_blueprint(api)
@app.after_request
def after_request(response):
response.headers.add(
'Access-Control-Allow-Headers', 'Content-Type, Authorization'
)
response.headers.add('Access-Control-Allow-Origin', '*')
response.headers.add(
'Access-Control-Allow-Methods', 'GET, POST, PUT, PATCH, DELETE'
)
return response
# @app.route('/')
# def root():
# return redirect('https://jalapino.herokuapp.com', code=302)
return app
|
ant1fact/jalapino-api | api/api.py | <reponame>ant1fact/jalapino-api
import json
from flask import (
Blueprint,
Response,
abort,
jsonify,
redirect,
render_template,
request,
)
from werkzeug.exceptions import HTTPException
from .auth import AuthError, requires_auth
api = Blueprint('api', __name__)
from .config import Config
from .models import *
def _verify_ownership(Model, id: int, auth0_id: str):
'''Verify ownership of, and return the resource being requested.'''
resource = Model.query.get_or_404(id)
if 'auth0_id' not in Model.__dict__:
# No ownership detected, return resource
return resource
if resource.auth0_id != auth0_id:
abort(403, 'Failed to verify ownership of the requested resource.')
return resource
def _get_json_or_abort():
req_json = request.get_json()
if req_json is None:
abort(400, 'Invalid JSON representation or no data found.')
return req_json
def _prepare_request_data(Model):
'''Central logic for preparing data coming from POST, PUT and PATCH requests.'''
if request.method not in {'POST', 'PUT', 'PATCH'}:
abort(
405,
'Invalid method. Allowed methods for this operation are: POST, PUT, PATCH',
)
if request.method == 'POST':
# Requires: All data
# Server fills in missing data with defaults, except where column.nullable=False
# in which case the data must come from the client.
# Raise 400 (Bad Request) if any data in the final representation is None.
try:
data = {**Model.defaults(), **request.get_json()}
except TypeError:
abort(400, 'Invalid JSON representation or no data found.')
if None in data.values():
missing_data = [k for k in data if data[k] is None]
abort(
400,
f'Must provide missing data for POST request: {" ,".join(missing_data)}',
)
return data
if request.method == 'PUT':
# Requires: All data
# All data must be provided by the client.
# Raise 400 (Bad Request) if any data in the final representation is None.
data = request.get_json()
for k in Model.defaults():
if k not in data or not data[k]:
missing_data = [k for k in data if data[k] is None]
abort(
400,
f'Must provide missing data for PUT request: {" ,".join(missing_data)}',
)
return data
if request.method == 'PATCH':
# Requires: Some data
# Raise 400 (Bad Request) if the final representation contains no data.
# i.e. none of the incoming key:value pairs were valid and got filtered out
data = {
k: v
for k, v in request.get_json().items()
if k in Model.defaults() and v is not None
}
if not data:
abort(
400,
f'No valid data was found to perform the PATCH request. \
Must provide at least one of the following: {" ,".join(Model.defaults())}',
)
return data
### API INFO ###
@api.route('/info')
def info():
return {
'title': 'Jalapino - Final project for Udacity Fullstack Nanodegree',
'version': 0.1,
'description': 'Simplified food delivery platform \
where restaurants can post their items and customers can place orders.',
'contact': {
'name': '<NAME>',
'email': '<EMAIL>',
'url': 'https://github.com/ant1fact/jalapino',
},
'license': {'name': 'MIT', 'url': 'https://spdx.org/licenses/MIT.html'},
}
### AUTH REDIRECTS ###
@api.route('/')
def root():
audience = Config.AUTH0_AUDIENCE
client_id = Config.AUTH0_CLIENTID
domain = Config.AUTH0_DOMAIN
redirect_uri = 'https://jalapino-api.herokuapp.com/callback'
return redirect(
f'https://{domain}/authorize?audience={audience}&response_type=token&client_id={client_id}&redirect_uri={redirect_uri}',
code=302,
)
@api.route('/callback')
def callback():
return render_template('jwt.html')
@api.route('/logout')
def logout():
client_id = Config.AUTH0_CLIENTID
domain = Config.AUTH0_DOMAIN
return_to = 'https://jalapino-api.herokuapp.com'
return redirect(
f'https://{domain}/v2/logout?client_id={client_id}&returnTo={return_to}',
code=302,
)
### RESTAURANTS ###
@api.route('/restaurants')
def get_restaurants():
'''Return a list of all restaurants paginated.'''
page = request.args.get('page', 1, type=int)
restaurants = Restaurant.query.paginate(
page=page, per_page=Config.PAGINATE_RESULTS_PER_PAGE
).items
return jsonify([r.serialize() for r in restaurants])
@api.route('/restaurants/<int:id>')
def get_restaurant(id: int):
return Restaurant.query.get_or_404(id).serialize()
@api.route('/restaurants', methods=['POST'])
@requires_auth('create:restaurant')
def create_restaurant(payload: dict):
new_restaurant = Restaurant()
data = _prepare_request_data(Restaurant)
new_restaurant.update(data)
new_restaurant.auth0_id = payload['sub']
return {'id': new_restaurant.save()}, 201
@api.route('/restaurants/<int:id>', methods=['PUT', 'PATCH'])
@requires_auth('update:restaurant')
def update_restaurant(payload: dict, id: int):
restaurant = _verify_ownership(Restaurant, id, auth0_id=payload['sub'])
data = _prepare_request_data(Restaurant)
restaurant.update(data)
restaurant.save()
return Response(status=200)
@api.route('/restaurants/<int:id>', methods=['DELETE'])
@requires_auth('delete:restaurant')
def delete_restaurant(payload: dict, id: int):
restaurant = _verify_ownership(Restaurant, id, auth0_id=payload['sub'])
restaurant.delete()
return Response(status=200)
### CUSTOMERS ###
@api.route('/customers/<int:id>')
@requires_auth('read:customer')
def get_customer(payload: dict, id: int):
customer = _verify_ownership(Customer, id, auth0_id=payload['sub'])
return customer.serialize()
@api.route('/customers', methods=['POST'])
@requires_auth('create:customer')
def create_customer(payload: dict):
new_customer = Customer()
data = _prepare_request_data(Customer)
new_customer.update(data)
new_customer.auth0_id = payload['sub']
return {'id': new_customer.save()}, 201
@api.route('/customers/<int:id>', methods=['PATCH'])
@requires_auth('update:customer')
def update_customer(payload: dict, id: int):
customer = _verify_ownership(Customer, id, auth0_id=payload['sub'])
data = _prepare_request_data(Customer)
customer.update(data)
customer.save()
return Response(status=200)
@api.route('/customers/<int:id>', methods=['DELETE'])
@requires_auth('delete:customer')
def delete_customer(payload: dict, id: int):
customer = _verify_ownership(Customer, id, auth0_id=payload['sub'])
customer.delete()
return Response(status=200)
### CATEGORIES ###
@api.route('/restaurants/<int:id>/categories', methods=['POST'])
@requires_auth('create:category')
def create_category(payload: dict, id: int):
restaurant = _verify_ownership(Restaurant, id, auth0_id=payload['sub'])
new_category = Category()
new_category.name = request.get_json().get('name', 'New Category')
new_category.restaurant_id = restaurant.id
return {'id': new_category.save()}, 201
@api.route('/categories/<int:id>', methods=['PUT', 'PATCH'])
@requires_auth('update:category')
def update_category(payload: dict, id: int):
category = Category.query.get_or_404(id)
_verify_ownership(Restaurant, id=category.restaurant_id, auth0_id=payload['sub'])
data = _prepare_request_data(Category)
category.update(data)
category.save()
return Response(status=200)
@api.route('/categories/<int:id>', methods=['DELETE'])
@requires_auth('delete:category')
def delete_category(payload: dict, id: int):
category = Category.query.get_or_404(id)
_verify_ownership(Restaurant, id=category.restaurant_id, auth0_id=payload['sub'])
# Remove children before deletion
category.items = []
category.delete()
return Response(status=200)
### ITEMS AND INGREDIENTS ###
def _get_or_create_ingredient(ingredient_name: str) -> Ingredient:
ingredient = Ingredient.query.filter_by(name=ingredient_name).first()
if ingredient is not None:
return ingredient
new_ingredient = Ingredient(name=ingredient_name)
new_ingredient.save()
return new_ingredient
def _process_ingredient_names(item: Item, ingredient_names: list) -> list:
'''Turn a list of ingredient names into Ingredient objects.'''
item.ingredients = [
_get_or_create_ingredient(ingredient_name)
for ingredient_name in ingredient_names
# Make sure we don't process any empty strings or None values
if ingredient_name
]
return item
@api.route('/items', methods=['POST'])
def search_items_by_name():
'''Search items by their name, return results paginated.'''
data = request.get_json()
if data is None:
abort(400, 'No json data found in request body.')
if 'search_term' not in data:
abort(400, 'No search_term has been specified.')
search_term = request.json['search_term']
if search_term == '':
return jsonify([])
page = request.args.get('page', 1, type=int)
items = (
Item.query.filter(Item.name.ilike(f'%{search_term}%'))
.paginate(page=page, per_page=Config.PAGINATE_RESULTS_PER_PAGE)
.items
)
return jsonify([i.serialize() for i in items])
@api.route('/items/<int:id>')
def get_item(id: int):
return Item.query.get_or_404(id).serialize()
@api.route('/categories/<int:id>/items', methods=['POST'])
@requires_auth('create:item')
def create_item(payload: dict, id: int):
'''Create a new item and its ingredients.'''
category = Category.query.get_or_404(id)
_verify_ownership(Restaurant, id=category.restaurant_id, auth0_id=payload['sub'])
# Create new empty item
new_item = Item()
# Process ingredients
if request.get_json().get('ingredients', []):
new_item = _process_ingredient_names(new_item, request.json['ingredients'])
# Add remaining data
data = _prepare_request_data(Item)
new_item.update(data)
new_item.category_id = id
return {'id': new_item.save()}, 201
@api.route('/items/<int:id>', methods=['PATCH', 'PUT'])
@requires_auth('update:item')
def update_item(payload: dict, id: int):
# Get existing item
item = Item.query.get_or_404(id)
category = Category.query.get_or_404(item.category_id)
_verify_ownership(Restaurant, id=category.restaurant_id, auth0_id=payload['sub'])
# Process ingredients
data = _prepare_request_data(Item)
item.update(data)
if request.get_json().get('ingredients', []):
item = _process_ingredient_names(item, request.json['ingredients'])
item.save()
return Response(status=200)
@api.route('/items/<int:id>', methods=['DELETE'])
@requires_auth('delete:item')
def delete_item(payload: dict, id: int):
item = Item.query.get_or_404(id)
category = Category.query.get_or_404(item.category_id)
_verify_ownership(Restaurant, id=category.restaurant_id, auth0_id=payload['sub'])
# Remove children before deletion
item.ingredients = []
item.delete()
return Response(status=200)
@api.route('/ingredients/<int:id>/items')
def get_items_by_ingredient(id: int):
ingredient = Ingredient.query.get_or_404(id)
return jsonify([i.serialize() for i in ingredient.items])
### ORDERS ###
def _get_item_restaurant_id(item: Item) -> int:
return Category.query.get_or_404(item.category_id).restaurant_id
def _assert_same_restaurant(items: list) -> bool:
'''Check if all Item objects in the list are coming from the same restaurant.'''
# Return false if the list is empty, or if the contained items are not of type Item
if not items or not all(isinstance(item, Item) for item in items):
return False
# Take first item's restaurant_id and compare the rest to that
restaurant_id = _get_item_restaurant_id(items.pop(0))
if any(_get_item_restaurant_id(item) != restaurant_id for item in items):
return False
return True
def _bulk_fetch_items(item_ids: list) -> list:
'''Convert a list of item ids to a list of Item objects.'''
if any(not isinstance(id, int) for id in item_ids):
abort(
400,
'Invalid type found in list of item ids. \
Make sure the list contains integer ids only.',
)
items = [Item.query.get_or_404(id) for id in item_ids]
if not _assert_same_restaurant(items):
abort(
400,
'Multiple restaurants referenced in the same order. \
Make sure all ordered items come from the same restaurant.',
)
return items
@api.route('/customers/<int:id>/orders', methods=['POST'])
@requires_auth('create:order')
def create_order(payload: dict, id: int):
'''Create new order, taking customer_id, restaurant_id and a list of items as item ids.'''
_verify_ownership(Customer, id, auth0_id=payload['sub'])
data = _prepare_request_data(Order)
data['customer_id'] = id
new_order = Order()
if not data.get('items', []):
abort(400, 'No item ids have been specified.')
new_order.items = _bulk_fetch_items(data.pop('items'))
new_order.restaurant_id = _get_item_restaurant_id(new_order.items[0])
new_order.update(data)
return {'id': new_order.save()}, 201
@api.route('/customers/<int:id>/orders')
@requires_auth('read:order')
def get_customer_orders(payload: dict, id: int):
'''Get the customer's order history.'''
customer = _verify_ownership(Customer, id, auth0_id=payload['sub'])
return jsonify([o.serialize() for o in customer.orders])
@api.route('/restaurants/<int:id>/orders')
@requires_auth('read:order')
def get_restaurant_orders(payload: dict, id: int):
'''Get the restaurant's order history.'''
restaurant = _verify_ownership(Restaurant, id, auth0_id=payload['sub'])
return jsonify([o.serialize() for o in restaurant.orders])
### ERROR HANDLING ###
# Handles all error codes as opposed to the minimum 4 required by the project spec
# https://flask.palletsprojects.com/en/2.0.x/errorhandling/#generic-exception-handlers
@api.errorhandler(HTTPException)
def handle_exception(e):
'''Return JSON instead of HTML for HTTP errors.'''
# Start with the correct headers and status code from the error
response = e.get_response()
# Replace the body with JSON
response.data = json.dumps(
{'code': e.code, 'name': e.name, 'description': e.description}
)
response.content_type = 'application/json'
return response
# https://flask.palletsprojects.com/en/1.1.x/api/#flask.Flask.errorhandler
@api.errorhandler(AuthError)
def handle_auth_error(e):
return e.error, e.status_code
|
ant1fact/jalapino-api | api/models.py | from flask_sqlalchemy import Model, SQLAlchemy
from sqlalchemy import inspect
DEFAULT_LOGO_URI = 'https://raw.githubusercontent.com/ant1fact/jalapino/fullstack/static/images/jalapino_150x150.png'
PROTECTED_COLUMN_NAMES = {'id', 'auth0_id'}
class CRUDModel(Model):
'''Extended model with C(R)UD methods.'''
def save(self) -> int:
'''Save changes to an existing record or add a new record to the database
if it didn't exist and return its ID on successful creation, otherwise None.'''
try:
if inspect(self).transient:
db.session.add(self)
db.session.commit()
return self.id
except Exception as error:
db.session.rollback()
return error
finally:
db.session.close()
def update(self, data: dict):
'''Update row data based on a dictionary.'''
for k, v in data.items():
mapper = inspect(self.__class__)
column_names = [
c.name for c in mapper.columns if c.name not in PROTECTED_COLUMN_NAMES
]
if k in column_names:
# It is important to use setattr() instead of directly setting the value
# on the self, otherwise the changes cannot be tracked by SQLAlchemy
setattr(self, k, v)
def delete(self) -> int:
'''Delete record from database and return ID if successful, otherwise None.'''
try:
self.__class__.query.filter_by(id=self.id).delete()
db.session.commit()
except Exception as error:
db.session.rollback()
return error
finally:
db.session.close()
@classmethod
def defaults(cls):
'''Return a dict of of <column name>:<default value> pairs for the given Model.
If no default value is specified, the value will be None instead.'''
mapper = inspect(cls)
def _get_column_default_value(column):
try:
return column.default.arg
except AttributeError:
return None
return {
col.name: _get_column_default_value(col)
for col in mapper.columns
if col.name not in PROTECTED_COLUMN_NAMES and not col.foreign_keys
}
db = SQLAlchemy(
model_class=CRUDModel,
session_options={'autoflush': False, 'expire_on_commit': False},
)
class Restaurant(db.Model):
__tablename__ = 'restaurants'
auth0_id = db.Column(db.String(50), nullable=False)
id = db.Column(db.Integer, primary_key=True)
name = db.Column(db.String(50), nullable=False)
logo_uri = db.Column(db.String(250), default=DEFAULT_LOGO_URI)
description = db.Column(db.String(250), default='')
address = db.Column(db.String(250), nullable=False)
email = db.Column(db.String(100), nullable=False)
phone = db.Column(db.String(50), nullable=False)
website = db.Column(db.String(250), default='')
categories = db.relationship(
'Category', backref='restaurant', cascade='all, delete-orphan', lazy=False
)
orders = db.relationship('Order', backref='restaurant')
def serialize(self):
# Do not return orders as they are sensitive data
return {
'id': self.id,
'name': self.name,
'phone': self.phone,
'address': self.address,
'email': self.email,
'logo_uri': self.logo_uri,
'description': self.description,
'website': self.website,
'categories': [c.serialize() for c in self.categories],
}
class Customer(db.Model):
__tablename__ = 'customers'
auth0_id = db.Column(db.String(50), nullable=False)
id = db.Column(db.Integer, primary_key=True)
name = db.Column(db.String(50), nullable=False)
email = db.Column(db.String(100), nullable=False)
phone = db.Column(db.String(50), nullable=False)
address = db.Column(db.String(250), nullable=False)
orders = db.relationship('Order', backref='customer')
def serialize(self):
return {
'id': self.id,
'name': self.name,
'email': self.email,
'phone': self.phone,
'address': self.address,
'orders': [o.serialize() for o in self.orders],
}
class Category(db.Model):
'''Item category for visual sorting of items in a resturant's item list.'''
__tablename__ = 'categories'
id = db.Column(db.Integer, primary_key=True)
items = db.relationship(
'Item', backref='category', cascade='all, delete-orphan', lazy=False
)
name = db.Column(db.String(50), nullable=False)
restaurant_id = db.Column(
db.Integer, db.ForeignKey('restaurants.id'), nullable=False
)
def serialize(self):
return {
'id': self.id,
'name': self.name,
'items': [item.serialize() for item in self.items],
}
items_ingredients = db.Table(
'items_ingredients',
db.Column('item_id', db.Integer, db.ForeignKey('items.id'), primary_key=True),
db.Column(
'ingredient_id', db.Integer, db.ForeignKey('ingredients.id'), primary_key=True
),
)
orders_items = db.Table(
'orders_items',
db.Column('orders_id', db.Integer, db.ForeignKey('orders.id'), primary_key=True),
db.Column('item_id', db.Integer, db.ForeignKey('items.id'), primary_key=True),
)
class Item(db.Model):
'''Items that can be ordered from a restaurant, such as food and drinks.'''
__tablename__ = 'items'
id = db.Column(db.Integer, primary_key=True)
name = db.Column(db.String(50), nullable=False)
description = db.Column(db.String(250), default='')
price = db.Column(db.Numeric(5, 2), nullable=False)
category_id = db.Column(db.Integer, db.ForeignKey('categories.id'), nullable=False)
ingredients = db.relationship(
'Ingredient',
secondary=items_ingredients,
lazy=False,
backref=db.backref('items', lazy=False),
)
def serialize(self):
return {
'id': self.id,
'name': self.name,
'description': self.description,
'price': self.price,
'ingredients': [i.serialize() for i in self.ingredients],
}
class Ingredient(db.Model):
__tablename__ = 'ingredients'
id = db.Column(db.Integer, primary_key=True)
name = db.Column(db.String(20), unique=True, nullable=False)
def serialize(self):
return {'id': self.id, 'name': self.name}
class Order(db.Model):
__tablename__ = 'orders'
id = db.Column(db.Integer, primary_key=True)
customer_id = db.Column(db.Integer, db.ForeignKey('customers.id'), nullable=False)
restaurant_id = db.Column(
db.Integer, db.ForeignKey('restaurants.id'), nullable=False
)
items = db.relationship(
'Item',
secondary=orders_items,
lazy='subquery',
backref=db.backref('orders', lazy=True),
)
def serialize(self):
return {
'id': self.id,
'customer_id': self.customer_id,
'restaurant_id': self.restaurant_id,
'items': [item.serialize() for item in self.items],
}
|
0000Blaze/Smart-Attendance | server/initiallInsertsForDb.py | import csv
import insertdb
import cv2
import numpy as np
####################################department insertion##########################
departmentDb = {
"1": "Department of Architecture",
"2": "Department of Civil Engineering",
"3": "Department of Electrical Engineering",
"4": "Department of Mechanical Engineering",
"5": "Department of Electronics and Computer Engineering"
}
for x in departmentDb:
insertdb.insertDepartment(x, departmentDb[x])
###################################subject insertion####################################
subjectBCT = {
"SH401": "Engineering Mathematics I",
"CT402": "Computr Programming",
"ME401": "Engineering Drawing I",
"SH402": "Engineering Physics",
"CE401": "Applied Mechanics",
"EE401": "Basic Electrical Engineering",
"SH451": "Engineering Mathematics II",
"ME451": "Engineering Drawing II",
"EX451": "Basic Electronics Engineering",
"SH453": "Engineering Chemistry",
"ME452": "Fundamental of Thermodynamics and Heat Transfer",
"ME453": "Workshop Technology",
"SH501": "Engineering Mathematics III",
"CT501": "Object Oriented Programming",
"CT502": "Theory of Computation",
"EE501": "Electric Circuit Theory",
"EX501": "Electronic Devices and Circuits",
"EX502": "Digital Logic",
"EX503": "Electromagnetics",
"SH551": "Applied Mathematics",
"SH553": "Numerical Methods",
"EE552": "Instrumentation I",
"EE554": "Electrical Machines",
"CT551": "Discrete Structure",
"CT552": "Data Structure and Algorithm",
"EX551": "Microprocessor",
"SH601": "Communication English",
"SH602": "Probability and Statics",
"CT601": "Software Engineering",
"CT602": "Data Communication",
"CT603": "Computer Organization and Architecture",
"EX602": "Instrumentation II",
"EX603": "Computer Graphics",
"CE655": "Engineering Economics",
"CT651": "Object Oriented Analysis and Design",
"CT652": "Database Management System",
"CT653": "Artificial Intelligence",
"CT655": "Embedded System",
"CT656": "Operating System",
"CT654": "Minor Project",
"ME708": "Organization and Management",
"EX701": "Energy Environment and Society",
"CT701": "Project Management",
"CT702": "Computer Network",
"CT703": "Distrubuted System",
"CT704": "Digital Signal Analysis and Processing",
"CT725": "Elective I",
"CT707": "Project(Part A)",
"CE752": "Professional Practice",
"CT751": "Information Systems",
"CT753": "Simulation and Modelling",
"CT754": "Internet and Intranet",
"CT765": "Elective II",
"CT785": "Elective III",
"CT755": "Project(Part B)"
}
for x in subjectBCT:
insertdb.insertSubject(x, subjectBCT[x])
##########################################class insertion##############################
classDB = {
"PUL074BCTAB": ["PUL074BCTAB","5",8],
"PUL074BEXAB": ["PUL074BEXAB","5",8],
"PUL074BELAB": ["PUL074BELAB","3",8],
"PUL075BCTAB": ["PUL075BCTAB","5",6],
"PUL075BCTCD": ["PUL075BCTCD","5",6],
"PUL075BEXAB": ["PUL075BEXAB","5",6],
"PUL075BELAB": ["PUL075BELAB","3",6],
"PUL076BCTAB": ["PUL076BCTAB","5",4],
"PUL076BCTCD": ["PUL076BCTCD","5",4],
"PUL076BEXAB": ["PUL076BEXAB","5",4],
"PUL076BELAB": ["PUL076BELAB","3",4],
"PUL077BCTAB": ["PUL077BCTAB","5",2],
"PUL077BCTCD": ["PUL077BCTCD","5",2],
"PUL077BEXAB": ["PUL077BEXAB","5",2],
"PUL077BELAB": ["PUL077BELAB","3",2]
}
for x in classDB:
insertdb.insertClass(x, classDB[x][0],classDB[x][1],classDB[x][2])
#
################################# face embedding calculation ###########################################
'''
listEmbedding =[]
for i in range(52):
if (i+49) == 87 or (i+49) == 73 or (i+49) == 99 or (i+49) == 97 : #skip dropouts and section AB added students
continue
elif i == 51:
iterator = str(i+49)
else:
iterator = "0"+str(i+49)
try:
imag = face_recognition.load_image_file("/home/rohan/Documents/minorProject2022/data/PUL075BCT"+iterator+".jpg")
except:
try:
imag = face_recognition.load_image_file("/home/rohan/Documents/minorProject2022/data/PUL075BCT"+iterator+".jpeg")
except:
imag = face_recognition.load_image_file("/home/rohan/Documents/minorProject2022/data/PUL075BCT"+iterator+".png")
imag = cv2.cvtColor(imag, cv2.COLOR_BGR2RGB)
encodingsTest = face_recognition.face_encodings(imag)[0]
listEmbedding.append(encodingsTest)
#saving data of face embeddings in a text file
#np.savetxt('embeddingDataCD.txt',listEmbedding,delimiter="\n", fmt="%s")
'''
file = open('embeddingDataCD.txt')
listEmbedding = file.readlines()
i = 0
j = 128
#################################csv file reading and insert in student db#############################
# opening the CSV file
with open('/home/rohan/Documents/minorProject2022/server/PUL075BCTCD.csv', mode='r') as file:
# reading the CSV file
csvFile = csv.DictReader(file)
# insert the contents of the CSV file
for lines in csvFile:
insertdb.insertStudent(
lines['RollNo'], lines['Name'], "PUL075BCTCD", "5", listEmbedding[i:j])
i = i+128
j = j+128
####################### teaher insertion#############
insertdb.insertTeacher("001", "<NAME>", "5")
################# insert into teaches ###########################
insertdb.insertIntoTeaches("001","PUL075BCTCD","CT652","6")
############### insert into admin ############################
insertdb.insertAdmin("aman", "12")
|
0000Blaze/Smart-Attendance | teacherApp/teacher.py | from server import client_teacher
from numpy import spacing
from kivymd.app import MDApp
from kivymd.uix.boxlayout import MDBoxLayout
from kivymd.uix.button import MDRaisedButton
from kivymd.uix.label import MDLabel
from kivy.uix.image import Image
from kivymd.uix.textfield import MDTextField
from kivymd.uix.datatables import MDDataTable
from kivy.metrics import dp
from kivy.uix.screenmanager import ScreenManager, Screen
class LoginWindow(Screen):
pass
class AttendanceWindow(Screen):
pass
cardColor = [0.796875, 0.8984375, 0.99609375, 1]
textColor = [0, 0, 0, 1]
backgroundColor = [0.59765625, 0.8046875, 0.99609375, 1]
sm = ScreenManager()
sm.add_widget(LoginWindow(name="login"))
sm.add_widget(AttendanceWindow(name="attendance"))
class MainApp(MDApp):
teacherId = ""
classId = ""
className = ""
classList = []
subjectId = ""
subjectname = ""
subjectList = []
attendanceId = ""
attendanceList = {}
attendanceToBeDone = []
attendanceListMini = []
data_tables = None
stop_btn = None
present_btn = None
def __init__(self, **kwargs):
self.title = "Smart Attendance"
super().__init__(**kwargs)
def build(self):
loginBg = MDBoxLayout()
loginBg.md_bg_color = backgroundColor
imageLayout = MDBoxLayout(size_hint=(0.15, 0.15),
pos_hint={'center_x': .5, 'center_y': .9})
imageObj = Image(source="./assets/icon.png")
imageLayout.add_widget(imageObj)
smallCard = MDBoxLayout()
smallCard.md_bg_color = cardColor
smallCard.size_hint = (0.5, 0.65)
smallCard.radius = [40, 40, 40, 40]
smallCard.orientation = "vertical"
smallCard.pos_hint = {'center_x': .5, 'center_y': .5}
teacherIDBox = MDBoxLayout()
teacherIDBox.pos_hint = {'center_x': .5, 'center_y': .5}
teacherIDBox.orientation = 'vertical'
teacherIDBox.adaptive_height = True
teacherIDBox.size_hint = (0.5, 1.0)
self.teacherIDInp = MDTextField(hint_text="Teacher Id:")
self.teacherIDInp.color_mode = "custom"
self.teacherIDInp.line_color_normal = textColor
self.teacherIDInp.line_color_focus = textColor
self.teacherIDInp.hint_text_color = textColor
self.teacherIDInp.pos_hint = {'center_x': .5, 'center_y': .2}
self.classIDInp = MDTextField(hint_text="Class Id:")
self.classIDInp.hint_text = "Class Id:"
self.classIDInp.color_mode = "custom"
self.classIDInp.line_color_normal = textColor
self.classIDInp.line_color_focus = textColor
self.classIDInp.hint_text_color = textColor
self.classIDInp.pos_hint = {'center_x': .5, 'center_y': .2}
teacherIDLabel = MDLabel(text="Teacher Id:")
teacherIDLabel.size_hint = (1, 0.2)
classIDLabel = MDLabel(text="Class Id:")
classIDLabel.size_hint = (1, 0.2)
teacherIDBox.add_widget(teacherIDLabel)
teacherIDBox.add_widget(self.teacherIDInp)
teacherIDBox.add_widget(classIDLabel)
teacherIDBox.add_widget(self.classIDInp)
connectButton = MDRaisedButton(text="Connect")
connectButton.pos_hint = {'center_x': .5, 'center_y': .5}
connectButton.bind(on_press=self.connectCallback)
subjectNameLayout = MDBoxLayout(orientation="vertical")
subjectNameLayout.pos_hint = {'center_x': .5, 'center_y': .5}
subjectNameLayout.size_hint = (0.65, 0.5)
subjectNameLayout.adaptive_height = True
self.teacherName = MDLabel(pos_hint={'center_x': .5, 'center_y': .5})
self.teacherName.text = ""
self.subjectName = MDLabel(pos_hint={'center_x': .5, 'center_y': .5})
self.subjectName.text = "No Connection"
subjectNameLayout.add_widget(self.teacherName)
subjectNameLayout.add_widget(self.subjectName)
startButton = MDRaisedButton(text="Start")
startButton.pos_hint = {'center_x': .5, 'center_y': .5}
startButton.bind(on_release=self.startCallback)
smallCard.add_widget(teacherIDBox)
smallCard.add_widget(connectButton)
smallCard.add_widget(subjectNameLayout)
smallCard.add_widget(startButton)
smallCard.add_widget(MDLabel(size_hint=(1, 0.2)))
loginScreen = sm.get_screen("login")
loginScreen.add_widget(loginBg)
loginScreen.add_widget(imageLayout)
loginScreen.add_widget(smallCard)
###########################################################################
attendanceBg = MDBoxLayout()
attendanceBg.md_bg_color = backgroundColor
attendanceBox = MDBoxLayout(orientation="vertical")
attendanceBox.pos_hint = {"center_x": .5, "center_y": .9}
attendanceBox.size_hint = (0.9, 0.2)
# attendanceBox.adaptive_height = True
# attendanceBox = MDBoxLayout(spacing="40dp")
# attendanceBox.md_bg_color = [1, 0, 0, 1]
attendanceInnerBox1 = MDBoxLayout(orientation="horizontal")
attendanceInnerBox1.size_hint = (1, 0.5)
self.attendanceTextLabel = MDLabel(
text="Attendance will time out in 10 minutes")
backButton = MDRaisedButton(text="Back")
backButton.bind(on_press=self.backCallback)
attendanceInnerBox2 = MDBoxLayout(orientation="horizontal")
attendanceInnerBox2.size_hint = (1, 0.5)
self.attendanceCodeLabel = MDLabel(text="Attendance Code :")
refreshButton = MDRaisedButton(text="Refresh")
refreshButton.bind(on_press=self.refreshCallback)
attendanceInnerBox1.add_widget(self.attendanceTextLabel)
attendanceInnerBox1.add_widget(backButton)
attendanceInnerBox2.add_widget(self.attendanceCodeLabel)
attendanceInnerBox2.add_widget(refreshButton)
attendanceBox.add_widget(attendanceInnerBox1)
attendanceBox.add_widget(attendanceInnerBox2)
# stopAttendanceButton = MDRaisedButton(text="Stop Attendance")
# stopAttendanceButton.bind(on_press=self.stopAttendanceCallback)
attendanceScreen = sm.get_screen("attendance")
attendanceScreen.add_widget(attendanceBg)
attendanceScreen.add_widget(attendanceBox)
# attendanceScreen.add_widget(stopAttendanceButton)
return sm
def getSubject(self):
try:
subjectListFromServer = client_teacher.updateClassAndSubjects(
self.teacherId)
# # get subject list of each class teached by teacher
# for i in subjectListFromServer["subject"]:
# self.subjectList.append(i)
# print(subjectList)
if "error" not in subjectListFromServer:
# print(subjectListFromServer["teacher_name"])
self.subjectId = subjectListFromServer["subject"][0][0]
self.subjectname = subjectListFromServer["subject"][0][1]
self.teacherName.text = str(
"Welcome, ") + str(subjectListFromServer["teacher_name"])
# print(self.subjectId)
except Exception as e:
print("Subject retrival error", e)
pass
def startAttendanceSheet(self):
try:
AttendanceListFromServer = client_teacher.startAttendance(
self.teacherId, self.classId, self.subjectId)
print(self.teacherId, self.classId, self.subjectId)
if "error" in AttendanceListFromServer:
print(AttendanceListFromServer["error"])
self.attendanceTextLabel.text = AttendanceListFromServer["error"]
else:
# save attendance code
self.attendanceId = AttendanceListFromServer["acode"]
for list in AttendanceListFromServer["student_list"]:
#print(list[0], list[1])
presence = "Absent"
presenceList = [list[1], presence]
self.attendanceList[list[0]] = presenceList
print(AttendanceListFromServer["timeout"])
self.attendanceTextLabel.text = AttendanceListFromServer["timeout"]
except Exception as e:
print("error :", e)
def updateAttendanceSheet(self):
try:
AttendanceListFromServer = client_teacher.getAttendance(
self.teacherId, self.classId)
if "error" in AttendanceListFromServer:
print(AttendanceListFromServer["error"])
else:
# update presence in list
keys = AttendanceListFromServer["student_list"]
# print(keys)
for key in keys:
self.attendanceList[key][1] = "Present"
self.widgetRemover() # removes old instance of datatable,stop and present button
attendanceScreen = sm.get_screen("attendance")
# adds data table , stop and present button
self.load_table(attendanceScreen)
except Exception as e:
print(e)
def finalAttendanceSheet(self, *args):
try:
AttendanceListFromServer = client_teacher.stopAttendance(
self.teacherId, self.classId)
if "error" in AttendanceListFromServer:
print(AttendanceListFromServer["error"])
self.attendanceTextLabel.text = AttendanceListFromServer["error"]
else:
print(AttendanceListFromServer["success"])
self.attendanceTextLabel.text = AttendanceListFromServer["success"]
except Exception as e:
print(e)
def manualPresent(self, *args):
try:
for text in self.attendanceToBeDone:
print("Done presence", text)
client_teacher.markAttendance(
self.teacherId, self.classId, text)
except:
print("some error occured during manual attendance")
# empty selected check for presence
while len(self.attendanceToBeDone) > 0:
self.attendanceToBeDone.pop()
# print("After",self.attendanceToBeDone)
self.updateAttendanceSheet()
def startCallback(self, *args):
self.startAttendanceSheet()
self.attendanceCodeLabel.text = "Attendance Code: " + \
str(self.attendanceId)
sm.current = "attendance"
attendanceScreen = sm.get_screen("attendance")
self.load_table(attendanceScreen)
def connectCallback(self, *args):
self.classIDInp.text = self.classIDInp.text.upper()
self.teacherId = self.teacherIDInp.text
self.classId = self.classIDInp.text
self.getSubject()
# print("searching subject for", self.teacherID, self.classID)
self.subjectName.text = str(self.subjectname)
def stopAttendanceCallback(self, *args):
self.finalAttendanceSheet()
self.attendanceCodeLabel.text = "Attendance Code: "
pass
def backCallback(self, *args):
sm.current = "login"
def refreshCallback(self, *args):
self.updateAttendanceSheet()
self.widgetRemover()
aScreen = sm.get_screen("attendance")
self.load_table(aScreen)
def stopCallback(self, *args):
self.finalAttendanceSheet()
pass
def presentCallback(self, *args):
self.manualPresent()
attendanceScreen = sm.get_screen("attendance")
self.widgetRemover()
self.load_table(attendanceScreen)
pass
def load_table(self, aScreen):
# list to make attendance list a list for initial insert in data table
AttendListMini = []
for key in self.attendanceList:
AttendListMini.append(key)
AttendListMini.append(self.attendanceList[key][0])
AttendListMini.append(self.attendanceList[key][1])
self.data_tables = MDDataTable(
pos_hint={'center_y': 0.5, 'center_x': 0.5},
size_hint=(0.7, 0.6),
rows_num=48,
check=True,
# use_pagination=True,
column_data=[
("Roll Number", dp(40)),
("Student", dp(30)),
("Presence", dp(30)), ],
row_data=[
(AttendListMini[i*3], AttendListMini[(i*3)+1],
AttendListMini[(i*3)+2])
for i in range(int(len(AttendListMini)/3))
# (f"{i + 1}", "2.23", "3.65")
# for i in range(50)
],
)
self.data_tables.bind(on_check_press=self.check_press)
self.stop_btn = MDRaisedButton(
text="Stop",
pos_hint={'center_y': 0.1, 'center_x': 0.6}
)
self.stop_btn.bind(on_press=self.stopCallback)
self.present_btn = MDRaisedButton(
text="Mark Present",
pos_hint={'center_y': 0.1, 'center_x': 0.3}
)
self.present_btn.bind(on_press=self.presentCallback)
aScreen.add_widget(self.data_tables)
aScreen.add_widget(self.stop_btn)
aScreen.add_widget(self.present_btn)
# return layout
def check_press(self, instance_table, current_row):
print(current_row)
self.attendanceToBeDone.append(current_row[0])
def widgetRemover(self):
attendanceScreen = sm.get_screen("attendance")
attendanceScreen.remove_widget(self.data_tables)
attendanceScreen.remove_widget(self.stop_btn)
attendanceScreen.remove_widget(self.present_btn)
# attendanceScreen.clear_widgets()
# attendanceScreen.add_widget(MDLabel(text="hi ravi"))
if __name__ == "__main__":
MainApp().run()
|
0000Blaze/Smart-Attendance | teacherApp/server/communication_json.py | <reponame>0000Blaze/Smart-Attendance
import struct
import json
def readall(_socket):
# read all data from socket as defined in the header
headerlength1 = _socket.recv(1)
headerlength2 = _socket.recv(1)
headerlength = struct.unpack(
'>H', headerlength1 + headerlength2) # big endian format
read_buffer = b''
remaining_bytes = headerlength[0]
while remaining_bytes > 0:
buff = _socket.recv(remaining_bytes)
read_buffer += buff
remaining_bytes -= len(buff)
data = json.loads(read_buffer)
return data
def convert2send(data):
# json formatted data preceded by two byte data-length and no header
datastr = json.dumps(data)
datalen = struct.pack('>H', len(datastr))
datastr = datalen + datastr.encode()
return datastr
def convertAndSend(data, sock):
# json formatted data preceded by two byte data-length and no header
datastr = json.dumps(data)
datalen = struct.pack('>H', len(datastr))
datastr = datalen + datastr.encode()
sock.sendall(datastr)
def convertSendClose(data, sock):
# json formatted data preceded by two byte data-length and no header
datastr = json.dumps(data)
datalen = struct.pack('>H', len(datastr))
datastr = datalen + datastr.encode()
sock.sendall(datastr)
sock.close()
|
0000Blaze/Smart-Attendance | teacherApp/server/client_teacher.py | import json
import socket
from typing import final
from server import communication_json
#from communication_json import convert2send, readall
ATTENDANCE_SERVER = {'host': '127.0.0.1', 'port': 60001}
#ATTENDANCE_SERVER = {'host': '192.168.1.65', 'port': 60001}
#ATTENDANCE_SERVER = {'host': '192.168.152.108', 'port': 60001}
SERVER_TIMEOUT = 30 #timeout after 20 seconds if server didn't respond
def sendAttendanceData(teacher_id, class_id, subject_code, attendance_request, attendance_server, student_id = None):
data = {}
if teacher_id != None:
data['tid'] = teacher_id
if class_id != None:
data['cid'] = class_id
if subject_code != None:
data['scode'] = subject_code
if student_id != None:
data['sid'] = student_id
data['attendance'] = attendance_request
#convert the data to be sent into json format
datastr = communication_json.convert2send(data)
#print(datastr)
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
try:
sock.settimeout(SERVER_TIMEOUT)
sock.connect((attendance_server['host'], attendance_server['port']))
try:
#sending attendance start data to server
sock.sendall(datastr)
#receive response which is byte representing attendance done or not
response = communication_json.readall(sock)
if "error" in response:
print(response['error'])
return response
else:
# response contains {'student_list': list of students(id, name) for this class, 'acode':attendance code}
return response
except TimeoutError as t:
raise
except:
return {'error': 'error sending data'}
finally:
sock.close()
except TimeoutError as t:
return{'error':'Server took long to respond'}
except:
return {'error': 'server not avialable'}
def startAttendance(teacher_id, class_id, subject_code):
response = sendAttendanceData(
teacher_id, class_id,subject_code, 'start',ATTENDANCE_SERVER)
return response
def getAttendance(teacher_id, class_id, attendance_server=ATTENDANCE_SERVER):
# doesn't require subject to be specified to get attendance
response = sendAttendanceData(
teacher_id, class_id, None, 'get', attendance_server)
return response
def markAttendance(teacher_id, class_id, student_id, attendance_server=ATTENDANCE_SERVER):
#explicitly mark a student as present
response = sendAttendanceData(
teacher_id, class_id, None, 'mark', attendance_server, student_id)
return response
def stopAttendance(teacher_id, class_id, attendance_server=ATTENDANCE_SERVER):
# doesn't require subject to be specified to close attendance
response = sendAttendanceData(
teacher_id, class_id, None, 'end', attendance_server)
return response
def updateClassAndSubjects(teacher_id, attendance_server=ATTENDANCE_SERVER):
response = sendAttendanceData(
teacher_id, None, None, 'update', attendance_server)
return response
# if __name__ == '__main__':
# #start attendance
# startAttendance('bct12', 'bctcd', 'mp')
# #wait some time
# #get realtime attendance ststus
# getAttendance('bct12', 'bctcd')
# #stop attendance
# stopAttendance('bct12', 'bctcd')
# input()
|
0000Blaze/Smart-Attendance | studentApp/main.py | import server.client_student
import cv2
import face_recognition
import os
from kivy.core.window import Window
from kivymd.app import MDApp
from kivy.uix.camera import Camera
from kivy.uix.image import Image
from kivymd.uix.boxlayout import MDBoxLayout
from kivymd.uix.button import MDFloatingActionButton, MDRaisedButton
from kivymd.uix.label import MDLabel
from kivymd.uix.textfield import MDTextField
from kivymd.uix.dialog import MDDialog
# from kivymd.uix.behaviors.backgroundcolor_behavior import BackgroundColorBehavior
textColor = [0, 0, 0, 1]
dialogTextColor = [0, 0, 0, 1]
textInpBgColor = [0.796875, 0.8984375, 0.99609375, 1]
appBgColor = [0.30078125, 0.76171875, 0.99609375, 1]
textLineColorFocus = [0.01171875, 0.515625, 0.984375, 1]
textLineColorNormal = [0.984375, 0.53125, 0.0117, 1]
Window.top = 50
#Window.size = (540, 960)
Window.size = (400,700)
filePresent = True
pictureRemoved = False
try:
fileP = open("./userData.txt", "r")
except:
filePresent = False
pass
class StudentApp(MDApp):
rollNoSaved = ""
pictureRemoved = False
def build(self):
# app LOGO
self.theme_cls.theme_style = "Light"
imageObj = Image(source="./assets/icon.png")
# layout for image of logo
imageLayout = MDBoxLayout(size_hint=(0.35, 0.35),
pos_hint={'x': 0.325, 'y': 0.2})
imageLayout.add_widget(imageObj)
# widgets for input
rollNoL = MDLabel(text=" Roll No:", theme_text_color="Custom")
rollNoL.text_color = textColor
AttendanceCodeL = MDLabel(
text=" Attendance Code:", theme_text_color="Custom")
AttendanceCodeL.text_color = textColor
self.rollNoT = MDTextField(hint_text="Enter Roll no :")
self.rollNoT.color_mode = "custom"
# self.rollNoT.fill_color = [0.0117, 0.96875, 0.984375, 1]
self.rollNoT.line_color_focus = textLineColorFocus
self.rollNoT.line_color_normal = textLineColorNormal
# self.rollNoT.mode = "fill"
self.AttendanceCodeT = MDTextField(
hint_text="Enter Attendance Code : ")
self.AttendanceCodeT.color_mode = "custom"
self.AttendanceCodeT.line_color_focus = textLineColorFocus
self.AttendanceCodeT.line_color_normal = textLineColorNormal
inputLayout = MDBoxLayout(orientation="vertical",
pos_hint={'x': 0.1, 'y': 0.1}
)
inputLayout2 = MDBoxLayout(orientation="vertical",
pos_hint={'x': 0.1, 'y': 0.1}
)
# setting text of roll no from saved file
if filePresent:
self.rollNoT.text = fileP.readline()
fileP.close()
inputLayout.size_hint = (0.8, 0.25)
inputLayout.radius = [20, 7, 20, 7]
# inputLayout.md_bg_color = [0.328125, 0.31640625, 0.8359375, 1]
inputLayout.md_bg_color = textInpBgColor
spacer = MDBoxLayout(size_hint=(1, 0.1))
inputLayout2.size_hint = (0.8, 0.25)
inputLayout2.radius = [20, 7, 20, 7]
# inputLayout2.md_bg_color = [0.328125, 0.31640625, 0.8359375, 1]
inputLayout2.md_bg_color = textInpBgColor
# Adding widgets for input to input layout
inputLayout.add_widget(MDLabel())
# inputLayout.add_widget(rollNoL)
inputLayout.add_widget(self.rollNoT)
inputLayout2.add_widget(MDLabel())
# inputLayout.add_widget(MDLabel())
inputLayout2.add_widget(MDLabel())
# inputLayout2.add_widget(AttendanceCodeL)
inputLayout2.add_widget(self.AttendanceCodeT)
inputLayout2.add_widget(MDLabel())
# Message label
self.messageL = MDLabel(
text="Error", theme_text_color="Custom")
self.messageL.text_color = dialogTextColor
# Layout for Message
# self.dismissButton = MDRaisedButton(text="Okay",
# pos_hint={'x': 0.7})
self.messageDialog = MDDialog(text="Dialog",
size_hint=(0.8, 0.2),
radius=[20, 7, 20, 7])
# size_hint=(0.8, 0.9),
# pos_hint={'x': 0.1, 'y': 0.5},
# Buttons=[self.dismissButton])
# self.dismissButton.bind(on_press=self.messageDialog.dismiss)
# messageDialogBox = MDBoxLayout(orientation="vertical",
# size_hint=(0.8, 1),
# pos_hint={'y': 0.8})
# messageDialogBox.add_widget(self.messageL)
# messageDialogBox.add_widget(self.dismissButton)
# self.messageDialog.buttons.append(self.dismissButton)
# self.messageDialog.add_widget(messageDialogBox)
messageLayout = MDBoxLayout(orientation="vertical",
size_hint=(0.8, 0.4),
pos_hint={'x': 0.1, 'y': 0.1}
)
self.authenticateL = MDLabel(
text="Authenticate your face:", theme_text_color="Custom")
self.authenticateL.text_color = textColor
messageLayout.add_widget(self.authenticateL)
# messageLayout.add_widget(self.messageDialog)
# Camera object
self.cameraObj = Camera()
self.cameraObj.resolution = (640, 480)
self.cameraObj.play = True
cameraLayout = MDBoxLayout(size_hint=(0.8, 1),
pos_hint={'x': 0.1})
cameraLayout.add_widget(self.cameraObj)
# Capture Button
self.buttonObj = MDFloatingActionButton(icon='camera')
# self.buttonObj.size_hint = (.2, .2)
self.buttonObj.pos_hint = {'x': .45, 'bottom': 0.1}
self.buttonObj.bind(on_press=self.takeImage)
self.buttonObj.md_bg_color = [0.796875, 0.8984375, 0.99609375, 1]
# layout for camera and capture button
myBoxLayout = MDBoxLayout(
orientation="vertical",
md_bg_color=appBgColor
# md_bg_color=[0.796875, 1, 1, 1]
)
myBoxLayout.add_widget(imageLayout)
myBoxLayout.add_widget(MDLabel(text=" ", size_hint=(1, 0.2)))
myBoxLayout.add_widget(inputLayout)
myBoxLayout.add_widget(spacer)
myBoxLayout.add_widget(inputLayout2)
myBoxLayout.add_widget(messageLayout)
myBoxLayout.add_widget(cameraLayout)
myBoxLayout.add_widget(self.buttonObj)
myBoxLayout.add_widget(MDLabel(text=" ", size_hint=(1, 0.1)))
return myBoxLayout
def takeImage(self, *args):
if self.AttendanceCodeT.text == "" or self.rollNoT == "":
# self.messageL.text = "Enter all values"
self.messageDialog.text = "Enter all values"
self.messageDialog.open()
else:
self.filename = str(self.rollNoT.text + "+" +
self.AttendanceCodeT.text+".png")
self.rollNo = str(self.rollNoT.text)
self.acode = self.AttendanceCodeT.text
print(self.filename)
try:
self.cameraObj.export_to_png(self.filename)
imag = face_recognition.load_image_file(self.filename)
imag = cv2.cvtColor(imag, cv2.COLOR_BGR2RGB)
self.encodingsData = face_recognition.face_encodings(imag)
if len(self.encodingsData) > 0:
self.encodingsData = self.encodingsData[0]
self.encodingsData = self.encodingsData.tolist()
# print(self.encodingsData)
# Send embeddings to server
print(self.rollNo)
dataFromServer = server.client_student.markAttendance(
self.rollNo, int(self.acode), self.encodingsData)
# print(dataFromServer)
if "error" in dataFromServer:
print(dataFromServer["error"])
# self.authenticateL.text = str(dataFromServer["error"])
self.messageDialog.text = str(dataFromServer["error"])
self.messageDialog.open()
else:
print(dataFromServer["success"])
self.authenticateL.text = str(
dataFromServer["success"])
# self.messageDialog.text = str(dataFromServer["error"])
# self.messageDialog.open()
try:
os.remove(self.filename)
self.pictureRemoved = True
except Exception as e:
print("Image file remove error: ", e)
if filePresent:
try:
os.remove("./userData.txt")
except Exception as e:
print("Text file remove error: ", e)
try:
fileP = open("./userData.txt", "w")
fileP.write(self.rollNo)
except Exception as e:
print("File error: ", e)
else:
self.authenticateL.text = "No face detected. Try Again"
os.remove(self.filename)
except Exception as e:
# print("No face detected.")
print("Error :", e)
if not self.pictureRemoved:
os.remove(self.filename)
self.encodingsData = None
if __name__ == "__main__":
StudentApp().run()
|
0000Blaze/Smart-Attendance | server/createdb.py | import mysql.connector
def createdb():
host = 'localhost'
user = 'root'
password = ''
port = 3306
dbname = 'sas'
len_cid = 15
len_cname = 20
len_scode = 10
len_subname = 50
len_did = 10
len_dname = 100
len_tid = 10
len_tname = 50
len_sid = 15
len_sname = 50
len_username = 30
len_password = 55
db_query = "CREATE DATABASE {}".format(dbname)
query_selectdb = "USE {}".format(dbname)
table_admin_query = '''CREATE TABLE admin(username VARCHAR({0}) NOT NULL,
password VARCHAR({1}) NOT NULL
)ENGINE = InnoDB;'''.format(len_username, len_password)
table_department_query = '''CREATE TABLE department(dID VARCHAR({0}),
name VARCHAR({1}) NOT NULL,
PRIMARY KEY(dID)
)ENGINE = InnoDB;'''.format(len_did, len_dname)
table_classroom_query = '''CREATE TABLE class(cID VARCHAR({0}),
name varchar({1}) NOT NULL,
dID VARCHAR({2}),
`sem` TINYINT UNSIGNED,
PRIMARY KEY(cID),
FOREIGN KEY (dID) REFERENCES department(dID)
)ENGINE = InnoDB;'''.format(len_cid, len_cname, len_did)
table_subject_query = '''CREATE TABLE subject(scode VARCHAR({0}),
name VARCHAR({1}) NOT NULL,
PRIMARY KEY(scode)
)ENGINE = InnoDB;'''.format(len_scode, len_subname)
table_teacher_query = '''CREATE TABLE teacher(tID VARCHAR({0}),
name VARCHAR({1}) NOT NULL,
dID VARCHAR({2}),
PRIMARY KEY(tID),
FOREIGN KEY (dID) REFERENCES department(dID)
)ENGINE = InnoDB;'''.format(len_tid, len_tname, len_did)
table_teaches_query = '''CREATE TABLE teaches(tID VARCHAR({0}),
scode VARCHAR({1}),
cID VARCHAR({2}),
`sem` TINYINT UNSIGNED,
PRIMARY KEY(tID, scode, cID, sem),
FOREIGN KEY (tID) REFERENCES teacher(tID),
FOREIGN KEY (scode) REFERENCES subject(scode),
FOREIGN KEY (cID) REFERENCES class(cID)
)ENGINE = InnoDB;'''.format(len_tid, len_scode, len_cid)
table_student_query = '''CREATE TABLE student(sID VARCHAR({0}),
name VARCHAR({1}) NOT NULL,
cID VARCHAR({2}),
dID VARCHAR({3}),
PRIMARY KEY(sID),
FOREIGN KEY (cID) REFERENCES class(cID),
FOREIGN KEY (dID) REFERENCES department(dID)
)ENGINE = InnoDB;'''.format(len_sid, len_sname, len_cid, len_did)
table_facedata_query = '''CREATE TABLE facedata(sID VARCHAR({0}),
`index` TINYINT UNSIGNED NOT NULL,
`embedding` FLOAT NOT NULL,
PRIMARY KEY(sID,`index`),
FOREIGN KEY (sID) REFERENCES student(sID)
)ENGINE = InnoDB;'''.format(len_sid)
table_attendance_query = '''CREATE TABLE attendance(`aID` INTEGER UNSIGNED AUTO_INCREMENT,
time TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
tID VARCHAR({}),
scode VARCHAR({}),
cID VARCHAR({}),
PRIMARY KEY(aID),
UNIQUE (time, cID),
FOREIGN KEY (tID) REFERENCES teacher(tID),
FOREIGN KEY (scode) REFERENCES subject(scode),
FOREIGN KEY (cID) REFERENCES class(cID)
)ENGINE = InnoDB;'''.format(len_tid, len_scode, len_cid)
table_record_query = '''CREATE TABLE record(`aID` INTEGER UNSIGNED,
sID VARCHAR({0}),
presence BOOLEAN DEFAULT False,
PRIMARY KEY(aID, sID),
FOREIGN KEY (aID) REFERENCES attendance(aID),
FOREIGN KEY (sID) REFERENCES student(sID)
)ENGINE = InnoDB;'''.format(len_sid)
table_queries = [table_admin_query,
table_department_query,
table_classroom_query,
table_subject_query,
table_teacher_query,
table_teaches_query,
table_student_query,
table_facedata_query,
table_attendance_query,
table_record_query]
try:
mysqlconn = mysql.connector.connect(
host=host,
user=user,
password=password,
port=port)
mysqlexecuter = mysqlconn.cursor()
try:
print('Executing... ', db_query)
mysqlexecuter.execute(db_query)
except mysql.connector.Error as e:
print(e)
try:
print('Executing... ', query_selectdb)
mysqlexecuter.execute(query_selectdb)
for query in table_queries:
try:
print('Executing... ', query)
mysqlexecuter.execute(query)
except mysql.connector.Error as e:
print(e)
except mysql.connector.Error as e:
print(e)
except mysql.connector.Error as e:
print(e)
if __name__ == '__main__':
createdb()
|
0000Blaze/Smart-Attendance | server/insertdb.py | <gh_stars>0
import mysql.connector
dbinfo = {'host': 'localhost',
'user': 'root',
'password': '',
'port': 3306,
'database': 'sas'}
def connect2db(_dbinfo=dbinfo):
'''returns cursor to the mysql database mentioned in dbinfo dictionary'''
mysqlconn = mysql.connector.connect(host=_dbinfo['host'], user=_dbinfo['user'], password=_dbinfo['password'],
port=_dbinfo['port'], database=_dbinfo['database'])
mycursor = mysqlconn.cursor()
return mysqlconn, mycursor
def insertAdmin(username, password):
query = 'INSERT INTO admin(username,password) VALUES("{0}","{1}")'.format(
username, password)
try:
mysqlconn, mycursor = connect2db()
try:
mycursor.execute(query)
mysqlconn.commit()
print(f'Added ({username}) to admin table')
return True
except mysql.connector.Error as e:
print(e)
return False
finally:
mycursor.close()
except mysql.connector.Error as e:
print(e)
return False
def insertClass(classid, name, depID, sem):
query = 'INSERT INTO class(cID, name, dID,`sem`) VALUES("{0}","{1}","{2}",{3})'.format(
classid, name, depID, int(sem))
try:
mysqlconn, mycursor = connect2db()
try:
mycursor.execute(query)
mysqlconn.commit()
print(f'Added ({classid}, {name}) to class table')
return True
except mysql.connector.Error as e:
print(e)
return False
finally:
mycursor.close()
except mysql.connector.Error as e:
print(e)
return False
def insertSubject(scode, name):
query = 'INSERT INTO subject(scode, name) VALUES("{0}","{1}")'.format(
scode, name)
try:
mysqlconn, mycursor = connect2db()
try:
mycursor.execute(query)
mysqlconn.commit()
print(f'Added ({scode}, {name}) to subject table')
return True
except mysql.connector.Error as e:
print(e)
return False
finally:
mycursor.close()
except mysql.connector.Error as e:
print(e)
return False
def insertDepartment(depid, name):
query = 'INSERT INTO department(dID, name) VALUES("{0}","{1}")'.format(
depid, name)
try:
mysqlconn, mycursor = connect2db()
try:
mycursor.execute(query)
mysqlconn.commit()
print(f'Added ({depid}, {name}) to department table')
return True
except mysql.connector.Error as e:
print(e)
return False
finally:
mycursor.close()
except mysql.connector.Error as e:
print(e)
return False
def insertTeacher(tid, name, depid):
query = 'INSERT INTO teacher(tID, name, dID) VALUES("{0}","{1}","{2}")'.format(
tid, name, depid)
try:
mysqlconn, mycursor = connect2db()
try:
mycursor.execute(query)
mysqlconn.commit()
print(f'Added ({tid},{name},{depid}) to teacher table')
return True
except mysql.connector.Error as e:
print(e)
return False
finally:
mycursor.close()
except mysql.connector.Error as e:
print(e)
return False
def insertIntoTeaches(tid, classid, subcode, sem):
if type(subcode) == list:
query = 'INSERT INTO teaches(tID, scode, cID, `sem`) VALUES("{0}","{1}","{2}", {3})'.format(
tid, subcode[0], classid, int(sem))
for i in range(len(subcode)-1):
query += ',("{0}","{1}","{2}",{3})'.format(tid,
subcode[i+1], classid, int(sem))
else:
query = 'INSERT INTO teaches(tID, scode, cID, `sem`) VALUES("{0}","{1}","{2}", {3})'.format(
tid, subcode, classid, int(sem))
try:
mysqlconn, mycursor = connect2db()
try:
mycursor.execute(query)
mysqlconn.commit()
print(f'Added ({tid},{classid},{subcode},{sem}) to teaches table')
return True
except mysql.connector.Error as e:
print(e)
return False
finally:
mycursor.close()
except mysql.connector.Error as e:
print(e)
return False
def insertStudent(stuid, name, classid, depid, face_embd):
if len(face_embd) != 128:
print('face embeddings size not equal to 128')
return False
student_query = 'INSERT INTO student(sID, name, cID, dID) VALUES("{0}","{1}","{2}","{3}")'.format(
stuid, name, classid, depid)
face_query = 'INSERT INTO facedata(sID, `index`, `embedding`) VALUES ("{0}", {1}, {2})'.format(
stuid, 0, face_embd[0])
for i in range(127): # first embedding value already in string
newrow = ',("{0}", {1}, {2})'.format(stuid, i+1, face_embd[i+1])
face_query += newrow
try:
# print(face_query)
mysqlconn, mycursor = connect2db()
try:
mycursor.execute(student_query)
print(f'Added ({stuid}, {name}, {classid}) to student table')
# mysqlconn.commit()
mycursor.execute(face_query)
mysqlconn.commit()
print(f'Added facedata of {name}')
return True
except mysql.connector.Error as e:
print(e)
return False
finally:
mycursor.close()
except mysql.connector.Error as e:
print(e)
return False
def insertAttendance(tid, scode, cid):
'''inserts new attendance details and returns its aID if successfull'''
query = 'INSERT INTO attendance(tID, scode, cID) VALUES("{0}","{1}","{2}")'.format(
tid, scode, cid)
try:
mysqlconn, mycursor = connect2db()
try:
mycursor.execute(query)
# get the auto incremented value of aid for the newly inserted record
aid = mycursor.lastrowid
mysqlconn.commit()
print(
f'Added ({aid},current time,{tid},{scode},{cid}) to attendance table')
return aid
except mysql.connector.Error as e:
print(e)
return None
finally:
mycursor.close()
except mysql.connector.Error as e:
print(e)
return None
def insertRecord(aid, stuid, presence):
query = 'INSERT INTO record(aID, sID, presence) VALUES({0},"{1}",{2})'.format(
aid, stuid, presence)
try:
mysqlconn, mycursor = connect2db()
try:
mycursor.execute(query)
print(f'Added ({aid},{stuid},{presence}) to record table')
mysqlconn.commit()
return True
except mysql.connector.Error as e:
print(e)
return False
finally:
mycursor.close()
except mysql.connector.Error as e:
print(e)
return False
def insertRecords(aid, stuids, presences=None):
if presences == None:
presences = [False for x in range(len(stuids))]
elif len(stuids) != len(presences):
print('Size of student and presence list not equal')
return False
# save attendance records for multiple students in single query
query = 'INSERT INTO record(aID, sID, presence) VALUES({0},"{1}",{2})'.format(
aid, stuids[0], presences[0])
# first value already in query string as it shouldn't have comma at begining
for i in range(len(stuids)-1):
newrow = ',({0}, "{1}", {2})'.format(aid, stuids[i+1], presences[i+1])
query += newrow
try:
mysqlconn, mycursor = connect2db()
try:
mycursor.execute(query)
mysqlconn.commit()
print(f'Added attendance records to record table')
return True
except mysql.connector.Error as e:
print(e)
return False
finally:
mycursor.close()
except mysql.connector.Error as e:
print(e)
return False
|
0000Blaze/Smart-Attendance | studentApp/server/client_student.py | <filename>studentApp/server/client_student.py
import json
import socket
import server.communication_json as communication_json
# attendance_server = {'host': '127.0.0.1', 'port': 60000}
attendance_server = {'host': '192.168.152.108', 'port': 60000}
#attendance_server = {'host': '192.168.1.65', 'port': 60000}
SERVER_TIMEOUT = 30 #timeout after 20 seconds if server didn't respond
def markAttendance(student_id, acode, face_embd, _attendance_server=attendance_server):
data = {}
data['sid'] = student_id
data['acode'] = int(acode)
data['face'] = [float(x) for x in face_embd]
#convert the data to be sent into json format
datastr = communication_json.convert2send(data)
#print(datastr)
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
try:
sock.settimeout(SERVER_TIMEOUT)
sock.connect((attendance_server['host'], attendance_server['port']))
# print("zxc ")
try:
#sending attendance start data to server
sock.sendall(datastr)
# print("zxc 2")
#receive response which is byte representing attendance done or not
response = communication_json.readall(sock)
if "error" in response:
# print(response['error'])
return response
elif "success" in response:
# print(response['success'])
return response #response contains
except TimeoutError as t:
raise
except:
return {'error': 'error sending/receiving data'}
finally:
sock.close()
except TimeoutError as t:
return{'error':'Server took long to respond'}
except:
return {'error': 'server not avialable'}
if __name__ == '__main__':
## markAttendance('075bct052', '88',
## [0.258, 0.444447, 0.1258, 0.36697, 0.125887, 0.11245588])
input()
|
0000Blaze/Smart-Attendance | server/server.py | <gh_stars>0
import socket
import threading
import random
import sched
import time
import sys
import numpy
import face_recognition
import mysql.connector
import communication_json
import insertdb
import utility
MAX_ACODE = 1000
# server info
server_ip = '192.168.152.108'
student_port = 60000
teacher_port = 60001
#updater_port = 60002
# database info
dbinfo = {'host': 'localhost',
'user': 'root',
'password': '',
'port': 3306,
'database': 'sas'}
# attendance closes automatically after 10 minutes if teacher doesn't close it
ATTENDANCE_TIMEOUT = 10 * 60
ATTENDANCE_TIMEOUT_CHECK = 10 # checks every 10 seconds for timeout of attendance
attendance_scheduler = sched.scheduler(time.time, time.sleep)
# class Attendance:
# def __init__(self, tid, acode, aid):
## self.tid = tid
## self.acode = acode
## self.aid = aid
# insert { classid:(teacherid, acode, aID) } to attendance active to start attendance
active_attendance = {}
# insert { classid: studentids[]} for student whose attendance is left to be shown to corresponding teacher
students_present = {}
def connect2db(_dbinfo=dbinfo):
'''returns cursor to the mysql database mentioned in dbinfo dictionary'''
try:
mysqlconn = mysql.connector.connect(host=_dbinfo['host'], user=_dbinfo['user'], password=_dbinfo['password'],
port=_dbinfo['port'], database=_dbinfo['database'])
mycursor = mysqlconn.cursor()
return mysqlconn, mycursor
except mysql.connector.Error as e:
raise
def sendSQLserverError(conn):
response = {}
response['error'] = 'Couldn\'t connect to attendance server. please try again after a moment'
communication_json.convertSendClose(response, conn)
def studentHandler(conn):
data = communication_json.readall(conn)
print(data['sid'])
response = {}
# find the class of the student
class_query = 'SELECT cID FROM student WHERE sID = "{}"'.format(
data['sid'])
# print("DGFD")
if data['face'] == None or len(data['face']) != 128:
response['error'] = 'Face data not supplied'
communication_json.convertSendClose(response, conn)
return
try:
mysqlconn, mycursor = connect2db()
try:
mycursor.execute(class_query)
# print("DGFD")
res = mycursor.fetchone()
if res == None:
response['error'] = 'You are not registered for any class'
communication_json.convertSendClose(response, conn)
return
data['cid'] = res[0]
# print("data cid = ", data['cid'])
except:
raise
finally:
mycursor.close()
except mysql.connector.Error as e:
print(e)
sendSQLserverError(conn)
return
# print("DfhcvhGFD")
if not data['cid'] in active_attendance:
response['error'] = 'Class is not taking attendance at the moment'
communication_json.convertSendClose(response, conn)
return
else:
if active_attendance[data['cid']][1] != data['acode']:
response['error'] = 'Attendance code wrong'
communication_json.convertSendClose(response, conn)
return
elif data['sid'] in students_present[data['cid']]:
response['error'] = 'Attendance already marked'
communication_json.convertSendClose(response, conn)
return
else:
try:
mysqlconn, mycursor = connect2db()
try:
# *first check the student is registered for classid*
student_facedata_query = 'SELECT embedding FROM facedata WHERE sID = "{}" ORDER BY `index`'.format(
data['sid'])
mycursor.execute(student_facedata_query)
result = mycursor.fetchall()
facedata = []
if len(result) == 0:
response['error'] = 'Your face is not registered. Contact the administrator'
elif len(result) != 128:
print('Face data insufficient in database')
sendSQLserverError(conn)
return
else:
for res in result:
facedata.append(res[0])
# compare facedata
match = face_recognition.compare_faces(
[numpy.array(facedata)], numpy.array(data['face']))
if match[0]:
# if face match then update
mark_attendance_query = 'UPDATE record SET presence = true WHERE aID = {0} AND sID = "{1}"'.format(
active_attendance[data['cid']][2], data['sid'])
mycursor.execute(mark_attendance_query)
mysqlconn.commit()
# add student_id to students_present[];
students_present[data['cid']].append(data['sid'])
# send attendance status to 'socket' and save in database;
response['success'] = 'Attendance marked'
communication_json.convertSendClose(response, conn)
return
else:
response['error'] = 'Face didn\'t match. Please try again'
communication_json.convertSendClose(response, conn)
return
except mysql.connector.Error as e:
sendSQLserverError(conn)
return
finally:
mycursor.close()
except mysql.connector.Error as e:
sendSQLserverError(conn)
return
def studentConnectionListen():
with socket.socket(socket.AF_INET, socket.SOCK_STREAM) as sl:
sl.bind((server_ip, student_port))
sl.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
while(True):
sl.listen()
conn, addr = sl.accept()
# start new thread;
t = threading.Thread(target=studentHandler, args=(conn,))
t.start()
print("Connected to student at: ", addr)
def removeClassFromAttendance(class_id):
if class_id in active_attendance:
# remove the class from active attendance list
del active_attendance[class_id]
del students_present[class_id] # remive students list
def attendanceTimeout():
# see time of each started attendance in active_attendance and close if maximum time has elapsed
while True:
if attendance_scheduler.empty():
time.sleep(ATTENDANCE_TIMEOUT_CHECK)
else:
attendance_scheduler.run()
# def teacherAttendanceLogFeedback(conn, class_id):
# if student_present[] is not empty send the student_ids and remove from the list
# close this thread if corresponding class_id has been removed from active attendance list
# return
##
# def getStudentList_json(class_id):
# get student list and their id from database for class_id
# save it in proper python structure
# return the json format data
# return
##
# def teacherExists(db_cursor):
# return
def getNewAttendanceCode(_ACODE=MAX_ACODE):
# select a random unique number as code
acode_unique = False
while not acode_unique:
acode = random.randint(0, _ACODE)
acode_unique = True
for key in active_attendance:
if active_attendance[key][1] == acode:
acode_unique = False
break
return acode
def teacherHandler(conn):
data = communication_json.readall(conn)
response = {}
# check if attendance is in progress or not to start/stop attendance
if data['attendance'] == 'end':
# stopping attendance
if data['cid'] in active_attendance:
if active_attendance[data['cid']][0] == data['tid']:
# same teacher must close attendance
# remove the class from active attendance list
del active_attendance[data['cid']]
del students_present[data['cid']] # remive students list
response['success'] = 'Attendance stopped'
communication_json.convertSendClose(response, conn)
return
# perform other cleanup...
else:
response['error'] = 'Another teacher started attendance for this class'
communication_json.convertSendClose(response, conn)
return
else:
response['error'] = 'No attendance in progress for the class'
communication_json.convertSendClose(response, conn)
return
elif data['attendance'] == 'start':
# starting attendance
if data['cid'] in active_attendance:
response['error'] = 'Attendance already started'
communication_json.convertSendClose(response, conn)
return
# check the classid and teacherid in data are correct consulting database
try:
mysqlconn, mycursor = connect2db()
try:
teacher_exists_query = 'SELECT tID FROM sas.teacher WHERE tID = "{}"'.format(
data['tid'])
mycursor.execute(teacher_exists_query)
res = mycursor.fetchone()
if res == None:
mycursor.close()
response['error'] = 'You are not registered as teacher'
communication_json.convertSendClose(response, conn)
return
else:
class_exists_query = 'SELECT cID FROM sas.class WHERE cID = "{}"'.format(
data['cid'])
mycursor.execute(class_exists_query)
res = mycursor.fetchone()
if res == None:
response['error'] = 'Couldn\'t find class'
communication_json.convertSendClose(response, conn)
return
else:
subject_exists_query = 'SELECT scode FROM sas.subject WHERE scode = "{}"'.format(
data['scode'])
mycursor.execute(class_exists_query)
res = mycursor.fetchone()
if res == None:
response['error'] = 'Couldn\'t find subject'
communication_json.convertSendClose(response, conn)
return
else:
# a unique attendance identifier for current session
acode = getNewAttendanceCode()
# send student list with studentid, name and attendance code
classlist_query = 'SELECT sID, name FROM student WHERE cID = "{}"'.format(
data['cid'])
mycursor.execute(classlist_query)
result = mycursor.fetchall()
response['student_list'] = result
response['acode'] = acode
response['timeout'] = 'The attendance will close automatically in {} minutes if not explicitly closed'.format(
ATTENDANCE_TIMEOUT/60)
communication_json.convertSendClose(response, conn)
sidlist = [r[0] for r in result]
# make new attendance record in database
attendanceid = insertdb.insertAttendance(
data['tid'], data['scode'], data['cid'])
# adding records of students to the attendance for the respective class with default false for presence
insertdb.insertRecords(attendanceid, sidlist)
# add the classid:teacherid pair to active attendance list
active_attendance[data['cid']] = (
data['tid'], acode, attendanceid)
# initially no student present
students_present[data['cid']] = []
# stop attendance after timeout period if teacher doesn't close explicitly
attendance_scheduler.enter(
ATTENDANCE_TIMEOUT, 0, removeClassFromAttendance, argument=(data['cid'],))
# --- start new thread for attendance log feedback --- not applicable now
# --- wait for attendance stop message from teacher client --- not applicable now
except mysql.connector.Error as e:
sendSQLserverError(conn)
return
finally:
mycursor.close()
except mysql.connector.Error as e:
sendSQLserverError(conn)
return
elif data['attendance'] == 'get':
# send list of students whose attendance has been marked
if data['cid'] in active_attendance:
if active_attendance[data['cid']][0] == data['tid']:
# same teacher is only allowed to see realtime attendance data
response['student_list'] = students_present[data['cid']]
communication_json.convertSendClose(response, conn)
return
else:
response['error'] = 'Another teacher started attendance for this class'
communication_json.convertSendClose(response, conn)
return
else:
response['error'] = 'No attendance in progress for the class'
communication_json.convertSendClose(response, conn)
return
elif data['attendance'] == 'mark':
# send list of students whose attendance has been marked
if data['cid'] in active_attendance:
if active_attendance[data['cid']][0] == data['tid']:
# same teacher is only allowed to mark a student present
if not data['sid'] in students_present[data['cid']]:
try:
mysqlconn, mycursor = connect2db()
mark_attendance_query = 'UPDATE record SET presence = true WHERE aID = {0} AND sID = "{1}"'.format(
active_attendance[data['cid']][2], data['sid'])
try:
mycursor.execute(mark_attendance_query)
mysqlconn.commit()
students_present[data['cid']].append(data['sid'])
response['success'] = f'Attendance marked for {data["sid"]}'
communication_json.convertSendClose(response, conn)
return
except mysql.connector.Error as e:
response['error'] = 'Student ID wrong'
communication_json.convertSendClose(response, conn)
return
except mysql.connector.Error as e:
sendSQLserverError(conn)
return
else:
response['error'] = f'Attendance already marked for {data["sid"]}'
communication_json.convertSendClose(response, conn)
return
else:
response['error'] = 'Another teacher started attendance for this class'
communication_json.convertSendClose(response, conn)
return
else:
response['error'] = 'No attendance in progress for the class'
communication_json.convertSendClose(response, conn)
return
elif data['attendance'] == 'update':
# though the key is 'attendance' it has nothing to do with attendance
# this just sends updated list of class and subjects to teacher
classSubjectUpdater(conn, data['tid'])
return
def teacherConnectionListen():
with socket.socket(socket.AF_INET, socket.SOCK_STREAM) as s:
s.bind((server_ip, teacher_port))
s.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
while(True):
s.listen()
conn, addr = s.accept()
# start new thread;
t = threading.Thread(target=teacherHandler, args=(conn,))
t.start()
print("Connected to teacher at: ", addr)
# class and subject data updater for teacher
def classSubjectUpdater(conn, tid):
response = {}
if tid == '':
response['error'] = "Please supply propoer teacher id"
communication_json.convertSendClose(response, conn)
return
else:
mysqlconn, mycursor = connect2db()
try:
teacher_exists_query = 'SELECT tID,name FROM sas.teacher WHERE tID = "{}"'.format(
tid)
mycursor.execute(teacher_exists_query)
res = mycursor.fetchone()
if res == None:
mycursor.close()
response['error'] = 'You are not registered as teacher'
communication_json.convertSendClose(response, conn)
return
else:
response['teacher_name'] = res[1]
except mysql.connector.Error as e:
print(e)
sendSQLserverError(conn)
return
finally:
mycursor.close()
try:
mysqlconn, mycursor = connect2db()
try:
classlist_query = 'SELECT cID, name FROM class INNER JOIN teaches USING (cID) WHERE tID = {0} AND teaches.`sem` != 0' .format(
tid)
mycursor.execute(classlist_query)
result = mycursor.fetchall()
response['class'] = result
subjectlist_query = 'SELECT scode, name FROM subject INNER JOIN teaches USING (scode) WHERE tID = {0} AND teaches.`sem` != 0'.format(
tid)
mycursor.execute(subjectlist_query)
result = mycursor.fetchall()
response['subject'] = result
communication_json.convertSendClose(response, conn)
except mysql.connector.Error as e:
print(e)
raise
finally:
mycursor.close()
except mysql.connector.Error as e:
# print(e)
sendSQLserverError(conn)
return
# def UpdateConnectionListen():
# with socket.socket(socket.AF_INET, socket.SOCK_STREAM) as s:
## s.bind((server_ip, teacher_port))
# while(True):
# s.listen()
## conn, addr = s.accept()
# start new thread;
## t = threading.Thread(target= classSubjectUpdater, args = (conn,))
# t.start()
## print("Connected to teacher at: ", addr)
if __name__ == '__main__':
teacherlistener = threading.Thread(target=teacherConnectionListen)
studentlistener = threading.Thread(target=studentConnectionListen)
attendancetimer = threading.Thread(target=attendanceTimeout)
teacherlistener.daemon = True
studentlistener.daemon = True
attendancetimer.daemon = True
# --test--
# attendanceid = insertdb.insertAttendance(
# "001", "CT652", "075bctCd")
# active_attendance['075bctCd'] = ('001', 1, attendanceid)
# students_present['075bctCd'] = []
teacherlistener.start() # listen and handle teacher clients
studentlistener.start() # listen and handle student clients
# stop any attendance that has not been stopped explicitly by teacher within timeout period
attendancetimer.start()
while True:
endServer = input()
if endServer == "q" or endServer == "Q":
sys.exit()
# wait till all threads have returned
# teacherlistener.join()
# studentlistener.join()
# attendancetimer.join()
|
0000Blaze/Smart-Attendance | scraper/getStudentList.py | <filename>scraper/getStudentList.py
from pydoc import source_synopsis
from markupsafe import string
import requests
from requests import get
from bs4 import BeautifulSoup
import pandas as pd
import numpy as np
from recordHandler import splitClasses
from recordHandler import addNewStudents
from recordHandler import removeDropouts
url ="http://doece.pcampus.edu.np/index.php/students-bachelor-in-computer-engineering/"
headers = {"Accept-Language":"en-US,en;q=0.5"}
results = requests.get(url,headers=headers)
soup = BeautifulSoup(results.text,"html.parser")
#initiliate data storage
rollNo =[]
name =[]
temp=[]
#find the specific batch roll number and name
matched_tags = soup.find_all(lambda tag:len(tag.find_all()) == 0 and "PUL075" in tag.text) #change year as needed batch
############################extracting the name and roll number into sparate lists###############################
#converting bs4.element.tag into string list
for matched_tag in matched_tags:
temp.append(str(matched_tag))
#temporary lists for removing the <p> and </p> tags
z1=[]
z2=[]
#split the roll number and name
for x in temp:
y = x.split(" ",1)
z1.append(y[0])
z2.append(y[1])
#Get name list
for x in z2:
x1= x.split("<",1)
name.append(x1[0])
#Get roll number list
for x in z1:
x1= x.split(">",1)
rollNo.append(x1[1])
#################################### END extracting name and roll number #####################################
#remove the weird unicode character
for index, element in enumerate(rollNo):
rollNo[index] = element.replace('\xa0',' ')
for index, element in enumerate(name):
name[index] = element.replace('\xa0',' ')
for index in range(len(rollNo)):
rollNo[index] = rollNo[index].replace(" ","")
#print(rollNo)
#pandas dataframe
records = pd.DataFrame({
'RollNo': rollNo,
'Name': name,
})
#convert pandas dataframe to csv file
records.to_csv('PUL075BCT.csv')
print("Records retrived and save as PUL075BCT.csv")
try:
removeDropouts()
splitClasses()
addNewStudents()
except:
print("\n SOME ERROR OCCURED\n") |
0000Blaze/Smart-Attendance | scraper/recordHandler.py | <reponame>0000Blaze/Smart-Attendance
## ONLY MODULES HERE IMPORTED IN getStudentList.py
#made to handle the csv file
#1. split classes AB and CD studnets
#2. add the new transfer students
#3. removed dropouts
from itertools import count
from operator import index
from statistics import variance
import pandas as pd
from csv import writer
def splitClasses():
chuck_size = 46
batch_no = 1
text = "AB"
for chunk in pd.read_csv('PUL075BCT.csv',chunksize=chuck_size):
if batch_no == 1:
chunk.to_csv('PUL075BCT'+ text + '.csv',index=False)
else:
text = "CD"
chunk.to_csv('PUL075BCT'+ text + '.csv',index=False)
batch_no = batch_no + 1
print("Succesful split into two records")
def addNewStudents():
#added required students record
dataAB=[
['PUL075BCT097','BIBEK BASHYAL'],
['PUL075BCT099','SAUGAT KAFLE']
]
dataCD =[
['PUL075BCT098','<NAME>'],
['PUL075BCT100','SIJAL BARAL']
]
data =[
['PUL075BCT097','BIBEK BASHYAL'],
['PUL075BCT098','<NAME>'],
['PUL075BCT099','SAUGAT KAFLE'],
['PUL075BCT100','SIJAL BARAL']
]
# Open our existing CSV file for AB class
with open('PUL075BCTAB.csv', 'a',newline='') as f_object:
writer_object = writer(f_object)
writer_object.writerows(dataAB)
f_object.close()
# Open our existing CSV file for CD class
with open('PUL075BCTCD.csv', 'a',newline='') as f_object:
writer_object = writer(f_object)
writer_object.writerows(dataCD)
f_object.close()
print("Succesful added new students in AB and CD")
#add students to PUL075BCT
with open('PUL075BCT.csv', 'a',newline='') as f_object:
writer_object = writer(f_object)
writer_object.writerows(data)
f_object.close()
print("Succesful added new students in PUL075BCT\n")
#3 remove drop out students
def removeDropouts():
data = pd.read_csv('PUL075BCT.csv',index_col="RollNo")
# dropping passed values
data.drop(["PUL075BCT017", "PUL075BCT036", "PUL075BCT073","PUL075BCT087"], inplace = True)
# dropping passed columns
data.drop(data.columns[data.columns.str.contains('unnamed',case = False)],axis = 1, inplace = True)
# display
print("Removed dropouts")
data.to_csv('PUL075BCT.csv')
|
0000Blaze/Smart-Attendance | server/utility.py | <filename>server/utility.py
def getKey(dictionary, svalue):
for key, value in dictionary.items():
if value == svalue:
return key
# def hasKey(dictionary, key):
# if key in dictionary:
# return True
# return False
|
0000Blaze/Smart-Attendance | data/getStudentImages.py | import wget
def imageDownloader(fixed_url,varying_url):
try:
url = fixed_url + varying_url + ".png"
file_name = wget.download(url)
except:
try:
url = fixed_url + varying_url + ".jpg"
file_name = wget.download(url)
except:
url = fixed_url + varying_url + ".jpeg"
file_name = wget.download(url)
print('Image Successfully Downloaded: ', file_name)
#fixed url part for all PUL075BCT students
fixed_url = "https://exam.ioe.edu.np/Images/StudentCurrentImage/3036/PUL075BCT"
#Total number of students in batch
i=96
while i>0:
if i<10:
varying_url = "00" + str(i)
elif i == 87 or i == 73 or i == 36 or i == 17: #dropouts 87 73 36 17
i= i-1
continue
else:
varying_url = "0" + str(i)
imageDownloader(fixed_url,varying_url)
i = i-1
#download images of new added students
varying_url = "100"
imageDownloader(fixed_url,varying_url)
x= int(varying_url) - 1
while x > 96:
varying_url = "0" + str(x)
imageDownloader(fixed_url,varying_url)
x = x-1 |
andrewzweb/TiShop | base_app/functional_tests/test_product_delete.py | <filename>base_app/functional_tests/test_product_delete.py<gh_stars>0
''' functional test to delete product '''
from django.urls import reverse
from mixer.backend.django import mixer
import pytest
from .base import FunctionalTest
pytestmark = pytest.mark.django_db
class ProductDeleteTest(FunctionalTest):
""" testcase product delete """
@pytest.mark.integration
def test_delete_product(self):
''' Rob want delete product '''
# create product in db
product = mixer.blend('product.Product')
# get page where you Rob can delete
self.browser.get(self.live_server_url +
reverse('product:delete', kwargs={'product_slug':product.slug}))
# Rob find botton to delete item
self.wait_for(self.browser.find_element_by_id('delete-button'))
# Ro click to botton
self.wait_for(self.browser.find_element_by_id('delete-button').click())
|
andrewzweb/TiShop | base_app/product/migrations/0003_productimage_description.py | # Generated by Django 3.1.7 on 2021-03-23 17:00
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('product', '0002_productimage'),
]
operations = [
migrations.AddField(
model_name='productimage',
name='description',
field=models.CharField(blank=True, max_length=100),
),
]
|
andrewzweb/TiShop | base_app/product/tests/test_models.py | <reponame>andrewzweb/TiShop<filename>base_app/product/tests/test_models.py
''' Test product models '''
import decimal
import pytest
from django.utils.text import slugify
from mixer.backend.django import mixer
pytestmark = pytest.mark.django_db
from ..models import ProductImage
class TestProduct:
''' test product '''
def setup(self):
'''set up'''
self.product = mixer.blend('product.Product')
def test_product_model_exist(self):
''' test model exist '''
assert self.product.id == 1, 'Should create a Product instance with id 1'
def test_product_have_title(self):
''' test product have title '''
product = mixer.blend('product.Product', title='product title')
assert product.title == 'product title'
def test_product_have_slug(self):
'''test product have slug'''
assert self.product.slug
def test_product_slug_its_title(self):
'''test product slug its title'''
assert self.product.slug == slugify(self.product.title)
def test_product_have_description(self):
'''test product have description'''
product = mixer.blend('product.Product', description='product description')
assert product.description == 'product description'
def test_product_have_price(self):
'''test product have price'''
product = mixer.blend('product.Product', price=10.00)
assert product.price == 10.00
def test_product_price_default_is_zero(self):
'''test product price default is zero'''
product = mixer.blend('product.Product')
assert product.price == 0
def test_product_price_max_number_max_is_ten_digits(self):
'''test product price max number max is ten digits'''
with pytest.raises(decimal.InvalidOperation):
mixer.blend('product.Product', price=100000000.00)
def test_product_price_have_decimal_places(self):
'''test product price have decimal places'''
product = mixer.blend('product.Product', price=1)
assert product.price == decimal.Decimal(1.00)
def test_product_str_return_title(self):
'''test product str return title'''
product = mixer.blend('product.Product')
assert product.title in str(product)
class TestProductImage:
''' testcase product image '''
def setup(self):
''' setup '''
self.product = mixer.blend('product.Product')
self.product_image = mixer.blend('product.ProductImage', product=self.product)
def test_exist_product_image(self):
'''test exist product image'''
assert ProductImage.objects.count() == 1
def test_obj_have_description(self):
'''test obj have description'''
assert self.product_image.description
def test_obj_have_image(self):
'''test obj have image'''
assert self.product_image.image
def test_obj_have_ralation_with_product(self):
'''test obj have ralation with product'''
assert self.product_image.product == self.product
def test_obj_str_return_title(self):
'''test obj str return title'''
assert str(self.product_image) == self.product_image.description
def test_obj_small_image(self):
'''test obj small image'''
assert self.product_image.image.url in self.product_image.small_image()
class TestCategory:
''' testcase category'''
def setup(self):
self.product = mixer.blend('product.Product')
self.category = mixer.blend('product.Category', product=self.product)
def test_category_exist(self):
''' test category '''
assert self.category
def test_category_have_title(self):
'''test category have title'''
assert self.category.title
def test_category_have_slug(self):
'''test category have slug'''
assert self.category.slug
def test_category_method_str(self):
'''test category have slug'''
assert self.category.title in str(self.category)
def test_method_save_autochange_slug(self):
'''test category have slug'''
# get title
title = self.category.title
# check slug it ccover title
assert str(slugify(title)) == self.category.slug
# set new title
new_title = 'New title'
self.category.title = new_title
# check new title not even slug
assert str(slugify(new_title)) != str(self.category.slug)
# action save instance
self.category.save()
# check slug must be same with title
assert str(slugify(new_title)) == str(self.category.slug)
|
andrewzweb/TiShop | base_app/product/migrations/0006_auto_20210323_1711.py | # Generated by Django 3.1.7 on 2021-03-23 17:11
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('product', '0005_auto_20210323_1707'),
]
operations = [
migrations.AlterField(
model_name='productimage',
name='image',
field=models.ImageField(upload_to=''),
),
]
|
andrewzweb/TiShop | base_app/product/models.py | <reponame>andrewzweb/TiShop
''' product models '''
from django.db import models
from django.utils.safestring import mark_safe
from django.utils.text import slugify
class Product(models.Model):
''' model product '''
title = models.CharField(max_length=250)
slug = models.SlugField(blank=True, null=True)
description = models.TextField(blank=True)
price = models.DecimalField(
default=0,
max_digits=10,
decimal_places=2,
blank=True
)
category = models.ForeignKey(
'Category',
on_delete=models.PROTECT,
blank=True,
null=True
)
updated = models.DateTimeField(auto_now=True)
created = models.DateTimeField(auto_now_add=True)
def __str__(self):
return '{}'.format(self.title)
def save(self, *args, **kwargs):
self.slug = slugify(self.title)
super(Product, self).save(*args, **kwargs)
class ProductImage(models.Model):
''' product image '''
description = models.CharField(blank=True, max_length=100)
image = models.ImageField()
product = models.ForeignKey(
'Product',
on_delete=models.CASCADE,
blank=True,
null=True,
)
def __str__(self):
return '{}'.format(self.description)
def small_image(self):
return mark_safe(u'<img src="%s" width="100"/>' % self.image.url)
small_image.short_description = 'Picture'
small_image.allow_tags = True
class Category(models.Model):
title = models.CharField(blank=False, max_length=100)
slug = models.SlugField(null=True, blank=True)
def __str__(self):
return self.title
def save(self, *args, **kwargs):
self.slug = slugify(self.title)
super(Category, self).save(*args, **kwargs)
|
andrewzweb/TiShop | base_app/functional_tests/test_product_list.py | ''' functional test product detail '''
import pytest
from django.urls import reverse
from .base import FunctionalTest
from mixer.backend.django import mixer
pytestmark = pytest.mark.django_db
import time
class ProductListTest(FunctionalTest):
""" testcase list product """
def setUp(self):
''' new setUp '''
super(ProductListTest, self).setUp()
self.products = mixer.cycle(5).blend('product.Product')
@pytest.mark.integration
def test_get_list_product_page(self):
''' Rob want see product page '''
# Rob get product
self.browser.get(self.live_server_url +
reverse('product:all'))
# Rob see 5 products in page
# check 5 product-title
for product in self.products:
assert product.title in self.browser.page_source
|
andrewzweb/TiShop | base_app/functional_tests/test_home_mock.py | from .base import FunctionalTest
from django.urls import reverse
import pytest
class HomePageTest(FunctionalTest):
""" testcase home view """
@pytest.mark.integration
def test_get_home_page(self):
''' test get home page '''
self.browser.get(
self.live_server_url + reverse('product:home'))
assert 'Home' in self.browser.title
|
andrewzweb/TiShop | base_app/product/urls.py | from django.urls import path
from . import views
app_name='product'
urlpatterns = [
path('home/', views.home, name='home'),
path('add/', views.product_add, name='add'),
path('', views.product_all, name='all'),
path('<slug:product_slug>', views.product_detail, name='detail'),
path('<slug:product_slug>/update', views.product_update, name='update'),
path('<slug:product_slug>/delete', views.product_delete, name='delete'),
path('category/<slug:category_slug>', views.category_filter, name='category_filter'),
]
|
andrewzweb/TiShop | base_app/product/views.py | from django.shortcuts import render, redirect, get_object_or_404
from django.urls import reverse
from django.forms import modelformset_factory
from .models import Product, ProductImage, Category
from . import forms
def home(request):
''' home '''
greatings = 'Hello you are at home'
return render(request, 'product/home.html', locals())
def product_all(request):
''' product all '''
categories = Category.objects.all()
products = Product.objects.all()
return render(request, 'product/list.html', locals())
def product_detail(request, product_slug):
'''product detail '''
categories = Category.objects.all()
product = get_object_or_404(Product, slug=product_slug)
return render(request, 'product/detail.html', locals())
def product_add(request):
'''product create '''
categories = Category.objects.all()
product_form = forms.ProductForm()
picture_form = forms.ProductImageForm()
if request.method == "POST":
product_form = forms.ProductForm(prefix="product", data=request.POST)
picture_form = forms.ProductImageForm(prefix="picture", data=request.POST, files=request.FILES)
if product_form.is_valid() and picture_form.is_valid():
product = product_form.save()
picture = picture_form.save(commit=False)
picture.product=product
picture.save()
return redirect(reverse('product:detail', kwargs={'product_slug':product.slug,}), status=201)
else:
product_form = forms.ProductForm(prefix="product")
picture_form = forms.ProductImageForm(prefix="picture")
return render(request, 'product/create.html', locals())
def product_update(request, product_slug):
'''product update '''
categories = Category.objects.all()
product = get_object_or_404(Product, slug=product_slug)
picture_queryset = ProductImage.objects.filter(product=product)
picture_formset = modelformset_factory(
ProductImage,
form=forms.ProductImageForm,
extra=1,
can_delete=True
)
if request.method == "POST":
product_form = forms.ProductForm(prefix="product", data=request.POST, instance=product)
picture_formset = picture_formset(request.POST, request.FILES)
if product_form.is_valid() and picture_formset.is_valid():
product = product_form.save()
for form in picture_formset:
data = form.cleaned_data
data_description = data.get('description')
data_image = data.get('image')
data_id = data.get('id')
data_delete = data.get('DELETE')
if data_id == None and data_description != None and data_image != None:
ProductImage.objects.create(
description=data_description,
image=data_image,
product=product
)
if data_delete == True:
print('delete', data_id)
ProductImage.objects.get(description=data_id).delete()
if data_id != None and data_description != None and data_image != None:
form.save()
return redirect(reverse('product:update', kwargs={'product_slug':product.slug,}))
else:
product_form = forms.ProductForm(prefix="product", instance=product)
picure_formset = picture_formset(queryset=picture_queryset)
return render(request, 'product/update.html', locals())
def product_delete(request, product_slug):
if request.method == 'POST':
product = get_object_or_404(Product, slug=product_slug)
product.delete()
return redirect(reverse('product:all'))
else:
product = get_object_or_404(Product, slug=product_slug)
return render(request, 'product/delete.html', locals())
def category_filter(request, category_slug):
'''product detail '''
categories = Category.objects.all()
target_category = Category.objects.get(slug=category_slug)
products = Product.objects.filter(category=target_category)
return render(request, 'product/category.html', locals())
|
andrewzweb/TiShop | base_app/product/admin.py | <gh_stars>0
''' admin product '''
from django.contrib import admin
from . import models
class ProductImagesInstanceInline(admin.TabularInline):
model = models.ProductImage
fields = ('small_image', 'description', 'image', 'product')
readonly_fields = ('small_image',)
extra = 1
@admin.register(models.Product)
class ProductAdmin(admin.ModelAdmin):
''' product admin '''
inlines = (ProductImagesInstanceInline,)
list_display = (
'view_name_and_price',
'title',
'price',
'category',
'updated',
'created')
list_editable = ('title', 'price')
search_fields = ('title', 'description')
list_filter = ('category', 'updated', 'created')
def view_name_and_price(self, obj):
''' view name and price in one field '''
return "Item: {} Price: {} usd".format(obj.title, obj.price)
@admin.register(models.ProductImage)
class ProductImageAdmin(admin.ModelAdmin):
''' product image '''
list_display = ('product', 'description', 'image')
@admin.register(models.Category)
class CategoryAdmin(admin.ModelAdmin):
''' category '''
list_display = ('title',)
|
andrewzweb/TiShop | base_app/functional_tests/base.py | <filename>base_app/functional_tests/base.py
''' base module which use in functional tests'''
from selenium import webdriver
from django.contrib.staticfiles.testing import StaticLiveServerTestCase
import time
import os
MAX_WAIT = 10
class FunctionalTest(StaticLiveServerTestCase):
""" test new user """
def setUp(self):
self.browser = webdriver.Firefox()
staging_server = os.environ.get('STAGING_SERVER')
if staging_server:
self.live_server_url = 'http://' + staging_server
def tearDown(self):
self.browser.quit()
def wait(fn):
''' wait str in table '''
def modified_fn(*args,**kwargs):
start_time = time.time()
while True:
try:
return fn(*args,**kwargs)
except (AssertionError, WebDriverException) as e:
if time.time() - start_time > MAX_WAIT:
raise e
time.sleep(0.5)
return modified_fn
@wait
def wait_for(self, fn):
return fn
|
andrewzweb/TiShop | base_app/base_app/test_settings.py | from .settings import *
DATABASE = {
"default":{
"ENGINE": 'django.db.backend.sqlite3',
"NAME": 'memory',
}
}
EMAIL_BACKEND = 'django.core.mail.backends.locmem.EmailBackend'
|
andrewzweb/TiShop | base_app/product/migrations/0008_auto_20210323_1838.py | # Generated by Django 3.1.7 on 2021-03-23 18:38
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('product', '0007_productimage_product'),
]
operations = [
migrations.CreateModel(
name='Category',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('title', models.CharField(max_length=100)),
('slug', models.SlugField(blank=True, null=True)),
],
),
migrations.AddField(
model_name='product',
name='category',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.PROTECT, to='product.category'),
),
]
|
andrewzweb/TiShop | base_app/functional_tests/test_product_update.py | ''' functional test to update product '''
import os
import time
import pytest
from django.urls import reverse
from .base import FunctionalTest
from selenium.webdriver.support.ui import Select
from product import models
class ProductUpdateTest(FunctionalTest):
""" testcase update product """
def setUp(self):
''' new setUp '''
super(ProductUpdateTest, self).setUp()
self.product_item = models.Product.objects.create(title='product')
@pytest.mark.integration
def test_update_product_add_first_product_picture(self):
''' Rob want add picture to product '''
# get page where you Rob can update product
self.browser.get(self.live_server_url +
reverse('product:update', kwargs={'product_slug': self.product_item.slug}))
# write description for file
picture_description = 'first picture'
self.wait_for(
self.browser.find_element_by_id('inputPictureDescription-0').send_keys(picture_description))
# input file
self.browser.find_element_by_id("inputPictureFile-0").send_keys(
os.getcwd() + "/functional_tests/image.jpg")
# click to button for update product
self.browser.find_element_by_id('buttonUpdateProduct').click()
# in db save Picture
assert models.ProductImage.objects.count() == 1
# in db first Picture have description which text Rob
assert models.ProductImage.objects.first().description == picture_description
# Rob see two forms for picture
picture_description_on_page = self.wait_for(
self.browser.find_elements_by_class_name('picture-description'))
assert len(picture_description_on_page) == 2
# And one of them have texted description which write Rob
description_in_first_form = self.wait_for(
self.browser.find_element_by_id('inputPictureDescription-0').get_attribute('value'))
assert picture_description in description_in_first_form
|
andrewzweb/TiShop | base_app/product/migrations/0004_productimage_image.py | <gh_stars>0
# Generated by Django 3.1.7 on 2021-03-23 17:06
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('product', '0003_productimage_description'),
]
operations = [
migrations.AddField(
model_name='productimage',
name='image',
field=models.ImageField(default=None, upload_to='product'),
preserve_default=False,
),
]
|
andrewzweb/TiShop | base_app/product/tests/test_views.py | ''' test views '''
from faker import Faker
import random
import pytest
from django.core.files.uploadedfile import SimpleUploadedFile
from django.urls import reverse
from django.test import RequestFactory
from mixer.backend.django import mixer
pytestmark = pytest.mark.django_db
faker = Faker()
from .. import views
from ..models import Product, ProductImage, Category
class TestHome:
''' test home view '''
def setup(self):
''' set up '''
self.req = RequestFactory()
def test_get_home_view(self):
''' test get home view '''
resp = views.home(self.req)
assert resp.status_code == 200
def test_show_greatings_in_view(self):
''' test show greatings in view '''
resp = views.home(self.req)
greatings = 'Hello you are at home'
assert greatings in str(resp.content)
class TestProductListView:
''' testcase product list'''
def setup(self):
''' set up '''
self.req = RequestFactory()
self.products = mixer.cycle(5).blend('product.Product')
def test_get_product_list(self):
''' test get product list '''
resp = views.product_all(self.req)
assert resp.status_code == 200
def test_view_show_all_product(self):
''' test view show all product '''
resp = views.product_all(self.req)
for product in self.products:
assert product.title in str(resp.content)
assert product.slug in str(resp.content)
assert str(product.price) in str(resp.content)
class TestProductDetailView:
''' test product detail view'''
def setup(self):
''' set up '''
self.req = RequestFactory
self.products = mixer.cycle(5).blend('product.Product')
def test_get_product_detail(self):
''' test view show all product '''
product = random.choice(self.products)
resp = views.product_detail(self.req, product_slug=product.slug)
assert product.title in str(resp.content)
assert str(product.price) in str(resp.content)
class TestProductDeleteView:
''' test product delete view'''
def setup(self):
''' set up '''
self.req = RequestFactory()
self.products = mixer.cycle(5).blend('product.Product')
assert Product.objects.count() == 5
def test_get_page_delete_product(self):
''' test delete product if method show all product '''
product = random.choice(self.products)
req = self.req.get(reverse('product:delete', kwargs={'product_slug':product.slug}))
resp = views.product_delete(req, product_slug=product.slug)
assert resp.status_code == 200
assert product.title in str(resp.content)
assert 'Delete' in str(resp.content)
assert Product.objects.count() == 5
def test_send_post_to_page_delete_and_delete_product(self):
''' test view show all product '''
product = random.choice(self.products)
req = self.req.post(reverse('product:delete', kwargs={'product_slug':product.slug}))
resp = views.product_delete(req, product_slug=product.slug)
assert resp.status_code == 302
assert Product.objects.count() == 4
class TestProductCreateView:
''' test product create view'''
def setup(self):
''' set up '''
self.req = RequestFactory()
def test_create_product_view_return_page(self):
''' test create product view return page '''
req = self.req.get(reverse('product:add'))
resp = views.product_add(req)
assert resp.status_code == 200
def test_create_product_view_send_post_request(self):
''' test view show all product '''
small_gif = (
b'\x47\x49\x46\x38\x39\x61\x01\x00\x01\x00\x00\x00\x00\x21\xf9\x04'
b'\x01\x0a\x00\x01\x00\x2c\x00\x00\x00\x00\x01\x00\x01\x00\x00\x02'
b'\x02\x4c\x01\x00\x3b'
)
image = SimpleUploadedFile('small.gif', small_gif, content_type='image/gif')
req = self.req.post(
reverse('product:add'),
data={
'product-title': 'Product Title',
'product-description': 'Product Description',
'picture-description': 'Picture Description',
'picture-image': image,
}
)
resp = views.product_add(req)
assert resp.status_code == 302
assert Product.objects.count() == 1
class TestProductUpdateView:
''' test product update view'''
def setup(self):
''' set up '''
self.req = RequestFactory()
self.product = mixer.blend('product.Product')
small_gif = (
b'\x47\x49\x46\x38\x39\x61\x01\x00\x01\x00\x00\x00\x00\x21\xf9\x04'
b'\x01\x0a\x00\x01\x00\x2c\x00\x00\x00\x00\x01\x00\x01\x00\x00\x02'
b'\x02\x4c\x01\x00\x3b'
)
self.image = SimpleUploadedFile('small.gif', small_gif, content_type='image/gif')
def test_get_page(self):
''' test create product view return page '''
req = self.req.get(reverse('product:update', kwargs={'product_slug':self.product.slug}))
resp = views.product_update(req, self.product.slug)
assert resp.status_code == 200
def test_render_form_data(self):
'''test render form data'''
pic1 = ProductImage.objects.create(description='pic1', image=self.image, product=self.product)
pic2 = ProductImage.objects.create(description='pic2', image=self.image, product=self.product)
pic3 = ProductImage.objects.create(description='pic3', image=self.image, product=self.product)
pic4 = ProductImage.objects.create(description='pic4', image=self.image, product=self.product)
pic5 = ProductImage.objects.create(description='pic5', image=self.image, product=self.product)
pictures = [pic1, pic2, pic3, pic4, pic5]
assert ProductImage.objects.count() == 5
assert pic1.product == self.product
req = self.req.get(
reverse('product:update', kwargs={'product_slug':self.product.slug}))
resp = views.product_update(req, product_slug=self.product.slug)
for picture in pictures:
assert picture.description in str(resp.content)
def test_update_product_view_send_post_request(self):
''' test view show all product '''
new_product_title = 'New Product Title'
new_product_description = 'Product Description'
picture_description = 'Picture Description'
picture_image = self.image
req = self.req.post(
reverse('product:update', kwargs={'product_slug':self.product.slug}),
data={
'product-title': new_product_title,
'product-description': new_product_description,
'form-TOTAL_FORMS': 1,
'form-INITIAL_FORMS': 0,
'form-0-id': '',
'form-0-description': picture_description,
'form-0-image': picture_image,
'form-0-DELETE': '',
}
)
resp = views.product_update(req, product_slug=self.product.slug)
assert resp.status_code == 302, 'views redirect in succses update'
assert Product.objects.count() == 1
assert ProductImage.objects.count() == 1
assert Product.objects.first().title == new_product_title
assert ProductImage.objects.first().description == picture_description
assert ProductImage.objects.first().product == Product.objects.first()
|
andrewzweb/TiShop | base_app/functional_tests/test_product_detail.py | <gh_stars>0
''' functional test product detail '''
import pytest
from django.urls import reverse
from .base import FunctionalTest
from mixer.backend.django import mixer
pytestmark = pytest.mark.django_db
class ProductDetailTest(FunctionalTest):
""" testcase detail product """
def setUp(self):
''' new setUp '''
super(ProductDetailTest, self).setUp()
self.product_title = 'Product Title'
self.product_description = 'description'
self.product_price = 120
self.product = mixer.blend('product.Product',
title=self.product_title,
description=self.product_description,
price=self.product_price
)
@pytest.mark.integration
def test_get_detail_page(self):
''' Rob want see product page '''
# Rob get product
self.browser.get(self.live_server_url +
reverse('product:detail',
kwargs={'product_slug': self.product.slug}))
# Rob see product title
created_product_title = self.wait_for(
self.browser.find_element_by_id('product-title').text)
# it's the same product-title
assert created_product_title == self.product_title
# Rob check prise
created_product_price = self.wait_for(
self.browser.find_element_by_id('product-price').text)
assert str(self.product_price) in created_product_price
# Rob check description
created_product_description = self.wait_for(
self.browser.find_element_by_id('product-description').text)
assert self.product_description in created_product_description
|
brulato/python_conferearquivos | arquivos.py | from pathlib import Path
import os.path
#BUSCAR CAMINHO ATUAL DO SCRIPT
mypath = Path().absolute()
caminho = format(mypath)
#CRIAR ARQUIVO DE LOG
NomeLog = 'log.txt'
CaminhoLog = caminho + '\\' + NomeLog
gearararquivo = open(CaminhoLog, 'w', encoding='utf-8')
#ABRIR ARQUIVO CONTENDO A RELAÇÃO DE NUMEROS DOS RELATÓRIOS PARA CONFERIR
arquivo = caminho + '\\pasta.txt'
linhas = open(arquivo, 'r')
#LOOP PARA CONFERIR RELATÓRIOS
for line in linhas:
texto = line.strip() + '.pdf'
texto = texto
if os.path.isfile(texto) == False:
documento = open(CaminhoLog, 'a')
documento.write("Arquivo não encontrado: " + texto + '\n') |
jgrundstad/KLC_tracker | klc/tracker/forms.py | from django import forms
from django.forms import ModelForm
from tracker.models import Item
class ItemForm(ModelForm):
class Meta:
model = Item
fields = ['proceeding', 'contacts', 'name', 'notes']
|
jgrundstad/KLC_tracker | klc/tracker/migrations/0005_auto_20150130_0136.py | <filename>klc/tracker/migrations/0005_auto_20150130_0136.py
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('tracker', '0004_auto_20150129_1836'),
]
operations = [
migrations.RenameField(
model_name='item',
old_name='time_spent',
new_name='minutes_spent',
),
migrations.AlterField(
model_name='client',
name='email',
field=models.EmailField(max_length=128, blank=True),
preserve_default=True,
),
migrations.AlterField(
model_name='client',
name='last_name',
field=models.CharField(help_text=b'doing', max_length=128),
preserve_default=True,
),
]
|
jgrundstad/KLC_tracker | klc/tracker/migrations/0001_initial.py | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
]
operations = [
migrations.CreateModel(
name='Client',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('first_name', models.CharField(max_length=128)),
('last_name', models.CharField(max_length=128)),
('company', models.CharField(max_length=128, blank=True)),
('address1', models.CharField(max_length=128, blank=True)),
('address2', models.CharField(max_length=128, blank=True)),
('city', models.CharField(max_length=64, blank=True)),
('state', models.CharField(max_length=16, blank=True)),
('phone1', models.CharField(max_length=24, blank=True)),
('phone2', models.CharField(max_length=24, blank=True)),
('fax', models.CharField(max_length=24, blank=True)),
('email', models.CharField(max_length=128, blank=True)),
],
options={
},
bases=(models.Model,),
),
migrations.CreateModel(
name='Item',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('name', models.CharField(max_length=256)),
('date', models.DateTimeField()),
('time_spent', models.IntegerField(blank=True)),
],
options={
},
bases=(models.Model,),
),
migrations.CreateModel(
name='Proceeding',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('name', models.CharField(max_length=1)),
('start_date', models.DateTimeField()),
('archive', models.CharField(default=b'N', max_length=1, choices=[(b'Y', b'Y'), (b'N', b'N')])),
('client', models.ForeignKey(to='tracker.Client')),
],
options={
},
bases=(models.Model,),
),
migrations.AddField(
model_name='item',
name='proceeding',
field=models.ForeignKey(to='tracker.Proceeding'),
preserve_default=True,
),
]
|
jgrundstad/KLC_tracker | klc/tracker/admin.py | from django.contrib import admin
from tracker.models import Proceeding, Contact, Item, Code
class ProceedingAdmin(admin.ModelAdmin):
list_display = ('name', 'contact', 'start_date')
class ItemAdmin(admin.ModelAdmin):
list_display = ('proceeding', 'date', 'name', 'snippet')
def snippet(self, obj):
return "%s..." % obj.notes[0:24]
snippet.short_description = 'notes snippet'
class ContactAdmin(admin.ModelAdmin):
list_display = ('full_name', 'short_name', 'role', 'phone1', 'email')
def full_name(self, obj):
return ("%s, %s" % (obj.last_name, obj.first_name))
full_name.short_description = 'full name'
class CodeAdmin(admin.ModelAdmin):
list_display = ('code', 'snippet')
def snippet(self, obj):
return "%s..." % obj.value[0:30]
snippet.short_description = 'code snippet'
# Register your models here.
admin.site.register(Proceeding, ProceedingAdmin)
admin.site.register(Contact, ContactAdmin)
admin.site.register(Item, ItemAdmin)
admin.site.register(Code, CodeAdmin)
|
jgrundstad/KLC_tracker | klc/tracker/views.py | <gh_stars>0
from django.core.context_processors import csrf
from django.core.urlresolvers import reverse
from django.http import HttpResponse, HttpResponseRedirect
from django.shortcuts import render
from forms import ItemForm
from tracker.models import Proceeding, Contact, Item, Code
# Create your views here.
def index(request):
proceeding_list = Proceeding.objects.order_by('start_date')
context = {'proceeding_list': proceeding_list}
return render(request, 'tracker/index.html', context)
def new_item(request, proceeding_id):
# add-new-item form
form = ItemForm(request.POST)
context = {'form': form, 'proceeding_id': proceeding_id}
return render(request, 'tracker/new_item.html', context)
def create_new_item(request, proceeding_id):
context = {}
if request.method == 'POST':
form = ItemForm(request.POST)
if form.is_valid():
form.save()
return HttpResponseRedirect(reverse('items',
kwargs={'proceeding_id':proceeding_id}))
else:
form = ItemForm()
context['form'] = form
return render_to_response('new_item.html', context)
def item(request, item_id):
return HttpResponse("Inspecting item %s." % item_id)
def contact(request, contact_id):
return HttpResponse("Inspecting contact %s." % contact_id)
def items(request, proceeding_id):
items = Item.objects.filter(proceeding=proceeding_id).order_by('date')
line_count_list = list()
notes_list = list()
contact_list = list()
# get codes
codes = Code.objects.all()
for item in items:
# replace code
n = item.notes
for c in codes:
n = n.replace(c.code, c.value)
notes_list.append(n)
# get line counts for sizing textareas
count = str((item.notes).count('\n') + 3)
line_count_list.append(count)
# get list of contacts
contacts = list()
emails = list()
for c in item.contacts.all():
#contacts.append(c.short_name + '\n' + c.email)
contacts.append(c.short_name)
emails.append("%s, %s\n%s\n%s" % (c.last_name, c.first_name,
c.email, c.phone1))
contact_list.append(zip(contacts, emails))
item_list = zip(items, notes_list, line_count_list, contact_list)
# get the proceeding name from the id to add to the heading
p_name = Proceeding.objects.filter(id=proceeding_id)[0].name
context = {'item_list': item_list, 'proceeding': p_name, 'proceeding_id': proceeding_id}
return render(request, 'tracker/items.html', context)
|
jgrundstad/KLC_tracker | klc/tracker/migrations/0013_auto_20150218_1949.py | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('tracker', '0012_auto_20150218_1939'),
]
operations = [
migrations.RenameField(
model_name='item',
old_name='contact',
new_name='contacts',
),
]
|
jgrundstad/KLC_tracker | klc/tracker/migrations/0012_auto_20150218_1939.py | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('tracker', '0011_auto_20150130_0207'),
]
operations = [
migrations.CreateModel(
name='Contact',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('first_name', models.CharField(max_length=128, blank=True)),
('last_name', models.CharField(max_length=128, blank=True)),
('short_name', models.CharField(max_length=32, blank=True)),
('role', models.CharField(max_length=128, blank=True)),
('company', models.CharField(max_length=128, blank=True)),
('address1', models.CharField(max_length=128, blank=True)),
('address2', models.CharField(max_length=128, blank=True)),
('city', models.CharField(max_length=64, blank=True)),
('state', models.CharField(max_length=16, blank=True)),
('zipcode', models.CharField(max_length=12, blank=True)),
('phone1', models.CharField(max_length=24, blank=True)),
('phone2', models.CharField(max_length=24, blank=True)),
('fax', models.CharField(max_length=24, blank=True)),
('email', models.EmailField(max_length=128, blank=True)),
('comment', models.TextField(blank=b'True')),
],
options={
},
bases=(models.Model,),
),
migrations.RemoveField(
model_name='proceeding',
name='client',
),
migrations.DeleteModel(
name='Client',
),
migrations.AddField(
model_name='item',
name='contact',
field=models.ManyToManyField(to='tracker.Contact'),
preserve_default=True,
),
migrations.AddField(
model_name='proceeding',
name='contact',
field=models.ForeignKey(default=1, to='tracker.Contact'),
preserve_default=False,
),
]
|
jgrundstad/KLC_tracker | klc/tracker/models.py | from django.db import models
from django.forms import ModelForm
# Create your models here.
class Contact(models.Model):
first_name = models.CharField(max_length=128, blank=True)
last_name = models.CharField(max_length=128, blank=True)
short_name = models.CharField(max_length=32, blank=True)
role = models.CharField(max_length=128, blank=True)
company = models.CharField(max_length=128, blank=True)
address1 = models.CharField(max_length=128, blank=True)
address2 = models.CharField(max_length=128, blank=True)
city = models.CharField(max_length=64, blank=True)
state = models.CharField(max_length=16, blank=True)
zipcode = models.CharField(max_length=12, blank=True)
phone1 = models.CharField(max_length=24, blank=True)
phone2 = models.CharField(max_length=24, blank=True)
fax = models.CharField(max_length=24, blank=True)
email = models.EmailField(max_length=128, blank=True)
comment = models.TextField(blank='True')
def __str__(self):
return "%s, %s" % (self.last_name, self.first_name)
class Proceeding(models.Model):
name = models.CharField(max_length=128)
contact = models.ForeignKey(Contact)
start_date = models.DateTimeField()
CHOICES = (
('Y', 'Y'),
('N', 'N'),
)
archive = models.CharField(max_length=1, choices=CHOICES, default='N')
def __str__(self):
return self.name
class Item(models.Model):
proceeding = models.ForeignKey(Proceeding)
name = models.CharField(max_length=256)
notes = models.TextField(blank=False, default='')
date = models.DateTimeField(auto_now_add=True)
contacts = models.ManyToManyField(Contact)
def __str__(self):
return self.name
class Code(models.Model):
code = models.CharField(max_length=32)
value = models.TextField(max_length=512)
|
jgrundstad/KLC_tracker | klc/tracker/urls.py | <gh_stars>0
from django.conf.urls import patterns, url
from tracker import views
urlpatterns = patterns('',
url(r'^$', views.index, name='index'),
url(r'^(?P<item_id>\d+)/item/$', views.item, name='item'),
url(r'^(?P<contact_id>\d+)/contact/$', views.contact, name='contact'),
url(r'^(?P<proceeding_id>\d+)/items/$', views.items, name='items'),
url(r'^(?P<proceeding_id>\d+)/new_item/$', views.new_item, name='new_item'),
url(r'^(?P<proceeding_id>\d+)/create_new_item/$', views.create_new_item, name='create_new_item'),
)
|
hillarykhan/DS-Unit-3-Sprint-2-SQL-and-Databases | module1-introduction-to-sql/test_queries.py | <reponame>hillarykhan/DS-Unit-3-Sprint-2-SQL-and-Databases
import sqlite3
# STEP 1: Import library
import sqlite3
from queries import TOTAL_SUBCLASS
# STEP 2: Create a function to create the connection
def connect_to_db(db_name="rpg_db.sqlite3"):
return sqlite3.connect(db_name)
def execute_query(cursor, query):
cursor.execute(query)
return cursor.fetchall()
# STEP 3: Execute and return query results
if __name__ == "__main__":
# Connect to DB
conn = connect_to_db()
# Create Cursor
curs = conn.cursor()
# Execute query
results = execute_query(curs, TOTAL_SUBCLASS)
print(results)
|
hillarykhan/DS-Unit-3-Sprint-2-SQL-and-Databases | module1-introduction-to-sql/rpg_db_example.py | <filename>module1-introduction-to-sql/rpg_db_example.py<gh_stars>0
"""SQL WORKFLOW EXAMPLE"""
# STEP 1: Import library
import sqlite3
# STEP 2: Create a function to create the connection
def connect_to_db(db_name="rpg_db.sqlite3"):
return sqlite3.connect(db_name)
def execute_query(cursor, query):
cursor.execute(query)
return cursor.fetchall()
# STEP 3: Create Queries
# TOTAL_CHARACTERS: How many total Characters are there?
TOTAL_CHARACTERS = """
SELECT COUNT(*)
FROM charactercreator_character;
"""
# TOTAL_SUBCLASS: How many of each specific subclass?
TOTAL_SUBCLASS = """
WITH character_class AS (
SELECT
DISTINCT
character.character_id,
CASE
WHEN cleric.character_ptr_id IS NOT NULL THEN "cleric"
WHEN fighter.character_ptr_id IS NOT NULL THEN "fighter"
WHEN mage.character_ptr_id IS NOT NULL AND necromancer.mage_ptr_id IS NULL THEN "mage"
WHEN necromancer.mage_ptr_id IS NOT NULL THEN "necromancer"
WHEN thief.character_ptr_id IS NOT NULL THEN "thief"
END AS subclass
FROM charactercreator_character character
LEFT OUTER JOIN charactercreator_cleric cleric
ON character.character_id = cleric.character_ptr_id
LEFT OUTER JOIN charactercreator_fighter fighter
ON character.character_id = fighter.character_ptr_id
LEFT OUTER JOIN charactercreator_mage mage
ON character.character_id = mage.character_ptr_id
LEFT OUTER JOIN charactercreator_necromancer necromancer
ON mage.character_ptr_id = necromancer.mage_ptr_id
LEFT OUTER JOIN charactercreator_thief thief
ON thief.character_ptr_id = thief.character_ptr_id
)
SELECT
subclass,
COUNT(*) AS subclass_count
FROM character_class
GROUP BY subclass;
"""
# TOTAL_ITEMS: How many total Items?
TOTAL_ITEMS = """
SELECT COUNT(*) AS item_count
FROM armory_item;
"""
# WEAPONS: How many of the Items are weapons?
WEAPONS = """
SELECT COUNT(*) AS weapon_count
FROM armory_weapon;
"""
# NON_WEAPONS: How many of the items are not weapons?
NON_WEAPONS = """
SELECT COUNT(*)
FROM armory_item item
LEFT JOIN armory_weapon weapon
ON item.item_id = weapon.item_ptr_id
WHERE weapon.item_ptr_id IS NULL;
"""
# CHARACTER_ITEMS: How many Items does each character have?
# (Return first 20 rows)
CHARACTER_ITEMS = """
SELECT
charactercreator_character_inventory.character_id,
COUNT(*) AS item_count
FROM charactercreator_character_inventory
GROUP BY character_id
LIMIT 20;
"""
# CHARACTER_WEAPONS: How many Weapons does each character have?
# (Return first 20 rows)
CHARACTER_WEAPONS = """
SELECT
inventory.character_id,
COUNT(*) AS weapon_count
FROM charactercreator_character_inventory inventory
INNER JOIN armory_weapon weapon
ON inventory.item_id = weapon.item_ptr_id
GROUP BY inventory.character_id
LIMIT 20;
"""
# AVG_CHARACTER_ITEMS: On average, how many Items does each Character have?
AVG_CHARACTER_ITEMS = """
WITH character_items AS (
SELECT
charactercreator_character_inventory.character_id,
COUNT(*) AS item_count
FROM charactercreator_character_inventory
GROUP BY character_id
)
SELECT AVG(item_count) AS items_avg
FROM character_items;
"""
# AVG_CHARACTER_WEAPONS: On average, how many Weapons does each character have?
AVG_CHARACTER_WEAPONS = """
SELECT
CAST(SUM(CASE WHEN weapons.item_ptr_id IS NULL THEN 0 ELSE 1 END) AS FLOAT)/COUNT(DISTINCT(characters.character_id)) AS weapon_avg
FROM charactercreator_character characters
LEFT OUTER JOIN charactercreator_character_inventory inventory
ON characters.character_id = inventory.character_id
LEFT OUTER JOIN armory_weapon weapons
ON inventory.item_id = weapons.item_ptr_id;
"""
# STEP 4: Execute and return query results
queries_dict = {'TOTAL_CHARACTERS': TOTAL_CHARACTERS,
'TOTAL_SUBCLASS': TOTAL_SUBCLASS,
'TOTAL_ITEMS': TOTAL_ITEMS,
'WEAPONS' : WEAPONS,
'NON_WEAPONS' : NON_WEAPONS,
'CHARACTER_ITEMS' : CHARACTER_ITEMS,
'CHARACTER_WEAPONS' : CHARACTER_WEAPONS,
'AVG_CHARACTER_ITEMS' : AVG_CHARACTER_ITEMS,
'AVG_CHARACTER_WEAPONS' : AVG_CHARACTER_WEAPONS
}
if __name__ == "__main__":
# Connect to DB
conn = connect_to_db()
# Create Cursor
curs = conn.cursor()
# Execute query
for key in queries_dict.keys():
print(key, execute_query(curs, queries_dict[key]))
|
hillarykhan/DS-Unit-3-Sprint-2-SQL-and-Databases | module1-introduction-to-sql/buddymove_holidayiq.py | <reponame>hillarykhan/DS-Unit-3-Sprint-2-SQL-and-Databases
import sqlite3
import pandas as pd
buddymovie = pd.read_csv("./buddymove_holidayiq.csv")
print(buddymovie.head())
# Open a connection to a new (blank) database file buddymove_holidayiq.sqlite3
conn = sqlite3.connect("buddymove_holidayiq.sqlite3")
curs = conn.cursor()
# Use df.to_sql (documentation) to insert the data into a new table
# Review in the SQLite3 database
buddymovie.to_sql("buddymove_holidayiq", con=conn)
# Count how many rows you have - it should be 249!
query1 = """
SELECT COUNT(*)
FROM buddymove_holidayiq;
"""
curs.execute(query1)
print(curs.fetchall())
# How many users who reviewed at least 100 Nature in the category also
# reviewed at least 100 in the Shopping category?
query2 = """
SELECT COUNT(DISTINCT(`User ID`))
FROM buddymove_holidayiq
WHERE Nature >= 100 AND Shopping >= 100;
"""
curs.execute(query2)
print(curs.fetchall())
# (Stretch) What are the average number of reviews for each category?
query3 = """
SELECT
AVG(Sports),
AVG(Religious),
AVG(Nature),
AVG(Theatre),
AVG(Shopping),
AVG(Picnic)
FROM buddymove_holidayiq;
"""
curs.execute(query3)
print(curs.fetchall())
|
hillarykhan/DS-Unit-3-Sprint-2-SQL-and-Databases | module2-sql-for-analysis/lecture_notes.py | <gh_stars>0
import psycopg2
from psycopg2.extras import execute_values
import json
# Connect to ElephantSQL-hosted PostgresSQL
# IMPORTANT: DO NOT COMMIT with credentials hard-coded
# NEED TO SPECIFY CREDENTIALS WITH ENVIRONMENT VARIABLES
# Example of what not to do in your .py:
# DB_NAME = 'kvpmgnub'
# DB_USER = 'kvpmgnub'
# DB_PASSWORD = '<PASSWORD>'
# DB_HOST = 'queenie.db.elephantsql.com'
import os
from dotenv import load_dotenv
# Loads contents of the .env file into the script's environment
load_dotenv()
DB_NAME = os.getenv("DB_NAME")
DB_USER = os.getenv("DB_USER")
DB_PASSWORD = os.getenv("DB_PASSWORD")
DB_HOST = os.getenv("DB_HOST")
# Helpful to check that your system knows what the variables are first
# before running additional commands - can be accomplished here:
# print(DB_NAME, DB_USER, DB_PASSWORD, DB_HOST)
# exit()
conn = psycopg2.connect(dbname=DB_NAME, user=DB_USER,
password=DB_PASSWORD, host=DB_HOST)
print("CONNECTION", conn)
# A "cursor", a structure to iterate over db records to perform queries
cur = conn.cursor()
print("CURSOR", cur)
# An example query
cur.execute('SELECT * from test_table;')
# Note: nothing happened yet! We need to *fetch* from cursor
# FETCH DATA
result = cur.fetchall()
print("RESULT: ", type(result))
print(result)
### If you see something like (1, 'A row name', None), congrats!
### You've interacted with your database from Python
### FYI: in sqlite: result = cur.execute('SELECT * from test_table;').fetchall()
### chaining commands like this doesn't work with psycopg
# INSERT DATA
# insertion_sql = """
# INSERT INTO test_table (name, data) VALUES -- filling values for the name and data columns (id auto generates)
# ('A row name', null), -- add the data as tuples (first value corresponds to order above (name) second to data)
# ('Another row, with JSON', '{"a": 1, "b": ["dog", "cat", 42], "c": true}'::JSONB -- :: converts/casts data types - in this case from a string to a JSON blob
# );
# """
# cur.execute(insertion_sql)
# Another method to inserting data without hardcoding python dictionaries, etc.
# (object-oriented approach)
my_dict = {"a": 1, "b": ["dog", "cat", 42], "c": True}
# insertion_query = "INSERT INTO test_table (name, data) VALUES (%s, %s)"
# cur.execute(insertion_query, ('A row name', 'null')) # functional applied to object hence object-oriented
# cur.execute(insertion_query, ('Another row, with JSONNN', json.dumps(my_dict))
# Problem is that the above approach does one row at a time
# Passing multiple rows into a table (functional approach):
insertion_query = f"INSERT INTO test_table (name, data) VALUES %s"
execute_values(cur, insertion_query, [ # from psycopg2.extras import execute_values
('A rowwwww', 'null'),
('Another row, with JSONNNNNN', json.dumps(my_dict)), # import json
('Third row', '3')
]) # data must be in a list of tuples!!!
# FYI: When we modify the contents of a table with a CREATE, DROP, INSERT, etc.
# statement, we need an additional step: commit/save results
# Otherwise, a primary key will be allocated to the rows but they won't actually
# appear in the table.
# Commit(Save) table modifications (in this case, inserting rows):
conn.commit()
# Good practice to all close the cursor and connection
cur.close()
conn.close()
|
hydroshare/dockerspawner | dockerspawner/volumenamingstrategy.py | def default_format_volume_name(template, spawner):
return template.format(username=spawner.user.name)
def escaped_format_volume_name(label_template, spawner):
"""Use this strategy if your usernames include illegal characters
for volume names and you do not use absolute paths in your volume
label template.
"""
return label_template.format(username=spawner.escaped_name)
|
hydroshare/dockerspawner | dockerspawner/__init__.py | <filename>dockerspawner/__init__.py<gh_stars>1-10
from ._version import __version__
from .dockerspawner import DockerSpawner
from .systemuserspawner import SystemUserSpawner
__all__ = ['__version__', 'DockerSpawner', 'SystemUserSpawner']
|
hydroshare/dockerspawner | tests/volumes_test.py | """Test volume manipulation logic
"""
from __future__ import absolute_import, division, print_function
import types
import pytest
from traitlets.config import LoggingConfigurable
def test_binds(monkeypatch):
import jupyterhub
monkeypatch.setattr("jupyterhub.spawner.Spawner", _MockSpawner)
from dockerspawner.dockerspawner import DockerSpawner
d = DockerSpawner()
d.user = types.SimpleNamespace(name='xyz')
d.volumes = {
'a': 'b',
'c': {'bind': 'd', 'mode': 'Z'},
}
assert d.volume_binds == {
'a': {'bind': 'b', 'mode': 'rw'},
'c': {'bind': 'd', 'mode': 'Z'},
}
d.volumes = {'a': 'b', 'c': 'd', 'e': 'f'}
assert d.volume_mount_points == ['b', 'd', 'f']
d.volumes = {'/nfs/{username}': {'bind': '/home/{username}', 'mode': 'z'}}
assert d.volume_binds == {'/nfs/xyz': {'bind': '/home/xyz', 'mode': 'z'}}
assert d.volume_mount_points == ['/home/xyz']
def test_volume_naming_configuration(monkeypatch):
from dockerspawner.dockerspawner import DockerSpawner
d = DockerSpawner()
d.user = types.SimpleNamespace(name='joe')
d.volumes = {'data/{username}': {'bind': '/home/{username}', 'mode': 'z'}}
def test_format(label_template, spawner):
return "THIS IS A TEST"
d.format_volume_name = test_format
assert d.volume_binds == {'THIS IS A TEST':{'bind': 'THIS IS A TEST', 'mode': 'z'}}
assert d.volume_mount_points == ['THIS IS A TEST']
def test_default_format_volume_name(monkeypatch):
from dockerspawner.dockerspawner import DockerSpawner
d = DockerSpawner()
d.user = types.SimpleNamespace(name='<EMAIL>')
d.volumes = {'data/{username}': {'bind': '/home/{username}', 'mode': 'z'}}
assert d.volume_binds == {'data/user@<EMAIL>.<EMAIL>':{'bind': '/home/<EMAIL>', 'mode': 'z'}}
assert d.volume_mount_points == ['/home/<EMAIL>']
def test_escaped_format_volume_name(monkeypatch):
import dockerspawner
from dockerspawner import DockerSpawner
d = DockerSpawner()
d.user = types.SimpleNamespace(name='<EMAIL>')
d.volumes = {'data/{username}': {'bind': '/home/{username}', 'mode': 'z'}}
d.format_volume_name = dockerspawner.volumenamingstrategy.escaped_format_volume_name
assert d.volume_binds == {'data/user_40email_2Ecom':{'bind': '/home/user_40email_2Ecom', 'mode': 'z'}}
assert d.volume_mount_points == ['/home/user_40email_2Ecom']
class _MockSpawner(LoggingConfigurable):
pass
|
outini/python-kairosdb | kairosdb/client.py | <reponame>outini/python-kairosdb
# coding: utf-8
#
# KairosDB REST API python client and interface (python-kairosdb)
#
# Copyright (C) 2017 <NAME> (jawa) <<EMAIL>>
#
# This file is part of python-kairosdb
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the MIT License.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# MIT License for more details.
#
# You should have received a copy of the MIT License along with this
# program; if not, see <https://opensource.org/licenses/MIT>.
import json
import requests
import logging
from functools import partial
logger = logging.getLogger(__name__)
class KairosDBAPIClient(object):
"""KairosDB API client
It implements common HTTP methods GET, POST, PUT and DELETE
This client is using :mod:`requests` package. Please see
http://docs.python-requests.org/ for more information.
:param bool verify: Control SSL certificate validation
:param int timeout: Request timeout in seconds
:param str api_endpoint: KairosDB API endpoint
.. method:: get(self, path, data=None, **kwargs)
Partial method invoking :meth:`~KairosDBAPIClient.request` with
http method *GET*.
.. method:: post(self, path, data=None, **kwargs)
Partial method invoking :meth:`~KairosDBAPIClient.request` with
http method *POST*.
.. method:: put(self, path, data=None, **kwargs)
Partial method invoking :meth:`~KairosDBAPIClient.request` with
http method *PUT*.
.. method:: delete(self, path, data=None, **kwargs)
Partial method invoking :meth:`~KairosDBAPIClient.request` with
http method *DELETE*.
"""
def __init__(self, api_endpoint, verify=None, timeout=None):
"""Initialization method"""
self.verify = verify
self.timeout = timeout
self.api_endpoint = api_endpoint
self.request_headers = {
'User-Agent': 'python-kairosdb',
'Accept': 'application/json',
'Content-Type': 'application/json'
}
self.r_session = requests.Session()
# Directly expose common HTTP methods
self.get = partial(self.request, method='GET')
self.post = partial(self.request, method='POST')
self.put = partial(self.request, method='PUT')
self.delete = partial(self.request, method='DELETE')
def request(self, path, method, data=None, **kwargs):
"""Handle requests to API
:param str path: API endpoint's path to request
:param str method: HTTP method to use
:param dict data: Data to send (optional)
:return: Parsed json response as :class:`dict`
Additional named argument may be passed and are directly transmitted
to :meth:`request` method of :class:`requests.Session` object.
"""
if not path.startswith('http://') and not path.startswith('https://'):
url = "%s/%s" % (self.api_endpoint, path)
else:
url = path
if data is None:
data = {}
response = self.r_session.request(method, url,
data=json.dumps(data),
headers=self.request_headers,
timeout=self.timeout,
verify=self.verify,
**kwargs)
if response.status_code == 204:
return {
'return_code': response.status_code,
'status': 'success'
}
try:
response_data = {'return_code': response.status_code}
response_data.update(response.json())
return response_data
except ValueError:
return {
'return_code': response.status_code,
'response': response.text
}
class KairosDBAPIEndPoint(object):
"""KairosDB API endpoint
This class do not provide convenience methods :meth:`get`, :meth:`post`,
:meth:`put` and :meth:`delete`. Those methods should be implemented by
subclasses.
:param CachetAPIClient api_client: Cachet API client instance
.. attribute:: api_client
:class:`~client.CachetAPIClient` instance passed at instantiation.
.. attribute:: _get
Alias to :meth:`~CachetAPIClient.get` method of :attr:`api_client`
instance.
.. attribute:: _post
Alias to :meth:`~CachetAPIClient.post` method of :attr:`api_client`
instance.
.. attribute:: _put
Alias to :meth:`~CachetAPIClient.put` method of :attr:`api_client`
instance.
.. attribute:: _delete
Alias to :meth:`~CachetAPIClient.delete` method of :attr:`api_client`
instance.
"""
def __init__(self, api_client):
"""Initialization method"""
self.api_client = api_client
self._get = api_client.get
self._post = api_client.post
self._put = api_client.put
self._delete = api_client.delete
|
outini/python-kairosdb | kairosdb/__init__.py | <reponame>outini/python-kairosdb<gh_stars>1-10
# coding: utf-8
#
# KairosDB REST API python client and interface (python-kairosdb)
#
# Copyright (C) 2017 <NAME> (jawa) <<EMAIL>>
#
# This file is part of python-kairosdb
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the MIT License.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# MIT License for more details.
#
# You should have received a copy of the MIT License along with this
# program; if not, see <https://opensource.org/licenses/MIT>.
import logging
import fnmatch
from logging.handlers import SysLogHandler
from kairosdb import client
#: Current version of the package as :class:`str`.
VERSION = "0.2.1"
#: Basic logger for KairosDB interface module
LOG = None
def basic_logger():
"""Configure a basic logger for KairosDB interface
:return: Logger object
"""
if not LOG:
logger = logging.getLogger('kairosdb')
logger.setLevel(logging.DEBUG)
fmt_syslog = logging.Formatter(
'%(module)s %(processName)s %(levelname)s: %(message)s')
fmt_stream = logging.Formatter(
'%(processName)s %(levelname)s: %(message)s')
stream_handler = logging.StreamHandler()
stream_handler.setFormatter(fmt_stream)
logger.addHandler(stream_handler)
syslog_handler = SysLogHandler(address='/dev/log')
syslog_handler.setFormatter(fmt_syslog)
syslog_handler.setLevel(logging.INFO)
logger.addHandler(syslog_handler)
global LOG
LOG = logger
return LOG
class KairosDBAPI(client.KairosDBAPIEndPoint):
"""KairosDB API interface
.. attribute:: version
KairosDB version from API.
.. seealso:: \
https://kairosdb.github.io/docs/build/html/restapi/Version.html
.. attribute:: health_status
KairosDB health status from API.
.. seealso:: \
https://kairosdb.github.io/docs/build/html/restapi/Health.html
.. attribute:: health_check
KairosDB health check from API.
.. seealso:: \
https://kairosdb.github.io/docs/build/html/restapi/Health.html
.. attribute:: metricnames
KairosDB metric names from API.
.. seealso:: \
https://kairosdb.github.io/docs/build/html/restapi/ListMetricNames.html
.. attribute:: tagnames
KairosDB tag names from API.
.. seealso:: \
https://kairosdb.github.io/docs/build/html/restapi/ListTagNames.html
.. attribute:: tagvalues
KairosDB tag values from API.
.. seealso:: \
https://kairosdb.github.io/docs/build/html/restapi/ListTagValues.html
"""
def __init__(self, *args, **kwargs):
"""Initialization method"""
super(KairosDBAPI, self).__init__(*args, **kwargs)
self._metricnames = None
self._tagnames = None
self._tagvalues = None
@property
def version(self):
"""KairosDB version"""
return self._get('version').get('version')
@property
def health_status(self):
"""KairosDB health status"""
return self._get('health/status')
@property
def health_check(self):
"""KairosDB health check"""
return self._get('health/check')
@property
def metricnames(self):
"""Metric names"""
if not self._metricnames:
self._metricnames = self._get('metricnames').get('results')
return self._metricnames
@property
def tagnames(self):
"""Tag names"""
if not self._tagnames:
self._tagnames = self._get('tagnames').get('results')
return self._tagnames
@property
def tagvalues(self):
"""Tag values"""
if not self._tagvalues:
self._tagvalues = self._get('tagvalues').get('results')
return self._tagvalues
def search_metrics(self, matches, exclude_matches=None):
"""Search KairosDB metrics using glob matches
:param list matches: List of glob matches
:param list exclude_matches: List of glob matches for exclusions
:return: Matched metric names as :func:`list`
"""
x_metrics = []
if exclude_matches:
[x_metrics.extend(fnmatch.filter(self.metricnames, match))
for match in exclude_matches]
x_metrics = set(x_metrics)
matched_metrics = []
for match in matches:
for metric in fnmatch.filter(self.metricnames, match):
if metric not in x_metrics:
matched_metrics.append(metric)
return matched_metrics
def query_metrics(self, data):
"""Get metrics data points
:param dict data: Data to post for query
:return: Metric data points as :class:`dict`
.. seealso:: \
https://kairosdb.github.io/docs/build/html/restapi/QueryMetrics.html
"""
return self._post('datapoints/query', data=data)
def delete_metric(self, metric_name):
"""Delete a metric and all data points associated with the metric
:param str metric_name: Name of the metric to delete
.. seealso:: \
https://kairosdb.github.io/docs/build/html/restapi/DeleteMetric.html
"""
return self._delete('metric/%s' % metric_name)
def delete_datapoints(self, data):
"""Delete metric data points
:param dict data: Data to post for query
.. seealso:: \
https://kairosdb.github.io/docs/build/html/restapi/DeleteDataPoints.html
"""
return self._post('datapoints/delete', data=data)
|
outini/python-kairosdb | setup.py | #
# Cachet API python client and interface (python-kairosdb)
#
# Copyright (C) 2017 <NAME> (jawa) <<EMAIL>>
#
# This file is part of python-kairosdb
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, see <http://www.gnu.org/licenses/>.
import os
from distutils.core import setup
if __name__ == '__main__':
readme_file = os.path.join(os.path.dirname(__file__), 'README.rst')
release = "0.2.1"
setup(
name="python-kairosdb",
version=release,
url="https://github.com/outini/python-kairosdb",
author="<NAME> (jawa)",
author_email="<EMAIL>",
maintainer="<NAME> (jawa)",
maintainer_email="<EMAIL>",
description="KairosDB REST API python client and interface",
long_description=open(readme_file).read(),
license="MIT",
platforms=['UNIX'],
scripts=[],
packages=['kairosdb'],
package_dir={'kairosdb': 'kairosdb'},
data_files=[('share/doc/python-kairosdb', ['README.rst', 'LICENSE'])],
keywords=['api', 'metrics', 'timeseries', 'python', 'kairosdb'],
classifiers=[
'Development Status :: 4 - Beta',
'Operating System :: POSIX :: BSD',
'Operating System :: POSIX :: Linux',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 3',
'Intended Audience :: System Administrators',
'Intended Audience :: Developers',
'Topic :: Utilities',
'Topic :: System :: Monitoring'
]
)
|
dmgav/dpcmaps | dpcmaps/dpc_gui.py | #!/usr/bin/env python
"""
Created on May 23, 2013
@author: <NAME> (<EMAIL>)
Computer Science Group, Computational Science Center
Brookhaven National Laboratory
This code is for Differential Phase Contrast (DPC) imaging based on
Fourier-shift fitting implementation.
Reference: Yan, H. et al. Quantitative x-ray phase imaging at the nanoscale by
multilayer Laue lenses. Sci. Rep. 3, 1307; DOI:10.1038/srep01307
(2013).
Test data is available at:
https://docs.google.com/file/d/0B3v6W1bQwN_AdjZwWmE3WTNqVnc/edit?usp=sharing
"""
from __future__ import print_function, division
import os
import sys
import csv
import time
import logging
from datetime import datetime
from functools import wraps
import multiprocessing as mp
import subprocess
from PyQt5 import QtCore
from PyQt5.QtCore import Qt
from PyQt5.QtWidgets import (
QApplication,
QMainWindow,
QInputDialog,
QFileDialog,
QLabel,
QLineEdit,
QDoubleSpinBox,
QSpinBox,
QComboBox,
QPushButton,
QSizePolicy,
QGroupBox,
QGridLayout,
QListWidget,
QWidget,
QCheckBox,
QTextEdit,
QFrame,
QHBoxLayout,
QSlider,
QAction,
QStyleFactory,
QRubberBand,
QMessageBox,
QMenu,
)
from PyQt5.QtGui import QPalette, QPixmap, QIcon, QPen, QPainter, QTextCursor
import matplotlib.cm as cm
from PIL import Image
import PIL
from skimage import exposure
import numpy as np
import matplotlib as mpl
from matplotlib.backends.backend_qt4agg import (
FigureCanvasQTAgg as FigureCanvas,
NavigationToolbar2QT as NavigationToolbar,
)
from matplotlib.figure import Figure
from matplotlib.patches import Rectangle
import matplotlib.gridspec as gridspec
import matplotlib.pyplot as plt
import psutil
try:
from tifffile import imsave
havetiff = True
except ImportError as ex:
print("[!] Import error - tifffile not available. Tif files will not be saved")
print("[!] (import error: {})".format(ex))
havetiff = False
import dpcmaps.load_timepix as load_timepix
import h5py
import dpcmaps.dpc_kernel as dpc
import dpcmaps.pyspecfile as pyspecfile
from dpcmaps.db_config.db_config import db
from dpcmaps import __version__
try:
import hxntools
from hxntools.scan_info import ScanInfo
from hxntools.scan_monitor import HxnScanMonitor
# from databroker import DataBroker
except ImportError as ex:
print("[!] Unable to import hxntools-related packages some features will " "be unavailable")
print("[!] (import error: {})".format(ex))
hxntools = None
logger = logging.getLogger(__name__)
get_save_filename = QFileDialog.getSaveFileName
get_open_filename = QFileDialog.getOpenFileName
SOLVERS = [
"Nelder-Mead",
"Powell",
"CG",
"BFGS",
"Newton-CG",
"Anneal",
"L-BFGS-B",
"TNC",
"COBYLA",
"SLS-QP",
"dogleg",
"trust-ncg",
]
TYPES = [
"TIFF",
"Timepix TIFF",
"ASCII",
"HDF5",
"FileStore",
]
roi_x1 = 0
roi_x2 = 0
roi_y1 = 0
roi_y2 = 0
a = None
gx = None
gy = None
phi = None
rx = None
ry = None
CMAP_PREVIEW_PATH = os.path.join(os.path.dirname(__file__), "cmap_previews")
def load_image_pil(path):
"""
Read images using the PIL lib
"""
f = Image.open(str(path)) # 'I;16B'
return np.array(f.getdata()).reshape(f.size[::-1])
def load_data_hdf5(file_path):
"""
Read images using the h5py lib
"""
f = h5py.File(str(file_path), "r")
entry = f["entry"]
instrument = entry["instrument"]
detector = instrument["detector"]
dsdata = detector["data"]
data = dsdata[...]
f.close()
return np.array(data)
def load_image_hdf5(file_path):
data = load_data_hdf5(file_path)
return data[0, :, :]
def load_image_ascii(path):
"""
Read ASCII images using the csv lib
"""
delimiter = "\t"
data = []
for row in csv.reader(open(path), delimiter=delimiter):
data.append(row[:-1])
img = np.array(data).astype(np.double)
return img
def brush_to_color_tuple(brush):
r, g, b, a = brush.color().getRgbF()
return (r, g, b)
class MyStream(QtCore.QObject):
message = QtCore.pyqtSignal(str)
def __init__(self, parent=None):
super(MyStream, self).__init__(parent)
def write(self, message):
self.message.emit(str(message))
def flush(self):
pass
class DPCThread(QtCore.QThread):
def __init__(self, canvas, pool=None, parent=None):
QtCore.QThread.__init__(self, parent)
self.canvas = canvas
self.pool = pool
update_signal = QtCore.pyqtSignal(object, object, object, object, object, object)
def run(self):
print("DPC thread started")
main = DPCWindow.instance
try:
ret = dpc.main(
pool=self.pool,
display_fcn=self.update_signal.emit,
load_image=main.load_image,
**self.dpc_settings,
)
print("DPC finished")
global a
global gx
global gy
global phi
global rx
global ry
a, gx, gy, phi, rx, ry = ret
main.a, main.gx, main.gy, main.phi, main.rx, main.ry = a, gx, gy, phi, rx, ry
main.line_btn.setEnabled(True)
main.reverse_x.setEnabled(True)
main.reverse_y.setEnabled(True)
main.swap_xy.setEnabled(True)
main.save_result_tiff.setEnabled(True)
main.save_result_txt.setEnabled(True)
main.hanging_opt.setEnabled(True)
main.random_processing_opt.setEnabled(True)
main.pyramid_scan.setEnabled(True)
main.pad_recon.setEnabled(True)
# main.direction_btn.setEnabled(True)
# main.removal_btn.setEnabled(True)
# main.confirm_btn.setEnabled(True)
finally:
main.set_running(False)
class MplCanvas(FigureCanvas):
"""
Canvas which allows us to use matplotlib with pyqt4
"""
def __init__(self, fig=None, parent=None, width=5, height=4, dpi=100):
fig = Figure(figsize=(width, height), dpi=dpi)
# We want the axes cleared every time plot() is called
self.axes = fig.add_subplot(1, 1, 1)
# self.axes.hold(False)
self.axes.cla()
FigureCanvas.__init__(self, fig)
# self.figure
self.setParent(parent)
FigureCanvas.setSizePolicy(self, QSizePolicy.Expanding, QSizePolicy.Expanding)
FigureCanvas.updateGeometry(self)
self._title = ""
self.title_font = {"family": "serif", "fontsize": 10}
self._title_size = 0
self.figure.subplots_adjust(top=0.95, bottom=0.15)
window_brush = self.window().palette().window()
fig.set_facecolor(brush_to_color_tuple(window_brush))
fig.set_edgecolor(brush_to_color_tuple(window_brush))
self._active = False
def _get_title(self):
return self._title
def _set_title(self, title):
self._title = title
if self.axes:
self.axes.set_title(title, fontdict=self.title_font)
# bbox = t.get_window_extent()
# bbox = bbox.inverse_transformed(self.figure.transFigure)
# self._title_size = bbox.height
# self.figure.subplots_adjust(top=1.0 - self._title_size)
title = property(_get_title, _set_title)
class Label(QLabel):
def __init__(self, parent=None):
super(Label, self).__init__(parent)
self.rubberBand = QRubberBand(QRubberBand.Rectangle, self)
self.origin = QtCore.QPoint()
def mousePressEvent(self, event):
global roi_x1
global roi_y1
self.rubberBand.hide()
if event.button() == Qt.LeftButton:
self.origin = QtCore.QPoint(event.pos())
self.rubberBand.setGeometry(QtCore.QRect(self.origin, QtCore.QSize()))
self.rubberBand.show()
roi_x1 = event.pos().x()
roi_y1 = event.pos().y()
def mouseMoveEvent(self, event):
# if event.buttons() == QtCore.Qt.NoButton:
# pos = event.pos()
if not self.origin.isNull():
self.rubberBand.setGeometry(QtCore.QRect(self.origin, event.pos()).normalized())
def mouseReleaseEvent(self, event):
global roi_x2
global roi_y2
roi_x2 = event.pos().x()
roi_y2 = event.pos().y()
main = DPCWindow.instance
if (roi_x1, roi_y1) != (roi_x2, roi_y2):
main.roi_x1_widget.setValue(roi_x1)
main.roi_y1_widget.setValue(roi_y1)
main.roi_x2_widget.setValue(roi_x2)
main.roi_y2_widget.setValue(roi_y2)
else:
if main.bad_flag != 0:
main.bad_pixels_widget.addItem("%d, %d" % (event.pos().x(), event.pos().y()))
for i in range(len(main.bad_pixels)):
main.roi_img_masked[main.bad_pixels[i][1], main.bad_pixels[i][0]] = 0
main.ax.imshow(
main.roi_img_masked, interpolation="nearest", origin="upper", cmap=main._ref_color_map
)
main.ref_canvas.draw()
self.rubberBand.show()
class paintLabel(QLabel):
def __init__(self, parent=None):
super(paintLabel, self).__init__(parent)
def paintEvent(self, event):
super(paintLabel, self).paintEvent(event)
qp = QPainter()
qp.begin(self)
self.drawLine(event, qp)
qp.end()
def drawLine(self, event, qp):
size = self.size()
pen = QPen(QtCore.Qt.red)
qp.setPen(pen)
qp.drawLine(size.width() / 2, 0, size.width() / 2, size.height() - 1)
qp.drawLine(size.width() / 2 - 1, 0, size.width() / 2 - 1, size.height() - 1)
qp.drawLine(0, size.height() / 2, size.width() - 1, size.height() / 2)
qp.drawLine(0, size.height() / 2 - 1, size.width() - 1, size.height() / 2 - 1)
pen.setStyle(QtCore.Qt.DashLine)
pen.setColor(QtCore.Qt.black)
qp.setPen(pen)
qp.drawLine(0, 0, size.width() - 1, 0)
qp.drawLine(0, size.height() - 1, size.width() - 1, size.height() - 1)
qp.drawLine(0, 0, 0, size.height() - 1)
qp.drawLine(size.width() - 1, 0, size.width() - 1, size.height() - 1)
class DPCWindow(QMainWindow):
CM_DEFAULT = "gray"
def __init__(self, parent=None):
QMainWindow.__init__(self, parent)
DPCWindow.instance = self
self.bin_num = 2 ** 16
self._thread = None
self.ion_data = None
self.bad_flag = 0
self.direction = -1 # 1 for horizontal and -1 for vertical
self.crop_x0 = None
self.crop_x1 = None
self.crop_y0 = None
self.crop_y1 = None
self.set_roi_enabled = False
self.his_enabled = False
self.scan = None
self.contrastval = 0
self.histequalization = False
self.showresiduals = False
self.running = False
self.gx, self.gy, self.phi, self.a, self.rx, self.ry = None, None, None, None, None, None
self.file_widget = QLineEdit("Chromosome_9_%05d.tif")
self.file_widget.setFixedWidth(350)
self.save_path_widget = QLineEdit("/home")
self.focus_widget = QDoubleSpinBox()
self.dx_widget = QDoubleSpinBox()
self.dy_widget = QDoubleSpinBox()
self.pixel_widget = QDoubleSpinBox()
self.energy_widget = QDoubleSpinBox()
self.rows_widget = QSpinBox()
self.cols_widget = QSpinBox()
self.mosaic_x_widget = QSpinBox()
self.mosaic_y_widget = QSpinBox()
self.roi_x1_widget = QSpinBox()
self.roi_x2_widget = QSpinBox()
self.roi_y1_widget = QSpinBox()
self.roi_y2_widget = QSpinBox()
self.strap_start = QSpinBox()
self.strap_end = QSpinBox()
self.first_widget = QSpinBox()
self.first_widget.valueChanged.connect(self._first_changed)
self.processes_widget = QSpinBox()
self.processes_widget.setMinimum(1)
self.processes_widget.setValue(psutil.cpu_count())
self.processes_widget.setMaximum(psutil.cpu_count())
self.solver_widget = QComboBox()
for solver in SOLVERS:
self.solver_widget.addItem(solver)
self.start_widget = QPushButton("Start")
self.stop_widget = QPushButton("Stop")
self.save_widget = QPushButton("Save")
self.scan_button = QPushButton("Load")
self.color_map = QComboBox()
self.update_color_maps()
self.color_map.currentIndexChanged.connect(self._set_color_map)
self._color_map = mpl.cm.get_cmap(self.CM_DEFAULT)
self.ref_color_map = QComboBox()
self.update_ref_color_maps()
self.ref_color_map.currentIndexChanged.connect(self._set_ref_color_map)
self._ref_color_map = mpl.cm.get_cmap(self.CM_DEFAULT)
self.start_widget.clicked.connect(self.start)
self.stop_widget.clicked.connect(self.stop)
self.save_widget.clicked.connect(self.save)
self.scan_button.clicked.connect(self.load_from_spec_scan)
self.load_image = load_timepix.load
def format_coord(x, y):
col = int(x + 0.5)
row = int(y + 0.5)
if row >= 0 and row < self.roi_img.shape[0] and col >= 0 and col < self.roi_img.shape[1]:
z = self.roi_img[row, col]
return "x=%1.4f y=%1.4f v=%1.4f" % (x, y, z)
else:
return "x=%1.4f y=%1.4f" % (x, y)
self.rect = Rectangle((0, 0), 0, 0, alpha=0.3, facecolor="gray", edgecolor="red", linewidth=2)
self.ref_fig = plt.figure()
# self.ref_canvas = MplCanvas(self.ref_fig, width=8, height=10, dpi=50)
self.ref_canvas = FigureCanvas(self.ref_fig)
self.ref_fig.subplots_adjust(top=0.99, left=0.01, right=0.99, bottom=0.04)
self.ref_fig.canvas.mpl_connect("button_press_event", self.on_press)
self.ref_fig.canvas.mpl_connect("button_release_event", self.on_release)
self.ref_fig.canvas.mpl_connect("motion_notify_event", self.on_motion)
self.ax = self.ref_fig.add_subplot(111)
self.ax.format_coord = format_coord
self.ax.add_patch(self.rect)
self.ax.figure.canvas.draw()
self.ref_toolbar = NavigationToolbar(self.ref_canvas, self)
self.his_btn = QPushButton("Equalize")
self.his_btn.setCheckable(True)
self.his_btn.clicked[bool].connect(self.histgramEqua)
self.roi_btn = QPushButton("Set ROI")
self.roi_btn.setCheckable(True)
self.roi_btn.clicked[bool].connect(self.set_roi_enable)
self.bri_btn = QPushButton("Brightest")
self.bri_btn.clicked.connect(self.select_bri_pixels)
self.bad_btn = QPushButton("Pick")
self.bad_btn.setCheckable(True)
self.bad_btn.clicked[bool].connect(self.bad_enable)
self.line_btn = QPushButton("Add")
self.line_btn.setEnabled(False)
self.line_btn.clicked.connect(self.add_strap)
direction_text = "\N{CLOCKWISE OPEN CIRCLE ARROW} 90\N{DEGREE SIGN}"
self.direction_btn = QPushButton(direction_text)
self.direction_btn.clicked.connect(self.change_direction)
self.direction_btn.setEnabled(False)
self.removal_btn = QPushButton("Remove")
self.removal_btn.clicked.connect(self.remove_background)
self.removal_btn.setEnabled(False)
self.confirm_btn = QPushButton("Apply")
self.confirm_btn.clicked.connect(self.confirm)
self.confirm_btn.setEnabled(False)
self.hide_btn = QPushButton("View && set")
self.hide_btn.setCheckable(True)
self.hide_btn.clicked.connect(self.hide_ref)
self.ok_btn = QPushButton("OK")
self.ok_btn.clicked.connect(self.crop_ok)
self.cancel_btn = QPushButton("Cancel")
self.cancel_btn.clicked.connect(self.crop_cancel)
# Setting widget (QGridLayout) in the bottom of reference image
self.min_lbl = QLabel("Min")
self.max_lbl = QLabel("Max")
self.min_box = QSpinBox()
self.max_box = QSpinBox()
self.min_box.setMaximum(self.bin_num)
self.min_box.setMinimum(0)
self.max_box.setMaximum(self.bin_num)
self.max_box.setMinimum(0)
self.rescale_intensity_btn = QPushButton("Apply")
self.rescale_intensity_btn.clicked.connect(self.rescale_intensity)
self.badPixelGbox = QGroupBox("Bad pixels")
self.badPixelGridLayout = QGridLayout()
self.badPixelGbox.setLayout(self.badPixelGridLayout)
bpw = self.bad_pixels_widget = QListWidget()
# Set the minimum height of the qlistwidget as 1 so that the
# qlistwidget is always as as high as its two side buttons
bpw.setMinimumHeight(1)
bpw.setContextMenuPolicy(Qt.CustomContextMenu)
bpw.customContextMenuRequested.connect(self._bad_pixels_menu)
self.badPixelGridLayout.addWidget(self.bri_btn, 0, 0)
self.badPixelGridLayout.addWidget(self.bad_btn, 1, 0)
self.badPixelGridLayout.addWidget(self.bad_pixels_widget, 0, 1, 2, 1)
def ref_close(event):
self.hide_btn.setChecked(False)
self.ref_grid = QGridLayout()
self.ref_widget = QWidget()
self.ref_widget.closeEvent = ref_close
self.ref_widget.setLayout(self.ref_grid)
self.ref_grid.addWidget(self.ref_canvas, 0, 0, 1, 6)
self.ref_grid.addWidget(self.ref_toolbar, 1, 0, 1, 6)
self.ref_grid.addWidget(self.badPixelGbox, 2, 0, 3, 1)
self.ref_grid.addWidget(self.ref_color_map, 2, 1, 1, 5)
self.ref_grid.addWidget(self.his_btn, 3, 1, 1, 2)
self.ref_grid.addWidget(self.roi_btn, 3, 3, 1, 2)
self.ref_grid.addWidget(self.min_lbl, 4, 1)
self.ref_grid.addWidget(self.min_box, 4, 2)
self.ref_grid.addWidget(self.max_lbl, 4, 3)
self.ref_grid.addWidget(self.max_box, 4, 4)
self.ref_grid.addWidget(self.rescale_intensity_btn, 4, 5)
self.file_format_btn = QPushButton("Select")
# self.file_format_btn.setStyle(WinLayout)
self.file_format_btn.clicked.connect(self.select_path)
"""
QGroupBox implementation for image settings
"""
self.imageSettingGbox = QGroupBox("Image settings")
self.imageSettingGridLayout = QGridLayout()
self.imageSettingGbox.setLayout(self.imageSettingGridLayout)
self.scan_number_lbl = QLabel("Scan number")
self.roi_x1_lbl = QLabel("ROI X1")
self.roi_x2_lbl = QLabel("ROI X2")
self.roi_y1_lbl = QLabel("ROI Y1")
self.roi_y2_lbl = QLabel("ROI Y2")
self.img_type_lbl = QLabel("Image type")
self.pixel_size_lbl = QLabel("Pixel size (um)")
self.file_name_lbl = QLabel("File name")
self.first_img_num_lbl = QLabel("First image number")
self.scan_info_lbl = QLabel("")
self.scan_info_lbl.setWordWrap(True)
self.select_ref_btn = QPushButton("Select the reference")
self.select_ref_btn.clicked.connect(self.select_ref_img)
self.img_type_combobox = itc = QComboBox()
for types in TYPES:
itc.addItem(types)
itc.currentIndexChanged.connect(self.load_img_method)
self.first_ref_cbox = QCheckBox("Use as the reference image")
self.first_ref_cbox.stateChanged.connect(self.first_equal_ref)
self.use_scan_number_cb = QCheckBox("Read from metadatastore")
self.use_scan_number_cb.toggled.connect(self._use_scan_number_clicked)
fs_key_cbox = self.fs_key_cbox = QComboBox()
fs_key_cbox.currentIndexChanged.connect(self._filestore_key_changed)
self.load_scan_btn = QPushButton("Load")
self.load_scan_btn.clicked.connect(self.load_scan_from_mds)
self.ref_image_path_QLineEdit = QLineEdit("reference image")
self.ref_image_path_QLineEdit.setFixedWidth(350)
self.scan_number_text = QLineEdit("3449")
row = 0
layout = self.imageSettingGridLayout
if hxntools is not None:
layout.addWidget(self.scan_number_lbl, row, 0)
layout.addWidget(self.scan_number_text, row, 1)
layout.addWidget(self.load_scan_btn, row, 2)
layout.addWidget(self.use_scan_number_cb, row, 3)
row += 1
layout.addWidget(self.fs_key_cbox, row, 0)
layout.addWidget(self.scan_info_lbl, row, 1, 1, 3)
row += 1
layout.addWidget(self.img_type_lbl, row, 0)
layout.addWidget(self.img_type_combobox, row, 1)
layout.addWidget(self.pixel_size_lbl, row, 2)
layout.addWidget(self.pixel_widget, row, 3)
row += 1
layout.addWidget(self.file_name_lbl, row, 0)
layout.addWidget(self.file_widget, row, 1, 1, 3)
layout.addWidget(self.file_format_btn, row, 4)
row += 1
layout.addWidget(self.first_img_num_lbl, row, 0)
layout.addWidget(self.first_widget, row, 1)
layout.addWidget(self.first_ref_cbox, row, 2, 1, 2)
row += 1
layout.addWidget(self.select_ref_btn, row, 0)
layout.addWidget(self.ref_image_path_QLineEdit, row, 1, 1, 3)
row += 1
layout.addWidget(self.roi_x1_lbl, row, 0)
layout.addWidget(self.roi_x1_widget, row, 1)
layout.addWidget(self.roi_x2_lbl, row, 2)
layout.addWidget(self.roi_x2_widget, row, 3)
layout.addWidget(self.hide_btn, row, 4)
row += 1
layout.addWidget(self.roi_y1_lbl, row, 0)
layout.addWidget(self.roi_y1_widget, row, 1)
layout.addWidget(self.roi_y2_lbl, row, 2)
layout.addWidget(self.roi_y2_widget, row, 3)
# QGroupBox implementation for experiment parameters
self.experimentParaGbox = QGroupBox("Experiment parameters")
self.experimentParaGridLayout = QGridLayout()
self.experimentParaGbox.setLayout(self.experimentParaGridLayout)
self.energy_lbl = QLabel("Energy (keV)")
self.detector_sample_lbl = QLabel("Detector-sample distance (m)")
self.x_step_size_lbl = QLabel("X step size (um)")
self.y_step_size_lbl = QLabel("Y step size (um)")
self.x_steps_number_lbl = QLabel("Columns (x)")
self.y_steps_number_lbl = QLabel("Rows (y)")
self.mosaic_x_size_lbl = QLabel("Mosaic column number")
self.mosaic_y_size_lbl = QLabel("Mosaic row number")
self.experimentParaGridLayout.addWidget(self.energy_lbl, 0, 0)
self.experimentParaGridLayout.addWidget(self.energy_widget, 0, 1)
self.experimentParaGridLayout.addWidget(self.detector_sample_lbl, 0, 2)
self.experimentParaGridLayout.addWidget(self.focus_widget, 0, 3)
self.experimentParaGridLayout.addWidget(self.x_step_size_lbl, 1, 0)
self.experimentParaGridLayout.addWidget(self.dx_widget, 1, 1)
self.experimentParaGridLayout.addWidget(self.y_step_size_lbl, 1, 2)
self.experimentParaGridLayout.addWidget(self.dy_widget, 1, 3)
self.experimentParaGridLayout.addWidget(self.x_steps_number_lbl, 2, 0)
self.experimentParaGridLayout.addWidget(self.cols_widget, 2, 1)
self.experimentParaGridLayout.addWidget(self.y_steps_number_lbl, 2, 2)
self.experimentParaGridLayout.addWidget(self.rows_widget, 2, 3)
self.experimentParaGridLayout.addWidget(self.mosaic_x_size_lbl, 3, 0)
self.experimentParaGridLayout.addWidget(self.mosaic_x_widget, 3, 1)
self.experimentParaGridLayout.addWidget(self.mosaic_y_size_lbl, 3, 2)
self.experimentParaGridLayout.addWidget(self.mosaic_y_widget, 3, 3)
"""
QGroupBox implementation for computation parameters
"""
self.computationParaGbox = QGroupBox("Computation parameters")
self.computationParaGridLayout = QGridLayout()
self.computationParaGbox.setLayout(self.computationParaGridLayout)
self.solver_method_lbl = QLabel("Solver method")
self.processes_lbl = QLabel("Processes")
self.random_processing_checkbox = QCheckBox("Random mode")
self.hanging_checkbox = QCheckBox("Hanging mode")
layout = self.computationParaGridLayout
layout.addWidget(self.solver_method_lbl, 0, 0)
layout.addWidget(self.solver_widget, 0, 1)
layout.addWidget(self.processes_lbl, 0, 2)
layout.addWidget(self.processes_widget, 0, 3)
# layout.addWidget(self.random_processing_checkbox, 1, 0)
# layout.addWidget(self.hanging_checkbox, 1, 1)
layout.addWidget(self.start_widget, 0, 4)
layout.addWidget(self.stop_widget, 0, 5)
"""
QGroupBox implementation for console information
"""
self.consoleInfoGbox = QGroupBox("Console information")
self.consoleInfoGridLayout = QGridLayout()
self.consoleInfoGbox.setLayout(self.consoleInfoGridLayout)
self.console_info = QTextEdit(self)
self.console_info.setReadOnly(True)
self.consoleInfoGridLayout.addWidget(self.console_info)
self.background_remove_qbox = QGroupBox("Remove background")
self.background_remove_layout = QGridLayout()
self.background_remove_qbox.setLayout(self.background_remove_layout)
self.strap_start_label = QLabel("Start")
self.strap_end_label = QLabel("End")
self.background_remove_layout.addWidget(self.strap_start_label, 0, 0)
self.background_remove_layout.addWidget(self.strap_start, 0, 1)
self.background_remove_layout.addWidget(self.strap_end_label, 0, 2)
self.background_remove_layout.addWidget(self.strap_end, 0, 3)
self.background_remove_layout.addWidget(self.line_btn, 0, 4)
self.background_remove_layout.addWidget(self.direction_btn, 0, 5)
self.background_remove_layout.addWidget(self.removal_btn, 0, 6)
self.background_remove_layout.addWidget(self.confirm_btn, 0, 7)
self.canvas = MplCanvas(width=10, height=12, dpi=50)
self.toolbar = NavigationToolbar(self.canvas, self)
self.image_vis_qbox = QGroupBox("Image visualization")
self.image_vis_layout = QGridLayout()
self.image_vis_qbox.setLayout(self.image_vis_layout)
self.image_vis_layout.addWidget(self.toolbar, 0, 0)
self.image_vis_layout.addWidget(self.color_map, 0, 1)
line = QFrame()
line.setFrameShape(QFrame.HLine)
line.setFrameShadow(QFrame.Sunken)
self.image_vis_layout.addWidget(line, 1, 0)
line = QFrame()
line.setFrameShape(QFrame.HLine)
line.setFrameShadow(QFrame.Sunken)
self.image_vis_layout.addWidget(line, 1, 1)
hboxcb = QHBoxLayout()
self.cb_histeq = QCheckBox("Histogram Equalization", self)
self.cb_histeq.setChecked(False)
self.cb_histeq.stateChanged.connect(self.OnCBHistEqualization)
hboxcb.addWidget(self.cb_histeq)
self.cb_resid = QCheckBox("Show Residuals", self)
self.cb_resid.setChecked(False)
self.cb_resid.stateChanged.connect(self.OnCBShowResiduals)
hboxcb.addWidget(self.cb_resid)
self.image_vis_layout.addLayout(hboxcb, 2, 1)
hboxslider = QHBoxLayout()
hboxslider.addWidget(QLabel("Contrast"))
self.slider_constrast = QSlider(QtCore.Qt.Horizontal)
self.slider_constrast.setFocusPolicy(QtCore.Qt.StrongFocus)
self.slider_constrast.setRange(0, 25)
self.slider_constrast.setValue(0)
self.slider_constrast.setTickPosition(QSlider.TicksBelow)
self.slider_constrast.setTickInterval(5)
hboxslider.addWidget(self.slider_constrast)
self.slider_constrast.valueChanged[int].connect(self.OnContrastSlider)
self.image_vis_layout.addLayout(hboxslider, 2, 0)
self.canvas_QGridLayout = QGridLayout()
self.canvas_widget = QWidget()
self.canvas_widget.setLayout(self.canvas_QGridLayout)
self.canvas_QGridLayout.addWidget(self.canvas, 0, 0, 1, 2)
self.canvas_QGridLayout.addWidget(self.image_vis_qbox, 1, 0)
self.canvas_QGridLayout.addWidget(self.background_remove_qbox, 1, 1)
self.crop_widget = QWidget()
self.crop_layout = QGridLayout()
self.crop_widget.setLayout(self.crop_layout)
self.crop_canvas = MplCanvas(width=8, height=8, dpi=50)
self.crop_fig = self.crop_canvas.figure
self.crop_fig.subplots_adjust(top=0.95, left=0.05, right=0.95, bottom=0.05)
self.crop_ax = self.crop_fig.add_subplot(111)
# self.crop_ax.hold(False)
self.crop_ax.cla()
self.crop_layout.addWidget(self.crop_canvas, 0, 0, 1, 2)
self.crop_layout.addWidget(self.ok_btn, 1, 0)
self.crop_layout.addWidget(self.cancel_btn, 1, 1)
self.last_path = ""
self.main_grid = QGridLayout()
self.main_widget = QWidget()
self.main_widget.setLayout(self.main_grid)
self.main_grid.addWidget(self.imageSettingGbox, 0, 0)
self.main_grid.addWidget(self.experimentParaGbox, 1, 0)
self.main_grid.addWidget(self.computationParaGbox, 2, 0)
self.main_grid.addWidget(self.consoleInfoGbox, 3, 0)
# Add menu
self.menu = self.menuBar()
self.save_result_tiff = QAction("Export to .tiff", self)
self.save_result_tiff.setEnabled(False)
self.save_result_tiff.triggered.connect(self.save_file_tiff)
self.save_result_txt = QAction("Export to .txt", self)
self.save_result_txt.setEnabled(False)
self.save_result_txt.triggered.connect(self.save_file_txt)
self.save_scan_params = QAction("Save scan parameters", self)
self.save_scan_params.triggered.connect(self.save_params_to_file)
self.load_scan_params = QAction("Load scan parameters", self)
self.load_scan_params.triggered.connect(self.load_params_from_file)
self.start_batch_gui = QAction("Launch DPC Batch GUI", self)
self.start_batch_gui.triggered.connect(self.launch_batch_gui)
self.reverse_x = QAction("Reverse gx", self, checkable=True)
self.reverse_x.triggered.connect(self.reverse_gx)
self.reverse_x.setEnabled(False)
self.reverse_y = QAction("Reverse gy", self, checkable=True)
self.reverse_y.triggered.connect(self.reverse_gy)
self.reverse_y.setEnabled(False)
self.swap_xy = QAction("Swap x/y", self, checkable=True)
self.swap_xy.triggered.connect(self.swap_x_y)
self.swap_xy.setEnabled(False)
self.random_processing_opt = QAction("Random mode", self, checkable=True)
self.hanging_opt = QAction("Hanging mode", self, checkable=True)
self.pyramid_scan = QAction("Pyramid scan", self, checkable=True)
self.pad_recon = QAction("Padding mode", self, checkable=True)
self.pad_recon.triggered.connect(self.padding_recon)
file_menu = self.menu.addMenu("File")
file_menu.addAction(self.save_result_tiff)
file_menu.addAction(self.save_result_txt)
file_menu.addAction(self.save_scan_params)
file_menu.addAction(self.load_scan_params)
file_menu.addAction(self.start_batch_gui)
option_menu = self.menu.addMenu("Option")
option_menu.addAction(self.reverse_x)
option_menu.addAction(self.reverse_y)
option_menu.addAction(self.swap_xy)
option_menu.addAction(self.random_processing_opt)
option_menu.addAction(self.hanging_opt)
option_menu.addAction(self.pyramid_scan)
option_menu.addAction(self.pad_recon)
if hxntools is not None:
self.monitor_scans = QAction("Monitor acquired scans", self, checkable=True)
self.monitor_scans.triggered.connect(self.monitor_toggled)
self.scan_monitor = HxnScanMonitor(uid_pv, db)
self.scan_monitor.connect("start", self.bs_scan_started)
self.scan_monitor.connect("stop", self.bs_scan_finished)
option_menu.addAction(self.monitor_scans)
self.setCentralWidget(self.main_widget)
self.setWindowTitle(f"DPC Maps {__version__}")
# QApplication.setStyle(QStyleFactory.create('Cleanlooks'))
QApplication.setStyle(QStyleFactory.create("Plastique"))
# QApplication.setStyle(QStyleFactory.create('cde'))
self._init_settings()
for w in [
self.pixel_widget,
self.focus_widget,
self.energy_widget,
self.dx_widget,
self.dy_widget,
self.rows_widget,
self.cols_widget,
self.roi_x1_widget,
self.roi_x2_widget,
self.roi_y1_widget,
self.roi_y2_widget,
self.first_widget,
self.mosaic_x_widget,
self.mosaic_y_widget,
self.strap_start,
self.strap_end,
]:
w.setMinimum(0)
w.setMaximum(int(2 ** 31 - 1))
try:
w.setDecimals(3)
except Exception:
pass
for w in [self.strap_start, self.strap_end]:
w.setMinimum(0)
w.setMaximum(9999)
self.load_settings()
def monitor_toggled(self):
pass
def bs_scan_started(self, uid, hxn_info=None, **hdr):
if not self.monitoring:
return
print("Scan started")
self.set_scan_from_scaninfo(ScanInfo(hdr), load_config=True)
def bs_scan_finished(self, uid, hxn_info=None, **hdr):
if not self.monitoring:
return
print("Scan finished")
self.stop()
self.set_scan_from_scaninfo(ScanInfo(hdr), load_config=True)
def _init_settings(self):
def typed_setter(fcn, type_):
@wraps(fcn)
def wrapped(value):
return fcn(type_(value))
return wrapped
def checked_setter(widget, offset=1):
@wraps(widget.setChecked)
def wrapped(value):
widget.setChecked(int(value) + offset)
return wrapped
def getter(attr):
def wrapped():
return getattr(self, attr)
return wrapped
def setter(attr):
def wrapped(value):
return setattr(self, attr, value)
return wrapped
self._settings = {
"file_format": [getter("file_format"), self.file_widget.setText],
"save_path": [getter("save_path"), self.save_path_widget.setText],
"dx": [getter("dx"), setter("dx")],
"dy": [getter("dy"), setter("dy")],
"x1": [getter("roi_x1"), typed_setter(self.roi_x1_widget.setValue, int)],
"y1": [getter("roi_y1"), typed_setter(self.roi_y1_widget.setValue, int)],
"x2": [getter("roi_x2"), typed_setter(self.roi_x2_widget.setValue, int)],
"y2": [getter("roi_y2"), typed_setter(self.roi_y2_widget.setValue, int)],
"pixel_size": [getter("pixel_size"), typed_setter(self.pixel_widget.setValue, float)],
"focus_to_det": [getter("focus"), typed_setter(self.focus_widget.setValue, float)],
"energy": [getter("energy"), typed_setter(self.energy_widget.setValue, float)],
"rows": [getter("rows"), typed_setter(self.rows_widget.setValue, int)],
"cols": [getter("cols"), typed_setter(self.cols_widget.setValue, int)],
"mosaic_y": [getter("mosaic_y"), typed_setter(self.mosaic_y_widget.setValue, int)],
"mosaic_x": [getter("mosaic_x"), typed_setter(self.mosaic_x_widget.setValue, int)],
"swap": [getter("swap"), checked_setter(self.swap_xy, 1)],
"reverse_x": [getter("re_x"), checked_setter(self.reverse_x, -1)],
"reverse_y": [getter("re_y"), checked_setter(self.reverse_y, -1)],
"random": [getter("random"), checked_setter(self.random_processing_opt, 1)],
"pyramid": [getter("pyramid"), checked_setter(self.pyramid_scan, 1)],
"pad": [getter("pad"), checked_setter(self.pad_recon, True)],
"hang": [getter("hang"), checked_setter(self.hanging_opt, 1)],
"ref_image": [getter("ref_image"), self.ref_image_path_QLineEdit.setText],
"first_image": [getter("first_image"), typed_setter(self.first_widget.setValue, int)],
"processes": [getter("processes"), typed_setter(self.processes_widget.setValue, int)],
"bad_pixels": [getter("bad_pixels"), self.set_bad_pixels],
"solver": [getter("solver"), setter("solver")],
"last_path": [getter("last_path"), setter("last_path")],
"scan_number": [getter("scan_number"), setter("scan_number")],
"use_mds": [getter("use_mds"), setter("use_mds")],
"filestore_key": [getter("filestore_key"), setter("filestore_key")],
# 'color_map': [lambda: self._color_map,
# setter('last_path')],
}
def _use_scan_number_clicked(self, checked):
self.use_mds = checked
self.file_widget.setEnabled(not self.use_mds)
self.fs_key_cbox.setVisible(self.use_mds)
self.scan_info_lbl.setVisible(self.use_mds)
self.select_ref_btn.setEnabled(not self.use_mds)
self.ref_image_path_QLineEdit.setEnabled(not self.use_mds)
if self.use_mds:
self.img_type_combobox.setCurrentIndex(TYPES.index("FileStore"))
self.first_ref_cbox.setChecked(True)
self.img_type_combobox.setEnabled(not self.use_mds)
@property
def use_mds(self):
if hxntools is None:
return False
return bool(self.use_scan_number_cb.isChecked())
@use_mds.setter
def use_mds(self, checked):
self.use_scan_number_cb.setChecked(bool(checked))
@property
def filestore_key(self):
return str(self.fs_key_cbox.currentText())
@filestore_key.setter
def filestore_key(self, key):
keys = list(sorted(self.scan.filestore_keys))
self.fs_key_cbox.setCurrentIndex(keys.index(key))
def _load_scan_from_mds(self, scan_id, load_config=True):
hdrs = list(db(scan_id=scan_id))
if len(hdrs) == 1:
hdr = hdrs[0]
else:
def get_ts(hdr):
return datetime.fromtimestamp(hdr["start"]["time"])
scans = ["{} ({})".format(get_ts(hdr), hdr["start"]["uid"]) for hdr in hdrs]
print("Multiple headers found...")
s, ok = QInputDialog.getItem(self, "Multiple scans", "Which scan?", scans, 0, False)
if ok:
index = scans.index(str(s))
hdr = hdrs[index]
else:
return
self.set_scan_from_scaninfo(ScanInfo(hdr), load_config=load_config)
def set_scan_from_scaninfo(self, scan, load_config=True):
self.scan = scan
selected = self.filestore_key
self.fs_key_cbox.clear()
if load_config:
self.ref_image_path_QLineEdit.setText("")
for i, key in enumerate(sorted(self.scan.filestore_keys)):
self.fs_key_cbox.addItem(key)
if key == selected:
self.fs_key_cbox.setCurrentIndex(i)
self.scan.key = self.filestore_key
self.use_mds = True
if self.scan.dimensions is None or len(self.scan.dimensions) == 0:
return
elif not load_config:
return
scan_range = self.scan.range
print("Scan dimensions", self.scan.dimensions)
print("Scan range:", scan_range)
self.pyramid_scan.setChecked(self.scan.pyramid)
if isinstance(scan_range, dict):
scan_range = [scan_range[mtr] for mtr in self.scan.motors]
if len(self.scan.dimensions) == 1:
nx, ny = self.scan.dimensions[0], 1
if scan_range is not None:
self.dx = np.diff(scan_range[0]) / nx
self.dy = 0.0
else:
nx, ny = self.scan.dimensions
if scan_range is not None:
self.dx = np.diff(scan_range[0]) / nx
self.dy = np.diff(scan_range[1]) / ny
self.cols = nx
self.rows = ny
self.scan_info_lbl.setText("Range: {}".format(scan_range))
def load_scan_from_mds(self, **kwargs):
return self._load_scan_from_mds(self.scan_number, **kwargs)
@property
def scan_number(self):
try:
return int(self.scan_number_text.text())
except ValueError:
return None
@scan_number.setter
def scan_number(self, value):
self.scan_number_text.setText(str(value))
def _first_changed(self, event):
if self.use_mds:
self.get_ref_from_mds()
def get_ref_from_mds(self):
if self.scan is None:
return
iter_ = iter(self.scan)
first_image = max((1, self.first_image + 1))
ref_image = None
try:
for i in range(first_image):
ref_image = next(iter_)
except StopIteration:
print("Reference image #{} does not exist with data key {}" "".format(first_image, self.scan.key))
if ref_image is not None:
self.ref_image_path_QLineEdit.setText(ref_image)
def _filestore_key_changed(self, event):
key = self.filestore_key
if self.scan is not None:
self.scan.key = key
print("MDS key set:", key)
self.get_ref_from_mds()
def on_press(self, event):
if event.inaxes:
self.crop_x0 = event.xdata
self.crop_y0 = event.ydata
def on_release(self, event):
self.crop_x1 = event.xdata
self.crop_y1 = event.ydata
if event.inaxes:
if (self.crop_x0, self.crop_y0) == (self.crop_x1, self.crop_y1):
if self.bad_flag:
self.bad_pixels_widget.addItem("%d, %d" % (int(round(self.crop_x1)), int(round(self.crop_y1))))
for i in range(len(self.bad_pixels)):
self.roi_img_masked[self.bad_pixels[i][1], self.bad_pixels[i][0]] = 0
self.ax.imshow(
self.roi_img_masked, interpolation="nearest", origin="upper", cmap=self._ref_color_map
)
self.ref_canvas.draw()
elif self.set_roi_enabled:
if self.his_enabled:
roi_crop = self.roi_img_equ[
int(round(self.crop_y0)) : int(round(self.crop_y1)),
int(round(self.crop_x0)) : int(round(self.crop_x1)),
]
else:
roi_crop = self.roi_img[
int(round(self.crop_y0)) : int(round(self.crop_y1)),
int(round(self.crop_x0)) : int(round(self.crop_x1)),
]
self.crop_ax.imshow(
roi_crop,
interpolation="nearest",
origin="upper",
cmap=self._ref_color_map,
extent=[
int(round(self.crop_x0)),
int(round(self.crop_x1)),
int(round(self.crop_y0)),
int(round(self.crop_y1)),
],
)
tfont = {"size": "22", "weight": "semibold"}
msg = "ROI will be set as (%d, %d) - (%d, %d)" % (
int(round(self.crop_x0)),
int(round(self.crop_y0)),
int(round(self.crop_x1)),
int(round(self.crop_y1)),
)
self.crop_ax.set_title(msg, **tfont)
self.crop_canvas.draw()
self.crop_widget.show()
def on_motion(self, event):
if self.set_roi_enabled and event.button == 1 and event.inaxes:
self.rect.set_width(event.xdata - self.crop_x0)
self.rect.set_height(event.ydata - self.crop_y0)
self.rect.set_xy((self.crop_x0, self.crop_y0))
self.ax.figure.canvas.draw()
def crop_ok(self):
self.roi_x1_widget.setValue(int(round(self.crop_x0)))
self.roi_y1_widget.setValue(int(round(self.crop_y0)))
self.roi_x2_widget.setValue(int(round(self.crop_x1)))
self.roi_y2_widget.setValue(int(round(self.crop_y1)))
self.crop_widget.hide()
def crop_cancel(self):
self.crop_widget.hide()
def set_roi_enable(self, pressed):
if pressed:
self.rect.set_visible(True)
self.ax.figure.canvas.draw()
self.set_roi_enabled = True
else:
self.rect.set_visible(False)
self.ax.figure.canvas.draw()
self.set_roi_enabled = False
def update_display(self, a, gx, gy, phi, rx, ry, flag=None):
# ax is a pyplot object
def show_line(ax, line):
ax.plot(line, "-*")
# return mpl.pyplot.show()
# def show_line_T(ax, line):
# ax.plot(line)
def show_image(ax, image):
# return ax.imshow(np.flipud(image.T), interpolation='nearest',
# origin='upper', cmap=cm.Greys_r)
if image is None:
print("image is none")
return
if self.histequalization:
return ax.imshow(
exposure.equalize_hist(image), interpolation="nearest", origin="upper", cmap=cm.Greys_r
)
elif self.contrastval > 0:
adjustedimage = (image - image.min()) * 255.0 / image.ptp()
return ax.imshow(
adjustedimage,
interpolation="nearest",
vmax=255 - self.contrastval * 10,
origin="upper",
cmap=cm.Greys_r,
)
else:
return ax.imshow(image, interpolation="nearest", origin="upper", cmap=cm.Greys_r)
def show_image_line(ax, image, start, end, direction=1):
if direction == 1:
ax.axhspan(start, end, facecolor="0.5", alpha=0.5)
return ax.imshow(image, interpolation="nearest", origin="upper", cmap=cm.Greys_r)
if direction == -1:
ax.axvspan(start, end, facecolor="0.5", alpha=0.5)
return ax.imshow(image, interpolation="nearest", origin="upper", cmap=cm.Greys_r)
tfont = {"size": "28", "weight": "semibold"}
# plt.hold(True)
main = DPCWindow.instance
canvas = self.canvas
fig = canvas.figure
fig.clear()
fig.subplots_adjust(top=0.95, left=0.05, right=0.95, bottom=0.03)
# Check 2D or 1D mode
cols_num = main.cols_widget.value()
rows_num = main.rows_widget.value()
oned = (cols_num == 1) or (rows_num == 1)
if oned is True:
if cols_num == 1:
gs = gridspec.GridSpec(3, 1)
canvas.a_ax = a_ax = fig.add_subplot(gs[0, 0])
a_ax.set_title("Intensity", **tfont)
canvas.ima = ima = show_line(a_ax, a)
canvas.gx_ax = gx_ax = fig.add_subplot(gs[1, 0])
gx_ax.set_title("Phase gradient (x)", **tfont)
# canvas.imx = imx = show_image(gx_ax, gx)
canvas.imx = imx = show_line(gx_ax, gx)
# fig.colorbar(imx)
canvas.gy_ax = gy_ax = fig.add_subplot(gs[2, 0])
gy_ax.set_title("Phase gradient (y)", **tfont)
# canvas.imy = imy = show_image(gy_ax, gy)
canvas.imy = imy = show_line(gy_ax, gy)
# fig.colorbar(imy)
else:
gs = gridspec.GridSpec(3, 1)
canvas.a_ax = a_ax = fig.add_subplot(gs[0, 0])
a_ax.set_title("Intensity", **tfont)
canvas.ima = ima = show_line(a_ax, a.T)
canvas.gx_ax = gx_ax = fig.add_subplot(gs[1, 0])
gx_ax.set_title("Phase gradient (x)", **tfont)
canvas.imx = imx = show_line(gx_ax, gx.T)
canvas.gy_ax = gy_ax = fig.add_subplot(gs[2, 0])
gy_ax.set_title("Phase gradient (y)", **tfont)
canvas.imy = imy = show_line(gy_ax, gy.T)
else:
if self.showresiduals:
gs = gridspec.GridSpec(2, 3)
else:
gs = gridspec.GridSpec(2, 2)
"""
if main.ion_data is not None:
pixels = a.shape[0] * a.shape[1]
ion_data = np.zeros(pixels)
ion_data[:len(main.ion_data)] = main.ion_data
ion_data[len(main.ion_data):] = ion_data[0]
ion_data = ion_data.reshape(a.shape)
min_ = np.min(a[np.where(a > 0)])
a[np.where(a == 0)] = min_
canvas.a_ax = a_ax = fig.add_subplot(gs[0, 1])
a_ax.set_title('a')
a_data = a / ion_data * ion_data[0]
canvas.ima = ima = show_image(a_ax, a_data)
fig.colorbar(ima)
"""
canvas.a_ax = a_ax = fig.add_subplot(gs[1, 0])
a_ax.set_title("Intensity", **tfont)
# a_data = a / ion_data * ion_data[0]
canvas.ima = ima = show_image(a_ax, a)
fig.colorbar(ima)
ima.set_cmap(main._color_map)
if flag is None:
canvas.gx_ax = gx_ax = fig.add_subplot(gs[0, 0])
gx_ax.set_title("Phase gradient (x)", **tfont)
canvas.imx = imx = show_image(gx_ax, gx)
# canvas.imx = imx = show_line(gx_ax, gx)
fig.colorbar(imx)
imx.set_cmap(main._color_map)
canvas.gy_ax = gy_ax = fig.add_subplot(gs[0, 1])
gy_ax.set_title("Phase gradient (y)", **tfont)
canvas.imy = imy = show_image(gy_ax, gy)
# canvas.imy = imy = show_line(gy_ax, gy)
fig.colorbar(imy)
imy.set_cmap(main._color_map)
if self.showresiduals:
canvas.rx_ax = rx_ax = fig.add_subplot(gs[0, 2])
rx_ax.set_title("Residual error (x)", **tfont)
canvas.imrx = imrx = show_image(rx_ax, rx)
fig.colorbar(imrx)
imrx.set_cmap(main._color_map)
canvas.ry_ax = ry_ax = fig.add_subplot(gs[1, 2])
ry_ax.set_title("Residual error (y)", **tfont)
canvas.imry = imry = show_image(ry_ax, ry)
fig.colorbar(imry)
imry.set_cmap(main._color_map)
else:
canvas.gx_ax = gx_ax = fig.add_subplot(gs[0, 0])
gx_ax.set_title("Phase gradient (x)", **tfont)
main = DPCWindow.instance
canvas.imx = imx = show_image_line(
gx_ax, gx, main.strap_start.value(), main.strap_end.value(), main.direction
)
fig.colorbar(imx)
imx.set_cmap(main._color_map)
canvas.gy_ax = gy_ax = fig.add_subplot(gs[0, 1])
gy_ax.set_title("Phase gradient (y)", **tfont)
canvas.imy = imy = show_image_line(
gy_ax, gy, main.strap_start.value(), main.strap_end.value(), main.direction
)
fig.colorbar(imy)
imy.set_cmap(main._color_map)
if self.showresiduals:
canvas.rx_ax = rx_ax = fig.add_subplot(gs[0, 2])
rx_ax.set_title("Residual error (x)", **tfont)
main = DPCWindow.instance
canvas.imrx = imrx = show_image_line(
rx_ax, rx, main.strap_start.value(), main.strap_end.value(), main.direction
)
fig.colorbar(imrx)
imrx.set_cmap(main._color_map)
canvas.ry_ax = ry_ax = fig.add_subplot(gs[1, 2])
ry_ax.set_title("Residual error (y)", **tfont)
canvas.imry = imry = show_image_line(
ry_ax, ry, main.strap_start.value(), main.strap_end.value(), main.direction
)
fig.colorbar(imry)
imry.set_cmap(main._color_map)
if phi is not None:
phi_ax = fig.add_subplot(gs[1, 1])
canvas.phi_ax = phi_ax
phi_ax.set_title("Phase", **tfont)
canvas.imphi = imphi = show_image(phi_ax, phi)
fig.colorbar(imphi)
imphi.set_cmap(main._color_map)
for splot in fig.axes:
splot.tick_params(axis="both", which="major", labelsize=21)
canvas.draw()
def add_strap(self, pressed):
"""
Add two lines in the gx and gy
"""
self.confirm_btn.setEnabled(False)
self.direction_btn.setEnabled(True)
self.removal_btn.setEnabled(True)
self.update_display(a, gx, gy, phi, rx, ry, "strap")
def change_direction(self, pressed):
"""
Change the orientation of the strap
"""
self.direction = -self.direction
self.update_display(a, gx, gy, phi, rx, ry, "strap")
def OnContrastSlider(self, pressed):
"""
Change the contrast of the images
"""
self.contrastval = self.slider_constrast.value()
if not self.running:
self.update_display(a, gx, gy, phi, rx, ry)
def OnCBHistEqualization(self, state):
"""
Image histogram equalization
"""
if state == QtCore.Qt.Checked:
self.histequalization = True
else:
self.histequalization = False
if not self.running:
self.update_display(a, gx, gy, phi, rx, ry)
def OnCBShowResiduals(self, state):
"""
Show residual images
"""
if state == QtCore.Qt.Checked:
self.showresiduals = True
else:
self.showresiduals = False
if not self.running:
self.update_display(a, gx, gy, phi, rx, ry)
def remove_background(self, pressed):
"""
Remove the background of the phase image
"""
global gx, gy, phi, rx, ry
self.confirm_btn.setEnabled(True)
self.direction_btn.setEnabled(False)
if self.direction == 1:
strap_gx = gx[self.strap_start.value() : self.strap_end.value(), :]
line_gx = np.mean(strap_gx, axis=0)
self.gx_r = gx - line_gx
strap_gy = gy[self.strap_start.value() : self.strap_end.value(), :]
line_gy = np.mean(strap_gy, axis=0)
self.gy_r = gy - line_gy
self.phi_r = dpc.recon(self.gx_r, self.gy_r, self.dx_widget.value(), self.dy_widget.value())
self.update_display(a, self.gx_r, self.gy_r, self.phi_r, rx, ry)
if self.direction == -1:
strap_gx = gx[:, self.strap_start.value() : self.strap_end.value()]
line_gx = np.mean(strap_gx, axis=1)
self.gx_r = np.transpose(gx)
self.gx_r = self.gx_r - line_gx
self.gx_r = np.transpose(self.gx_r)
strap_gy = gy[:, self.strap_start.value() : self.strap_end.value()]
line_gy = np.mean(strap_gy, axis=1)
self.gy_r = np.transpose(gy)
self.gy_r = self.gy_r - line_gy
self.gy_r = np.transpose(self.gy_r)
self.phi_r = dpc.recon(self.gx_r, self.gy_r, self.dx_widget.value(), self.dy_widget.value())
self.update_display(a, self.gx_r, self.gy_r, self.phi_r, rx, ry)
def confirm(self, pressed):
"""
Confirm the background removal
"""
global phi, gx, gy, rx, ry
phi = self.phi_r
imsave("phi.jpg", phi)
np.savetxt("phi.txt", phi)
gx = self.gx_r
imsave("gx.jpg", gx)
np.savetxt("gx.txt", gx)
gy = self.gy_r
imsave("gy.jpg", gy)
np.savetxt("gy.txt", gy)
self.confirm_btn.setEnabled(False)
self.direction_btn.setEnabled(False)
self.removal_btn.setEnabled(False)
def bad_enable(self, pressed):
"""
Enable or disable bad pixels selection by changing the bad_flag value
"""
self.bad_flag = 1 if pressed else 0
def histgramEqua(self, pressed):
"""
Histogram equalization for the reference image
"""
if pressed:
self.his_enabled = True
im = self.ax.imshow(self.roi_img_equ, interpolation="nearest", origin="upper", cmap=cm.Greys_r)
else:
self.his_enabled = False
im = self.ax.imshow(self.roi_img, interpolation="nearest", origin="upper", cmap=cm.Greys_r)
im.set_cmap(self._ref_color_map)
self.ref_canvas.ref_im = im
self.ref_canvas.draw()
"""
def preContrast(self):
self.contrastImage = self.roi_img.convert('L')
# self.contrastImage = self.roi_img
self.enh = ImageEnhance.Contrast(self.contrastImage)
"""
def rescale_intensity(self):
"""
Stretch or shrink ROI image intensity levels
"""
min_ = self.min_box.value()
max_ = self.max_box.value()
roi_array = exposure.rescale_intensity(self.roi_img, in_range=(min_, max_))
self.ax.imshow(roi_array, interpolation="nearest", origin="upper", cmap=self._ref_color_map)
self.ref_canvas.draw()
def calHist(self):
"""
Calculate the histogram of the image used to select ROI
"""
imhist, bins = np.histogram(self.roi_img, bins=self.bin_num, range=(0, self.bin_num), density=True)
cdf = imhist.cumsum()
cdf = (self.bin_num - 1) * cdf / cdf[-1]
# cdf = (self.roi_img_max-self.roi_img_min) * cdf / cdf[-1]
equalizedImg = np.floor(np.interp(self.roi_img, bins[:-1], cdf))
self.roi_img_equ = np.reshape(equalizedImg, self.roi_img.shape, order="C")
# skimage histgram equalization
# img = np.array(self.roi_img.getdata(),
# dtype=np.uint16).reshape(self.roi_img.size[1],
# self.roi_img.size[0])
# equalizedImg = exposure.equalize_hist(img)
# scipy.misc.imsave('equalizedImg.tif', equalizedImg)
def select_bri_pixels(self):
"""
Select the bad pixels (pixels with the maximum pixel value)
"""
indices = np.where(self.roi_img == self.roi_img.max())
indices_num = indices[0].size
for i in range(indices_num):
item = "%d, %d" % (indices[1][i], indices[0][i])
self.bad_pixels_widget.addItem(item)
for i in range(len(self.bad_pixels)):
self.roi_img_masked[self.bad_pixels[i][1], self.bad_pixels[i][0]] = 0
self.ax.imshow(self.roi_img_masked, interpolation="nearest", origin="upper", cmap=self._ref_color_map)
self.ref_canvas.draw()
"""
def change_contrast(self, value):
'''
Change the contrast of the ROI image by slider bar
'''
delta = value / 10.0
self.enh.enhance(delta).save('change_contrast.tif')
contrastImageTemp = QPixmap('change_contrast.tif')
self.img_lbl.setPixmap(contrastImageTemp)
"""
"""
def eventFilter(self, source, event):
'''
Event filter to enable cursor coordinates tracking on the ROI image
'''
if (event.type() == QtCore.QEvent.MouseMove and
source is self.ref_canvas):
if event.buttons() == QtCore.Qt.NoButton:
pos = event.pos()
self.txt_lbl.setText('min=%d, max=%d, x=%d, y=%d, value=%d ' %
(self.roi_img_min, self.roi_img_max,
pos.x(), pos.y(),
self.roi_img.getpixel((pos.x(),
pos.y()))))
top_left_x = pos.x()-10 if pos.x()-10>=0 else 0
top_left_y = pos.y()-10 if pos.y()-10>=0 else 0
bottom_right_x = (pos.x()+10 if pos.x()+10<self.roi_img.size[0]
else self.roi_img.size[0]-1)
bottom_right_y = (pos.y()+10 if pos.y()+10<self.roi_img.size[1]
else self.roi_img.size[1]-1)
if (pos.y()-10)<0:
self.temp_lbl.setAlignment(QtCore.Qt.AlignBottom)
if (pos.x()+10)>=self.roi_img.size[0]:
self.temp_lbl.setAlignment(QtCore.Qt.AlignLeft)
if (pos.x()-10)<0:
self.temp_lbl.setAlignment(QtCore.Qt.AlignRight)
if (pos.y()+10)>=self.roi_img.size[1]:
self.temp_lbl.setAlignment(QtCore.Qt.AlignTop)
width = bottom_right_x - top_left_x + 1
height = bottom_right_y - top_left_y+ 1
img_fraction = self.img_lbl.pixmap().copy(top_left_x,
top_left_y, width,
height)
scaled_img_fraction = img_fraction.scaled(width*8, height*8)
self.temp_lbl.setPixmap(scaled_img_fraction)
if (event.type() == QtCore.QEvent.MouseMove and
source is not self.img_lbl):
if event.buttons() == QtCore.Qt.NoButton:
self.txt_lbl.setText('min=%d, max=%d' % (self.roi_img_min,
self.roi_img_max))
self.temp_lbl.clear()
return QDialog.eventFilter(self, source, event)
"""
def select_path(self):
"""
Select path and initiate file format for the data
"""
fname = get_open_filename(self, "Open file", "/home")[0]
fname = str(fname)
basename, extension = os.path.splitext(fname)
if extension == ".h5":
self.file_widget.setText(fname)
else:
if fname != "":
index1 = fname.rfind(".")
index2 = fname.rfind("_")
digits = index1 - index2 - 1
if digits == 1:
format_str = "%d"
else:
format_str = "%" + "0%dd" % digits
modified = fname.replace(fname[index2 + 1 : index1], format_str)
self.file_widget.setText(modified)
def save_file_tiff(self):
self.save_file(save_txt=False, save_tif=True)
def save_file_txt(self):
self.save_file(save_txt=True, save_tif=False)
def save_file(self, save_txt=True, save_tif=True):
"""
Select the path where the results will be saved
"""
global a, gx, gy, phi, rx, ry
default_path = str(self.save_path_widget.text())
path = get_save_filename(self, "Select path", default_path)[0]
path = str(path)
self.save_path_widget.setText(path)
if path != "":
if save_txt:
a_path = path + "_a.txt"
# print(a_path)
np.savetxt(a_path, a)
gx_path = path + "_gx.txt"
np.savetxt(gx_path, gx)
gy_path = path + "_gy.txt"
np.savetxt(gy_path, gy)
rx_path = path + "_rx.txt"
np.savetxt(rx_path, rx)
ry_path = path + "_ry.txt"
np.savetxt(ry_path, ry)
if phi is not None:
phi_path = path + "_phi.txt"
np.savetxt(phi_path, phi)
if havetiff and save_tif:
if phi is not None:
imgs = np.stack((a, gx, gy, rx, ry, phi))
imsave(path + ".tif", imgs.astype(np.float32))
else:
imgs = np.stack((a, gx, gy, rx, ry))
imsave(path + ".tif", imgs.astype(np.float32))
def save_params_to_file(self):
self.save_settings()
path = get_save_filename(self, "Select path", "", ".txt")[0]
path = str(path)
if path != "":
print("Saving parameters to {0}".format(path))
settings = self.dpc_settings
# Save parameters to the parameter text file
param_file = open(path, "w")
param_file.write("step_size_dx_um = {0}\n".format(settings["dx"]))
param_file.write("step_size_dy_um = {0}\n".format(settings["dy"]))
param_file.write("cols_x = {0}\n".format(settings["cols"]))
param_file.write("rows_y = {0}\n".format(settings["rows"]))
param_file.write("pixel_size_um = {0}\n".format(settings["pixel_size"]))
param_file.write("detector_sample_distance = {0}\n".format(settings["focus_to_det"]))
param_file.write("energy_keV = {0}\n".format(settings["energy"]))
param_file.write("roi_x1 = {0}\n".format(settings["x1"]))
param_file.write("roi_x2 = {0}\n".format(settings["x2"]))
param_file.write("roi_y1 = {0}\n".format(settings["y1"]))
param_file.write("roi_y2 = {0}\n".format(settings["y2"]))
param_file.write("mosaic_column_number_x = {0}\n".format(settings["mosaic_x"]))
param_file.write("mosaic_column_number_y = {0}\n".format(settings["mosaic_y"]))
param_file.write("solver = {0}\n".format(settings["solver"]))
param_file.write("random = {0}\n".format(settings["random"]))
param_file.write("pyramid = {0}\n".format(settings["pyramid"]))
param_file.write("hang = {0}\n".format(settings["hang"]))
param_file.write("swap = {0}\n".format(settings["swap"]))
param_file.write("reverse_x = {0}\n".format(settings["reverse_x"]))
param_file.write("reverse_y = {0}\n".format(settings["reverse_y"]))
param_file.write("pad = {0}\n".format(1 if settings["pad"] else 0))
param_file.close()
def load_params_from_file(self):
path = get_open_filename(self, "Select path", "", "Parameter files (*.txt);;")[0]
path = str(path)
if path != "":
print("Loading parameters from {0}".format(path))
settings = self.settings
# Save parameters to the parameter text file
param_file = open(path, "r")
for line in param_file:
if line.startswith("#"):
continue
elif "step_size_dx_um" in line.lower():
slist = line.strip().split("=")
settings.setValue("dx", float(slist[1]))
elif "step_size_dy_um" in line.lower():
slist = line.strip().split("=")
settings.setValue("dy", float(slist[1]))
elif "cols_x" in line.lower():
slist = line.strip().split("=")
settings.setValue("cols", int(slist[1]))
elif "rows_y" in line.lower():
slist = line.strip().split("=")
settings.setValue("rows", int(slist[1]))
elif "pixel_size_um" in line.lower():
slist = line.strip().split("=")
settings.setValue("pixel_size", float(slist[1]))
elif "detector_sample_distance" in line.lower():
slist = line.strip().split("=")
settings.setValue("focus_to_det", float(slist[1]))
elif "energy_keV" in line.lower():
slist = line.strip().split("=")
settings.setValue("energy", float(slist[1]))
elif "roi_x1" in line.lower():
slist = line.strip().split("=")
settings.setValue("x1", int(slist[1]))
elif "roi_x2" in line.lower():
slist = line.strip().split("=")
settings.setValue("x2", int(slist[1]))
elif "roi_y1" in line.lower():
slist = line.strip().split("=")
settings.setValue("y1", int(slist[1]))
elif "roi_y2" in line.lower():
slist = line.strip().split("=")
settings.setValue("y2", int(slist[1]))
elif "mosaic_column_number_x" in line.lower():
slist = line.strip().split("=")
settings.setValue("mosaic_x", int(slist[1]))
elif "mosaic_column_number_y" in line.lower():
slist = line.strip().split("=")
settings.setValue("mosaic_y", int(slist[1]))
elif "solver" in line.lower():
slist = line.strip().split("=")
settings.setValue("solver", "{0}".format(slist[1].strip()))
elif "random" in line.lower():
slist = line.strip().split("=")
settings.setValue("random", int(slist[1]))
elif "pyramid" in line.lower():
slist = line.strip().split("=")
settings.setValue("pyramid", int(slist[1]))
elif "hang" in line.lower():
slist = line.strip().split("=")
settings.setValue("hang", int(slist[1]))
elif "swap" in line.lower():
slist = line.strip().split("=")
settings.setValue("swap", int(slist[1]))
elif "reverse_x" in line.lower():
slist = line.strip().split("=")
settings.setValue("reverse_x", int(slist[1]))
elif "reverse_y" in line.lower():
slist = line.strip().split("=")
settings.setValue("reverse_y", int(slist[1]))
elif "pad" in line.lower():
slist = line.strip().split("=")
settings.setValue("pad", int(slist[1]))
param_file.close()
loaded = {}
for key, (getter, setter) in self._settings.items():
value = settings.value(key)
try:
value = value.toPyObject()
except AttributeError:
pass
if value is not None:
try:
setter(value)
except Exception as ex:
print("Unable to set value for %s=%s (%s) %s" "" % (key, value, ex.__class__.__name__, ex))
else:
loaded[key] = value
try:
self.setGeometry(loaded["geometry"])
except Exception:
pass
try:
self.ref_widget.setGeometry(loaded["ref_geo"])
except Exception:
pass
try:
self.img_type_combobox.setCurrentIndex(loaded["image_type"])
except Exception:
pass
try:
self.ref_image_path_QLineEdit.setText(loaded["ref_image"])
except Exception:
pass
try:
self.first_ref_cbox.setChecked(loaded["first_as_ref"])
except Exception:
pass
try:
self.use_mds.setChecked(loaded["use_mds"])
except Exception:
pass
def launch_batch_gui(self):
dir_name, _ = os.path.split(__file__)
fln = os.path.join(dir_name, "dpc_batch_gui.py")
subprocess.Popen(["python", fln])
def swap_x_y(self):
global a, gx, gy, phi, rx, ry
gx, gy = gy, gx
phi = dpc.recon(gx, gy, self.dx_widget.value(), self.dy_widget.value())
self.update_display(a, gx, gy, phi, rx, ry)
def reverse_gx(self):
global a, gx, gy, phi, rx, ry
gx = -gx
phi = dpc.recon(gx, gy, self.dx_widget.value(), self.dy_widget.value())
self.update_display(a, gx, gy, phi, rx, ry)
def reverse_gy(self):
global a, gx, gy, phi, rx, ry
gy = -gy
phi = dpc.recon(gx, gy, self.dx_widget.value(), self.dy_widget.value())
self.update_display(a, gx, gy, phi, rx, ry)
def padding_recon(self):
global a, gx, gy, phi, rx, ry
if self.pad_recon.isChecked():
phi = dpc.recon(gx, gy, self.dx_widget.value(), self.dy_widget.value(), 3)
print("Padding mode enabled!")
else:
phi = dpc.recon(gx, gy, self.dx_widget.value(), self.dy_widget.value())
print("Padding mode disabled!")
self.update_display(a, gx, gy, phi, rx, ry)
def select_ref_img(self):
"""
Select the reference image and record its location and name
"""
fname = get_open_filename(self, "Open file", "/home")[0]
fname = str(fname)
if fname != "":
self.ref_image_path_QLineEdit.setText(fname)
def hide_ref(self, pressed):
"""
Hide/Show the reference image related widgets
"""
if pressed:
self.load_img_method()
not_ref = self.first_ref_cbox.checkState() == Qt.Unchecked
if not_ref or self.use_mds:
ref_path = str(self.ref_image_path_QLineEdit.text())
else:
if self.file_widget.text()[-3:] == ".h5":
ref_path = str(self.file_widget.text())
else:
ref_path = str(self.file_widget.text()) % self.first_widget.value()
try:
self.roi_img = self.load_image(ref_path)
self.roi_img_masked = self.roi_img.copy()
self.calHist()
ref_im = self.ax.imshow(
self.roi_img_masked, interpolation="nearest", origin="upper", cmap=cm.Greys_r
)
ref_im.set_cmap(self._ref_color_map)
self.ref_canvas.ref_im = ref_im
self.ref_widget.show()
self.ref_canvas.draw()
except Exception as ex:
logger.error("Reference image read failed", exc_info=ex)
msg = "Could not read the reference image! \r (%s) %s" "" % (ex.__class__.__name__, ex)
QMessageBox.information(self, "Read error", msg, QMessageBox.Ok)
self.hide_btn.setChecked(False)
else:
self.ref_widget.hide()
def first_equal_ref(self, state):
"""
First image ?= reference image
"""
if state == QtCore.Qt.Checked:
palette = QPalette()
palette.setColor(QPalette.Base, QtCore.Qt.lightGray)
self.ref_image_path_QLineEdit.setPalette(palette)
self.select_ref_btn.setEnabled(False)
self.ref_image_path_QLineEdit.setEnabled(False)
else:
self.select_ref_btn.setEnabled(True)
self.ref_image_path_QLineEdit.setEnabled(True)
palette = QPalette()
palette.setColor(QPalette.Base, QtCore.Qt.white)
self.ref_image_path_QLineEdit.setPalette(palette)
def load_img_method(self):
method = str(self.img_type_combobox.currentText())
if method == "Timepix TIFF":
self.load_image = load_timepix.load
elif method == "TIFF":
self.load_image = load_image_pil
elif method == "ASCII":
self.load_image = load_image_ascii
elif method == "HDF5":
self.load_image = load_image_hdf5
elif method == "FileStore":
self.load_image = dpc.load_image_filestore
def _set_color_map(self, index):
"""
User changed color map callback.
"""
cm_ = str(self.color_map.itemText(index))
print("Color map set to: %s" % cm_)
self._color_map = mpl.cm.get_cmap(cm_)
for im in ["imphi", "imx", "imy", "ima"]:
try:
im = getattr(self.canvas, im)
im.set_cmap(self._color_map)
except Exception as ex:
print("failed to set color map: (%s) %s" "" % (ex.__class__.__name__, ex))
self.canvas.draw()
def _set_ref_color_map(self, index):
"""
User changed color map callback.
"""
cm_ = str(self.ref_color_map.itemText(index))
self._ref_color_map = mpl.cm.get_cmap(cm_)
try:
for im in [
self.ref_canvas.ref_im,
]:
im.set_cmap(self._ref_color_map)
except Exception as ex:
print("failed to set color map: (%s) %s" % (ex.__class__.__name__, ex))
finally:
self.ref_canvas.draw()
def create_cmap_previews(self):
"""
Create the color map previews for the combobox
"""
cm_names = sorted(_cm for _cm in mpl.cm.datad.keys() if not _cm.endswith("_r"))
cm_filenames = [os.path.join(CMAP_PREVIEW_PATH, "%s.png" % cm_name) for cm_name in cm_names]
ret = zip(cm_names, cm_filenames)
points = np.outer(np.ones(10), np.arange(0, 1, 0.01))
if not os.path.exists(CMAP_PREVIEW_PATH):
try:
os.mkdir(CMAP_PREVIEW_PATH)
except Exception as ex:
print("Unable to create preview path: %s" % ex)
return ret
for cm_name, fn in zip(cm_names, cm_filenames):
if not os.path.exists(fn):
print("Generating colormap preview: %s" % fn)
canvas = MplCanvas(width=2, height=0.25, dpi=50)
fig = canvas.figure
fig.clear()
ax = fig.add_subplot(1, 1, 1)
ax.axis("off")
fig.subplots_adjust(top=1, left=0, right=1, bottom=0)
_cm = mpl.cm.get_cmap(cm_name)
ax.imshow(points, aspect="auto", cmap=_cm, origin="upper")
try:
fig.savefig(fn)
except Exception:
print('Unable to create color map preview "%s"' % fn, file=sys.stderr)
break
return ret
def update_color_maps(self):
size = None
for i, (cm_name, fn) in enumerate(self.create_cmap_previews()):
if os.path.exists(fn):
self.color_map.addItem(QIcon(fn), cm_name)
if size is None:
size = QPixmap(fn).size()
self.color_map.setIconSize(size)
else:
self.color_map.addItem(cm_name)
if cm_name == self.CM_DEFAULT:
self.color_map.setCurrentIndex(i)
def update_ref_color_maps(self):
size = None
for i, (cm_name, fn) in enumerate(self.create_cmap_previews()):
if os.path.exists(fn):
self.ref_color_map.addItem(QIcon(fn), cm_name)
if size is None:
size = QPixmap(fn).size()
self.ref_color_map.setIconSize(size)
else:
self.ref_color_map.addItem(cm_name)
if cm_name == self.CM_DEFAULT:
self.ref_color_map.setCurrentIndex(i)
@property
def settings(self):
return QtCore.QSettings("dpcmaps", "DPC-GUI")
def save_settings(self):
settings = self.settings
for key, (getter, setter) in self._settings.items():
settings.setValue(key, getter())
settings.setValue("geometry", self.geometry())
settings.setValue("ref_geo", self.ref_widget.geometry())
settings.setValue("image_type", self.img_type_combobox.currentIndex())
settings.setValue("ref_image", self.ref_image_path_QLineEdit.text())
settings.setValue("first_as_ref", self.first_ref_cbox.isChecked())
def load_settings(self):
settings = self.settings
loaded = {}
for key, (getter, setter) in self._settings.items():
value = settings.value(key)
try:
value = value.toPyObject()
except AttributeError:
pass
if value is not None:
try:
setter(value)
except Exception as ex:
print("Unable to set value for %s=%s (%s) %s" "" % (key, value, ex.__class__.__name__, ex))
else:
loaded[key] = value
try:
self.setGeometry(loaded["geometry"])
except Exception:
pass
try:
self.ref_widget.setGeometry(loaded["ref_geo"])
except Exception:
pass
try:
self.img_type_combobox.setCurrentIndex(loaded["image_type"])
except Exception:
pass
try:
self.ref_image_path_QLineEdit.setText(loaded["ref_image"])
except Exception:
pass
try:
self.first_ref_cbox.setChecked(loaded["first_as_ref"])
except Exception:
pass
try:
self.use_mds.setChecked(loaded["use_mds"])
except Exception:
pass
def closeEvent(self, event=None):
self.save_settings()
sys.exit()
@property
def dx(self):
return float(self.dx_widget.text())
@dx.setter
def dx(self, dx):
self.dx_widget.setValue(float(dx))
@property
def dy(self):
return float(self.dy_widget.text())
@dy.setter
def dy(self, dy):
self.dy_widget.setValue(float(dy))
@property
def processes(self):
return int(self.processes_widget.text())
@property
def file_format(self):
return str(self.file_widget.text())
@property
def save_path(self):
return str(self.save_path_widget.text())
@property
def pixel_size(self):
return self.pixel_widget.value()
@property
def focus(self):
return self.focus_widget.value()
@property
def energy(self):
return self.energy_widget.value()
@property
def rows(self):
return self.rows_widget.value()
@rows.setter
def rows(self, rows):
self.rows_widget.setValue(rows)
@property
def cols(self):
return self.cols_widget.value()
@cols.setter
def cols(self, cols):
self.cols_widget.setValue(cols)
@property
def mosaic_x(self):
return self.mosaic_x_widget.value()
@property
def mosaic_y(self):
return self.mosaic_y_widget.value()
@property
def monitoring(self):
return self.monitor_scans.isChecked()
@property
def random(self):
if self.random_processing_opt.isChecked():
return 1
else:
return -1
@property
def pad(self):
if self.pad_recon.isChecked():
return True
else:
return False
@property
def pyramid(self):
if self.pyramid_scan.isChecked():
return 1
else:
return -1
@property
def swap(self):
if self.swap_xy.isChecked():
return 1
else:
return -1
@property
def re_x(self):
if self.reverse_x.isChecked():
return -1
else:
return 1
@property
def re_y(self):
if self.reverse_y.isChecked():
return -1
else:
return 1
@property
def hang(self):
if self.hanging_opt.isChecked():
return 1
else:
return -1
@property
def first_image(self):
return self.first_widget.value()
@property
def ref_image(self):
if self.first_ref_cbox.checkState() == Qt.Unchecked or self.use_mds:
return str(self.ref_image_path_QLineEdit.text())
else:
if self.file_widget.text()[-3:] == ".h5":
return str(self.file_widget.text())
else:
return str(self.file_widget.text()) % self.first_widget.value()
@property
def roi_x1(self):
return self.roi_x1_widget.value()
@property
def roi_x2(self):
return self.roi_x2_widget.value()
@property
def roi_y1(self):
return self.roi_y1_widget.value()
@property
def roi_y2(self):
return self.roi_y2_widget.value()
@property
def bad_pixels(self):
w = self.bad_pixels_widget
def fix_tuple(item):
item = str(item.text())
return [int(x) for x in item.split(",")]
return [fix_tuple(w.item(i)) for i in range(w.count())]
def _bad_pixels_menu(self, pos):
def add():
msg = "Position in the format: x, y"
s, ok = QInputDialog.getText(self, "Position?", msg)
if ok:
s = str(s)
x, y = s.split(",")
x = int(x)
y = int(y)
self.bad_pixels_widget.addItem("%d, %d" % (x, y))
for i in range(len(self.bad_pixels)):
self.roi_img_masked[self.bad_pixels[i][1], self.bad_pixels[i][0]] = 0
self.ax.imshow(
self.roi_img_masked, interpolation="nearest", origin="upper", cmap=self._ref_color_map
)
self.ref_canvas.draw()
def remove():
rows = [index.row() for index in self.bad_pixels_widget.selectedIndexes()]
for row in reversed(sorted(rows)):
self.bad_pixels_widget.takeItem(row)
self.roi_img_masked = self.roi_img.copy()
for i in range(len(self.bad_pixels)):
self.roi_img_masked[self.bad_pixels[i][1], self.bad_pixels[i][0]] = 0
self.ax.imshow(self.roi_img_masked, interpolation="nearest", origin="upper", cmap=self._ref_color_map)
self.ref_canvas.draw()
def clear():
self.bad_pixels_widget.clear()
self.roi_img_masked = self.roi_img.copy()
self.ax.imshow(self.roi_img_masked, interpolation="nearest", origin="upper", cmap=self._ref_color_map)
self.ref_canvas.draw()
self.menu = menu = QMenu()
menu.addAction("&Add", add)
menu.addAction("&Remove", remove)
menu.addAction("&Clear", clear)
menu.popup(self.bad_pixels_widget.mapToGlobal(pos))
def load_from_spec_scan(self):
filename = get_open_filename(self, "Scan filename", self.last_path, "*.spec")[0]
if not filename:
return
self.last_path = filename
print("Loading %s" % filename)
with pyspecfile.SPECFileReader(filename, parse_data=False) as f:
scans = dict((int(scan["number"]), scan) for scan in f.scans)
scan_info = [
"%04d - %s" % (number, scan["command"])
for number, scan in scans.items()
if "mesh" in scan["command"]
]
scan_info.sort()
print("\n".join(scan_info))
s, ok = QInputDialog.getItem(self, "Scan selection", "Scan number?", scan_info, 0, False)
if ok:
print("Selected scan", s)
number = int(s.split(" ")[0])
sd = scans[number]
f.parse_data(sd)
timepix_index = sd["columns"].index("tpx_image")
line0 = sd["lines"][0]
timepix_first_image = int(line0[timepix_index])
try:
ion1_index = sd["columns"].index("Ion1")
self.ion_data = np.array([line[ion1_index] for line in sd["lines"]])
except Exception as ex:
print("Failed loading Ion1 data (%s) %s" "" % (ex, ex.__class__.__name__))
self.ion_data = None
print("First timepix image:", timepix_first_image)
self.first_widget.setValue(timepix_first_image - 1)
command = sd["command"].replace(" ", " ")
x = [2, 3, 4] # x start, end, points
y = [6, 7, 8] # y start, end, points
info = command.split(" ")
x_info = [float(info[i]) for i in x]
y_info = [float(info[i]) for i in y]
dx = (x_info[1] - x_info[0]) / (x_info[2] - 1)
dy = (y_info[1] - y_info[0]) / (y_info[2] - 1)
self.rows_widget.setValue(int(y_info[-1]))
self.cols_widget.setValue(int(x_info[-1]))
self.dx_widget.setValue(float(dx))
self.dy_widget.setValue(float(dy))
@property
def solver(self):
return SOLVERS[self.solver_widget.currentIndex()]
@solver.setter
def solver(self, solver):
self.solver_widget.setCurrentIndex(SOLVERS.index(solver))
def set_bad_pixels(self, pixels):
w = self.bad_pixels_widget
w.clear()
for item in pixels:
x, y = item
w.addItem(
"%d, %d"
% (
x,
y,
)
)
@property
def dpc_settings(self):
ret = {}
for key, (getter, setter) in self._settings.items():
if key not in ("last_path", "scan_number", "filestore_key", "processes"):
ret[key] = getter()
return ret
def start(self):
self.load_img_method()
self.save_settings()
if self.use_mds and self.scan is None:
if self.scan_number is not None:
self.load_scan_from_mds(load_config=False)
if self.scan is None:
not_loaded = "Scan not loaded from metadatastore"
QMessageBox.information(self, "Load scan", not_loaded, QMessageBox.Ok)
return
self.reverse_x.setEnabled(False)
self.reverse_y.setEnabled(False)
self.swap_xy.setEnabled(False)
self.hanging_opt.setEnabled(False)
self.random_processing_opt.setEnabled(False)
self.pyramid_scan.setEnabled(False)
self.pad_recon.setEnabled(False)
self.save_result_tiff.setEnabled(False)
self.save_result_txt.setEnabled(False)
self.canvas_widget.show()
self.line_btn.setEnabled(False)
self.direction_btn.setEnabled(False)
self.removal_btn.setEnabled(False)
self.confirm_btn.setEnabled(False)
if self._thread is not None and self._thread.isFinished():
self._thread = None
if self._thread is None:
if self.processes == 0:
pool = None
else:
pool = mp.Pool(processes=self.processes)
thread = self._thread = DPCThread(self.canvas, pool=pool)
thread.update_signal.connect(self.update_display)
thread.dpc_settings = self.dpc_settings
if self.use_mds:
thread.dpc_settings["scan"] = self.scan
if self.load_image == load_image_hdf5:
thread.dpc_settings["use_hdf5"] = True
else:
thread.dpc_settings["use_hdf5"] = False
thread.start()
self.set_running(True)
def set_running(self, running):
self.start_widget.setEnabled(not running)
self.stop_widget.setEnabled(running)
self.running = running
def stop(self):
if self._thread is not None:
pool = self._thread.pool
if pool is not None:
pool.terminate()
self._thread.pool = None
time.sleep(0.2)
self._thread.terminate()
self._thread = None
self.set_running(False)
def save(self):
filename = get_save_filename(self, "Save filename prefix", "", "")[0]
if not filename:
return
arrays = [
("gx", self.gx),
("gy", self.gy),
("phi", self.phi),
("a", self.a),
("rx", self.rx),
("ry", self.ry),
]
for name, arr in arrays:
im = PIL.Image.fromarray(arr)
im.save("%s_%s.tif" % (filename, name))
np.savetxt("%s_%s.txt" % (filename, name), im)
@QtCore.pyqtSlot(str)
def on_myStream_message(self, message):
self.console_info.moveCursor(QTextCursor.End)
self.console_info.insertPlainText(message)
uid_pv = "XF:03IDC-ES{BS-Scan}UID-I"
def run_dpc_gui():
global uid_pv
try:
uid_pv = sys.argv[1]
except IndexError:
# Use default 'uid_pv'
pass
logging.basicConfig(level=logging.INFO)
app = QApplication(sys.argv)
# app.setAttribute(Qt.AA_X11InitThreads)
window = DPCWindow()
window.show()
app.installEventFilter(window)
myStream = MyStream()
myStream.message.connect(window.on_myStream_message)
sys.stdout = myStream
sys.exit(app.exec_())
if __name__ == "__main__":
run_dpc_gui()
|
dmgav/dpcmaps | dpcmaps/scan.py | import collections
import numpy as np
from databroker import DataBroker as db
import logging
logger = logging.getLogger(__name__)
def _eval_scan_args(scan_args):
"""Evaluate scan arguments, replacing OphydObjects with NamedObjects"""
class NamedObject:
def __init__(self, name):
self.name = name
def no_op():
def no_op_inner(*args, name=None, **kwargs):
if name is not None:
return NamedObject(name)
return no_op_inner
return eval(scan_args, collections.defaultdict(no_op))
step_1d = (
"InnerProductAbsScan",
"HxnInnerAbsScan",
"InnerProductDeltaScan",
"HxnInnerDeltaScan",
"AbsScan",
"HxnAbsScan",
"DeltaScan",
"HxnDeltaScan",
)
step_2d = ("OuterProductAbsScan", "HxnOuterAbsScan")
fly_scans = ("FlyPlan1D", "FlyPlan2D")
def get_scan_info(header):
start_doc = header["start"]
scan_args = start_doc["scan_args"]
scan_type = start_doc["scan_type"]
motors = None
range_ = None
scan_args = start_doc.get("scan_args", {})
pyramid = False
if scan_type in fly_scans:
logger.debug("Scan %s (%s) is a fly scan (%s)", start_doc["scan_id"], start_doc["uid"], scan_type)
dimensions = start_doc["dimensions"]
motors = start_doc["axes"]
pyramid = start_doc["fly_type"] == "pyramid"
try:
range_ = start_doc["scan_range"]
except KeyError:
try:
range_ = [(float(scan_args["scan_start"]), float(scan_args["scan_end"]))]
except (KeyError, ValueError):
pass
elif scan_type in step_2d:
logger.debug("Scan %s (%s) is an ND scan (%s)", start_doc["scan_id"], start_doc["uid"], scan_type)
# 2D mesh scan
scan_args = _eval_scan_args(scan_args["args"])
motors = [arg.name for arg in scan_args[::5]]
dimensions = scan_args[3::5]
range0 = scan_args[1::5]
range1 = scan_args[2::5]
range_ = list(zip(range0, range1))
elif scan_type in step_1d or "num" in start_doc:
logger.debug("Scan %s (%s) is a 1D scan (%s)", start_doc["scan_id"], start_doc["uid"], scan_type)
# 1D scans
dimensions = [int(scan_args["num"])]
try:
motors = [_eval_scan_args(scan_args["motor"]).name]
except Exception:
motors = []
else:
msg = "Unrecognized scan type (uid={} {})".format(start_doc["uid"], scan_type)
raise RuntimeError(msg)
num = np.product(dimensions)
return {
"num": num,
"dimensions": dimensions,
"motors": motors,
"range": range_,
"scan_args": scan_args,
"pyramid": pyramid,
}
class Scan(object):
def __init__(self, header):
self.header = header
self.start_doc = header["start"]
self.descriptors = header["descriptors"]
self.key = None
for key, value in get_scan_info(self.header).items():
logger.debug("Scan info %s=%s", key, value)
setattr(self, key, value)
@property
def filestore_keys(self):
for desc in self.descriptors:
for key, info in desc["data_keys"].items():
try:
external = info["external"]
except KeyError:
continue
try:
source, info = external.split(":", 1)
except Exception:
pass
else:
source = source.lower()
if source in ("filestore",):
yield key
@property
def scan_id(self):
return self.start_doc["scan_id"]
def __repr__(self):
return "{}(scan_id={})".format(self.__class__.__name__, self.scan_id)
def __iter__(self):
if self.key:
for event in db.fetch_events(self.header, fill=False):
yield event["data"][self.key]
|
dmgav/dpcmaps | dpcmaps/load_timepix.py | <gh_stars>0
import numpy as np
import matplotlib.pyplot as plt
# a = lt.load_tiff(36025, 256, 256, 512, 512, 2)
def load(filename, nx_prb=256, ny_prb=256, x_raw=512, y_raw=512, threshold=0):
diff_array = np.zeros((nx_prb, ny_prb))
if 1:
with open(filename, "r") as f:
np.fromfile(f, dtype="int32", count=2)
tmp = np.fromfile(f, dtype="int16", count=x_raw * y_raw)
else:
tmp = np.arange(x_raw * y_raw)
tmp.resize(y_raw, x_raw)
# tmp = np.fliplr(np.transpose(tmp * 1.))
tmp[np.where(tmp < threshold)] = 0.0
# t = np.zeros((516, 516))
t = np.zeros((x_raw + 4, y_raw + 4))
t[0 : x_raw / 2 - 1, 0 : y_raw / 2 - 1] = tmp[0 : x_raw / 2 - 1, 0 : y_raw / 2 - 1]
t[x_raw / 2 + 5 : x_raw + 4, 0 : y_raw / 2 - 1] = tmp[x_raw / 2 + 1 : x_raw, 0 : y_raw / 2 - 1]
t[0 : x_raw / 2 - 1, y_raw / 2 + 5 : y_raw + 4] = tmp[0 : x_raw / 2 - 1, y_raw / 2 + 1 : y_raw]
t[x_raw / 2 + 5 : x_raw + 4, y_raw / 2 + 5 : y_raw + 4] = tmp[x_raw / 2 + 1 : x_raw, y_raw / 2 + 1 : y_raw]
for i in range(y_raw):
t[x_raw / 2 - 1 : x_raw / 2 + 2, i] = tmp[x_raw / 2 - 1, i] / 3.0
t[x_raw / 2 + 2 : x_raw / 2 + 5, i] = tmp[x_raw / 2, i] / 3.0
for i in range(x_raw):
t[i, y_raw / 2 - 1 : y_raw / 2 + 2] = tmp[i, y_raw / 2 - 1] / 3.0
t[i, y_raw / 2 + 2 : y_raw / 2 + 5] = tmp[i, y_raw / 2] / 3.0
t[x_raw / 2 - 1 : x_raw / 2 + 2, y_raw / 2 - 1 : y_raw / 2 + 2] = tmp[x_raw / 2 - 1, y_raw / 2 - 1] / 9.0
t[x_raw / 2 - 1 : x_raw / 2 + 2, y_raw / 2 + 2 : y_raw / 2 + 5] = tmp[x_raw / 2 - 1, y_raw / 2] / 9.0
t[x_raw / 2 + 2 : x_raw / 2 + 5, y_raw / 2 - 1 : y_raw / 2 + 2] = tmp[x_raw / 2, y_raw / 2 - 1] / 9.0
t[x_raw / 2 + 2 : x_raw / 2 + 5, y_raw / 2 + 2 : y_raw / 2 + 5] = tmp[x_raw / 2, y_raw / 2] / 9.0
# t[141:147, 110:117] = 0.
# t2 = t[105:105 + 256, 395 - 256:395]
# diff_array[:, :] = np.sqrt(t2[:, :])
if 0:
plt.close("all")
plt.figure()
plt.imshow(np.log(diff_array[:, :] + 0.001))
# t[209, 264] = 0
# diff_array[106, 125] = 0.
return t
def orig(file_name, nx_prb, ny_prb, x_raw=512, y_raw=512, threshold=0):
diff_array = np.zeros((nx_prb, ny_prb))
tmp = np.arange(x_raw * y_raw)
tmp.resize(y_raw, x_raw)
tmp = np.fliplr(np.transpose(tmp * 1.0))
index = np.where(tmp < threshold)
tmp[index] = 0.0
t = np.zeros((516, 516))
t[:255, :255] = tmp[:255, :255].copy()
t[:255, 516 - 255 :] = tmp[:255, 512 - 255 :].copy()
t[516 - 255 :, :255] = tmp[512 - 255 :, :255].copy()
t[516 - 255 :, 516 - 255 :] = tmp[512 - 255 :, 512 - 255 :].copy()
t[:255, 255] = tmp[:255, 255] / 3.0
t[:255, 256] = tmp[:255, 255] / 3.0
t[:255, 257] = tmp[:255, 255] / 3.0
t[:255, 258] = tmp[:255, 256] / 3.0
t[:255, 259] = tmp[:255, 256] / 3.0
t[:255, 260] = tmp[:255, 256] / 3.0
t[516 - 255 :, 255] = tmp[512 - 255 :, 255] / 3.0
t[516 - 255 :, 256] = tmp[512 - 255 :, 255] / 3.0
t[516 - 255 :, 257] = tmp[512 - 255 :, 255] / 3.0
t[516 - 255 :, 258] = tmp[512 - 255 :, 256] / 3.0
t[516 - 255 :, 259] = tmp[512 - 255 :, 256] / 3.0
t[516 - 255 :, 260] = tmp[512 - 255 :, 256] / 3.0
t[255, :255] = tmp[255, :255] / 3.0
t[256, :255] = tmp[255, :255] / 3.0
t[257, :255] = tmp[255, :255] / 3.0
t[258, :255] = tmp[255, :255] / 3.0
t[259, :255] = tmp[255, :255] / 3.0
t[260, :255] = tmp[255, :255] / 3.0
t[255, 516 - 255 :] = tmp[255, 512 - 255 :] / 3.0
t[256, 516 - 255 :] = tmp[255, 512 - 255 :] / 3.0
t[257, 516 - 255 :] = tmp[255, 512 - 255 :] / 3.0
t[258, 516 - 255 :] = tmp[255, 512 - 255 :] / 3.0
t[259, 516 - 255 :] = tmp[255, 512 - 255 :] / 3.0
t[260, 516 - 255 :] = tmp[255, 512 - 255 :] / 3.0
for i in range(255, 258):
for j in range(255, 258):
t[i, j] = tmp[255, 255] / 9.0
for i in range(258, 261):
for j in range(255, 258):
t[i, j] = tmp[256, 255] / 9.0
for i in range(255, 258):
for j in range(258, 261):
t[i, j] = tmp[255, 256] / 9.0
for i in range(258, 261):
for j in range(258, 261):
t[i, j] = tmp[256, 256] / 9.0
t[141:147, 110:117] = 0.0
t2 = t[105 : 105 + 256, 395 - 256 : 395]
diff_array[:, :] = np.sqrt(t2[:, :])
plt.close("all")
plt.figure()
plt.imshow(np.log(diff_array[:, :] + 0.001))
diff_array[106, 125] = 0.0
return diff_array
# if 0:
# plt.figure(0)
# old = orig("", 256, 256)
# plt.figure(1)
# new = load_tiff("", 256, 256)
# print(sum(new - old))
# plt.show()
|
dmgav/dpcmaps | dpcmaps/hxn_db.py | from hxntools.handlers import register
import yaml
from metadatastore.mds import MDS
from databroker import Broker
from filestore.fs import FileStore
from hxntools.handlers.timepix import TimepixHDF5Handler
# from hxntools.handlers.xspress3 import Xspress3HDF5Handler
register()
with open("/home/xf03id/.config/databroker/hxn.yml", "r") as read_file:
data = yaml.load(read_file)
_mds_config = {
"host": data["metadatastore"]["config"]["host"],
"port": 27017,
"database": data["metadatastore"]["config"]["database"],
"timezone": "US/Eastern",
}
mds = MDS(_mds_config)
_fs_config = {
"host": data["assets"]["config"]["host"],
"port": 27017,
"database": data["assets"]["config"]["database"],
}
db = Broker(mds, FileStore(_fs_config))
db.fs.register_handler(TimepixHDF5Handler._handler_name, TimepixHDF5Handler, overwrite=True)
|
dmgav/dpcmaps | dpcmaps/dpc_kernel.py | #!/usr/bin/env python
"""
Created on May 23, 2013
@author: <NAME> (<EMAIL>)
Computer Science Group, Computational Science Center
Brookhaven National Laboratory
This code is for Differential Phase Contrast (DPC) imaging based on Fourier-shift fitting
implementation.
Reference: Yan, H. et al. Quantitative x-ray phase imaging at the nanoscale by multilayer
Laue lenses. Sci. Rep. 3, 1307; DOI:10.1038/srep01307 (2013).
Test data is available at:
https://docs.google.com/file/d/0B3v6W1bQwN_AdjZwWmE3WTNqVnc/edit?usp=sharing
"""
from __future__ import print_function, division
import os
import numpy as np
import matplotlib.pyplot as plt
import PIL
from scipy.optimize import minimize
import time
from six import StringIO
import dpcmaps.load_timepix as load_timepix
import h5py
from dpcmaps.db_config.db_config import db
# try:
# import filestore.api as fsapi
# except Exception:
# print("Filestore is not available.")
rss_cache = {}
rss_iters = 0
def get_beta(xdata):
length = len(xdata)
try:
beta = rss_cache[length]
except Exception:
# beta = 1j * (np.arange(length) + 1 - (np.floor(length / 2.0) + 1))
beta = 1j * (np.arange(length) - np.floor(length / 2.0))
rss_cache[length] = beta
return beta
def rss(v, xdata, ydata, beta):
"""Function to be minimized in the Nelder Mead algorithm"""
fitted_curve = xdata * v[0] * np.exp(v[1] * beta)
return np.sum(np.abs(ydata - fitted_curve) ** 2)
def pil_load(fn):
im = PIL.Image.open(fn)
def toarray(im, dtype=np.uint8):
x_str = im.tostring("raw", im.mode)
return np.fromstring(x_str, dtype)
assert im.mode.startswith("I;16")
if im.mode.endswith("B"):
x = toarray(im, ">u2")
else:
x = toarray(im, "<u2")
x.shape = im.size[1], im.size[0]
return x.astype("=u2")
def load_image_filestore(datum_id):
if datum_id is None:
raise IOError("Image doesn't exist yet")
# raise Exception(f"Reading image: datum_id = {datum_id}")
try:
return np.asarray(db.reg.retrieve(datum_id)).squeeze()
# return np.asarray(fsapi.retrieve(datum_id)).squeeze()
except Exception as ex:
print("Filestore load failed (datum={}): ({}) {}" "".format(datum_id, ex.__class__.__name__, ex))
raise
def load_data_hdf5(file_path):
"""
Read images using the h5py lib
"""
f = h5py.File(str(file_path), "r")
entry = f["entry"]
instrument = entry["instrument"]
detector = instrument["detector"]
dsdata = detector["data"]
data = dsdata[...]
f.close()
return np.array(data)
def load_file(load_image, fn, hang, roi=None, bad_pixels=[], zip_file=None):
"""
Load an image file
"""
if load_image == load_image_filestore:
# ignore hanging settings, just hit filestore
try:
im = load_image(fn)
except Exception:
return None, None, None
else:
if hang == 1:
while not os.path.exists(fn):
time.sleep(0.1)
else:
im = load_image(fn)
elif os.path.exists(fn):
im = load_image(fn)
elif zip_file is not None:
raise NotImplementedError
# loading from a zip file is just about as fast (when not running in
# parallel)
f = zip_file.open(fn)
stream = StringIO.StringIO()
stream.write(f.read())
f.close()
stream.seek(0)
im = plt.imread(stream, format="tif")
else:
raise Exception("File not found: %s" % fn)
if bad_pixels is not None:
for x, y in bad_pixels:
im[y, x] = 0
if roi is not None:
x1, y1, x2, y2 = roi
im = im[y1 : y2 + 1, x1 : x2 + 1]
xline = np.sum(im, axis=0)
yline = np.sum(im, axis=1)
fx = np.fft.fftshift(np.fft.ifft(xline))
fy = np.fft.fftshift(np.fft.ifft(yline))
return im, fx, fy
def load_file_h5(im, roi=None, bad_pixels=[]):
if bad_pixels is not None:
for x, y in bad_pixels:
im[y, x] = 0
if roi is not None:
x1, y1, x2, y2 = roi
im = im[y1 : y2 + 1, x1 : x2 + 1]
xline = np.sum(im, axis=0)
yline = np.sum(im, axis=1)
fx = np.fft.fftshift(np.fft.ifft(xline))
fy = np.fft.fftshift(np.fft.ifft(yline))
return im, fx, fy
def xj_test(filename, i, j, hang, roi=None, bad_pixels=[], **kwargs):
try:
im, fx, fy = load_file(filename, zip_file=zip_file, hang=hang, roi=roi, bad_pixels=bad_pixels)
except Exception:
# print('Failed to load file %s: %s' % (filename, ex))
return 0.0, 0.0, 0.0
wx, wy = im.shape
gx = np.sum(im[: wx // 2, :]) - np.sum(im[wx // 2 :, :])
gy = np.sum(im[:, : wy // 2]) - np.sum(im[:, wy // 2 :])
return 0, gx, gy
def run_dpc(
filename,
i,
j,
ref_fx=None,
ref_fy=None,
start_point=[1, 0],
pixel_size=55,
focus_to_det=1.46,
dx=0.1,
dy=0.1,
energy=19.5,
zip_file=None,
roi=None,
bad_pixels=[],
max_iters=1000,
solver="Nelder-Mead",
hang=True,
reverse_x=1,
reverse_y=1,
load_image=load_timepix.load,
):
"""
All units in micron
pixel_size
focus_to_det: focus to detector distance
dx: scan step size x
dy: scan step size y
energy: in keV
"""
try:
img, fx, fy = load_file(load_image, filename, hang=hang, zip_file=zip_file, roi=roi, bad_pixels=bad_pixels)
except IOError as ie:
print("%s" % ie)
return 0.0, 0.0, 0.0, 0.0, 0.0
if img is None:
print("Image {0} was not loaded.".format(filename))
return 1e-5, 1e-5, 1e-5, 1e-5, 1e-5
# vx = fmin(rss, start_point, args=(ref_fx, fx, get_beta(ref_fx)),
# maxiter=max_iters, maxfun=max_iters, disp=0)
res = minimize(
rss,
start_point,
args=(ref_fx, fx, get_beta(ref_fx)),
method=solver,
tol=1e-6,
options=dict(maxiter=max_iters),
)
vx = res.x
rx = res.fun
a = vx[0]
gx = reverse_x * vx[1]
# vy = fmin(rss, start_point, args=(ref_fy, fy, get_beta(ref_fy)),
# maxiter=max_iters, maxfun=max_iters, disp=0)
res = minimize(
rss,
start_point,
args=(ref_fy, fy, get_beta(ref_fy)),
method=solver,
tol=1e-6,
options=dict(maxiter=max_iters),
)
vy = res.x
ry = res.fun
gy = reverse_y * vy[1]
# print(i, j, vx[0], vx[1], vy[1])
return a, gx, gy, rx, ry
def run_dpc_h5(
dataimg,
i,
j,
ref_fx=None,
ref_fy=None,
start_point=[1, 0],
pixel_size=55,
focus_to_det=1.46,
dx=0.1,
dy=0.1,
energy=19.5,
zip_file=None,
roi=None,
bad_pixels=[],
max_iters=1000,
solver="Nelder-Mead",
hang=True,
reverse_x=1,
reverse_y=1,
load_image=None,
):
"""
All units in micron
pixel_size
focus_to_det: focus to detector distance
dx: scan step size x
dy: scan step size y
energy: in keV
"""
try:
img, fx, fy = load_file_h5(dataimg, roi=roi, bad_pixels=bad_pixels)
except IOError as ie:
print("%s" % ie)
return 0.0, 0.0, 0.0, 0.0, 0.0
if img is None:
return 1e-5, 1e-5, 1e-5, 1e-5, 1e-5
# vx = fmin(rss, start_point, args=(ref_fx, fx, get_beta(ref_fx)),
# maxiter=max_iters, maxfun=max_iters, disp=0)
res = minimize(
rss,
start_point,
args=(ref_fx, fx, get_beta(ref_fx)),
method=solver,
tol=1e-6,
options=dict(maxiter=max_iters),
)
vx = res.x
rx = res.fun
a = vx[0]
gx = reverse_x * vx[1]
# vy = fmin(rss, start_point, args=(ref_fy, fy, get_beta(ref_fy)),
# maxiter=max_iters, maxfun=max_iters, disp=0)
res = minimize(
rss,
start_point,
args=(ref_fy, fy, get_beta(ref_fy)),
method=solver,
tol=1e-6,
options=dict(maxiter=max_iters),
)
vy = res.x
ry = res.fun
gy = reverse_y * vy[1]
# print(i, j, vx[0], vx[1], vy[1])
return a, gx, gy, rx, ry
def recon(gx, gy, dx=0.1, dy=0.1, pad=1, w=1.0):
"""
Reconstruct the final phase image
Parameters
----------
gx : 2-D numpy array
phase gradient along x direction
gy : 2-D numpy array
phase gradient along y direction
dx : float
scanning step size in x direction (in micro-meter)
dy : float
scanning step size in y direction (in micro-meter)
pad : float
padding parameter
default value, pad = 1 --> no padding
p p p
pad = 3 --> p v p
p p p
w : float
weighting parameter for the phase gradient along x and y direction when
constructing the final phase image
Returns
----------
phi : 2-D numpy array
final phase image
References
----------
[1] <NAME>, <NAME>, <NAME>, <NAME>, <NAME>,
<NAME>, <NAME>, and <NAME>, "Quantitative
x-ray phase imaging at the nanoscale by multilayer Laue lenses," Scientific
reports 3 (2013).
"""
rows, cols = gx.shape
gx_padding = np.zeros((pad * rows, pad * cols), dtype="d")
gy_padding = np.zeros((pad * rows, pad * cols), dtype="d")
gx_padding[(pad // 2) * rows : (pad // 2 + 1) * rows, (pad // 2) * cols : (pad // 2 + 1) * cols] = gx
gy_padding[(pad // 2) * rows : (pad // 2 + 1) * rows, (pad // 2) * cols : (pad // 2 + 1) * cols] = gy
tx = np.fft.fftshift(np.fft.fft2(gx_padding))
ty = np.fft.fftshift(np.fft.fft2(gy_padding))
c = np.zeros((pad * rows, pad * cols), dtype=complex)
mid_col = pad * cols // 2 + 1
mid_row = pad * rows // 2 + 1
ax = 2 * np.pi * (np.arange(pad * cols) + 1 - mid_col) / (pad * cols * dx)
ay = 2 * np.pi * (np.arange(pad * rows) + 1 - mid_row) / (pad * rows * dy)
kappax, kappay = np.meshgrid(ax, ay)
c = -1j * (kappax * tx + w * kappay * ty)
c = np.ma.masked_values(c, 0)
c /= kappax ** 2 + w * kappay ** 2
c = np.ma.filled(c, 0)
c = np.fft.ifftshift(c)
phi_padding = np.fft.ifft2(c)
phi_padding = -phi_padding.real
phi = phi_padding[(pad // 2) * rows : (pad // 2 + 1) * rows, (pad // 2) * cols : (pad // 2 + 1) * cols]
return phi
def main(
file_format="SOFC/SOFC_%05d.tif",
dx=0.1,
dy=0.1,
ref_image=None,
zip_file=None,
rows=121,
cols=121,
start_point=[1, 0],
pixel_size=55,
focus_to_det=1.46,
energy=19.5,
pool=None,
first_image=1,
x1=None,
x2=None,
y1=None,
y2=None,
bad_pixels=[],
solver="Nelder-Mead",
display_fcn=None,
random=1,
pyramid=-1,
hang=1,
swap=-1,
reverse_x=1,
reverse_y=1,
mosaic_x=121,
mosaic_y=121,
load_image=load_timepix.load,
use_mds=False,
use_hdf5=False,
scan=None,
save_path=None,
pad=False,
calculate_results=False,
):
print("DPC")
print("---")
print("\tFile format: %s" % file_format)
print("\tdx: %s" % dx)
print("\tdy: %s" % dy)
print("\trows: %s" % rows)
print("\tcols: %s" % cols)
print("\tstart point: %s" % start_point)
print("\tpixel size: %s" % pixel_size)
print("\tfocus to det: %s" % (focus_to_det))
print("\tenergy: %s" % energy)
print("\tfirst image: %s" % first_image)
print("\treference image: %s" % ref_image)
print("\tsolver: %s" % solver)
print("\thang : %s" % hang)
print("\tswap : %s" % swap)
print("\treverse_x : %s" % reverse_x)
print("\treverse_y : %s" % reverse_y)
print("\tROI: (%s, %s)-(%s, %s)" % (x1, y1, x2, y2))
print("\tUse mds : %s" % use_mds)
print("\tUse hdf5 : %s" % use_hdf5)
print("\tScan : %s" % scan)
if display_fcn is not None:
calculate_results = True
t0 = time.time()
roi = None
if x1 is not None and x2 is not None:
if y1 is not None and y2 is not None:
roi = (x1, y1, x2, y2)
if use_hdf5:
# load the data
datastack = load_data_hdf5(file_format)
# read the reference image hdf5: only one reference image
reference, ref_fx, ref_fy = load_file_h5(datastack[first_image - 1, :, :], roi=roi, bad_pixels=bad_pixels)
else:
# read the reference image: only one reference image
reference, ref_fx, ref_fy = load_file(
load_image, ref_image, hang, zip_file=zip_file, roi=roi, bad_pixels=bad_pixels
)
a = np.zeros((rows, cols), dtype="d")
gx = np.zeros((rows, cols), dtype="d")
gy = np.zeros((rows, cols), dtype="d")
rx = np.zeros((rows, cols), dtype="d")
ry = np.zeros((rows, cols), dtype="d")
dpc_settings = dict(
start_point=start_point,
pixel_size=pixel_size,
focus_to_det=focus_to_det,
dx=dx,
dy=dy,
energy=energy,
zip_file=zip_file,
ref_fx=ref_fx,
ref_fy=ref_fy,
roi=roi,
bad_pixels=bad_pixels,
solver=solver,
load_image=load_image,
hang=hang,
reverse_x=reverse_x,
reverse_y=reverse_y,
)
if use_mds:
image_uids = list(scan)
print("Filestore has %d images" % (len(image_uids)))
def get_filename(i, j):
idx = first_image + i * cols + j
try:
return image_uids[idx]
except IndexError:
return None
elif use_hdf5:
def get_filename(i, j):
frame_num = first_image + i * cols + j - 1
return frame_num
else:
def get_filename(i, j):
frame_num = first_image + i * cols + j
return file_format % frame_num
# Wavelength in micron
lambda_ = 12.4e-4 / energy
_t0 = time.time()
mrows = rows // mosaic_y
mcols = cols // mosaic_x
if 1:
fcn = run_dpc
else:
fcn = xj_test
gx_factor = len(ref_fx) * pixel_size / (lambda_ * focus_to_det * 1e6)
gy_factor = len(ref_fy) * pixel_size / (lambda_ * focus_to_det * 1e6)
for n in range(mosaic_y):
for m in range(mosaic_x):
if use_hdf5:
args = [
(datastack[get_filename(i, j), :, :], i, j)
for i in range(n * mrows, n * mrows + mrows)
for j in range(m * mcols, m * mcols + mcols)
]
else:
args = [
(get_filename(i, j), i, j)
for i in range(n * mrows, n * mrows + mrows)
for j in range(m * mcols, m * mcols + mcols)
]
try:
if display_fcn is not None and random == 1:
np.random.shuffle(args)
# Function call without multiprocessing for debugging
# for arg in args:
# results = fcn(arg[0],arg[1],arg[2], ref_fx=ref_fx, roi=roi)
if use_hdf5:
fcn = run_dpc_h5
results = [pool.apply_async(fcn, arg, kwds=dpc_settings) for arg in args]
if calculate_results:
total_results = len(results)
k = 0
while k < total_results:
k = 0
for arg, result in zip(args, results):
if result.ready():
_a, _gx, _gy, _rx, _ry = result.get()
fn, i, j = arg
if pyramid == 1 and i % 2 != 0:
j = mcols - j - 1
a[i, j] = _a
rx[i, j] = _rx
ry[i, j] = _ry
if swap == 1:
gy[i, j] = _gx * gx_factor
gx[i, j] = _gy * gy_factor
else:
gx[i, j] = _gx * gx_factor
gy[i, j] = _gy * gy_factor
k += 1
try:
if display_fcn is not None:
display_fcn(a, gx, gy, None, rx, ry)
except Exception as ex:
print("Failed to update display: (%s) %s" % (ex.__class__.__name__, ex))
time.sleep(1.0)
except KeyboardInterrupt:
print("Cancelled")
return
pool.close()
pool.join()
_t1 = time.time()
elapsed = _t1 - _t0
print(
"Multiprocess elapsed=%.3f frames=%d (per frame %.3fms)"
"" % (elapsed, rows * cols, 1000 * elapsed / (rows * cols))
)
dim = len(np.squeeze(gx).shape)
if dim != 1:
if pad is True:
phi = recon(gx, gy, dx, dy, 3)
print("Padding mode enabled!")
else:
phi = recon(gx, gy, dx, dy)
print("Padding mode disabled!")
t1 = time.time()
print("Elapsed", t1 - t0)
if display_fcn is not None:
display_fcn(a, gx, gy, phi, rx, ry)
return a, gx, gy, phi, rx, ry
else:
t1 = time.time()
print("Elapsed", t1 - t0)
phi = None
if display_fcn is not None:
display_fcn(a, gx, gy, phi, rx, ry)
return a, gx, gy, phi, rx, ry
if __name__ == "__main__":
zip_file = None # zipfile.ZipFile('SOFC.zip')
main(zip_file=zip_file, rows=121, cols=121)
|
dmgav/dpcmaps | dpcmaps/remove_ctrl_m_chars.py | import os
import sys
import tempfile
def main():
filename = sys.argv[1]
with tempfile.NamedTemporaryFile(delete=False) as fh:
for line in open(filename):
line = line.rstrip()
fh.write(line + "\n")
os.rename(filename, filename + ".bak")
os.rename(fh.name, filename)
if __name__ == "__main__":
main()
|
dmgav/dpcmaps | dpcmaps/dpc.py | <filename>dpcmaps/dpc.py
#!/usr/bin/env python
"""
Created on May 23, 2013, last modified on June 19, 2013
@author: <NAME> (<EMAIL>)
Computer Science Group, Computational Science Center
Brookhaven National Laboratory
This code is for Differential Phase Contrast (DPC) imaging based on Fourier-shift fitting
implementation.
Reference: Yan, H. et al. Quantitative x-ray phase imaging at the nanoscale by multilayer
Laue lenses. Sci. Rep. 3, 1307; DOI:10.1038/srep01307 (2013).
Test data is available at:
https://docs.google.com/file/d/0B3v6W1bQwN_AdjZwWmE3WTNqVnc/edit?usp=sharing
"""
from __future__ import print_function
import os
import numpy as np
import matplotlib.pyplot as plt
import PIL
from scipy.misc import imsave
from scipy.optimize import minimize
import time
import cStringIO as StringIO
import load_timepix
rss_cache = {}
rss_iters = 0
def get_beta(xdata):
length = len(xdata)
try:
beta = rss_cache[length]
except Exception:
# beta = 1j * (np.arange(length) + 1 - (np.floor(length / 2.0) + 1))
beta = 1j * (np.arange(length) - np.floor(length / 2.0))
rss_cache[length] = beta
return beta
def rss(v, xdata, ydata, beta):
"""Function to be minimized in the Nelder Mead algorithm"""
fitted_curve = xdata * v[0] * np.exp(v[1] * beta)
return np.sum(np.abs(ydata - fitted_curve) ** 2)
def pil_load(fn):
im = PIL.Image.open(fn)
def toarray(im, dtype=np.uint8):
x_str = im.tostring("raw", im.mode)
return np.fromstring(x_str, dtype)
assert im.mode.startswith("I;16")
if im.mode.endswith("B"):
x = toarray(im, ">u2")
else:
x = toarray(im, "<u2")
x.shape = im.size[1], im.size[0]
return x.astype("=u2")
def load_file(fn, roi=None, bad_pixels=[], zip_file=None):
"""
Load an image file
"""
if os.path.exists(fn):
im = load_timepix.load(fn)
elif zip_file is not None:
raise NotImplementedError
# loading from a zip file is just about as fast (when not running in
# parallel)
f = zip_file.open(fn)
stream = StringIO.StringIO()
stream.write(f.read())
f.close()
stream.seek(0)
im = plt.imread(stream, format="tif")
else:
raise Exception("File not found: %s" % fn)
if bad_pixels is not None:
for x, y in bad_pixels:
im[x, y] = 0
if roi is not None:
x1, y1, x2, y2 = roi
im = im[x1 : x2 + 1, y1 : y2 + 1]
xline = np.sum(im, axis=1)
yline = np.sum(im, axis=0)
fx = np.fft.fftshift(np.fft.ifft(xline))
fy = np.fft.fftshift(np.fft.ifft(yline))
return im, fx, fy
def xj_test(filename, i, j, roi=None, bad_pixels=[], **kwargs):
try:
im, fx, fy = load_file(filename, zip_file=zip_file, roi=roi, bad_pixels=bad_pixels)
except Exception as ex:
print("Failed to load file %s: %s" % (filename, ex))
return 0.0, 0.0, 0.0
wx, wy = im.shape
gx = np.sum(im[: wx / 2, :]) - np.sum(im[wx / 2 :, :])
gy = np.sum(im[:, : wy / 2]) - np.sum(im[:, wy / 2 :])
return 0, gx, gy
def run_dpc(
filename,
i,
j,
ref_fx=None,
ref_fy=None,
start_point=[1, 0],
pixel_size=55,
focus_to_det=1.46,
dx=0.1,
dy=0.1,
energy=19.5,
zip_file=None,
roi=None,
bad_pixels=[],
max_iters=1000,
solver="Nelder-Mead",
invers=False,
):
"""
All units in micron
pixel_size
focus_to_det: focus to detector distance
dx: scan step size x
dy: scan step size y
energy: in keV
"""
try:
img, fx, fy = load_file(filename, zip_file=zip_file, roi=roi, bad_pixels=bad_pixels)
except Exception as ex:
print("Failed to load file %s: %s" % (filename, ex))
return 0.0, 0.0, 0.0
# vx = fmin(rss, start_point, args=(ref_fx, fx, get_beta(ref_fx)),
# maxiter=max_iters, maxfun=max_iters, disp=0)
res = minimize(
rss,
start_point,
args=(ref_fx, fx, get_beta(ref_fx)),
method=solver,
tol=1e-4,
options=dict(maxiter=max_iters),
)
vx = res.x
a = vx[0]
if invers:
gx = -vx[1]
else:
gx = vx[1]
# vy = fmin(rss, start_point, args=(ref_fy, fy, get_beta(ref_fy)),
# maxiter=max_iters, maxfun=max_iters, disp=0)
res = minimize(
rss,
start_point,
args=(ref_fy, fy, get_beta(ref_fy)),
method=solver,
tol=1e-6,
options=dict(maxiter=max_iters),
)
vy = res.x
gy = vy[1]
# print(i, j, vx[0], vx[1], vy[1])
return a, gx, gy
def recon(gx, gy, dx=0.1, dy=0.1, pad=1, w=1.0):
"""
Reconstruct the final phase image
Parameters
----------
gx : 2-D numpy array
phase gradient along x direction
gy : 2-D numpy array
phase gradient along y direction
dx : float
scanning step size in x direction (in micro-meter)
dy : float
scanning step size in y direction (in micro-meter)
pad : float
padding parameter
default value, pad = 1 --> no padding
p p p
pad = 3 --> p v p
p p p
w : float
weighting parameter for the phase gradient along x and y direction when
constructing the final phase image
Returns
----------
phi : 2-D numpy array
final phase image
References
----------
[1] <NAME>, <NAME>, <NAME>, <NAME>, <NAME>,
<NAME>, <NAME>, and <NAME>, "Quantitative
x-ray phase imaging at the nanoscale by multilayer Laue lenses," Scientific
reports 3 (2013).
"""
rows, cols = gx.shape
gx_padding = np.zeros((pad * rows, pad * cols), dtype="d")
gy_padding = np.zeros((pad * rows, pad * cols), dtype="d")
gx_padding[(pad // 2) * rows : (pad // 2 + 1) * rows, (pad // 2) * cols : (pad // 2 + 1) * cols] = gx
gy_padding[(pad // 2) * rows : (pad // 2 + 1) * rows, (pad // 2) * cols : (pad // 2 + 1) * cols] = gy
tx = np.fft.fftshift(np.fft.fft2(gx_padding))
ty = np.fft.fftshift(np.fft.fft2(gy_padding))
c = np.zeros((pad * rows, pad * cols), dtype=complex)
mid_col = pad * cols // 2.0 + 1
mid_row = pad * rows // 2.0 + 1
ax = 2 * np.pi * (np.arange(pad * cols) + 1 - mid_col) / (pad * cols * dx)
ay = 2 * np.pi * (np.arange(pad * rows) + 1 - mid_row) / (pad * rows * dy)
kappax, kappay = np.meshgrid(ax, ay)
c = -1j * (kappax * tx + w * kappay * ty)
c = np.ma.masked_values(c, 0)
c /= kappax ** 2 + w * kappay ** 2
c = np.ma.filled(c, 0)
c = np.fft.ifftshift(c)
phi_padding = np.fft.ifft2(c)
phi_padding = -phi_padding.real
phi = phi_padding[(pad // 2) * rows : (pad // 2 + 1) * rows, (pad // 2) * cols : (pad // 2 + 1) * cols]
return phi
def main(
file_format="SOFC/SOFC_%05d.tif",
dx=0.1,
dy=0.1,
ref_image=1,
zip_file=None,
rows=121,
cols=121,
start_point=[1, 0],
pixel_size=55,
focus_to_det=1.46e6,
energy=19.5,
pool=None,
first_image=1,
x1=None,
x2=None,
y1=None,
y2=None,
bad_pixels=[],
solver="Nelder-Mead",
display_fcn=None,
invers=False,
):
print("DPC")
print("---")
print("\tFile format: %s" % file_format)
print("\tdx: %s" % dx)
print("\tdy: %s" % dy)
print("\trows: %s" % rows)
print("\tcols: %s" % cols)
print("\tstart point: %s" % start_point)
print("\tpixel size: %s" % pixel_size)
print("\tfocus to det: %s" % (focus_to_det / 1e6))
print("\tenergy: %s" % energy)
print("\tfirst image: %s" % first_image)
print("\treference image: %s" % ref_image)
print("\tsolver: %s" % solver)
print("\tROI: (%s, %s)-(%s, %s)" % (x1, y1, x2, y2))
t0 = time.time()
roi = None
if x1 is not None and x2 is not None:
if y1 is not None and y2 is not None:
roi = (x1, y1, x2, y2)
# read the reference image: only one reference image
reference, ref_fx, ref_fy = load_file(
file_format % ref_image, zip_file=zip_file, roi=roi, bad_pixels=bad_pixels
)
a = np.zeros((rows, cols), dtype="d")
gx = np.zeros((rows, cols), dtype="d")
gy = np.zeros((rows, cols), dtype="d")
dpc_settings = dict(
start_point=start_point,
pixel_size=pixel_size,
focus_to_det=focus_to_det,
dx=dx,
dy=dy,
energy=energy,
zip_file=zip_file,
ref_fx=ref_fx,
ref_fy=ref_fy,
roi=roi,
bad_pixels=bad_pixels,
solver=solver,
invers=invers,
)
def get_filename(i, j):
frame_num = first_image + i * cols + j
# scan 1 9669
# 12261 images
# = 21930
# scan 2 21950
# if frame_num >= 21930:
# frame_num += 20
return file_format % frame_num
# Wavelength in micron
lambda_ = 12.4e-4 / energy
if pool is None:
for i in range(rows):
trow = time.time()
print("Row %d" % i, end="")
rss_iters = 0
for j in range(cols):
_a, _gx, _gy = run_dpc(get_filename(i, j), i, j, **dpc_settings)
a[i, j] = _a
gx[i, j] = _gx
gy[i, j] = _gy
row_elapsed = 1000 * (time.time() - trow)
print(" elapsed %.3fms" % row_elapsed, end=" ")
print(" (per frame %.3fms, rss iters %d)" % (row_elapsed / cols, rss_iters))
else:
args = [(get_filename(i, j), i, j) for i in range(rows) for j in range(cols)]
_t0 = time.time()
try:
if 1:
fcn = run_dpc
else:
fcn = xj_test
if display_fcn is not None:
np.random.shuffle(args)
results = [pool.apply_async(fcn, arg, kwds=dpc_settings) for arg in args]
if display_fcn is not None:
total_results = len(results)
k = 0
while k < total_results:
k = 0
for arg, result in zip(args, results):
if result.ready():
_a, _gx, _gy = result.get()
fn, i, j = arg
a[i, j] = _a
gx[i, j] = _gx
gy[i, j] = _gy
k += 1
try:
gx *= len(ref_fx) * pixel_size / (lambda_ * focus_to_det * 1e6)
gy *= len(ref_fy) * pixel_size / (lambda_ * focus_to_det * 1e6)
display_fcn(a, gx, gy, None)
except Exception as ex:
print("Failed to update display: (%s) %s" % (ex.__class__.__name__, ex))
time.sleep(1.0)
pool.close()
pool.join()
except KeyboardInterrupt:
print("Cancelled")
return
for arg, result in zip(args, results):
fn, i, j = arg
_a, _gx, _gy = result.get()
a[i, j] = _a
gx[i, j] = _gx
gy[i, j] = _gy
k += 1
_t1 = time.time()
elapsed = _t1 - _t0
print(
"Multiprocess elapsed=%.3f frames=%d (per frame %.3fms)"
% (elapsed, rows * cols, 1000 * elapsed / (rows * cols))
)
gx *= len(ref_fx) * pixel_size / (lambda_ * focus_to_det * 1e6)
gy *= len(ref_fy) * pixel_size / (lambda_ * focus_to_det * 1e6)
dim = len(np.squeeze(gx).shape)
if dim != 1:
imsave("a.jpg", a)
np.savetxt("a.txt", a)
imsave("gx.jpg", gx)
np.savetxt("gx.txt", gx)
imsave("gy.jpg", gy)
np.savetxt("gy.txt", gy)
# -------------reconstruct the final phase image using gx and gy--------------------#
phi = recon(gx, gy, dx, dy)
imsave("phi.jpg", phi)
np.savetxt("phi.txt", phi)
t1 = time.time()
print("Elapsed", t1 - t0)
return a, gx, gy, phi
else:
"""
#plt.hold(False)
plt.plot(np.squeeze(a), '-*')
plt.savefig('a.jpg')
np.savetxt('a.txt', a)
plt.plot(np.squeeze(gx), '-*')
plt.savefig('gx.jpg')
np.savetxt('gx.txt', gx)
plt.plot(np.squeeze(gy), '-*')
plt.savefig('gy.jpg')
np.savetxt('gy.txt', gy)
t1 = time.time()
print('Elapsed', t1 - t0)
"""
phi = None
return a, gx, gy, phi
# plt.imshow(phi, cmap=cm.Greys_r)
# plt.show()
if __name__ == "__main__":
zip_file = None # zipfile.ZipFile('SOFC.zip')
main(zip_file=zip_file, processes=0, rows=121, cols=121)
|
dmgav/dpcmaps | dpcmaps/db_config/db_config.py | <filename>dpcmaps/db_config/db_config.py
import json
import os
import platform
beamline_name = ""
# The following code is borrowed from PyXRF. It supposed to determine beamline name
# based on PyXRF configuration file '/etc/pyxrf/pyxrf.json'
try:
beamline_name = ""
# Attempt to find the configuration file first
config_path = "/etc/pyxrf/pyxrf.json"
if os.path.isfile(config_path):
try:
with open(config_path, "r") as beamline_pyxrf:
beamline_config_pyxrf = json.load(beamline_pyxrf)
beamline_name = beamline_config_pyxrf["beamline_name"]
except Exception as ex:
raise IOError(f"Error while opening configuration file {config_path!r}") from ex
else:
# Otherwise try to identify the beamline using host name
hostname = platform.node()
beamline_names = {
"xf03id": "HXN",
"xf05id": "SRX",
"xf08bm": "TES",
"xf04bm": "XFM",
}
for k, v in beamline_names.items():
if hostname.startswith(k):
beamline_name = v
if not beamline_name:
raise Exception("Beamline is not identified")
if beamline_name == "HXN":
from dpcmaps.db_config.hxn_db_config import db
# elif beamline_name == "SRX":
# from dpcmaps.db_config.srx_db_config import db
# elif beamline_name == "XFM":
# from dpcmaps.db_config.xfm_db_config import db
# elif beamline_name == "TES":
# from dpcmaps.db_config.tes_db_config import db
else:
db = None
db_analysis = None
print(f"Beamline Database is not used in DpcMaps: beamline {beamline_name!r} is not supported")
except Exception as ex:
db = None
print(f"Beamline Database is not used in DpcMaps: {ex}")
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.