text
stringlengths 8
6.05M
|
|---|
# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT!
import grpc
import tabletmanagerdata_pb2 as tabletmanagerdata__pb2
class TabletManagerStub(object):
"""TabletManager is a service definition for tabletmanagerdata.TabletManager.
Various read-only methods
"""
def __init__(self, channel):
"""Constructor.
Args:
channel: A grpc.Channel.
"""
self.Ping = channel.unary_unary(
'/tabletmanagerservice.TabletManager/Ping',
request_serializer=tabletmanagerdata__pb2.PingRequest.SerializeToString,
response_deserializer=tabletmanagerdata__pb2.PingResponse.FromString,
)
self.Sleep = channel.unary_unary(
'/tabletmanagerservice.TabletManager/Sleep',
request_serializer=tabletmanagerdata__pb2.SleepRequest.SerializeToString,
response_deserializer=tabletmanagerdata__pb2.SleepResponse.FromString,
)
self.ExecuteHook = channel.unary_unary(
'/tabletmanagerservice.TabletManager/ExecuteHook',
request_serializer=tabletmanagerdata__pb2.ExecuteHookRequest.SerializeToString,
response_deserializer=tabletmanagerdata__pb2.ExecuteHookResponse.FromString,
)
self.GetSchema = channel.unary_unary(
'/tabletmanagerservice.TabletManager/GetSchema',
request_serializer=tabletmanagerdata__pb2.GetSchemaRequest.SerializeToString,
response_deserializer=tabletmanagerdata__pb2.GetSchemaResponse.FromString,
)
self.GetPermissions = channel.unary_unary(
'/tabletmanagerservice.TabletManager/GetPermissions',
request_serializer=tabletmanagerdata__pb2.GetPermissionsRequest.SerializeToString,
response_deserializer=tabletmanagerdata__pb2.GetPermissionsResponse.FromString,
)
self.SetReadOnly = channel.unary_unary(
'/tabletmanagerservice.TabletManager/SetReadOnly',
request_serializer=tabletmanagerdata__pb2.SetReadOnlyRequest.SerializeToString,
response_deserializer=tabletmanagerdata__pb2.SetReadOnlyResponse.FromString,
)
self.SetReadWrite = channel.unary_unary(
'/tabletmanagerservice.TabletManager/SetReadWrite',
request_serializer=tabletmanagerdata__pb2.SetReadWriteRequest.SerializeToString,
response_deserializer=tabletmanagerdata__pb2.SetReadWriteResponse.FromString,
)
self.ChangeType = channel.unary_unary(
'/tabletmanagerservice.TabletManager/ChangeType',
request_serializer=tabletmanagerdata__pb2.ChangeTypeRequest.SerializeToString,
response_deserializer=tabletmanagerdata__pb2.ChangeTypeResponse.FromString,
)
self.RefreshState = channel.unary_unary(
'/tabletmanagerservice.TabletManager/RefreshState',
request_serializer=tabletmanagerdata__pb2.RefreshStateRequest.SerializeToString,
response_deserializer=tabletmanagerdata__pb2.RefreshStateResponse.FromString,
)
self.RunHealthCheck = channel.unary_unary(
'/tabletmanagerservice.TabletManager/RunHealthCheck',
request_serializer=tabletmanagerdata__pb2.RunHealthCheckRequest.SerializeToString,
response_deserializer=tabletmanagerdata__pb2.RunHealthCheckResponse.FromString,
)
self.IgnoreHealthError = channel.unary_unary(
'/tabletmanagerservice.TabletManager/IgnoreHealthError',
request_serializer=tabletmanagerdata__pb2.IgnoreHealthErrorRequest.SerializeToString,
response_deserializer=tabletmanagerdata__pb2.IgnoreHealthErrorResponse.FromString,
)
self.ReloadSchema = channel.unary_unary(
'/tabletmanagerservice.TabletManager/ReloadSchema',
request_serializer=tabletmanagerdata__pb2.ReloadSchemaRequest.SerializeToString,
response_deserializer=tabletmanagerdata__pb2.ReloadSchemaResponse.FromString,
)
self.PreflightSchema = channel.unary_unary(
'/tabletmanagerservice.TabletManager/PreflightSchema',
request_serializer=tabletmanagerdata__pb2.PreflightSchemaRequest.SerializeToString,
response_deserializer=tabletmanagerdata__pb2.PreflightSchemaResponse.FromString,
)
self.ApplySchema = channel.unary_unary(
'/tabletmanagerservice.TabletManager/ApplySchema',
request_serializer=tabletmanagerdata__pb2.ApplySchemaRequest.SerializeToString,
response_deserializer=tabletmanagerdata__pb2.ApplySchemaResponse.FromString,
)
self.LockTables = channel.unary_unary(
'/tabletmanagerservice.TabletManager/LockTables',
request_serializer=tabletmanagerdata__pb2.LockTablesRequest.SerializeToString,
response_deserializer=tabletmanagerdata__pb2.LockTablesResponse.FromString,
)
self.UnlockTables = channel.unary_unary(
'/tabletmanagerservice.TabletManager/UnlockTables',
request_serializer=tabletmanagerdata__pb2.UnlockTablesRequest.SerializeToString,
response_deserializer=tabletmanagerdata__pb2.UnlockTablesResponse.FromString,
)
self.ExecuteFetchAsDba = channel.unary_unary(
'/tabletmanagerservice.TabletManager/ExecuteFetchAsDba',
request_serializer=tabletmanagerdata__pb2.ExecuteFetchAsDbaRequest.SerializeToString,
response_deserializer=tabletmanagerdata__pb2.ExecuteFetchAsDbaResponse.FromString,
)
self.ExecuteFetchAsAllPrivs = channel.unary_unary(
'/tabletmanagerservice.TabletManager/ExecuteFetchAsAllPrivs',
request_serializer=tabletmanagerdata__pb2.ExecuteFetchAsAllPrivsRequest.SerializeToString,
response_deserializer=tabletmanagerdata__pb2.ExecuteFetchAsAllPrivsResponse.FromString,
)
self.ExecuteFetchAsApp = channel.unary_unary(
'/tabletmanagerservice.TabletManager/ExecuteFetchAsApp',
request_serializer=tabletmanagerdata__pb2.ExecuteFetchAsAppRequest.SerializeToString,
response_deserializer=tabletmanagerdata__pb2.ExecuteFetchAsAppResponse.FromString,
)
self.SlaveStatus = channel.unary_unary(
'/tabletmanagerservice.TabletManager/SlaveStatus',
request_serializer=tabletmanagerdata__pb2.SlaveStatusRequest.SerializeToString,
response_deserializer=tabletmanagerdata__pb2.SlaveStatusResponse.FromString,
)
self.MasterPosition = channel.unary_unary(
'/tabletmanagerservice.TabletManager/MasterPosition',
request_serializer=tabletmanagerdata__pb2.MasterPositionRequest.SerializeToString,
response_deserializer=tabletmanagerdata__pb2.MasterPositionResponse.FromString,
)
self.StopSlave = channel.unary_unary(
'/tabletmanagerservice.TabletManager/StopSlave',
request_serializer=tabletmanagerdata__pb2.StopSlaveRequest.SerializeToString,
response_deserializer=tabletmanagerdata__pb2.StopSlaveResponse.FromString,
)
self.StopSlaveMinimum = channel.unary_unary(
'/tabletmanagerservice.TabletManager/StopSlaveMinimum',
request_serializer=tabletmanagerdata__pb2.StopSlaveMinimumRequest.SerializeToString,
response_deserializer=tabletmanagerdata__pb2.StopSlaveMinimumResponse.FromString,
)
self.StartSlave = channel.unary_unary(
'/tabletmanagerservice.TabletManager/StartSlave',
request_serializer=tabletmanagerdata__pb2.StartSlaveRequest.SerializeToString,
response_deserializer=tabletmanagerdata__pb2.StartSlaveResponse.FromString,
)
self.StartSlaveUntilAfter = channel.unary_unary(
'/tabletmanagerservice.TabletManager/StartSlaveUntilAfter',
request_serializer=tabletmanagerdata__pb2.StartSlaveUntilAfterRequest.SerializeToString,
response_deserializer=tabletmanagerdata__pb2.StartSlaveUntilAfterResponse.FromString,
)
self.TabletExternallyReparented = channel.unary_unary(
'/tabletmanagerservice.TabletManager/TabletExternallyReparented',
request_serializer=tabletmanagerdata__pb2.TabletExternallyReparentedRequest.SerializeToString,
response_deserializer=tabletmanagerdata__pb2.TabletExternallyReparentedResponse.FromString,
)
self.TabletExternallyElected = channel.unary_unary(
'/tabletmanagerservice.TabletManager/TabletExternallyElected',
request_serializer=tabletmanagerdata__pb2.TabletExternallyElectedRequest.SerializeToString,
response_deserializer=tabletmanagerdata__pb2.TabletExternallyElectedResponse.FromString,
)
self.GetSlaves = channel.unary_unary(
'/tabletmanagerservice.TabletManager/GetSlaves',
request_serializer=tabletmanagerdata__pb2.GetSlavesRequest.SerializeToString,
response_deserializer=tabletmanagerdata__pb2.GetSlavesResponse.FromString,
)
self.VReplicationExec = channel.unary_unary(
'/tabletmanagerservice.TabletManager/VReplicationExec',
request_serializer=tabletmanagerdata__pb2.VReplicationExecRequest.SerializeToString,
response_deserializer=tabletmanagerdata__pb2.VReplicationExecResponse.FromString,
)
self.VReplicationWaitForPos = channel.unary_unary(
'/tabletmanagerservice.TabletManager/VReplicationWaitForPos',
request_serializer=tabletmanagerdata__pb2.VReplicationWaitForPosRequest.SerializeToString,
response_deserializer=tabletmanagerdata__pb2.VReplicationWaitForPosResponse.FromString,
)
self.ResetReplication = channel.unary_unary(
'/tabletmanagerservice.TabletManager/ResetReplication',
request_serializer=tabletmanagerdata__pb2.ResetReplicationRequest.SerializeToString,
response_deserializer=tabletmanagerdata__pb2.ResetReplicationResponse.FromString,
)
self.InitMaster = channel.unary_unary(
'/tabletmanagerservice.TabletManager/InitMaster',
request_serializer=tabletmanagerdata__pb2.InitMasterRequest.SerializeToString,
response_deserializer=tabletmanagerdata__pb2.InitMasterResponse.FromString,
)
self.PopulateReparentJournal = channel.unary_unary(
'/tabletmanagerservice.TabletManager/PopulateReparentJournal',
request_serializer=tabletmanagerdata__pb2.PopulateReparentJournalRequest.SerializeToString,
response_deserializer=tabletmanagerdata__pb2.PopulateReparentJournalResponse.FromString,
)
self.InitSlave = channel.unary_unary(
'/tabletmanagerservice.TabletManager/InitSlave',
request_serializer=tabletmanagerdata__pb2.InitSlaveRequest.SerializeToString,
response_deserializer=tabletmanagerdata__pb2.InitSlaveResponse.FromString,
)
self.DemoteMaster = channel.unary_unary(
'/tabletmanagerservice.TabletManager/DemoteMaster',
request_serializer=tabletmanagerdata__pb2.DemoteMasterRequest.SerializeToString,
response_deserializer=tabletmanagerdata__pb2.DemoteMasterResponse.FromString,
)
self.PromoteSlaveWhenCaughtUp = channel.unary_unary(
'/tabletmanagerservice.TabletManager/PromoteSlaveWhenCaughtUp',
request_serializer=tabletmanagerdata__pb2.PromoteSlaveWhenCaughtUpRequest.SerializeToString,
response_deserializer=tabletmanagerdata__pb2.PromoteSlaveWhenCaughtUpResponse.FromString,
)
self.SlaveWasPromoted = channel.unary_unary(
'/tabletmanagerservice.TabletManager/SlaveWasPromoted',
request_serializer=tabletmanagerdata__pb2.SlaveWasPromotedRequest.SerializeToString,
response_deserializer=tabletmanagerdata__pb2.SlaveWasPromotedResponse.FromString,
)
self.SetMaster = channel.unary_unary(
'/tabletmanagerservice.TabletManager/SetMaster',
request_serializer=tabletmanagerdata__pb2.SetMasterRequest.SerializeToString,
response_deserializer=tabletmanagerdata__pb2.SetMasterResponse.FromString,
)
self.SlaveWasRestarted = channel.unary_unary(
'/tabletmanagerservice.TabletManager/SlaveWasRestarted',
request_serializer=tabletmanagerdata__pb2.SlaveWasRestartedRequest.SerializeToString,
response_deserializer=tabletmanagerdata__pb2.SlaveWasRestartedResponse.FromString,
)
self.StopReplicationAndGetStatus = channel.unary_unary(
'/tabletmanagerservice.TabletManager/StopReplicationAndGetStatus',
request_serializer=tabletmanagerdata__pb2.StopReplicationAndGetStatusRequest.SerializeToString,
response_deserializer=tabletmanagerdata__pb2.StopReplicationAndGetStatusResponse.FromString,
)
self.PromoteSlave = channel.unary_unary(
'/tabletmanagerservice.TabletManager/PromoteSlave',
request_serializer=tabletmanagerdata__pb2.PromoteSlaveRequest.SerializeToString,
response_deserializer=tabletmanagerdata__pb2.PromoteSlaveResponse.FromString,
)
self.Backup = channel.unary_stream(
'/tabletmanagerservice.TabletManager/Backup',
request_serializer=tabletmanagerdata__pb2.BackupRequest.SerializeToString,
response_deserializer=tabletmanagerdata__pb2.BackupResponse.FromString,
)
self.RestoreFromBackup = channel.unary_stream(
'/tabletmanagerservice.TabletManager/RestoreFromBackup',
request_serializer=tabletmanagerdata__pb2.RestoreFromBackupRequest.SerializeToString,
response_deserializer=tabletmanagerdata__pb2.RestoreFromBackupResponse.FromString,
)
class TabletManagerServicer(object):
"""TabletManager is a service definition for tabletmanagerdata.TabletManager.
Various read-only methods
"""
def Ping(self, request, context):
"""Ping returns the input payload
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def Sleep(self, request, context):
"""Sleep sleeps for the provided duration
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def ExecuteHook(self, request, context):
"""ExecuteHook executes the hook remotely
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def GetSchema(self, request, context):
"""GetSchema asks the tablet for its schema
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def GetPermissions(self, request, context):
"""GetPermissions asks the tablet for its permissions
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def SetReadOnly(self, request, context):
"""
Various read-write methods
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def SetReadWrite(self, request, context):
# missing associated documentation comment in .proto file
pass
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def ChangeType(self, request, context):
"""ChangeType asks the remote tablet to change its type
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def RefreshState(self, request, context):
# missing associated documentation comment in .proto file
pass
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def RunHealthCheck(self, request, context):
# missing associated documentation comment in .proto file
pass
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def IgnoreHealthError(self, request, context):
# missing associated documentation comment in .proto file
pass
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def ReloadSchema(self, request, context):
# missing associated documentation comment in .proto file
pass
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def PreflightSchema(self, request, context):
# missing associated documentation comment in .proto file
pass
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def ApplySchema(self, request, context):
# missing associated documentation comment in .proto file
pass
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def LockTables(self, request, context):
# missing associated documentation comment in .proto file
pass
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def UnlockTables(self, request, context):
# missing associated documentation comment in .proto file
pass
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def ExecuteFetchAsDba(self, request, context):
# missing associated documentation comment in .proto file
pass
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def ExecuteFetchAsAllPrivs(self, request, context):
# missing associated documentation comment in .proto file
pass
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def ExecuteFetchAsApp(self, request, context):
# missing associated documentation comment in .proto file
pass
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def SlaveStatus(self, request, context):
"""
Replication related methods
SlaveStatus returns the current slave status.
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def MasterPosition(self, request, context):
"""MasterPosition returns the current master position
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def StopSlave(self, request, context):
"""StopSlave makes mysql stop its replication
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def StopSlaveMinimum(self, request, context):
"""StopSlaveMinimum stops the mysql replication after it reaches
the provided minimum point
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def StartSlave(self, request, context):
"""StartSlave starts the mysql replication
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def StartSlaveUntilAfter(self, request, context):
"""StartSlave starts the mysql replication until and including
the provided position
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def TabletExternallyReparented(self, request, context):
"""TabletExternallyReparented tells a tablet that its underlying MySQL is
currently the master. It is only used in environments (tabletmanagerdata.such as Vitess+MoB)
in which MySQL is reparented by some agent external to Vitess, and then
that agent simply notifies Vitess.
This call is idempotent with respect to a single target tablet.
However, the tablet assumes there is a cooling-off period following the
initial external reparent from A to B, before this call is repeated on any
tablet other than B. This assumption is configurable with the vttablet flag
"finalize_external_reparent_timeout".
For more information, see the design doc at go/vt-fast-failover.
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def TabletExternallyElected(self, request, context):
"""TabletExternallyElected is an notification that may be sent in
anticipation of potentially later sending TabletExternallyReparented.
The tablet can use this extra lead time to prepare to react quickly if
TabletExternallyReparented does follow.
This call is effectively a no-op if it is not followed by a call to
TabletExternallyReparented, so the external agent doing the reparent can
still change its mind.
The agent does not need to wait for this call or cancel it before calling
TabletExternallyReparented if the external reparent operation finishes
before TabletExternallyElected returns.
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def GetSlaves(self, request, context):
"""GetSlaves asks for the list of mysql slaves
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def VReplicationExec(self, request, context):
"""VReplication API
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def VReplicationWaitForPos(self, request, context):
# missing associated documentation comment in .proto file
pass
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def ResetReplication(self, request, context):
"""
Reparenting related functions
ResetReplication makes the target not replicating
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def InitMaster(self, request, context):
"""InitMaster initializes the tablet as a master
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def PopulateReparentJournal(self, request, context):
"""PopulateReparentJournal tells the tablet to add an entry to its
reparent journal
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def InitSlave(self, request, context):
"""InitSlave tells the tablet to reparent to the master unconditionnally
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def DemoteMaster(self, request, context):
"""DemoteMaster tells the soon-to-be-former master it's gonna change
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def PromoteSlaveWhenCaughtUp(self, request, context):
"""PromoteSlaveWhenCaughtUp tells the remote tablet to catch up,
and then be the master
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def SlaveWasPromoted(self, request, context):
"""SlaveWasPromoted tells the remote tablet it is now the master
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def SetMaster(self, request, context):
"""SetMaster tells the slave to reparent
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def SlaveWasRestarted(self, request, context):
"""SlaveWasRestarted tells the remote tablet its master has changed
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def StopReplicationAndGetStatus(self, request, context):
"""StopReplicationAndGetStatus stops MySQL replication, and returns the
replication status
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def PromoteSlave(self, request, context):
"""PromoteSlave makes the slave the new master
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def Backup(self, request, context):
"""
Backup related methods
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def RestoreFromBackup(self, request, context):
"""RestoreFromBackup deletes all local data and restores it from the latest backup.
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def add_TabletManagerServicer_to_server(servicer, server):
rpc_method_handlers = {
'Ping': grpc.unary_unary_rpc_method_handler(
servicer.Ping,
request_deserializer=tabletmanagerdata__pb2.PingRequest.FromString,
response_serializer=tabletmanagerdata__pb2.PingResponse.SerializeToString,
),
'Sleep': grpc.unary_unary_rpc_method_handler(
servicer.Sleep,
request_deserializer=tabletmanagerdata__pb2.SleepRequest.FromString,
response_serializer=tabletmanagerdata__pb2.SleepResponse.SerializeToString,
),
'ExecuteHook': grpc.unary_unary_rpc_method_handler(
servicer.ExecuteHook,
request_deserializer=tabletmanagerdata__pb2.ExecuteHookRequest.FromString,
response_serializer=tabletmanagerdata__pb2.ExecuteHookResponse.SerializeToString,
),
'GetSchema': grpc.unary_unary_rpc_method_handler(
servicer.GetSchema,
request_deserializer=tabletmanagerdata__pb2.GetSchemaRequest.FromString,
response_serializer=tabletmanagerdata__pb2.GetSchemaResponse.SerializeToString,
),
'GetPermissions': grpc.unary_unary_rpc_method_handler(
servicer.GetPermissions,
request_deserializer=tabletmanagerdata__pb2.GetPermissionsRequest.FromString,
response_serializer=tabletmanagerdata__pb2.GetPermissionsResponse.SerializeToString,
),
'SetReadOnly': grpc.unary_unary_rpc_method_handler(
servicer.SetReadOnly,
request_deserializer=tabletmanagerdata__pb2.SetReadOnlyRequest.FromString,
response_serializer=tabletmanagerdata__pb2.SetReadOnlyResponse.SerializeToString,
),
'SetReadWrite': grpc.unary_unary_rpc_method_handler(
servicer.SetReadWrite,
request_deserializer=tabletmanagerdata__pb2.SetReadWriteRequest.FromString,
response_serializer=tabletmanagerdata__pb2.SetReadWriteResponse.SerializeToString,
),
'ChangeType': grpc.unary_unary_rpc_method_handler(
servicer.ChangeType,
request_deserializer=tabletmanagerdata__pb2.ChangeTypeRequest.FromString,
response_serializer=tabletmanagerdata__pb2.ChangeTypeResponse.SerializeToString,
),
'RefreshState': grpc.unary_unary_rpc_method_handler(
servicer.RefreshState,
request_deserializer=tabletmanagerdata__pb2.RefreshStateRequest.FromString,
response_serializer=tabletmanagerdata__pb2.RefreshStateResponse.SerializeToString,
),
'RunHealthCheck': grpc.unary_unary_rpc_method_handler(
servicer.RunHealthCheck,
request_deserializer=tabletmanagerdata__pb2.RunHealthCheckRequest.FromString,
response_serializer=tabletmanagerdata__pb2.RunHealthCheckResponse.SerializeToString,
),
'IgnoreHealthError': grpc.unary_unary_rpc_method_handler(
servicer.IgnoreHealthError,
request_deserializer=tabletmanagerdata__pb2.IgnoreHealthErrorRequest.FromString,
response_serializer=tabletmanagerdata__pb2.IgnoreHealthErrorResponse.SerializeToString,
),
'ReloadSchema': grpc.unary_unary_rpc_method_handler(
servicer.ReloadSchema,
request_deserializer=tabletmanagerdata__pb2.ReloadSchemaRequest.FromString,
response_serializer=tabletmanagerdata__pb2.ReloadSchemaResponse.SerializeToString,
),
'PreflightSchema': grpc.unary_unary_rpc_method_handler(
servicer.PreflightSchema,
request_deserializer=tabletmanagerdata__pb2.PreflightSchemaRequest.FromString,
response_serializer=tabletmanagerdata__pb2.PreflightSchemaResponse.SerializeToString,
),
'ApplySchema': grpc.unary_unary_rpc_method_handler(
servicer.ApplySchema,
request_deserializer=tabletmanagerdata__pb2.ApplySchemaRequest.FromString,
response_serializer=tabletmanagerdata__pb2.ApplySchemaResponse.SerializeToString,
),
'LockTables': grpc.unary_unary_rpc_method_handler(
servicer.LockTables,
request_deserializer=tabletmanagerdata__pb2.LockTablesRequest.FromString,
response_serializer=tabletmanagerdata__pb2.LockTablesResponse.SerializeToString,
),
'UnlockTables': grpc.unary_unary_rpc_method_handler(
servicer.UnlockTables,
request_deserializer=tabletmanagerdata__pb2.UnlockTablesRequest.FromString,
response_serializer=tabletmanagerdata__pb2.UnlockTablesResponse.SerializeToString,
),
'ExecuteFetchAsDba': grpc.unary_unary_rpc_method_handler(
servicer.ExecuteFetchAsDba,
request_deserializer=tabletmanagerdata__pb2.ExecuteFetchAsDbaRequest.FromString,
response_serializer=tabletmanagerdata__pb2.ExecuteFetchAsDbaResponse.SerializeToString,
),
'ExecuteFetchAsAllPrivs': grpc.unary_unary_rpc_method_handler(
servicer.ExecuteFetchAsAllPrivs,
request_deserializer=tabletmanagerdata__pb2.ExecuteFetchAsAllPrivsRequest.FromString,
response_serializer=tabletmanagerdata__pb2.ExecuteFetchAsAllPrivsResponse.SerializeToString,
),
'ExecuteFetchAsApp': grpc.unary_unary_rpc_method_handler(
servicer.ExecuteFetchAsApp,
request_deserializer=tabletmanagerdata__pb2.ExecuteFetchAsAppRequest.FromString,
response_serializer=tabletmanagerdata__pb2.ExecuteFetchAsAppResponse.SerializeToString,
),
'SlaveStatus': grpc.unary_unary_rpc_method_handler(
servicer.SlaveStatus,
request_deserializer=tabletmanagerdata__pb2.SlaveStatusRequest.FromString,
response_serializer=tabletmanagerdata__pb2.SlaveStatusResponse.SerializeToString,
),
'MasterPosition': grpc.unary_unary_rpc_method_handler(
servicer.MasterPosition,
request_deserializer=tabletmanagerdata__pb2.MasterPositionRequest.FromString,
response_serializer=tabletmanagerdata__pb2.MasterPositionResponse.SerializeToString,
),
'StopSlave': grpc.unary_unary_rpc_method_handler(
servicer.StopSlave,
request_deserializer=tabletmanagerdata__pb2.StopSlaveRequest.FromString,
response_serializer=tabletmanagerdata__pb2.StopSlaveResponse.SerializeToString,
),
'StopSlaveMinimum': grpc.unary_unary_rpc_method_handler(
servicer.StopSlaveMinimum,
request_deserializer=tabletmanagerdata__pb2.StopSlaveMinimumRequest.FromString,
response_serializer=tabletmanagerdata__pb2.StopSlaveMinimumResponse.SerializeToString,
),
'StartSlave': grpc.unary_unary_rpc_method_handler(
servicer.StartSlave,
request_deserializer=tabletmanagerdata__pb2.StartSlaveRequest.FromString,
response_serializer=tabletmanagerdata__pb2.StartSlaveResponse.SerializeToString,
),
'StartSlaveUntilAfter': grpc.unary_unary_rpc_method_handler(
servicer.StartSlaveUntilAfter,
request_deserializer=tabletmanagerdata__pb2.StartSlaveUntilAfterRequest.FromString,
response_serializer=tabletmanagerdata__pb2.StartSlaveUntilAfterResponse.SerializeToString,
),
'TabletExternallyReparented': grpc.unary_unary_rpc_method_handler(
servicer.TabletExternallyReparented,
request_deserializer=tabletmanagerdata__pb2.TabletExternallyReparentedRequest.FromString,
response_serializer=tabletmanagerdata__pb2.TabletExternallyReparentedResponse.SerializeToString,
),
'TabletExternallyElected': grpc.unary_unary_rpc_method_handler(
servicer.TabletExternallyElected,
request_deserializer=tabletmanagerdata__pb2.TabletExternallyElectedRequest.FromString,
response_serializer=tabletmanagerdata__pb2.TabletExternallyElectedResponse.SerializeToString,
),
'GetSlaves': grpc.unary_unary_rpc_method_handler(
servicer.GetSlaves,
request_deserializer=tabletmanagerdata__pb2.GetSlavesRequest.FromString,
response_serializer=tabletmanagerdata__pb2.GetSlavesResponse.SerializeToString,
),
'VReplicationExec': grpc.unary_unary_rpc_method_handler(
servicer.VReplicationExec,
request_deserializer=tabletmanagerdata__pb2.VReplicationExecRequest.FromString,
response_serializer=tabletmanagerdata__pb2.VReplicationExecResponse.SerializeToString,
),
'VReplicationWaitForPos': grpc.unary_unary_rpc_method_handler(
servicer.VReplicationWaitForPos,
request_deserializer=tabletmanagerdata__pb2.VReplicationWaitForPosRequest.FromString,
response_serializer=tabletmanagerdata__pb2.VReplicationWaitForPosResponse.SerializeToString,
),
'ResetReplication': grpc.unary_unary_rpc_method_handler(
servicer.ResetReplication,
request_deserializer=tabletmanagerdata__pb2.ResetReplicationRequest.FromString,
response_serializer=tabletmanagerdata__pb2.ResetReplicationResponse.SerializeToString,
),
'InitMaster': grpc.unary_unary_rpc_method_handler(
servicer.InitMaster,
request_deserializer=tabletmanagerdata__pb2.InitMasterRequest.FromString,
response_serializer=tabletmanagerdata__pb2.InitMasterResponse.SerializeToString,
),
'PopulateReparentJournal': grpc.unary_unary_rpc_method_handler(
servicer.PopulateReparentJournal,
request_deserializer=tabletmanagerdata__pb2.PopulateReparentJournalRequest.FromString,
response_serializer=tabletmanagerdata__pb2.PopulateReparentJournalResponse.SerializeToString,
),
'InitSlave': grpc.unary_unary_rpc_method_handler(
servicer.InitSlave,
request_deserializer=tabletmanagerdata__pb2.InitSlaveRequest.FromString,
response_serializer=tabletmanagerdata__pb2.InitSlaveResponse.SerializeToString,
),
'DemoteMaster': grpc.unary_unary_rpc_method_handler(
servicer.DemoteMaster,
request_deserializer=tabletmanagerdata__pb2.DemoteMasterRequest.FromString,
response_serializer=tabletmanagerdata__pb2.DemoteMasterResponse.SerializeToString,
),
'PromoteSlaveWhenCaughtUp': grpc.unary_unary_rpc_method_handler(
servicer.PromoteSlaveWhenCaughtUp,
request_deserializer=tabletmanagerdata__pb2.PromoteSlaveWhenCaughtUpRequest.FromString,
response_serializer=tabletmanagerdata__pb2.PromoteSlaveWhenCaughtUpResponse.SerializeToString,
),
'SlaveWasPromoted': grpc.unary_unary_rpc_method_handler(
servicer.SlaveWasPromoted,
request_deserializer=tabletmanagerdata__pb2.SlaveWasPromotedRequest.FromString,
response_serializer=tabletmanagerdata__pb2.SlaveWasPromotedResponse.SerializeToString,
),
'SetMaster': grpc.unary_unary_rpc_method_handler(
servicer.SetMaster,
request_deserializer=tabletmanagerdata__pb2.SetMasterRequest.FromString,
response_serializer=tabletmanagerdata__pb2.SetMasterResponse.SerializeToString,
),
'SlaveWasRestarted': grpc.unary_unary_rpc_method_handler(
servicer.SlaveWasRestarted,
request_deserializer=tabletmanagerdata__pb2.SlaveWasRestartedRequest.FromString,
response_serializer=tabletmanagerdata__pb2.SlaveWasRestartedResponse.SerializeToString,
),
'StopReplicationAndGetStatus': grpc.unary_unary_rpc_method_handler(
servicer.StopReplicationAndGetStatus,
request_deserializer=tabletmanagerdata__pb2.StopReplicationAndGetStatusRequest.FromString,
response_serializer=tabletmanagerdata__pb2.StopReplicationAndGetStatusResponse.SerializeToString,
),
'PromoteSlave': grpc.unary_unary_rpc_method_handler(
servicer.PromoteSlave,
request_deserializer=tabletmanagerdata__pb2.PromoteSlaveRequest.FromString,
response_serializer=tabletmanagerdata__pb2.PromoteSlaveResponse.SerializeToString,
),
'Backup': grpc.unary_stream_rpc_method_handler(
servicer.Backup,
request_deserializer=tabletmanagerdata__pb2.BackupRequest.FromString,
response_serializer=tabletmanagerdata__pb2.BackupResponse.SerializeToString,
),
'RestoreFromBackup': grpc.unary_stream_rpc_method_handler(
servicer.RestoreFromBackup,
request_deserializer=tabletmanagerdata__pb2.RestoreFromBackupRequest.FromString,
response_serializer=tabletmanagerdata__pb2.RestoreFromBackupResponse.SerializeToString,
),
}
generic_handler = grpc.method_handlers_generic_handler(
'tabletmanagerservice.TabletManager', rpc_method_handlers)
server.add_generic_rpc_handlers((generic_handler,))
|
"""
You have a set of integers s, which originally contains all the numbers from 1 to n. Unfortunately, due to some error, one of the numbers in s got duplicated to another number in the set, which results in repetition of one number and loss of another number.
You are given an integer array nums representing the data status of this set after the error.
Find the number that occurs twice and the number that is missing and return them in the form of an array.
Example 1:
Input: nums = [1,2,2,4]
Output: [2,3]
Example 2:
Input: nums = [1,1]
Output: [1,2]
"""
class Solution:
def findErrorNums(self, nums: List[int]) -> List[int]:
reference = range(1, len(nums) + 1)
dedup = set(nums)
return [sum(nums) - sum(dedup), sum(reference) - sum(dedup)]
|
import requests
import pygame
import os
import math
x, y = 0, 0
w, h = 0, 0
def_l = 'map'
def_spn = '0.01,0.01'
def_ll = '30.25,59.947176'
def_pt = '30.25,59.947176'
lst = ['map', 'sat', 'skl']
n = 0
address = ''
size = 0.01
mp = False
index_status = False
index = ' '
sz_lst = [0.01, 0.1, 0.9, 1.8]
size_lst = [[0.013, 0.00545], [0.21, 0.0865], [1.65, 0.685], [3.3, 1.37]]
mashtab_lst = [0.002, 0.025, 0.1, 0.2]
s = 0
dr = False
def draw_button_reset(screen, text):
global box_reset
font1 = pygame.font.Font(None, 50)
text = font1.render(text, 1, (0, 0, 255))
text_x = 10
text_y = 10
text_w = text.get_width()
text_h = text.get_height()
screen.blit(text, (text_x, text_y))
pygame.draw.rect(screen, (0, 0, 255), (text_x - 10, text_y - 10,
text_w + 20, text_h + 20), 1)
box_reset = pygame.Rect(text_x - 10, text_y - 10,
text_w + 20, text_h + 20)
def draw_text_find(screen, text):
global input_box2
font1 = pygame.font.Font(None, 50)
text = font1.render(text, 1, (0, 0, 255))
text_x = 600 - text.get_width() - 10 - 30
text_y = 450 - text.get_height() - 10
text_w = text.get_width()
text_h = text.get_height()
screen.blit(text, (text_x, text_y))
pygame.draw.rect(screen, (0, 0, 255), (text_x - 10, text_y - 10,
text_w + 20, text_h + 20), 1)
input_box2 = pygame.Rect(text_x - 10, text_y - 10,
text_w + 20, text_h + 20)
def draw_button_switch(screen, text):
global input_box3
font1 = pygame.font.Font(None, 50)
text = font1.render(text, 1, (0, 0, 255))
text_x = 10
text_y = 450 - text.get_height() - 10
text_w = text.get_width()
text_h = text.get_height()
screen.blit(text, (text_x, text_y))
pygame.draw.rect(screen, (0, 0, 255), (text_x - 10, text_y - 10,
text_w + 20, text_h + 20), 1)
input_box3 = pygame.Rect(text_x - 10, text_y - 10,
text_w + 20, text_h + 20)
def address_text(screen, text):
font1 = pygame.font.Font(None, 15)
text = font1.render(text, 1, (0, 0, 255))
text_x = 100
text_y = 10
text_w = text.get_width()
text_h = text.get_height()
screen.blit(text, (text_x, text_y))
def draw_text_for_index_box(screen, text):
font1 = pygame.font.Font(None, 18)
text = font1.render(text, 1, (0, 0, 255))
text_x = 325
text_y = 410
text_w = text.get_width()
text_h = text.get_height()
screen.blit(text, (text_x, text_y))
def draw_box_index(screen):
global index_status, box_index
x, y, w, h = 300, 400, 20, 20
if index_status:
pygame.draw.rect(screen, (0, 0, 255), (x, y, w, h))
else:
pygame.draw.rect(screen, (0, 0, 255), (x, y, w, h), 3)
box_index = pygame.Rect(x, y, w, h)
def draw_button_plus(screen, text):
global box_plus
font1 = pygame.font.Font(None, 50)
text = font1.render(text, 1, (0, 0, 255))
text_x = 600 - text.get_width() - 10
text_y = 10
text_w = text.get_width()
text_h = text.get_height()
screen.blit(text, (text_x, text_y))
pygame.draw.rect(screen, (0, 0, 255), (text_x - 10, text_y - 10,
text_w + 20, text_h + 20), 1)
box_plus = pygame.Rect(text_x - 10, text_y - 10,
text_w + 20, text_h + 20)
def draw_button_minus(screen, text):
global box_minus
font1 = pygame.font.Font(None, 50)
text = font1.render(text, 1, (0, 0, 255))
text_x = 600 - text.get_width() - 15
text_y = 10 + text.get_height() + 20
text_w = text.get_width()
text_h = text.get_height()
screen.blit(text, (text_x, text_y))
pygame.draw.rect(screen, (0, 0, 255), (560, text_y - 10,
40, text_h + 20), 1)
box_minus = pygame.Rect(560, text_y - 10,
40, text_h + 20)
def get_request(l=def_l, spn=def_spn, ll=def_ll, pt=def_pt):
global def_ll, def_spn, def_l, def_pt
response = None
server = 'https://static-maps.yandex.ru/1.x/'
params = {
'l': l,
'spn': spn,
'll': ll,
'pt': f'{pt},pm2bll'
}
response = requests.get(server, params=params)
def_spn = spn
def_ll = ll
return response
def write_image():
global response, map_file
with open(map_file, "wb") as file:
file.write(response.content)
def draw_image():
global screen, mp
if mp:
screen.blit(pygame.image.load(map_file), (0, 0))
pygame.display.flip()
def draw_text_input():
global input_box, color, text
font = pygame.font.Font(None, 32)
txt_surface = font.render(text, True, color)
width = max(200, 300)
input_box.w = width
screen.blit(txt_surface, (input_box.x + 5, input_box.y + 5))
pygame.draw.rect(screen, color, input_box, 2)
pygame.display.flip()
def draw():
global screen, lst, n, address
screen.fill((0, 0, 0))
draw_image()
draw_text_find(screen, 'Искать')
draw_button_switch(screen, lst[n])
draw_button_plus(screen, '+')
draw_button_minus(screen, '-')
draw_button_reset(screen, 'reset')
address_text(screen, address)
draw_box_index(screen)
draw_text_for_index_box(screen, 'Индекс')
draw_text_input()
def search_coord(text):
global address, index, index_status
if text != '':
server = 'https://geocode-maps.yandex.ru/1.x/'
params = {
"apikey": "40d1649f-0493-4b70-98ba-98533de7710b",
'geocode': text,
'format': 'json'
}
resp = requests.get(server, params=params).json()
if resp['response']['GeoObjectCollection']['featureMember'] != []:
address_ll = resp['response']['GeoObjectCollection']['featureMember'][0]['GeoObject']['Point']['pos']
address = resp['response']['GeoObjectCollection']['featureMember'][0]['GeoObject']['metaDataProperty']['GeocoderMetaData']['Address']['formatted']
if 'postal_code' in resp['response']['GeoObjectCollection']['featureMember'][0]['GeoObject']['metaDataProperty']['GeocoderMetaData']['Address']:
index = resp['response']['GeoObjectCollection']['featureMember'][0]['GeoObject']['metaDataProperty']['GeocoderMetaData']['Address']['postal_code']
if index_status:
address += ' ' + index
address_ll = ','.join(i for i in address_ll.split(' '))
return address_ll
return 0
return 0
def lonlat_distance(a, b):
degree_to_meters_factor = 111 * 1000
a_lon, a_lat = a
b_lon, b_lat = b
radians_lattitude = math.radians((a_lat + b_lat) / 2.)
lat_lon_factor = math.cos(radians_lattitude)
dx = abs(a_lon - b_lon) * degree_to_meters_factor * lat_lon_factor
dy = abs(a_lat - b_lat) * degree_to_meters_factor
distance = math.sqrt(dx * dx + dy * dy)
return distance
def get_organization():
global def_pt, address
server = 'https://geocode-maps.yandex.ru/1.x/'
params = {
"apikey": "40d1649f-0493-4b70-98ba-98533de7710b",
'geocode': def_pt,
'format': 'json'
}
res1 = requests.get(server, params=params).json()
ad = res1['response']['GeoObjectCollection']['featureMember'][0]['GeoObject']['metaDataProperty']['GeocoderMetaData']['Address']['formatted']
server = 'https://search-maps.yandex.ru/v1/'
api_key = "dda3ddba-c9ea-4ead-9010-f43fbc15c6e3"
params = {
"apikey": api_key,
"text": ad,
"lang": "ru_RU",
'type': 'biz'
}
res2 = requests.get(server, params=params).json()
org = res2["features"][0]
ps = org["geometry"]["coordinates"]
if lonlat_distance([float(i) for i in def_pt.split(',')], ps) <= 50:
address = org["properties"]["CompanyMetaData"]["name"]
mp = False
dr = False
write_image()
else:
mp = False
address = ''
dr = False
write_image()
response = get_request()
map_file = "map.png"
write_image()
pygame.init()
screen = pygame.display.set_mode((600, 450))
clock = pygame.time.Clock()
input_box = pygame.Rect(300, 418 - 55, 240, 32)
input_box2 = ''
input_box3 = ''
box_plus = ''
box_minus = ''
box_reset = ''
box_index = ''
color_inactive = pygame.Color('lightskyblue3')
color_active = pygame.Color('dodgerblue2')
color = color_inactive
active = False
text = ''
t = True
running = True
while running:
for event in pygame.event.get():
if event.type == pygame.QUIT:
running = False
if event.type == pygame.MOUSEBUTTONDOWN:
if event.button == 3:
t = True
get_organization()
if event.button == 1:
t = True
if box_index.collidepoint(event.pos):
index_status = not index_status
search_coord(address)
elif box_plus.collidepoint(event.pos) and s > 0:
s -= 1
def_spn = str(sz_lst[s]) + ',' + str(sz_lst[s])
response = get_request(ll=def_ll, pt=def_pt, l=def_l, spn=def_spn)
write_image()
elif box_minus.collidepoint(event.pos) and s < 3:
s += 1
def_spn = str(sz_lst[s]) + ',' + str(sz_lst[s])
response = get_request(ll=def_ll, pt=def_pt, l=def_l, spn=def_spn)
write_image()
elif input_box2.collidepoint(event.pos):
ll = search_coord(text)
if ll != 0:
dr = True
mp = True
t = True
def_pt = ll
def_ll = ll
response = get_request(ll=def_ll, pt=def_pt, l=def_l, spn=def_spn)
write_image()
text = ''
elif input_box3.collidepoint(event.pos):
n += 1
if n > 2:
n = 0
def_l = lst[n]
response = get_request(ll=def_ll, pt=def_pt, l=def_l, spn=def_spn)
write_image()
elif box_reset.collidepoint(event.pos):
mp = False
address = ''
dr = False
write_image()
elif not input_box.collidepoint(event.pos) and not box_minus.collidepoint(event.pos) and not box_plus.collidepoint(event.pos):
xr = size_lst[s][0] / 300
yr = size_lst[s][1] / 225
xx, yy = event.pos
xx, yy = xx * xr, yy * yr
def_ll = [float(i) for i in def_ll.split(',')]
def_pt = def_ll[0] - size_lst[s][0] + xx, def_ll[1] + size_lst[s][1] - yy
def_pt = ','.join(str(i) for i in def_pt)
def_ll = ','.join(str(i) for i in def_ll)
if dr:
search_coord(def_pt)
response = get_request(ll=def_ll, pt=def_pt, l=def_l, spn=def_spn)
write_image()
if input_box.collidepoint(event.pos):
# Toggle the active variable.
active = not active
else:
active = False
# Change the current color of the input box.
color = color_active if active else color_inactive
if event.type == pygame.KEYDOWN:
if event.key in [pygame.K_UP, pygame.K_DOWN, pygame.K_LEFT, pygame.K_RIGHT]:
pos = [float(i) for i in def_ll.split(',')]
if event.key == pygame.K_UP:
pos[1] += mashtab_lst[s]
def_ll = ','.join(str(i) for i in pos)
t = True
response = get_request(ll=def_ll, pt=def_pt, l=def_l, spn=def_spn)
write_image()
if event.key == pygame.K_DOWN:
pos[1] -= mashtab_lst[s]
def_ll = ','.join(str(i) for i in pos)
t = True
response = get_request(ll=def_ll, pt=def_pt, l=def_l, spn=def_spn)
write_image()
if event.key == pygame.K_LEFT:
pos[0] -= mashtab_lst[s]
def_ll = ','.join(str(i) for i in pos)
t = True
response = get_request(ll=def_ll, pt=def_pt, l=def_l, spn=def_spn)
write_image()
if event.key == pygame.K_RIGHT:
pos[0] += mashtab_lst[s]
def_ll = ','.join(str(i) for i in pos)
t = True
response = get_request(ll=def_ll, pt=def_pt, l=def_l, spn=def_spn)
write_image()
elif active:
if event.key in [pygame.K_UP, pygame.K_DOWN, pygame.K_LEFT, pygame.K_RIGHT]:
continue
if event.key == pygame.K_RETURN:
t = True
dr = True
ll = search_coord(text)
if ll != 0:
mp = True
def_pt = ll
def_ll = ll
response = get_request(ll=def_ll, pt=def_pt, l=def_l, spn=def_spn)
write_image()
text = ''
elif event.key == pygame.K_BACKSPACE:
t = True
text = text[:-1]
else:
t = True
text += event.unicode
if t:
draw()
t = False
pygame.quit()
# Удаляем за собой файл с изображением.
os.remove(map_file)
|
# Dependencies
import requests as req
# Performing a GET Request and saving the API's response within a variable
url = "https://api.spacexdata.com/v2/rockets/falcon9"
response = req.get(url)
response_json = response.json()
# It is possible to grab a specific value from within the JSON object
print(response_json["cost_per_launch"])
# It is also possible to perform some analyses on values stored within the JSON object
print("There are " + str((len(response_json["payload_weights"]))) + " payloads.")
# Finally, it is possible to reference the values stored within sub-dictionaries and sub-lists
print("The first payload weighed " + str(response_json["payload_weights"][0]["kg"]) + " Kilograms")
|
__author__ = 'ou3'
from foqus_lib.gui.common.InputPriorTable import InputPriorTable
class InferenceInputsTable(InputPriorTable):
def __init__(self, parent = None):
super(InferenceInputsTable, self).__init__(parent)
self.typeItems = ['Variable', 'Fixed', 'Design', 'Uncertain']
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created on Sat Dec 1 15:55:42 2018
@author: andr
"""
import os
import numpy as np
add_period_list = list(range(1000))
pdot_list = [i/100 for i in range(-400, 400)]
for add in add_period_list:
for pdot in pdot_list:
with open('0943_fk_start.par', 'r') as file:
lines = file.readlines()
start_period = lines[5][:-1]
lines[5] = start_period + str(add) + ' 1' + '\n'
lines[8] = 'F1 ' + str(pdot) + 'D-15 1' + '\n'
with open('0943_fk.par', 'w') as file:
for line in lines:
file.write(line)
os.system("tempo 0943_fk.tim")
os.system("~/work/tempo/util/print_resid/./print_resid -mre > resid.ascii")
data = np.genfromtxt("resid.ascii").T
with open('res_p_pdot.txt', 'a') as file:
file.write(start_period[11:] + str(add) + ' ')
file.write(str(pdot) + 'D-15' + ' ')
file.write(str(np.std(data[1])))
file.write('\n')
|
import pytest
from asn1PERser.codec.per.encoder import encode as per_encoder
from asn1PERser.codec.per.encoder import SizeConstrainViolated, InvalidComponentIndexIntoStructuredType
from asn1PERser.classes.data.builtin.SequenceOfType import SequenceOfType
from asn1PERser.classes.data.builtin.IntegerType import IntegerType
from asn1PERser.classes.types.constraint import SequenceOfValueSize
I0_VAL = 34567
I1_VAL = -845
I2_VAL = 23
I3_VAL = 200000
I4_VAL = 5
def SCHEMA_no_constrains_sequence_of():
class MySeqOf(SequenceOfType):
'''
MySeqOf::= SEQUENCE OF INTEGER
'''
componentType = IntegerType()
return MySeqOf()
def SCHEMA_constrained_seq_of_no_extension(lb, ub):
class MySeqOf(SequenceOfType):
'''
MySeqOf::= SEQUENCE (SIZE(lowerEndpoint..upperEndpoint)) OF INTEGER
'''
subtypeSpec = SequenceOfValueSize(lb, ub)
componentType = IntegerType()
return MySeqOf()
def SCHEMA_constrained_seq_of_extension_present(lb, ub):
class MySeqOf(SequenceOfType):
'''
MySeqOf::= SEQUENCE (SIZE(lowerEndpoint..upperEndpoint,...)) OF INTEGER
'''
subtypeSpec = SequenceOfValueSize(lb, ub, extensionMarker=True)
componentType = IntegerType()
return MySeqOf()
def DATA_seq_of(schema_seq_of, i0_is, i1_is, i2_is, i3_is, i4_is):
seq_of = schema_seq_of
if i0_is:
seq_of.extend([IntegerType(I0_VAL)])
if i1_is:
seq_of.extend([IntegerType(I1_VAL)])
if i2_is:
seq_of.extend([IntegerType(I2_VAL)])
if i3_is:
seq_of.extend([IntegerType(I3_VAL)])
if i4_is:
seq_of.extend([IntegerType(I4_VAL)])
return seq_of
@pytest.mark.parametrize("sequence_of, encoded", [
(DATA_seq_of(SCHEMA_no_constrains_sequence_of(), i0_is=False, i1_is=False, i2_is=False, i3_is=False, i4_is=False),
'00'),
(DATA_seq_of(SCHEMA_no_constrains_sequence_of(), i0_is=True, i1_is=False, i2_is=False, i3_is=False, i4_is=False),
'0103008707'),
(DATA_seq_of(SCHEMA_no_constrains_sequence_of(), i0_is=True, i1_is=True, i2_is=False, i3_is=False, i4_is=False),
'020300870702FCB3'),
(DATA_seq_of(SCHEMA_no_constrains_sequence_of(), i0_is=True, i1_is=True, i2_is=True, i3_is=False, i4_is=False),
'030300870702FCB30117'),
(DATA_seq_of(SCHEMA_no_constrains_sequence_of(), i0_is=True, i1_is=True, i2_is=True, i3_is=True, i4_is=False),
'040300870702FCB3011703030D40'),
(DATA_seq_of(SCHEMA_no_constrains_sequence_of(), i0_is=True, i1_is=True, i2_is=True, i3_is=True, i4_is=True),
'050300870702FCB3011703030D400105'),
])
def test_no_constrains_sequence_of_integer_can_be_encoded(sequence_of, encoded):
assert per_encoder(sequence_of) == bytearray.fromhex(encoded)
@pytest.mark.parametrize("sequence_of, encoded", [
(DATA_seq_of(SCHEMA_constrained_seq_of_no_extension(lb=0, ub=2), i0_is=True, i1_is=False, i2_is=False, i3_is=False, i4_is=False),
'4003008707'),
(DATA_seq_of(SCHEMA_constrained_seq_of_no_extension(lb=0, ub=2), i0_is=True, i1_is=True, i2_is=False, i3_is=False, i4_is=False),
'800300870702FCB3'),
(DATA_seq_of(SCHEMA_constrained_seq_of_no_extension(lb=2, ub=5), i0_is=True, i1_is=True, i2_is=False, i3_is=False, i4_is=False),
'000300870702FCB3'),
(DATA_seq_of(SCHEMA_constrained_seq_of_no_extension(lb=2, ub=5), i0_is=True, i1_is=True, i2_is=True, i3_is=False, i4_is=False),
'400300870702FCB30117'),
(DATA_seq_of(SCHEMA_constrained_seq_of_no_extension(lb=2, ub=5), i0_is=True, i1_is=True, i2_is=True, i3_is=True, i4_is=False),
'800300870702FCB3011703030D40'),
(DATA_seq_of(SCHEMA_constrained_seq_of_no_extension(lb=2, ub=5), i0_is=True, i1_is=True, i2_is=True, i3_is=True, i4_is=True),
'C00300870702FCB3011703030D400105'),
])
def test_constrained_sequence_of_no_extension_can_be_encoded(sequence_of, encoded):
assert per_encoder(sequence_of) == bytearray.fromhex(encoded)
@pytest.mark.parametrize("sequence_of, encoded", [
(DATA_seq_of(SCHEMA_constrained_seq_of_no_extension(lb=1, ub=1), i0_is=True, i1_is=False, i2_is=False, i3_is=False, i4_is=False),
'03008707'),
(DATA_seq_of(SCHEMA_constrained_seq_of_no_extension(lb=2, ub=2), i0_is=True, i1_is=True, i2_is=False, i3_is=False, i4_is=False),
'0300870702FCB3'),
(DATA_seq_of(SCHEMA_constrained_seq_of_no_extension(lb=3, ub=3), i0_is=True, i1_is=True, i2_is=True, i3_is=False, i4_is=False),
'0300870702FCB30117'),
(DATA_seq_of(SCHEMA_constrained_seq_of_no_extension(lb=4, ub=4), i0_is=True, i1_is=True, i2_is=True, i3_is=True, i4_is=False),
'0300870702FCB3011703030D40'),
(DATA_seq_of(SCHEMA_constrained_seq_of_no_extension(lb=5, ub=5), i0_is=True, i1_is=True, i2_is=True, i3_is=True, i4_is=True),
'0300870702FCB3011703030D400105'),
])
def test_constrained_sequence_of_of_fixed_length_no_extension_can_be_encoded(sequence_of, encoded):
assert per_encoder(sequence_of) == bytearray.fromhex(encoded)
@pytest.mark.parametrize("sequence_of, encoded", [
(DATA_seq_of(SCHEMA_constrained_seq_of_extension_present(lb=0, ub=2), i0_is=True, i1_is=False, i2_is=False, i3_is=False, i4_is=False),
'2003008707'),
(DATA_seq_of(SCHEMA_constrained_seq_of_extension_present(lb=0, ub=2), i0_is=True, i1_is=True, i2_is=False, i3_is=False, i4_is=False),
'400300870702FCB3'),
(DATA_seq_of(SCHEMA_constrained_seq_of_extension_present(lb=2, ub=5), i0_is=True, i1_is=True, i2_is=False, i3_is=False, i4_is=False),
'000300870702FCB3'),
(DATA_seq_of(SCHEMA_constrained_seq_of_extension_present(lb=2, ub=5), i0_is=True, i1_is=True, i2_is=True, i3_is=False, i4_is=False),
'200300870702FCB30117'),
(DATA_seq_of(SCHEMA_constrained_seq_of_extension_present(lb=2, ub=5), i0_is=True, i1_is=True, i2_is=True, i3_is=True, i4_is=False),
'400300870702FCB3011703030D40'),
(DATA_seq_of(SCHEMA_constrained_seq_of_extension_present(lb=2, ub=5), i0_is=True, i1_is=True, i2_is=True, i3_is=True, i4_is=True),
'600300870702FCB3011703030D400105'),
])
def test_constrained_sequence_of_with_extension_and_num_of_elems_is_within_extension_root_can_be_encoded(sequence_of, encoded):
assert per_encoder(sequence_of) == bytearray.fromhex(encoded)
@pytest.mark.parametrize("sequence_of, encoded", [
(DATA_seq_of(SCHEMA_constrained_seq_of_extension_present(lb=0, ub=0), i0_is=True, i1_is=False, i2_is=False, i3_is=False, i4_is=False),
'800103008707'),
(DATA_seq_of(SCHEMA_constrained_seq_of_extension_present(lb=0, ub=0), i0_is=True, i1_is=True, i2_is=False, i3_is=False, i4_is=False),
'80020300870702FCB3'),
(DATA_seq_of(SCHEMA_constrained_seq_of_extension_present(lb=0, ub=0), i0_is=True, i1_is=True, i2_is=True, i3_is=False, i4_is=False),
'80030300870702FCB30117'),
(DATA_seq_of(SCHEMA_constrained_seq_of_extension_present(lb=1, ub=1), i0_is=True, i1_is=True, i2_is=False, i3_is=False, i4_is=False),
'80020300870702FCB3'),
(DATA_seq_of(SCHEMA_constrained_seq_of_extension_present(lb=1, ub=1), i0_is=True, i1_is=True, i2_is=True, i3_is=True, i4_is=True),
'80050300870702FCB3011703030D400105'),
(DATA_seq_of(SCHEMA_constrained_seq_of_extension_present(lb=1, ub=3), i0_is=True, i1_is=True, i2_is=True, i3_is=True, i4_is=False),
'80040300870702FCB3011703030D40'),
(DATA_seq_of(SCHEMA_constrained_seq_of_extension_present(lb=1, ub=3), i0_is=True, i1_is=True, i2_is=True, i3_is=True, i4_is=True),
'80050300870702FCB3011703030D400105'),
(DATA_seq_of(SCHEMA_constrained_seq_of_extension_present(lb=3, ub=4), i0_is=True, i1_is=True, i2_is=True, i3_is=True, i4_is=True),
'80050300870702FCB3011703030D400105'),
(DATA_seq_of(SCHEMA_constrained_seq_of_extension_present(lb=3, ub=4), i0_is=True, i1_is=False, i2_is=False, i3_is=False, i4_is=False),
'800103008707'),
])
def test_constrained_sequence_of_with_extension_and_num_of_elems_is_not_within_extension_root_can_be_encoded(sequence_of, encoded):
assert per_encoder(sequence_of) == bytearray.fromhex(encoded)
@pytest.mark.parametrize("sequence_of, exception", [
(DATA_seq_of(SCHEMA_constrained_seq_of_no_extension(lb=4, ub=5), i0_is=False, i1_is=False, i2_is=False, i3_is=False, i4_is=False),
pytest.raises(SizeConstrainViolated)),
(DATA_seq_of(SCHEMA_constrained_seq_of_no_extension(lb=4, ub=5), i0_is=True, i1_is=False, i2_is=False, i3_is=False, i4_is=False),
pytest.raises(SizeConstrainViolated)),
(DATA_seq_of(SCHEMA_constrained_seq_of_no_extension(lb=4, ub=5), i0_is=True, i1_is=True, i2_is=False, i3_is=False, i4_is=False),
pytest.raises(SizeConstrainViolated)),
(DATA_seq_of(SCHEMA_constrained_seq_of_no_extension(lb=4, ub=5), i0_is=True, i1_is=True, i2_is=True, i3_is=False, i4_is=False),
pytest.raises(SizeConstrainViolated)),
])
def test_constrained_sequence_of_with_num_of_elems_less_than_lower_bound_and_no_extension_raises(sequence_of, exception):
with exception:
per_encoder(sequence_of)
@pytest.mark.parametrize("sequence_of, exception", [
(DATA_seq_of(SCHEMA_constrained_seq_of_no_extension(lb=1, ub=2), i0_is=True, i1_is=True, i2_is=True, i3_is=False, i4_is=False),
pytest.raises(InvalidComponentIndexIntoStructuredType)),
(DATA_seq_of(SCHEMA_constrained_seq_of_no_extension(lb=1, ub=2), i0_is=True, i1_is=True, i2_is=True, i3_is=True, i4_is=False),
pytest.raises(InvalidComponentIndexIntoStructuredType)),
(DATA_seq_of(SCHEMA_constrained_seq_of_no_extension(lb=1, ub=2), i0_is=True, i1_is=True, i2_is=True, i3_is=True, i4_is=True),
pytest.raises(InvalidComponentIndexIntoStructuredType)),
(DATA_seq_of(SCHEMA_constrained_seq_of_no_extension(lb=1, ub=1), i0_is=True, i1_is=True, i2_is=False, i3_is=False, i4_is=False),
pytest.raises(InvalidComponentIndexIntoStructuredType)),
])
def test_constrained_sequence_with_num_of_elems_greater_than_upper_bound_and_no_extension_raises(sequence_of, exception):
with exception:
per_encoder(sequence_of)
|
#system lib
from time import time
import fileinput
import os
#hash lib
import hashlib as hasher
## encrpty lib
from Crypto.Hash import SHA256, SHA, SHA512
from Crypto.PublicKey import RSA
import Crypto.Cipher.PKCS1_v1_5
import Crypto.Random
import Crypto.Signature.PKCS1_v1_5
import rsa
## project lib
import get_key
import my_log as log
## windows
import win32api,win32con
###global variable
file_need_encrpty_test = "./test_file/test1.txt"
blockchain_file = "./blockchain.txt"
encrpty_step = 20
'''
Todo:
### history data
record any history
'''
## get mac id
def get_id_of_computer():
import uuid
node = uuid.getnode()
mac = uuid.UUID(int = node).hex[-12:]
return mac
# class define, for generate block unit
class MyBlock:
def __init__(self, index, timestamp, id, previous_hash = "0"):
self.index = index
self.timestamp = time()
self.id = id
self.previous_hash = previous_hash #previous hash value
need_hash = (str(index) + str(timestamp) + str(id) + str(previous_hash)).encode("utf-8")
self.hash = self.hash_block(need_hash) # current node hash value
@staticmethod
def hash_block(need_hash):
h = SHA256.new()
h.update(need_hash)
return h.hexdigest()
# Manually construct a block with
# index zero and arbitrary previous hash
def create_genesis_block(id):
return MyBlock(0, time(), get_id_of_computer(), id)
## get next block by last block
def next_block(last_block, mac_id):
this_index = str(int(last_block.index) + 1)
this_timestamp = time()
this_id = mac_id
this_hash = last_block.hash
return MyBlock(this_index, this_timestamp, this_id, this_hash)
def block_member_print(block):
log.log_raw("index, timestamp, id, previous_hash, hash==========>")
log.log_raw(block.index)
log.log_raw(block.timestamp)
log.log_raw(block.id)
log.log_raw(block.previous_hash)
log.log_raw(block.hash)
log.log_raw("<==========index, timestamp, id, previous_hash, hash")
def block_chain_check(blockchain, block_size):
i = 0
if block_size == 1:
return True
for block in blockchain:
if(block.hash != blockchain[i + 1].previous_hash):
log.log_out("check fail")
return False
if i >= (block_size - 2):
break
i = i + 1
log.debug("check pass")
return True
def get_hash_legth():
block = MyBlock(0, time(), get_id_of_computer())
return len(block.hash)
def read_and_print_file():
hash_len = get_hash_legth()
#hash_len = 10
total_len = 0
with open(file_need_encrpty_test,'rb') as f:
log.debug("file open success:" + file_need_encrpty_test)
#with open(file_need_encrpty,'rb') as f:
words = f.read(10)
while 1:
log.log_str(words)
if (len(words) == 10):
total_len = total_len + 10
words = f.read(10)
continue
else:
total_len = total_len + len(words)
log.debug("len :" + str(len(words)))
log.debug("total len:" + str(total_len))
break
def check_mac_id_in_blockchain(blockchain ,mac_id):
i = 0
for block in blockchain:
log.debug("id mac_id")
log.log_raw(blockchain[i].id)
log.log_raw(mac_id)
if blockchain[i].id == mac_id:
log.debug("check %d times" %i)
return True
i += 1
return False
def save_blockchain(blockchain, block_size):
i = 0
with open(blockchain_file, 'w') as f:
f.write(str(block_size) + '\n')
for block in blockchain:
if i == 0:
## add one blank line
f.write(str(block.previous_hash) + '\n' + '\n')
f.write(str(block.index) + '\n')
f.write(str(block.timestamp) + '\n')
f.write(str(block.id) + '\n')
f.write(str(block.hash) + '\n')
else:
f.write(str(block.index) + '\n')
f.write(str(block.timestamp) + '\n')
f.write(str(block.id) + '\n')
f.write(str(block.previous_hash) + '\n')
f.write(str(block.hash) + '\n')
i = i + 1
def get_blockchain(blockchain_file):
blockchain = []
if os.path.exists(blockchain_file) is not True:
log.debug("does not exist file, exit")
return False
with open(blockchain_file, 'r') as f:
blockchain_size = f.readline()
priv = ''
for i in range(0, int(blockchain_size)):
block = create_genesis_block("0")
if i == 0:
while True:
current_line = f.readline()
if current_line != "\n":
priv = priv + current_line
else:
break
block.previous_hash = priv
block.index = f.readline().split("\n")[0]
block.timestamp = f.readline().split("\n")[0]
block.id = f.readline().split("\n")[0]
block.hash = f.readline().split("\n")[0]
else:
block.index = f.readline().split("\n")[0]
block.timestamp = f.readline().split("\n")[0]
block.id = f.readline().split("\n")[0]
block.previous_hash = f.readline().split("\n")[0]
block.hash = f.readline().split("\n")[0]
blockchain.append(block)
return blockchain
def get_private_key_from_block(blockchain):
key_raw = blockchain[0].previous_hash
key = RSA.importKey(key_raw)
return key
def get_max_length(rsa_key, encrypt=True):
blocksize = Crypto.Util.number.size(rsa_key.n) / 8
reserve_size = 11
if not encrypt:
reserve_size = 0
maxlength = blocksize - reserve_size
return maxlength
|
# -*- coding: utf-8 -*-
"""
Created on Sat Dec 22 00:12:32 2018
@author: nakul
"""
#Kmeans Clustering
import numpy as np
import matplotlib.pyplot as plt
import pandas as pd
dataset = pd.read_csv('Mall_Customers.csv')
X = dataset.iloc[:,[3,4]].values
#Use elbow method to find optimal number of clusters
from sklearn.cluster import KMeans
wcss = []
for i in range(1,11):
kmeans = KMeans(n_clusters = i, init = 'k-means++', n_init = 10, max_iter = 300, random_state = 0)
kmeans.fit(X)
wcss.append(kmeans.inertia_)
plt.plot(range(1,11), wcss)
plt.title('The Elbow Method')
plt.xlabel('No of Clusters')
plt.ylabel('WCSS')
plt.legend()
plt.show()
#We found 5 CLusters
#Applying Kmeans with 5 clusters
kmeans = KMeans(n_clusters = 5, init = 'k-means++', n_init = 10, max_iter = 300, random_state = 0)
y_kmeans = kmeans.fit_predict(X)
#Visualizing the Result
plt.scatter(X[y_kmeans == 0, 0], X[y_kmeans == 0,1], s= 100, c = 'red', label = 'Cluster1')
plt.scatter(X[y_kmeans == 1, 0], X[y_kmeans == 1,1], s= 100, c = 'green', label = 'Cluster2')
plt.scatter(X[y_kmeans == 2, 0], X[y_kmeans == 2,1], s= 100, c = 'blue', label = 'Cluster3')
plt.scatter(X[y_kmeans == 3, 0], X[y_kmeans == 3,1], s= 100, c = 'orange', label = 'Cluster4')
plt.scatter(X[y_kmeans == 4, 0], X[y_kmeans == 4,1], s= 100, c = 'cyan', label = 'Cluster5')
plt.scatter(kmeans.cluster_centers_[:,0], kmeans.cluster_centers_[:,1], s = 600, c = 'yellow', label = 'centroid')
plt.title('The Elbow Method')
plt.xlabel('No of Clusters')
plt.ylabel('WCSS')
plt.legend()
plt.show()
#Hierarchial CLustering
import numpy as np
import matplotlib.pyplot as plt
import pandas as pd
dataset = pd.read_csv('Mall_Customers.csv')
X = dataset.iloc[:,[3,4]].values
#Use Dendogram to find Optimal number of CLusters
import scipy.cluster.hierarchy as sch
dendogram = sch.dendrogram(sch.linkage(X, method = 'ward'))
plt.title('Dendogram')
plt.xlabel('No of Clusters')
plt.ylabel('Euclidean Distances')
plt.show()
#Fitting CLuster onto Model
from sklearn.cluster import AgglomerativeClustering
hc = AgglomerativeClustering(n_clusters = 5, affinity = 'euclidean', linkage = 'ward')
y_hc = hc.fit_predict(X)
plt.scatter(X[y_hc == 0, 0], X[y_hc == 0,1], s= 100, c = 'red', label = 'Cluster1')
plt.scatter(X[y_hc == 1, 0], X[y_hc == 1,1], s= 100, c = 'green', label = 'Cluster2')
plt.scatter(X[y_hc == 2, 0], X[y_hc == 2,1], s= 100, c = 'blue', label = 'Cluster3')
plt.scatter(X[y_hc == 3, 0], X[y_hc == 3,1], s= 100, c = 'orange', label = 'Cluster4')
plt.scatter(X[y_hc == 4, 0], X[y_hc == 4,1], s= 100, c = 'cyan', label = 'Cluster5')
plt.title('The Elbow Method')
plt.xlabel('No of Clusters')
plt.ylabel('WCSS')
plt.legend()
plt.show()
|
#slicing a list
players = ['delgado', 'abrams', 'beto', 'gillam']
print("slice 1 = ")
print(players[0:3]) #don't forget to use [], not ()
print("slice 2 = ")
print(players[2:4])
print("slice without a beginning = ") #starts at beginning of list
print(players[:4])
print("slice without an end = ")
print(players[2:])
print("slicing from a distance = ")
print(players[-2:]) #last two
print(players[:-2]) #first two
#looping through a slice
print("All of these people should have won the midterms: ")
for player in players[:4]:
print(player.title())
pokemon = ['pikachu','oddish','pidgey','rattata','clefairy','psyduck']
print("team 1 = ")
print(pokemon[0:3])
print("team 2 = ")
print(pokemon[-4:])
print("team 3 = ")
print(pokemon[:-2])
n
|
for i in range(3):
inArr = list(map(int, input().split()))
gcd = 1
for j in range(min(inArr), 0, -1):
works = True
for n in inArr:
if n % j != 0:
works = False
if works == True:
gcd = j
break
else:
j += 1
print(gcd)
|
a = True
temp = [int(x) for x in input().split()]
dmax = pow(temp[1],2)
x = 0 #valor horizontal
y = 0 #valor vertical
for i in range(temp[0]):
m = input().split() #movimento desta linha
d = int(m[1]) #distância percorrida
if m[0] == "N":
y += d
elif m[0] == "S":
y -= d
elif m[0] == "L":
x += d
elif m[0] == "O":
x -= d
if pow(x,2)+pow(y,2) > dmax:
print(1)
a = False
break
if a:
print(0)
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.shortcuts import render
from django.http import HttpResponse
from django.template import loader, RequestContext
from models import *
# Create your views here.
def buckets(request):
return render(request, 'buckets/index.html', {'title':'Bucket列表', 'bucket_list':BucketInfo.objects.all()})
def bucketDetails(request, id):
images = BucketInfo.objects.get(id=id).imageinfo_set.all();
return render(request, 'buckets/bucketDetail.html', {'title':'Bucket内容', 'image_list':images})
|
import datetime
import factory
from django.db.models import signals
from elections.models import (
ElectedRole,
Election,
ElectionType,
ModerationHistory,
ModerationStatus,
ModerationStatuses,
)
from organisations.tests.factories import (
DivisionGeographyFactory,
OrganisationDivisionFactory,
OrganisationFactory,
)
class ElectionTypeFactory(factory.django.DjangoModelFactory):
class Meta:
model = ElectionType
django_get_or_create = ("election_type",)
name = "Local elections"
election_type = "local"
# default_voting_system
class ElectedRoleFactory(factory.django.DjangoModelFactory):
class Meta:
model = ElectedRole
django_get_or_create = ("election_type",)
election_type = factory.SubFactory(ElectionTypeFactory)
organisation = factory.SubFactory(OrganisationFactory)
elected_title = "Councillor"
elected_role_name = "Councillor"
@factory.django.mute_signals(signals.post_save)
class ElectionFactory(factory.django.DjangoModelFactory):
class Meta:
model = Election
django_get_or_create = ("election_id",)
@classmethod
def _get_manager(cls, model_class):
return model_class.private_objects
election_id = factory.Sequence(
lambda n: "local.place-name-%d.2017-03-23" % n
)
election_title = factory.Sequence(lambda n: "Election %d" % n)
election_type = factory.SubFactory(ElectionTypeFactory)
poll_open_date = "2017-03-23"
organisation = factory.SubFactory(OrganisationFactory)
elected_role = factory.SubFactory(ElectedRoleFactory)
division = factory.SubFactory(OrganisationDivisionFactory)
division_geography = factory.SubFactory(DivisionGeographyFactory)
organisation_geography = None
seats_contested = 1
seats_total = 1
group = factory.SubFactory(
"elections.tests.factories.ElectionFactory",
election_id="local.2017-03-23",
group=None,
group_type="election",
)
group_type = None
class ModerationStatusFactory(factory.django.DjangoModelFactory):
class Meta:
model = ModerationStatus
django_get_or_create = ("short_label",)
short_label = ModerationStatuses.approved.value
long_label = "long label"
class ModerationHistoryFactory(factory.django.DjangoModelFactory):
class Meta:
model = ModerationHistory
election = factory.SubFactory(ElectionFactory)
status = factory.SubFactory(ModerationStatusFactory)
created = datetime.datetime.now()
modified = datetime.datetime.now()
class ElectionWithStatusFactory(ElectionFactory):
moderation_status = factory.RelatedFactory(
ModerationHistoryFactory,
"election",
status__short_label=ModerationStatuses.approved.value,
)
def related_status(status):
return factory.RelatedFactory(
ModerationHistoryFactory,
"election",
status__short_label=ModerationStatuses(status.capitalize()).value,
)
|
from .base_sequence import Sequence
from .minibatch_sequence import MiniBatchSequence, SAGEMiniBatchSequence, FastGCNBatchSequence
from .fullbatch_sequence import FullBatchSequence
from .sample_sequence import SBVATSampleSequence
from .null_sequence import NullSequence
|
def minMinMax(arr):
arr = sorted(arr)
minMiss = arr[0]
for x in range(1,len(arr)):
if minMiss+x not in arr:
minMiss = minMiss+x
break
return [arr[0], minMiss, arr[-1]]
'''
Given an unsorted array of integers, find the smallest number in the array,
the largest number in the array, and the smallest number between the two
array bounds that is not in the array.
For instance, given the array [-1, 4, 5, -23, 24], the smallest number is -23,
the largest number is 24, and the smallest number between the array bounds
is -22. You may assume the input is well-formed.
You solution should return an array [smallest, minimumAbsent, largest]
The smallest integer should be the integer from the array with the lowest value.
The largest integer should be the integer from the array with the highest value.
The minimumAbsent is the smallest number between the largest and the smallest
number that is not in the array.
minMinMax([-1, 4, 5, -23, 24]); //[-23, -22, 24]
minMinMax([1, 3, -3, -2, 8, -1]); //[-3, 0, 8]
minMinMax([2, -4, 8, -5, 9, 7]); //[-5, -3,9]
'''
|
# Basic functions
def createSkeleton(name, id, health=20, damage=10):
id = id + 1
return {
'name' : name,
'health' : health,
'damage': damage,
'id': id
}
def add(skeleton, army):
army.append(skeleton)
def find(id, army):
for unit in army:
if (unit['id'] == id): return unit
return 0
def delete(id, army):
for i in range(len(army)):
if (army[i]['id'] == id):
army.pop(i)
return 1
return 0
def sendToHero(skeleton, hero, army):
round = 1
while (skeleton['health'] > 0 and hero['health'] > 0):
print('Round ', round)
skeleton['health'] -= hero['damage']
hero['health'] -= skeleton['damage']
status = f"hero health - {hero['health']}\nSkeleton health - {skeleton['health']}"
print(status)
print('\n')
round += 1
print('Result:')
if(hero['health'] <= 0):
print('Hero defeated')
return 1
if(skeleton['health'] <= 0):
print(skeleton['name'], ' defeated')
delete(skeleton['id'], army)
|
from discord.ext import commands
import discord
import random
import time
import datetime
from pytz import timezone
import pytz
from random import randrange
class CommandsCog(commands.Cog):
def __init__(self, bot):
self.bot = bot
self.bot.remove_command('help')
@commands.command(name="next")
async def next(self, ctx):
def format(d, e=False):
date_format='%H:%M'
return d.strftime(date_format)
def getDay(d):
date_format='%m/%d'
return d.strftime(date_format)
tz_pst = pytz.timezone('US/Pacific')
now = datetime.datetime.now(tz_pst).replace(tzinfo=None)
ev = {
"Hacking Starts":(datetime.datetime(2021, 1, 15, 0, 0),),
"Among Us Hype Night":(datetime.datetime(2021, 1, 15, 21, 0),datetime.datetime(2021, 1, 15, 23, 0)),
"Opening Ceremony, Team Mixer":(datetime.datetime(2021, 1, 16, 10, 0),datetime.datetime(2021, 1, 16, 11, 30)),
"Alon Grinshpoon - Intro to AR (Workshop)":(datetime.datetime(2021, 1, 16, 12, 0),datetime.datetime(2021, 1, 16, 13, 00)),
"Rohan Bansal - Electronics and the C Language (Workshop)":(datetime.datetime(2021, 1, 16, 14, 0),datetime.datetime(2021, 1, 16, 15, 0)),
"Chinmayi Balasu - Networking in STEM (Speaker)":(datetime.datetime(2021, 1, 16, 16, 0),datetime.datetime(2021, 1, 16, 17, 0)),
"Aldrin Brillante - Emoji Prediction (Workshop)":(datetime.datetime(2021, 1, 16, 18, 0),datetime.datetime(2021, 1, 16, 19, 0)),
"Ivy Xu - Journey to Entrepreneurship (Speaker)":(datetime.datetime(2021, 1, 16, 20, 0),datetime.datetime(2021, 1, 16, 21, 0)),
"Among Us Game Night":(datetime.datetime(2021, 1, 16, 21, 30),datetime.datetime(2021, 1, 17, 0, 0)),
"Mr. John Smale - Building a CS Resume (Speaker)":(datetime.datetime(2021, 1, 17, 10, 0),datetime.datetime(2021, 1, 17, 11, 00)),
"Mr. Chris Fairley - CAD with Fusion 360 (Workshop)":(datetime.datetime(2021, 1, 17, 14, 0),datetime.datetime(2021, 1, 17, 15, 00)),
"Steven Puri (Speaker)":(datetime.datetime(2021, 1, 17, 16, 0),datetime.datetime(2021, 1, 17, 17, 00)),
"Anshul Gupta - Intro to Web Dev (Workshop)":(datetime.datetime(2021, 1, 17, 18, 0),datetime.datetime(2021, 1, 17, 19, 00)),
"Minecraft Hunger Games":(datetime.datetime(2021, 1, 17, 20, 30),datetime.datetime(2021, 1, 17, 23, 0)),
"Hacking Ends":(datetime.datetime(2021, 1, 18, 8, 0),),
"Judging":(datetime.datetime(2021, 1, 18, 8, 0),datetime.datetime(2021, 1, 18, 10, 30)),
"Closing/Awards Ceremony":(datetime.datetime(2021, 1, 18, 11, 0),datetime.datetime(2021, 1, 18, 12, 0))}
for e,t in ev.items():
if len(t) == 2:
if t[0] < now and t[1] > now:
embed = discord.Embed(
title="Current Event", description="\n**" + getDay(t[0]) + " " + format(t[0])+"-"+format(t[1], True) + " | " + e + "**\n\nZoom link: https://thetahacks.tech/zoom", color=0x00ff9d)
await ctx.send(embed=embed)
return 0
l=-1
for e,t in ev.items():
l+=1
if t[0] > now:
c = list(ev.items())[l]
final="\n**"
if(len(c[1])==1):
final += getDay(c[1][0]) + " " + format(c[1][0], True) + " | " + c[0]
else:
final += getDay(c[1][0]) + " " + format(c[1][0])+"-"+format(c[1][1], True) + " | " + c[0]
final += "**\n\nZoom link: https://thetahacks.tech/zoom"
embed = discord.Embed(
title="Next Event", description=final, color=0x00ff9d)
await ctx.send(embed=embed)
return 0
embed = discord.Embed(
title="ThetaHacks Virtual has ended.", description="", color=0x00ff9d)
await ctx.send(embed=embed)
return 0
@commands.command(name="events")
async def events(self, ctx):
def format(d, e=False):
date_format='%H:%M'
return d.strftime(date_format)
def getDay(d):
return int(d.strftime("%d"))-15
ev = {"Hacking Starts":(datetime.datetime(2021, 1, 15, 0, 0),),
"Among Us Hype Night":(datetime.datetime(2021, 1, 15, 21, 0),datetime.datetime(2021, 1, 15, 23, 0)),
"Opening Ceremony, Team Mixer":(datetime.datetime(2021, 1, 16, 10, 0),datetime.datetime(2021, 1, 16, 11, 30)),
"Alon Grinshpoon - Intro to AR (Workshop)":(datetime.datetime(2021, 1, 16, 12, 0),datetime.datetime(2021, 1, 16, 13, 00)),
"Rohan Bansal - Electronics and the C Language (Workshop)":(datetime.datetime(2021, 1, 16, 14, 0),datetime.datetime(2021, 1, 16, 15, 00)),
"Chinmayi Balasu - Networking in STEM (Speaker)":(datetime.datetime(2021, 1, 16, 16, 0),datetime.datetime(2021, 1, 16, 17, 00)),
"Aldrin Brillante - Emoji Prediction (Workshop)":(datetime.datetime(2021, 1, 16, 18, 0),datetime.datetime(2021, 1, 16, 19, 00)),
"Ivy Xu - Journey to Entrepreneurship (Speaker)":(datetime.datetime(2021, 1, 16, 20, 0),datetime.datetime(2021, 1, 16, 21, 00)),
"Among Us Game Night":(datetime.datetime(2021, 1, 16, 21, 30),datetime.datetime(2021, 1, 17, 0, 0)),
"Mr. John Smale - Building a CS Resume (Speaker)":(datetime.datetime(2021, 1, 17, 10, 0),datetime.datetime(2021, 1, 17, 11, 00)),
"Mr. Chris Fairley - CAD with Fusion 360 (Workshop)":(datetime.datetime(2021, 1, 17, 14, 0),datetime.datetime(2021, 1, 17, 15, 00)),
"Steven Puri (Speaker)":(datetime.datetime(2021, 1, 17, 16, 0),datetime.datetime(2021, 1, 17, 17, 00)),
"Anshul Gupta - Intro to Web Dev (Workshop)":(datetime.datetime(2021, 1, 17, 18, 0),datetime.datetime(2021, 1, 17, 19, 00)),
"Minecraft Hunger Games":(datetime.datetime(2021, 1, 17, 20, 30),datetime.datetime(2021, 1, 17, 23, 00)),
"Hacking Ends":(datetime.datetime(2021, 1, 18, 8, 0),),
"Judging":(datetime.datetime(2021, 1, 18, 8, 0),datetime.datetime(2021, 1, 18, 10, 30)),
"Closing/Awards Ceremony":(datetime.datetime(2021, 1, 18, 11, 0),datetime.datetime(2021, 1, 18, 12, 00))}
days = [{},{},{},{}]
for e, t in ev.items():
days[getDay(t[0])][e]=t
final = "**——1/15——**\n"
for e, t in days[0].items():
if(len(t)==1):
final += format(t[0], True) + " | " + e
else:
final += format(t[0])+"-"+format(t[1], True) + " | " + e
final += "\n"
final += "\n"
final += "**——1/16——**\n"
for e, t in days[1].items():
final += format(t[0])+"-"+format(t[1], True) + " | " + e
final += "\n"
final += "\n"
final += "**——1/17——**\n"
for e, t in days[2].items():
final += format(t[0])+"-"+format(t[1], True) + " | " + e
final += "\n"
final += "\n"
final += "**——1/18——**\n"
for e, t in days[3].items():
if(len(t)==1):
final += format(t[0], True) + " | " + e
else:
final += format(t[0])+"-"+format(t[1], True) + " | " + e
final += "\n"
final += "\n\n**Zoom link:** https://thetahacks.tech/zoom\n\nAll times are in PST"
embed = discord.Embed(
title="ThetaHacks Virtual has ended.", description="", color=0x00ff9d)
await ctx.send(embed=embed)
@commands.command(name="signup")
async def signup(self, ctx):
embed = discord.Embed(
title="Sign Up", description="Signups for the next event coming soon!", color=0xb134eb)
await ctx.send(embed=embed)
@commands.command(name="raffle")
async def raffle(self, ctx, roleName):
role = discord.utils.get(ctx.guild.roles, name=roleName)
l = len([member for member in ctx.guild.members if role in member.roles])
if role is None:
await ctx.send('There is no such role on this server!')
return
if l == 0:
await ctx.send("Nobody has the role.")
return
randomRaffle = int(randrange(l))
await ctx.send([member for member in ctx.guild.members if role in member.roles][randomRaffle].display_name)
@commands.command(name="info")
async def info(self, ctx):
embed = discord.Embed(
title="Information", description="The original ThetaHacks Virtual occurred from Jan. 15-18, 2021. But now, ThetaHacks is becoming something bigger... Stay tuned for more info! \n\nLinks:\nMore info and signups on our website: https://thetahacks.tech \n Devpost: https://thetahacks.devpost.com", color=0xc0e8f9)
await ctx.send(embed=embed)
@commands.command(name="ping")
async def ping(self, ctx):
start = time.perf_counter()
message = await ctx.send("Ping...")
end = time.perf_counter()
duration = (end - start) * 1000
await message.edit(content='Pong! {:.2f}ms'.format(duration))
@commands.command(name="dice")
async def dice(self, ctx, n=1):
try:
if n < 1 or n > 20:
await ctx.send("Invalid arguments for command `dice`.")
else:
# roll N dice
await ctx.send(" ".join(str(random.randint(1, 6)) for i in range(n)))
except: # error
await ctx.send("Invalid arguments for command `dice`.")
@commands.command(name="magic8")
async def magic8(self, ctx):
bm = ("It is certain.", "It is decidedly so.", "Without a doubt.", "Yes – definitely.", "Most likely.", "Outlook good.", "Signs point to yes.", "Reply hazy, try again.", "Ask again later.",
"Better not tell you now.", "Cannot predict now.", "Concentrate and ask again.", "Don't count on it.", "My reply is no.", "My sources say no.", "Outlook not so good.", "Very doubtful.")
await ctx.send(random.choice(bm))
@commands.command(name="help")
async def help(self, ctx):
embed = discord.Embed(
title="Help", description="Valid commands:\n\n**Utility**\n`!events` - List event times and Zoom link\n`!next` - Show next event and Zoom link\n`!signup` - Signup form link\n`!info` - ThetaHacks information\n`!help` - \
View valid commands\n`!stats` - See server statistics\n`!rules` - See server rules\n\n**Fun**\n`!kill @user`\n`!ping` - pong\n`!magic8` - \
Magic 8 ball\n`!dice N` - Roll N dice (1 <= N <= 10)", color=0x0027ff)
await ctx.send(embed=embed)
@commands.command(name="rules")
async def rules(self, ctx):
embed = discord.Embed(
title="Server Rules", description="1. Do not bully or harass others. Homophobia, racism and other discrimination is not allowed. \
Treat others the way you wish to be treated.\n\n2. Spamming, messages that do not contribute to the general conversation and \
non-English messages are not allowed. With this in mind, please also send content to its relevant channels.\n\n3. \
Excessive or toxic swearing, as well as generally distasteful or NSFW content is not allowed.\n\n4. Do not partake in \
activity against any Terms of Service within our community. This includes but is not limited to, the act of purchasing \
and selling accounts.\n\n5. Do not promote your personal material on our server without consent of a mod or admin. \
If you would like to partner with us, please contact an admin.\n\n6. Discord statuses/nicknames/names should be clean, \
this means no slurs, nothing that breaks TOS, no promotion, etc. Failure to comply with a mod’s request to change your \
status in a timely manner will deem a punishment proportionate to how severe your status is.\n\n7. Logical extensions of \
rules may also be enforced.", color=0xaa00ff)
await ctx.send(embed=embed)
@commands.command(name="kill")
async def kill(self, ctx, member: discord.Member):
if not member:
return await ctx.send("Invalid arguments for command `kill`")
kill_messages = ["barbecued", "disintegrated", "360-no-scoped",
"eaten alive", "yeeted out of existence", "squashed", "smited", "dropped in the void"]
# choose random message
this_msg = random.choice(kill_messages)
embed = discord.Embed(
title="K-O!", description="%s was %s by %s" % (member.display_name, this_msg, ctx.author.display_name), color=0xff00d1)
await ctx.send(embed=embed)
@commands.command(name="stats")
async def stats(self, ctx):
everyone = ctx.guild.get_role(717170061382516736)
attendees = ctx.guild.get_role(721874238801313884)
partners = ctx.guild.get_role(741822062221459568)
bots = ctx.guild.get_role(721827685990531113)
mentors = ctx.guild.get_role(722143200910901250)
staff = ctx.guild.get_role(730445847938203718)
coordinators = ctx.guild.get_role(717171411692683275)
text = f"`{len(coordinators.members)}` Coordinators\n`{len(staff.members)}` Staff\n`{len(mentors.members)}` Mentors\n`{len(partners.members)}` \
Partners\n`{len(bots.members)}` Bots\n`{len(attendees.members)}` Attendees\n`{len(everyone.members)}` All Members"
embed = discord.Embed(
title="ThetaHacks Stats", description=text, color=0x00ff9d)
await ctx.send(embed=embed)
@commands.command(name="ping")
async def ping(self, ctx):
start = time.perf_counter()
message = await ctx.send("Ping...")
end = time.perf_counter()
duration = (end - start) * 1000
await message.edit(content='Pong! {:.2f}ms'.format(duration))
@commands.command(name="raffle")
async def raffle(self, ctx):
roleName = "Attendees"
role = discord.utils.get(message.guild.roles, name=roleName)
i = 0
empty = True
randomRaffle = 0
randomRaffle = randrange(494)
if role is None:
await message.channel.send(f'There is no {roleName} role on this server!')
return
for member in message.guild.members:
if role in member.roles:
i+=1
if (i == randomRaffle):
await message.channel.send("{0.name}".format(member))
else:
await message.channel.send(f"No User at {randomRaffle}")
empty = False
if empty:
await message.channel.send(f"Nobody has the role {roleName}".format(role.mention))
def setup(bot):
print('commands')
bot.add_cog(CommandsCog(bot))
|
from typing import List
from entities.employee import Employee
from entities.manager import Manager
from abc import ABC, abstractmethod
from utils.connection_util import connection
from daos.employee_dao import EmployeeDAO
from exceptions.resource_not_found import ResourceNotFound
from exceptions.user_credential_failed import UserCredentialFailError
class EmployeeDaoPostgres(EmployeeDAO):
def check_user_credential(self, first_name: str, last_name: str) -> Employee or Manager:
# manager
sql = """ select * from manager where first_name =%s and
last_name =%s"""
cursor = connection.cursor()
cursor.execute(sql, [first_name, last_name])
connection.commit()
record = cursor.fetchone()
if record is not None:
manager = Manager(*record)
return manager
# employee
sql = """ select * from employee where first_name = %s and
last_name= %s"""
cursor = connection.cursor()
cursor.execute(sql, [first_name, last_name])
connection.commit()
record = cursor.fetchone()
if record is not None:
employee = Employee(*record)
return employee
raise UserCredentialFailError
def get_all_employees(self) -> list[Employee]:
sql = """select * from employee """
cursor = connection.cursor()
cursor.execute(sql)
all_employee = cursor.fetchall()
if all_employee is None:
raise ResourceNotFound
employees = [Employee(*a_employee) for a_employee in all_employee]
return employees
def get_employee_by_eid(self, employee_id: int) -> Employee:
sql = """select * from employee where employee_id = %s"""
cursor = connection.cursor()
cursor.execute(sql, [employee_id])
record = cursor.fetchone()
if record is None:
raise ResourceNotFound
employee = Employee(*record)
return employee
|
"""
Galois Field by user6655984 on StackOverflow
https://stackoverflow.com/questions/48065360/interpolate-polynomial-over-a-finite-field
"""
import itertools
from sympy.polys.domains import ZZ
from sympy.polys.galoistools import (gf_irreducible_p, gf_add, gf_sub, gf_mul, gf_rem, gf_gcdex)
from sympy.ntheory.primetest import isprime
class GF:
def __init__(self, p, n=1):
p, n = int(p), int(n)
if not isprime(p):
raise ValueError("p must be a prime number, not %s" % p)
if n <= 0:
raise ValueError("n must be a positive integer, not %s" % n)
self.p = p
self.n = n
if n == 1:
self.reducing = [1, 0]
else:
for c in itertools.product(range(p), repeat=n):
poly = (1, *c)
if gf_irreducible_p(poly, p, ZZ):
self.reducing = poly
break
def add(self, x, y):
return gf_add(x, y, self.p, ZZ)
def sub(self, x, y):
return gf_sub(x, y, self.p, ZZ)
def mul(self, x, y):
return gf_rem(gf_mul(x, y, self.p, ZZ), self.reducing, self.p, ZZ)
def inv(self, x):
s, t, h = gf_gcdex(x, self.reducing, self.p, ZZ)
return s
def eval_poly(self, poly, point):
val = []
for c in poly:
val = self.mul(val, point)
val = self.add(val, c)
return val
|
'''pca_svd.py
Subclass of PCA_COV that performs PCA using the singular value decomposition (SVD)
YOUR NAME HERE
CS 251 Data Analysis Visualization, Spring 2020
'''
import numpy as np
import pca_cov
class PCA_SVD(pca_cov.PCA_COV):
def pca(self, vars, normalize=False):
'''Performs PCA on the data variables `vars` using SVD
'''
# Find the data to use for self.A
self.vars = vars
if type(self.data[self.vars]) != np.ndarray:
self.A = self.data[self.vars].to_numpy()
else:
self.A = self.data[self.vars]
self.orgscales = (np.max(self.A, axis = 0) - np.min(self.A, axis =0))
if normalize == True:
self.A = (self.A - np.min(self.A, axis =0) ) / self.orgscales
# Store means
self.means = np.mean(self.A, axis = 0)
# Find eigen values/vectors
U, S, V = np.linalg.svd(self.A - self.means, full_matrices = False)
self.e_vals = (S * S.T) / (self.A.shape[0]-1)
#self.e_vals = S**2 / (self.A.shape[0]-1) # another way to get eigen values
self.e_vecs = V.T
# Set instance variables
self.prop_var = self.compute_prop_var(self.e_vals)
self.cum_var = self.compute_cum_var(self.prop_var)
|
#!/usr/bin/python3
"""
PYTHON OBJECT RELATIONAL MAPPING MODULE
model_state_update_id_2 module provides function to change the name of a
State object from the DB.
"""
import sys
from model_state import Base, State
from sqlalchemy import create_engine
from sqlalchemy.orm import sessionmaker
def model_state_update_id_2():
"""
changes the name of a State object where id = 2 from the database.
Takes 3 arguments: mysql username, mysql password and database name.
"""
engine = create_engine('mysql+mysqldb://{}:{}@localhost/{}'.format(
sys.argv[1], sys.argv[2], sys.argv[3]), pool_pre_ping=True)
Base.metadata.create_all(engine)
Session = sessionmaker(bind=engine)
session = Session()
session.query(State).filter(State.id == 2).update({"name": "New Mexico"})
session.commit()
session.close()
if __name__ == "__main__":
model_state_update_id_2()
|
from datetime import datetime
class Bomba:
_tipo = None
__valor = 0
__quantidade = 0
def __init__(self, tipo, valor, quantidade):
self._tipo = tipo
self.__valor = valor
self.__quantidade = quantidade
def desconto(self, desconto):
self.__valor -= self.__valor * desconto
@property
def vitoria(self):
if 8 == datetime.now().day:
return self.__valor - (self.__valor * 0.1)
return self.__valor
@property
def quantidade(self):
return self.__quantidade
@quantidade.setter
def quantidade(self, quantidade):
self.__quantidade = quantidade
class Frentista:
nome = None
__cpf = None
def __init__(self, nome, cpf):
self.nome = nome
self.__cpf = cpf
if __name__ == "__main__":
bg = Bomba('G', 4.5, 100)
print(f'Tipo: {bg._tipo}\nValor: {bg.vitoria}')
bg.quantidade = 150
# # bg.setQuantidade(150)
# bg.addQuantidade(150)
print('->>>>>>>>>', bg.quantidade)
ba = Bomba('A', 3.5, 100)
print(f'Tipo: {ba._tipo}\nValor: {ba.vitoria}')
|
# -*- coding: utf-8 -*-
"""
Created on Sun Dec 1 00:22:09 2019
@author: HP
"""
import cv2
num=cv2.imread(r'F:\CS Projects\Coding\Python\Input\Num_3.jpg')
a=5
b=6
dict={a:'hi',b:'hello'}
print(dict)
gray=cv2.cvtColor(num,cv2.COLOR_BGR2GRAY)
gray=cv2.GaussianBlur(gray,(7,7),0)
edge=cv2.Canny(gray,50,100)
#edge=cv2.dilate(edge,None,iterations=1)
#edge=cv2.erode(edge,None,iterations=1)
cv2.imshow('Number',edge)
if cv2.waitKey(0)==ord('q'):
cv2.destroyAllWindows()
|
# -*- coding: utf-8 -*-
"""Tests for MacOS fseventsd files."""
import unittest
import pygzipf
from dtformats import fseventsd
from tests import test_lib
class FseventsFileTest(test_lib.BaseTestCase):
"""MacOS fseventsd file tests."""
# pylint: disable=protected-access
def testReadDLSPageHeaderV1(self):
"""Tests the _ReadDLSPageHeader function on format version 1."""
output_writer = test_lib.TestOutputWriter()
test_file = fseventsd.FseventsFile(output_writer=output_writer)
test_file_path = self._GetTestFilePath(['fsevents-0000000002d89b58'])
self._SkipIfPathNotExists(test_file_path)
gzipf_file = pygzipf.file()
gzipf_file.open(test_file_path)
try:
test_file._ReadDLSPageHeader(gzipf_file, 0)
finally:
gzipf_file.close()
def testReadDLSPageHeaderV2(self):
"""Tests the _ReadDLSPageHeader function on format version 2."""
output_writer = test_lib.TestOutputWriter()
test_file = fseventsd.FseventsFile(output_writer=output_writer)
test_file_path = self._GetTestFilePath(['fsevents-00000000001a0b79'])
self._SkipIfPathNotExists(test_file_path)
gzipf_file = pygzipf.file()
gzipf_file.open(test_file_path)
try:
test_file._ReadDLSPageHeader(gzipf_file, 0)
finally:
gzipf_file.close()
def testReadDLSRecordV1(self):
"""Tests the _ReadDLSRecord function on format version 1."""
output_writer = test_lib.TestOutputWriter()
test_file = fseventsd.FseventsFile(output_writer=output_writer)
test_file_path = self._GetTestFilePath(['fsevents-0000000002d89b58'])
self._SkipIfPathNotExists(test_file_path)
gzipf_file = pygzipf.file()
gzipf_file.open(test_file_path)
try:
test_file._ReadDLSRecord(gzipf_file, 12, 1)
finally:
gzipf_file.close()
def testReadDLSRecordV2(self):
"""Tests the _ReadDLSRecord function on format version 2."""
output_writer = test_lib.TestOutputWriter()
test_file = fseventsd.FseventsFile(output_writer=output_writer)
test_file_path = self._GetTestFilePath(['fsevents-00000000001a0b79'])
self._SkipIfPathNotExists(test_file_path)
gzipf_file = pygzipf.file()
gzipf_file.open(test_file_path)
try:
test_file._ReadDLSRecord(gzipf_file, 12, 2)
finally:
gzipf_file.close()
def testReadFileObjectV1(self):
"""Tests the ReadFileObject function on format version 1."""
output_writer = test_lib.TestOutputWriter()
test_file = fseventsd.FseventsFile(debug=True, output_writer=output_writer)
test_file_path = self._GetTestFilePath(['fsevents-0000000002d89b58'])
self._SkipIfPathNotExists(test_file_path)
test_file.Open(test_file_path)
test_file.Close()
def testReadFileObjectV2(self):
"""Tests the ReadFileObject function on format version 2."""
output_writer = test_lib.TestOutputWriter()
test_file = fseventsd.FseventsFile(debug=True, output_writer=output_writer)
test_file_path = self._GetTestFilePath(['fsevents-00000000001a0b79'])
self._SkipIfPathNotExists(test_file_path)
test_file.Open(test_file_path)
test_file.Close()
if __name__ == '__main__':
unittest.main()
|
from flask import render_template
from app import app
from app.forms import LoginForm, RegistrationForm
@app.route('/')
@app.route('/index')
def index():
return render_template('base.html', title='Display this shit')
@app.route('/register')
def register():
form = RegistrationForm()
return render_template('registration.html', form=form)
@app.route('/login')
def login():
form = LoginForm()
return render_template('login.html', form=form)
|
"""
"""
__all__ = [
"ERR_CHAN",
"DATA_CHAN",
"netPeer",
"sendCmd",
"sendData",
"disconnectPeer",
]
from edh import *
# conventional Nedh channel for error reporting
ERR_CHAN = "err"
# conventional Nedh channel for data exchange
DATA_CHAN = "data"
# effectful identifier of the peer object
netPeer = Symbol("@netPeer")
# effectful identifier of the method procedure for normal command sending
sendCmd = Symbol("@sendCmd")
# effectful identifier of the method procedure for normal data sending
sendData = Symbol("@sendData")
# effectful identifier of the method procedure to disconnect the connection
disconnectPeer = Symbol("@disconnectPeer")
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created on Thu Oct 15 07:39:40 2020
@author: adonay
"""
import os.path as op
import numpy as np
import pandas as pd
import pickle
import matplotlib.pyplot as plt
import utils_io as uio
import utils_signal_processing as sig_proc
import utils_feature_extraction as feat_ext
from mne.filter import filter_data
from sklearn.decomposition import PCA
import matplotlib
font = {'family' : 'normal',
'size' : 15}
matplotlib.rc('font', **font)
def zscore(x):
x = (x - np.nanmean(x))/ np.nanstd(x)
return x
def minmax_scaler(x):
xmin, xmax = np.min(x), np.max(x)
x = (x - xmin)/ (xmax - xmin)
return x
def make_fig():
fig = plt.figure(figsize=(15, 20), constrained_layout=True)
gs = fig.add_gridspec(4, 4)
ax1 = fig.add_subplot(gs[0, :])
ax1.set_title('TS, peaks and pk slopes')
ax2 = fig.add_subplot(gs[1,:-2])
ax3 = fig.add_subplot(gs[2, :-2])
ax4 = fig.add_subplot(gs[3, 0])
ax5 = fig.add_subplot(gs[3, 1])
ax6 = fig.add_subplot(gs[1, 2:])
ax7 = fig.add_subplot(gs[2, 2:])
ax8 = fig.add_subplot(gs[3, 2:])
return fig, [ax1, ax2, ax3, ax4, ax5, ax6, ax7, ax8]
def filename_creator(subj, folder_name):
diagn = df_beh.loc[subj, 'gen_diagnosis']
diag_num = df_beh.loc[subj, 'gen_diagnosis_num']
age = df_beh.loc[subj, 'age']
if diagn == "Control":
severity = 0
ttl = f"{n} {subj}, age {df_beh.loc[subj, 'age']} ,{diagn}"
elif diagn == "PD":
ttl = f"{n} {subj}, age {df_beh.loc[subj, 'age']} ,{diagn}, {df_beh.loc[subj, 'updrs_arm_total_R']}"
severity = df_beh.loc[subj, 'updrs_arm_total_R']
else:
ttl = f"{n} {subj}, age {df_beh.loc[subj, 'age']} ,{diagn}, {df_beh.loc[subj, 'common_arm_score_L']}"
severity = df_beh.loc[subj, 'common_arm_score_L']
fname = f"{folder_name}/{severity}_{diag_num}_{age}_{subj}.png"
return fname
# def save_plot(x, y, folder):
# Path definitions
root_dir = '/home/adonay/Desktop/projects/Ataxia'
model_name = '_resnet152_FingerTappingJan29shuffle1_650000'
paths = uio.get_paths(model_name, root_dir)
sfreq_common = 60
BP_filr = [1, 10]
# load data
df_beh = pd.read_csv(paths['beh'], index_col=0)
# fname = f"TS_filt_{BP_filr[0]}_{BP_filr[1]}hz_{sfreq_common}Fs_{model_name}.pickle"
fname = f"TS_{sfreq_common}Fs_{model_name}.pickle"
with open(paths['out'] + fname, 'rb') as f:
TS_preds = pickle.load( f)
ord_num = 3
## Initialize dataframe for storing features and patient data
subjs = list(TS_preds.keys())
init_var = []
for s in ['r', 'l']:
t = np.arange(200)
v = np.sin(t)
pk_pos, pk_neg = feat_ext.get_peaks(v, height=.5, prominence=.1, time=[])
feat_thth = feat_ext.get_pkpk_feat(v, t, pk_neg, "thth", s)
feat_pkth = feat_ext.get_pktrough_feat(v, t, pk_pos, pk_neg, "pkth", s)
feat_pks = feat_ext.get_peak_shape_feat(v, t, pk_pos, 'pk', s, ord_num)
feat_ths = feat_ext.get_peak_shape_feat(v, t, pk_neg, 'th', s, ord_num)
feat_TS = feat_ext.get_TS_features(v, t, 'ts', s, ord_num, exclude_order=[0])
keys = [list(k.keys()) for k in [feat_TS, feat_pks, feat_ths, feat_pkth, feat_thth]]
keys = [i for kk in keys for i in kk]
df = pd.DataFrame(columns=keys, index=subjs)
n= 15
n += 1
subj = subjs[n]
fig0, ax = plt.subplots(1)
do_plot = False
fig, axs= make_fig()
# run analysis
for n, subj in enumerate(subjs):
ts = TS_preds[subj]['TS_filt']
times = TS_preds[subj]['times_filt']
for ix, s in enumerate(['r', 'l']):
ix_inx = 0 + (ix*3)
ix_thb = 1 + (ix*3)
min_sz = min(ts[ix_inx].shape[1], ts[ix_thb].shape[1])
ts_inx = ts[ix_inx][:,:min_sz]
ts_thb = ts[ix_thb][:,:min_sz]
tapping = np.sqrt(np.sum(ts_inx - ts_thb, axis=0)**2)
tapping1 = filter_data(tapping, sfreq_common, BP_filr[0], BP_filr[1], pad='reflect', verbose=0)
tapping1 = zscore(tapping1)
tapping2 = filter_data(tapping, sfreq_common, None, BP_filr[1], pad='reflect', verbose=0)
tapping2 = zscore(tapping2)
time = times[ix_inx][:min_sz]
_, freq, line = ax.psd(tapping1, Fs=sfreq_common, return_line=True)
px = line[0].get_ydata()
mx = np.argmax(px[6:])+6
pk_fq = freq[mx]
pk_pos, pk_neg = feat_ext.get_peaks(tapping1, height=.05, prominence=.1, time=time, do_plot=do_plot, ax=axs[0])
feat_thth = feat_ext.get_pkpk_feat(tapping2, time, pk_neg, "thth", s, do_plot=do_plot, axs=axs[3:5])
feat_pkth = feat_ext.get_pktrough_feat(tapping2, time, pk_pos, pk_neg, "pkth", s, do_plot=do_plot, axs=axs[1:3])
feat_pks = feat_ext.get_peak_shape_feat(tapping2, time, pk_pos, 'pk', s, ord_num, do_plot=do_plot, ax=axs[0])
feat_ths = feat_ext.get_peak_shape_feat(tapping2, time, pk_neg, 'th', s, ord_num, do_plot=do_plot, ax=axs[0])
feat_TS = feat_ext.get_TS_features(tapping2, time, 'ts', s, ord_num, exclude_order=[0], do_plot=do_plot, axs=axs[5:])
feat = {**feat_TS, **feat_pks, **feat_ths, **feat_thth, **feat_pkth}
for k, v in feat.items():
df.loc[subj, k]= v
# Calculate finger correlations
ts_inx_f = np.diff(filter_data(ts_inx, sfreq_common, 1, None, pad='reflect', verbose=0))
ts_thb_f = np.diff(filter_data(ts_thb, sfreq_common, 1, None, pad='reflect', verbose=0))
neg_corr = min(np.corrcoef(ts_inx_f, ts_thb_f)[0,2], np.corrcoef(ts_inx_f, ts_thb_f)[1,3])
pos_corr = max(np.corrcoef(ts_inx_f, ts_thb_f)[0,2], np.corrcoef(ts_inx_f, ts_thb_f)[1,3])
df.loc[subj, "ts_vel_corr_pos_" + s] = pos_corr
df.loc[subj, "ts_vel_corr_neg_" + s] = neg_corr
df.loc[subj, "pk_freq_" + s] = pk_fq
df.loc[subj, "out_times_" + s] = times[ix_inx][-1] - times[ix_inx][0]
df.loc[subj, "out_fps_" + s] = TS_preds[subj]['sfreq_ori'][0][ix_inx]
# r_feat = [c for c in df.columns if c[-1] == "r"]
# l_feat = [c for c in df.columns if c[-1] == "l"]
# len_sum = (times[0].size + times[3].size )
# weigh_rl = [times[0].size/len_sum, times[3].size/len_sum]
# for c_r, c_l in zip(r_feat, l_feat):
# df[c_r[:-1]+"b"] = df[c_r]*weigh_rl[0] + df[c_l]*weigh_rl[1]
if do_plot:
fname = filename_creator(subj, paths['out']+"TS_feats_imgs")
fig.savefig(fname)
_ = [a.cla() for a in axs]
Finger_tapping = pd.concat([df, df_beh], axis=1, join='inner')
n_sbj = Finger_tapping.shape[0]
fname_out = paths['out'] + f'FT_feat_{n_sbj}subj_{BP_filr[0]}_{BP_filr[1]}hz_{sfreq_common}Fs_{model_name}.csv'
assert(n_sbj == len(subjs))
Finger_tapping.to_csv(fname_out)
runfile('/data/github/DeepNMA/SS04_feature_ML_class.py', wdir='/data/github/DeepNMA')
# runfile('/data/github/DeepNMA/SS04_feature_ML_regres.py', wdir='/data/github/DeepNMA')
|
import asyncio
async def count(limit=3):
for step in range(1, limit+1):
print("Веду отсчет.",step)
await asyncio.sleep(0)
coroutine = count(5)
while True:
coroutine.send(None)
|
/Users/samnayrouz/anaconda3/lib/python3.6/bisect.py
|
import os
os.sys.path.insert(0, os.path.abspath('../settings_folder'))
import settings
import ddpg_airsim
import dqn_airsim
import dqn_baselines
import ppo_airsim
#import sac_airsim
from game_handler_class import *
import msgs
import file_handling
from utils import *
def runTask(task):
# decide on the algorithm
# DQN-B is the stable-baselines version of DQN
# DQN is the Keras-RL version of DQN
if ("algo" in task.keys()):
if (task["algo"] in ["DDPG", "DQN", "PPO", "SAC", "DQN-B"]):
if (task["algo"] == "DDPG"):
msgs.algo = "DDPG"
train_class = ddpg_airsim
elif (task["algo"] == "PPO"):
msgs.algo = "PPO"
train_class = ppo_airsim
elif (task["algo"] == "DQN"):
train_class = dqn_airsim
msgs.algo = "DQN"
elif (task["algo"] == "DQN-B"):
train_class = dqn_baselines
msgs.algo = "DQN-B"
elif (task["algo"] == "SAC"):
train_class = sac_airsim
else:
print("this algorithm is not supported")
exit(0)
if (task["task_type"] == "backup"):
backup_obj = file_handling.Backup()
backup_obj.get_backup()
if (task["task_type"] == "train"):
train_obj, env = train_class.setup(env_name=task["env_name"], \
difficulty_level=task["difficulty_level"])
if (task["algo"] == "DQN"):
train_class.train(train_obj, env)
if(task["algo"] == "DQN-B"):
train_class.train(train_obj, env)
if (task["algo"] == "PPO"):
train_class.train(train_obj, env)
if (task["algo"] == "SAC"):
train_class.train(train_obj, env)
if (task["task_type"] == "test"):
if (len(task["weights"]) == 0):
task["weights"] = file_handling.find_all_weight_files(msgs.algo, settings.proj_root_path)
for weights in task["weights"]:
utils.reset_msg_logs()
train_obj, env = train_class.setup(env_name=task["env_name"], \
difficulty_level=task["difficulty_level"])
train_class.test(train_obj, env, weights)
if (task["task_type"] == "start_game"):
game_handler = GameHandler()
game_handler.start_game_in_editor()
if (task["task_type"] == "restart_game"):
game_handler = GameHandler()
game_handler.restart_game()
if task["task_type"] == "generate_csv":
msgs.algo = task["algo"] = "DQN"
csv_file = os.path.join(settings.proj_root_path, "data", msgs.algo, task["data_file"])
generate_csv(csv_file)
if task["task_type"] == "plot_data":
data_file = os.path.join(settings.proj_root_path, "data", task["algo"], task["data_file"])
plot_data(data_file, task["data_to_plot"], task["plot_data_mode"])
def main():
taskList = []
model_weights_list_to_test = ["C:/workspace/airlearning-rl/data/PPO/zone0/0.hf5"]
task1 = {"task_type": "start_game"}
task2 = {"algo": "DQN-B", "task_type": "train", "difficulty_level": "easy", "env_name": "AirSimEnv-v42",
"weights": model_weights_list_to_test}
taskList.append(task1)
taskList.append(task2)
for task_el in taskList:
runTask(task_el)
if __name__ == "__main__":
main()
|
from rest_framework import serializers
from apps.jogo.models import Jogo
class JogoSerializer(serializers.ModelSerializer):
class Meta:
model = Jogo
fields = '__all__'
def create(self):
jogo = Jogo()
jogo.iniciar_jogo()
for n in range(1, 301):
jogo.iniciar_partida(n)
return jogo.save_historico()
|
class Product(object):
def __init__(self, price, item_name, weight, brand, cost=0.0, status='for sale'):
self.price = price
self.item_name = item_name
self.weight = weight
self.brand = brand
self.cost = cost
self.status = status
def sell(self):
self.status = 'sold'
return self
def add_tax(self):
self.add_tax = 0.15 * self.price
self.add_tax += self.price
self.price = self.add_tax
return float(self.price)
def return_me(self):
if self.status == 'defective':
self.cost *= 0.0
return self.cost
elif self.status == 'opened':
self.return_me = self.price * .20
self.cost = self.add_tax() - self.return_me
return self.cost
else:
return self.add_tax()
def display_info(self):
print 'Status:{} Cost:{}'.format(self.status, self.return_me())
return 'Price w/ Tax: {}, Item: {}, Weight: {}, Brand: {}'.format(self.price, self.item_name, self.weight, self.brand)
a = Product(price=50.0, item_name='t-shirt', weight=0.2,
cost=0.0, brand='diamond', status='defective')
b = Product(price=50.0, item_name='t-shirt', weight=0.2, cost=0.0, brand='diamond', status='opened')
c = Product(price=100.0, item_name='t-shirt', weight=0.2, cost=0.0, brand='diamond', status='none')
d = Product(price=20.0, item_name='t-shirt', weight=0.2, cost=0.0, brand='diamond', status='opened')
# b.price
# b.cost
# b.status
# b.add_tax()
# b.price
# b.return_me()
print b.display_info()
print a.display_info()
print c.display_info()
print d.display_info()
|
# Generated by Django 2.2.2 on 2019-09-04 09:41
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('user', '0006_auto_20190829_0910'),
]
operations = [
migrations.CreateModel(
name='City',
fields=[
('id', models.IntegerField(primary_key=True, serialize=False)),
('code', models.CharField(default='', max_length=50)),
('name', models.CharField(max_length=250)),
('parent_id', models.IntegerField()),
('first_letter', models.CharField(max_length=50)),
('level', models.IntegerField()),
],
options={
'db_table': 'city',
},
),
migrations.DeleteModel(
name='UserToken',
),
]
|
import cv2
import numpy as np
import imutils
from calibration import Contour, ReferenceLine, SquaresIdentifier, PeaksIdentifier, CalibrationRig
USE_VIDEO = False
def process_image(image):
triangles, squares = Contour.find_triangles_and_squares(image)
ref_line = ReferenceLine(triangles)
ref_line.draw(image)
peaks_identifier = PeaksIdentifier(triangles, ref_line)
peaks = peaks_identifier.identify()
for label, peak in peaks.iteritems():
peak = np.int0(peak)
cv2.circle(image, tuple(peak), 5, (0, 0, 255), 2, lineType=cv2.LINE_AA)
cv2.putText(image, str(label), (peak[0], peak[1] - 10), cv2.FONT_HERSHEY_SIMPLEX,
0.4, (0, 0, 255), 2)
sqr_identifier = SquaresIdentifier(squares, ref_line)
identified_squares = sqr_identifier.identify(image, triangles)
for label, s in identified_squares.iteritems():
cv2.putText(image, str(label), tuple(np.int0(s.centroid)), cv2.FONT_HERSHEY_SIMPLEX,
0.5, (0, 255, 255), 2)
calib = CalibrationRig(peaks, identified_squares)
calib.draw_axes(image)
return image
if USE_VIDEO:
cap = cv2.VideoCapture("assets/video.mp4")
wait_dur = 0
while True:
ret, frame = cap.read()
if not ret:
break
image = process_image(frame)
cv2.imshow("Frame", image)
if cv2.waitKey(wait_dur) & 0xFF == ord('q'):
break
wait_dur = 1
cap.release()
cv2.destroyAllWindows()
else:
image = cv2.imread("assets/IMG_4205.JPG")
image = imutils.resize(image, width=720)
image = process_image(image)
cv2.imshow("Image", image)
cv2.waitKey(0)
|
from fastapi import APIRouter
from fastapi.exceptions import HTTPException
from app.schemas import questions
from app.api.operation.question import get_question, create_question
from app.models.questions import QuestionChoices
router = APIRouter()
@router.post("/create_question/", response_model=questions.Question)
async def create_questions(question: QuestionChoices):
obj = await get_question(question)
if obj:
raise HTTPException(
status_code=400,
detail="Question has been inserted !!"
)
return await create_question(question)
|
from app.DAOs.MasterDAO import MasterDAO
from psycopg2 import sql, errors,errorcodes
from app.DAOs.AuditDAO import AuditDAO
from app.handlers.WebsiteHandler import WebsiteHandler
from app.DAOs.WebsiteDAO import WebsiteDAO
from app.DAOs.PhoneDAO import PhoneDAO
from flask import jsonify
class ServiceDAO(MasterDAO):
"""
Data access object for transactions involving services.
"""
def serviceInfoArgs(self, service):
"""
Query Database for an Service's information by its sid.
:param service: contains service fields
:type service: dict
:return list: list of strings with 'key = value' structure.
"""
fields = []
for key in service:
if key == 'rid':
fields.append(key + " = " + str(service[key]))
if key == 'sname':
fields.append(key + " = " + "'"+str(service[key])+"'")
if key == 'sdescription':
fields.append(key + " = " + "'"+str(service[key])+"'")
if key == 'sschedule':
fields.append(key + " = " + "'"+str(service[key])+"'")
return fields
def deleteService(self, sid, uid):
"""
Remove a service from the database,given a service ID.
Uses :func:`~app.DAOs.AuditDAO.AuditDAO.getTableValueByIntID` &
:func:`~app.DAOs.AuditDAO.AuditDAO.insertAuditEntry`
:param sid: Service ID
:type sid: int
:param uid: User ID
:type uid: int
:return Tuple: SQL result of Query as a tuple.
"""
cursor = self.conn.cursor()
audit = AuditDAO()
tablename = "services"
pkey = "sid"
oldValue = audit.getTableValueByIntID(
table=tablename, pkeyname=pkey, pkeyval=sid, cursor=cursor)
query = sql.SQL("update {table1} set isdeleted = true "
"where {pkey1} = %s "
"returning sid,rid,sname,sdescription,sschedule ").format(
table1=sql.Identifier('services'),
pkey1=sql.Identifier('sid'))
cursor.execute(query, (int(sid), ))
result = cursor.fetchone()
newValue = audit.getTableValueByIntID(
table=tablename, pkeyname=pkey, pkeyval=sid, cursor=cursor)
audit.insertAuditEntry(changedTable=tablename, changeType=audit.UPDATEVALUE, oldValue=oldValue,
newValue=newValue, uid=uid, cursor=cursor)
self.conn.commit()
if result is None:
return None
return result
def createService(self, uid, rid, sname, sdescription, sschedule, websites, numbers):
"""
Creates a new service and adds websites and phones to it.
Uses :func:`~app.DAOs.AuditDAO.AuditDAO.getTableValueByIntID` &
:func:`~app.DAOs.AuditDAO.AuditDAO.insertAuditEntry`
:param uid: The user ID for the creator of the service
:type uid: int
:param rid: The ID for the room that would provide the service
:type rid: int
:param sname: The name of the service
:type sname: string
:param sdescription: A description of the service
:type sdescription: string
:param sschedule: The service's schedule
:type sschedule: string
:param websites: Websites to be asociated with the service
:type websites: array
:param numbers: Phone numbers to be added to the service
:type numbers: array
:return: results from :func:`~app.DAOs.ServiceDAO.ServiceDAO.getServiceByID` used with
the new service's sid.
"""
cursor = self.conn.cursor()
# Build the query to create an event entry.
try:
audit = AuditDAO()
tablename = "services"
pkeys = ["rid", "sname"]
oldValue = audit.getTableValueByPkeyPair(table=tablename, pkeyname1=pkeys[0], pkeyname2=pkeys[1],
pkeyval1=rid, pkeyval2=sname, cursor=cursor)
query = sql.SQL("insert into {table1} ({insert_fields})"
"values (%s, %s, %s, %s, %s) "
"ON CONFLICT (rid,sname) "
"do update set sdescription=%s, sschedule=%s, isdeleted=false "
"where services.isdeleted = true "
"returning {keys} ").format(
table1=sql.Identifier('services'),
insert_fields=sql.SQL(',').join(
[
sql.Identifier('rid'),
sql.Identifier('sname'),
sql.Identifier('sdescription'),
sql.Identifier('sschedule'),
sql.Identifier('isdeleted'),
]),
keys=sql.SQL(',').join(
[ sql.Identifier('sid'),
sql.Identifier('rid'),
sql.Identifier('sname'),
sql.Identifier('sdescription'),
sql.Identifier('sschedule'),
sql.Identifier('isdeleted'),
]))
cursor.execute(query, (int(rid), str(sname), str(
sdescription), str(sschedule), False, str(
sdescription), str(sschedule)))
result = cursor.fetchone()
try :
sid = result[0]
except :
return jsonify(Error = 'Room with service already exists '), 401
newValue = audit.getTableValueByPkeyPair(table=tablename, pkeyname1=pkeys[0], pkeyname2=pkeys[1],
pkeyval1=rid, pkeyval2=sname, cursor=cursor)
if not oldValue:
changeType = audit.INSERTVALUE
else:
changeType = audit.UPDATEVALUE
audit.insertAuditEntry(changedTable=tablename, changeType=changeType, oldValue=oldValue,
newValue=newValue, uid=uid, cursor=cursor)
for site in websites:
website = (WebsiteDAO.addWebsite(
self, url=site['url'], cursor=cursor, uid=uid))
if website is None:
return jsonify(Error='Website problem '+site['url']+" Not valid"),400
else:
WebsiteDAO().addWebsitesToService(
sid=sid, wid=website[0], wdescription=site['wdescription'], cursor=cursor, uid=uid)
for num in numbers:
phone = PhoneDAO.addPhone(
self, pnumber=num['pnumber'], ptype=num['ptype'], cursor=cursor, uid=uid)
PhoneDAO().addPhoneToService(
sid=sid, pid=phone[0], cursor=cursor, uid=uid)
# Commit changes if no errors occur.
self.conn.commit()
return result
except errors.UniqueViolation as badkey:
return jsonify(Error="Room has service with the same name"+str(badkey)), 401
def getServiceByID(self, sid):
"""
Query Database for an Service's information by its sid.
:param sid: Service ID
:type sid: int
:return Tuple: SQL result of Query as a tuple.
"""
cursor = self.conn.cursor()
query = sql.SQL("select {fields} from {table} "
"where {pkey}= %s;").format(
fields=sql.SQL(',').join([
sql.Identifier('sid'),
sql.Identifier('rid'),
sql.Identifier('sname'),
sql.Identifier('sdescription'),
sql.Identifier('sschedule'),
sql.Identifier('isdeleted')
]),
table=sql.Identifier('services'),
pkey=sql.Identifier('sid'))
cursor.execute(query, (int(sid),))
result = cursor.fetchone()
return result
def getServicesByRoomID(self, rid):
"""
Query Database for an all services in a given room ID.
:param rid: Room ID.
:type rid: int
:return Tuple: SQL result of Query as a tuple.
"""
cursor = self.conn.cursor()
query = sql.SQL(
"select sid,sname,sdescription,sschedule from services WHERE rid = %s and isdeleted = false ").format()
cursor.execute(query, (rid, ))
result = []
for row in cursor:
result.append(row)
return result
def getServicesSegmented(self, offset, limit):
"""
Query Database for an all services, segmented.
:param offset: Number of rows to ignore from top results.
:type offset: int
:param limit: Maximum number of rows to return from query results.
:type limit: int
:return Tuple: SQL result of Query as a tuple.
"""
cursor = self.conn.cursor()
query = sql.SQL("select * from services WHERE isdeleted = false "
"offset %s "
"limit %s ").format()
cursor.execute(query, (offset, limit))
result = []
for row in cursor:
result.append(row)
return result
def getServicesByKeywords(self, searchstring, offset, limit):
"""
Query Database for an all services matching a given keyword.
:param searchstring: Keyword to search for services
:type searchstring: string
:param offset: Number of rows to ignore from top results.
:type offset: int
:param limit: Maximum number of rows to return from query results.
:type limit: int
:return Tuple: SQL result of Query as a tuple.
"""
cursor = self.conn.cursor()
query = sql.SQL("select {fields} from {table} "
"where isdeleted = False and "
"({pkey1} @@ to_tsquery(%s) "
"or {pkey2} @@ to_tsquery(%s)) "
"offset %s "
"limit %s;").format(
fields=sql.SQL(',').join([
sql.Identifier('sid'),
sql.Identifier('rid'),
sql.Identifier('sname'),
sql.Identifier('sdescription'),
sql.Identifier('sschedule'),
sql.Identifier('isdeleted')
]),
table=sql.Identifier('services'),
pkey1=sql.Identifier('sname_tokens'),
pkey2=sql.Identifier('sdescription_tokens'))
cursor.execute(query, (str(searchstring), str(
searchstring), int(offset), int(limit)))
result = []
for row in cursor:
result.append(row)
return result
def updateServiceInformation(self, sid, service, uid):
"""
Update the information about a service.
Parameters:
:param sid: Service ID.
:type sid: int
:param service: Dictionary with the service information to update.
:type service: string
:param uid: User ID of the caller of this function.
:type uid: int
:return Tuple: SQL result of Query as a tuple.
"""
cursor = self.conn.cursor()
try:
fields_list = self.serviceInfoArgs(service)
audit = AuditDAO()
tablename = "services"
pkey = "sid"
oldValue = audit.getTableValueByIntID(
table=tablename, pkeyname=pkey, pkeyval=sid, cursor=cursor)
query = sql.SQL("update {table1} set {fields} "
"where {pkey1} = %s AND isdeleted=false "
"returning {pkey1} ").format(
table1=sql.Identifier('services'),
fields=sql.SQL(",").join(map(sql.SQL, fields_list)),
pkey1=sql.Identifier('sid'))
cursor.execute(query, (int(sid), ))
result = cursor.fetchone()
newValue = audit.getTableValueByIntID(
table=tablename, pkeyname=pkey, pkeyval=sid, cursor=cursor)
audit.insertAuditEntry(changedTable=tablename, changeType=audit.UPDATEVALUE, oldValue=oldValue,
newValue=newValue, uid=uid, cursor=cursor)
self.conn.commit()
return result
except errors.UniqueViolation as badkey:
return jsonify(Error="anonther service is using the same name, within the same room"),403
except errors.TypeError as badkey:
return jsonify(Error = "Sid problem")
|
class LivingThing():
def breathe(self):
return "I'm breathing as a living thing."
class Mammal(LivingThing):
def breathe(self):
return "I'm breathing as a mammal."
class Animal(LivingThing):
def sound(self):
return "I am speaking out as an animal"
class Bird(LivingThing):
pass
if __name__ == '__main__':
bird = Bird()
mammal = Mammal()
print(mammal.breathe())
|
number_to_guess = 5
times = 5
while times !=0:
user_number=int(input("Adivina el numero del 0 al 10: "))
if number_to_guess == user_number:
print("Has gando campeón\n")
times=0
else:
times -=1
if times ==0:
print("Has perdido imbecil\n")
|
import datastore
import encryptordatastore
def initialize():
datastore.initialize()
encryptordatastore.initialize()
|
from . import (parsers,
common,
io,
multiprocessing_tools,
analysis,
diagnostics,
stochastic_processes,
stats_utils,
plotting)
|
import math
import pylo
class StaticMagneticFieldForTilt(pylo.Device):
def __init__(self, *args, **kwargs) -> None:
super().__init__(*args, **kwargs)
self.init_event_id = "static_magnetic_field_init"
self.modify_step_event_id = "static_magnetic_field_modify_step"
self.clearEvents()
pylo.init_ready[self.init_event_id] = self.initialize
pylo.before_approach[self.modify_step_event_id] = self.modifyStep
self._logger = pylo.logginglib.get_logger(self)
self.hint_shown = False
def __del__(self) -> None:
self.clearEvents()
def clearEvents(self) -> None:
"""Clear the events from the bound functions"""
if self.init_event_id in pylo.init_ready:
del pylo.init_ready[self.init_event_id]
if self.modify_step_event_id in pylo.before_approach:
del pylo.before_approach[self.modify_step_event_id]
def initialize(self, controller, *args, **kwargs) -> None:
"""Initialize the plugin."""
# define the configuration options again, this time the contorller
# is known and therefore options can be generated
StaticMagneticFieldForTilt.defineConfigurationOptions(
controller.configuration, self.config_group_name,
self.config_defaults, controller=controller)
self.hint_shown = False
def getMeasurementVariableIdByName(self, controller: pylo.Controller,
name: str) -> str:
"""Get the measurement variable id from the given measurement variable
`name´.
Raises
------
KeyError
When the `name` is not found
Parameters
----------
controller : pylo.Controller
The controller
name : str
The measurement variable name
Returns
-------
str
The measurement variable id
"""
for var in controller.microscope.supported_measurement_variables:
if ((var.has_calibration and var.calibrated_name == name) or
var.name == name):
return var.unique_id
raise KeyError(("Could not find a measurement variable with the " +
"name '{}'.").format(name))
def modifyStep(self, controller: pylo.Controller, *args, **kwargs):
"""Modify the step to keep a constant field."""
if self.hint_shown:
return
try:
tilt_name = controller.configuration.getValue(self.config_group_name,
"correct-variable")
except KeyError:
return
try:
tilt_id = self.getMeasurementVariableIdByName(controller, tilt_name)
except KeyError:
return
try:
field_name = controller.configuration.getValue(self.config_group_name,
"magnetic-field")
except KeyError:
return
try:
field_id = self.getMeasurementVariableIdByName(controller, field_name)
except KeyError:
if not self.hint_shown:
self.hint_shown = True
controller.view.showHint("The magnetic field cannot be kept " +
"constant because the magnetic field " +
"measurement variable is not set. " +
"The tilt correction plugin is now " +
"switched off.")
return
constant = controller.configuration.getValue(self.config_group_name,
"keep-constant")
in_deg = controller.configuration.getValue(self.config_group_name,
"tilt-in-degree",
datatype=bool)
pylo.logginglib.log_debug(self._logger, ("Preparing to correct H-field " +
"to keep it constant. Found " +
"tilt id '{}' and field id " +
"'{}', '{}'-field should be " +
"kept constant and tilt is " +
"{}measured in degrees").format(
tilt_id, field_id,
constant, "" if in_deg else "not "))
if (tilt_id in controller.measurement.current_step and
field_id in controller.measurement.current_step and
isinstance(controller.measurement.current_step[tilt_id], (int, float)) and
isinstance(controller.measurement.current_step[field_id], (int, float))):
tilt = controller.measurement.current_step[tilt_id]
field = controller.measurement.current_step[field_id]
if in_deg:
calc = "[rad({})".format(tilt)
tilt = math.radians(tilt)
calc += "={}]".format(tilt)
else:
calc = "{}".format(tilt)
if constant == "In-plane":
controller.measurement.current_step[field_id] = field / math.sin(tilt)
calc = "sin({})".format(calc)
elif constant == "Out-of-plane":
controller.measurement.current_step[field_id] = field / math.cos(tilt)
calc = "cos({})".format(calc)
pylo.logginglib.log_debug(self._logger, ("Changing '{}' to new " +
"value '{}' = {} / {}").format(
field_id,
controller.measurement.current_step[field_id],
field, calc))
@staticmethod
def defineConfigurationOptions(configuration, group, defaults, *args, **kwargs):
if ("controller" in kwargs and
isinstance(kwargs["controller"], pylo.Controller)):
field_type = [v.calibrated_name
if v.has_calibration and v.calibrated_name is not None
else v.name for v
in kwargs["controller"].microscope.supported_measurement_variables]
correction_type = field_type.copy()
field_type.insert(0, "Please select...")
correction_type.insert(0, "Off")
field_type = pylo.Datatype.options(field_type)
correction_type = pylo.Datatype.options(correction_type)
else:
field_type = str
correction_type = str
configuration.addConfigurationOption(group, "correct-variable",
datatype=correction_type,
description=("The tilt measurement variable id to correct. "+
"Use 'Off' to prevent tilt correction"))
configuration.addConfigurationOption(group, "magnetic-field",
datatype=field_type,
description=("The magnetic field measurement variable id. "+
"If the 'correct-variable' is 'Off', this value is " +
"ignored."))
configuration.addConfigurationOption(group, "keep-constant",
datatype=pylo.Datatype.options(("In-plane", "Out-of-plane")),
description=("The magnetic field to keep constant. Ignored if " +
"'correct-variable' is 'Off'."))
configuration.addConfigurationOption(group, "tilt-in-degree",
datatype=bool, default_value=True,
description=("Whether the tilt (the value of the measurement " +
"variable with the id of the 'correct-variable') is " +
"measured in degree (True) or in radians (False). " +
"Ignored if 'correct-variable' is 'Off'."))
|
from rest_framework.viewsets import GenericViewSet
from rest_framework.mixins import CreateModelMixin, ListModelMixin
from rest_framework.permissions import IsAuthenticated
from ..models import Order
from ..permissions import IsRegularUser
from .serializers import OrderSerializer
class OrderViewSet(CreateModelMixin, ListModelMixin, GenericViewSet):
queryset = Order.objects.all()
permission_classes = [IsAuthenticated, IsRegularUser]
serializer_class = OrderSerializer
def get_queryset(self):
return Order.objects.filter(user=self.request.user)
def perform_create(self, serializer):
serializer.save(user=self.request.user)
|
# -*- coding: UTF-8 -*-
import sys
reload(sys)
sys.setdefaultencoding('utf-8')
from selenium import webdriver
from selenium.webdriver.support.ui import WebDriverWait
from selenium.common.exceptions import WebDriverException
from selenium.webdriver.common.by import By
from selenium.webdriver.support import expected_conditions as EC
import os
import time
import json
import csv
import re
from math import *
from datetime import datetime
from bs4 import BeautifulSoup
BASE_URL = 'https://www.whoscored.com'
STATS_KEYS = [u'team', u'name', u'position', u'playMins', u'result', u'goals', u'assists', u'goalsConceded', u'penaltyConceded',
u'cornersTotal', u'aerialsWon', u'dribblesLost', u'shotsTotal', u'passesAccurate', u'tackleUnsuccesful',
u'defensiveAerials', u'aerialsTotal', u'offensiveAerials', u'passesTotal', u'throwInsTotal',
u'offsidesCaught', u'interceptions', u'ratings', u'touches', u'dispossessed', u'parriedSafe', u'claimsHigh',
u'clearances', u'throwInAccuracy', u'collected', u'parriedDanger', u'possession', u'shotsOffTarget', u'dribblesAttempted',
u'shotsOnPost', u'dribblesWon', u'cornersAccurate', u'tackleSuccess', u'throwInsAccurate', u'dribbleSuccess', u'errors',
u'aerialSuccess', u'shotsBlocked', u'tacklesTotal', u'tackleSuccessful', u'shotsOnTarget', u'dribbledPast',
u'passesKey', u'foulsCommited', u'totalSaves', u'passSuccess']
RATING_KEYS = ['team', 'name', 'position', 'adjustedRating', 'overallRating', 'parriedDangerRating', 'cccPassesRating', 'shotsAccuracyRating',
'errorsRating', 'goalsConcededRating', 'dribbleSuccessRating', 'aerialSuccessRating', 'collectedRating',
'totalSavesRating', 'dribbledPastRating', 'goalsRating', 'defenseThreeRatting', 'passesAccuracyRating']
RATING_CONFIG = {
}
def load_json(file_name):
with open(file_name) as json_data:
d = json.load(json_data)
return d
def write_json(file_name, json_data):
with open(file_name, 'w') as outfile:
json.dump(json_data, outfile)
return json_data
# script = soup.find('script', text=re.compile('calendarParameter'))
# json_text = re.search(r'calendarParameter\),\s+(\[\[(.|\n)*\])',
# script.string, flags=re.DOTALL | re.MULTILINE).group(1).replace("'", '"').rstrip('\r\n')
def get_fixtures(driver, league_url, only_now=True):
driver.get(league_url)
html = driver.page_source
fixture_url = driver.find_elements_by_xpath(
'//*[@id="sub-navigation"]/ul/li[2]/a')[0].get_attribute('href')
driver.get(fixture_url)
print(fixture_url)
fixtures = {}
total = len(fixtures)
season = '-'.join(fixture_url.split('-')[-2:])
while True:
html = driver.page_source
soup = BeautifulSoup(html)
table = soup.find('table', id='tournament-fixture')
trs = table.find_all('tr')
date = None
for tr in trs:
print(tr.text)
if 'rowgroupheader' in tr.get('class'):
date = str(datetime.strptime(tr.text.split(', ')[-1], '%b %d %Y').date())
else:
id = tr.attrs['data-id']
print(id)
if fixtures.has_key(id) and fixtures[id].get('status', 0) == 'FT':
continue
status = tr.find('td', class_='status').text
time_str = tr.find('td', class_='time').text
home = tr.find('td', class_='home').text
home_url = tr.find('td', class_='home').find('a').get('href')
away = tr.find('td', class_='away').text
away_url = tr.find('td', class_='away').find('a').get('href')
url = tr.find('td', class_='result').find('a').get('href')
result = tr.find('td', class_='result').text
fixtures[id] = {'status': status, 'time': time_str, 'date': date, 'season': season,
'home': home, 'home_url': home_url, 'away': away,
'away_url': away_url, 'result': result, 'url': url, 'id': id}
if len(fixtures) == total:
break
total = len(fixtures)
time.sleep(3)
print('Current Counts:', total)
elements = driver.find_elements_by_xpath(
'//*[@id="date-controller"]/a[1]')
if len(elements) == 0:
break
prev = driver.find_elements_by_xpath(
'//*[@id="date-controller"]/a[1]')[0]
ajax_click(driver, prev)
path = os.path.join('matches',
league_url.split('/')[-1], season)
if not os.path.exists(path):
os.makedirs(path)
write_json(path + '/fixtures.json', fixtures)
def parse_match(html):
soup = BeautifulSoup(html)
script = soup.find('script', text=re.compile('matchCentreData'))
json_text = re.search(r'matchCentreData\s+=\s+(\{.*?\});\n',
script.string, flags=re.DOTALL | re.MULTILINE).group(1)
data = json.loads(json_text)
return data
def parse_stats(file_name):
# keys:
# [u'startDate', u'periodCode', u'home', u'attendance', u'expandedMinutes',
# u'away', u'timeStamp', u'score', u'etScore', u'commonEvents', u'events', u'referee',
# u'maxMinute', u'elapsed', u'pkScore', u'startTime', u'weatherCode',
# u'expandedMaxMinute', u'periodMinuteLimits', u'timeoutInSeconds', u'periodEndMinutes',
# u'htScore', u'playerIdNameDictionary', u'maxPeriod', u'minuteExpanded', u'venueName',
# u'statusCode', u'ftScore']
data = load_json(file_name)
stats = []
penaltyConceded = {}
assists = {}
team_fields = ['home', 'away']
goals = {}
goals_mins = {'home': [], 'away': []}
for event in data['events']:
# penaltyConceded: 133
if 133 in event.get(u'satisfiedEventsTypes', []):
penaltyConceded[event[u'playerId']] = penaltyConceded.get(event['playerId'], 0) + 1
# assist: 91
if 91 in event.get(u'satisfiedEventsTypes', []):
assists[event[u'playerId']] = assists.get(event['playerId'], 0) + 1
for team in team_fields:
# keys:
# [u'averageAge', u'stats', u'name', u'incidentEvents', u'players', u'formations',
# u'countryName', u'field', u'teamId', u'scores', u'shotZones', u'managerName']
for event in data[team].get('incidentEvents', []):
if event.has_key('isGoal'):
goals[event[u'playerId']] = goals.get(event['playerId'], 0) + 1
goals_mins[team].append(int(event['expandedMinute']))
for team in team_fields:
other_team = team_fields[1 - team_fields.index(team)]
for player in data[team]['players']:
# keys:
# [u'shirtNo', u'stats', u'name', u'weight', u'playerId', u'age',
# u'height', u'isManOfTheMatch', u'field', u'isFirstEleven', u'position']
# stats keys:
# [u'cornersTotal', u'aerialsWon', u'dribblesLost', u'shotsTotal', u'passesAccurate',
# u'tackleUnsuccesful', u'defensiveAerials', u'aerialsTotal', u'offensiveAerials',
# u'passesTotal', u'throwInsTotal', u'dispossessed', u'interceptions', u'ratings',
# u'touches', u'offsidesCaught', u'parriedSafe', u'clearances', u'throwInAccuracy',
# u'collected', u'parriedDanger', u'possession', u'shotsOffTarget', u'dribblesAttempted',
# u'dribblesWon', u'cornersAccurate', u'tackleSuccess', u'throwInsAccurate', u'dribbleSuccess',
# u'errors', u'aerialSuccess', u'tacklesTotal', u'tackleSuccessful', u'shotsOnTarget',
# u'passesKey', u'dribbledPast', u'foulsCommited', u'shotsBlocked', u'totalSaves', u'passSuccess']
player_stats = {u'name': player['name'],
u'position': player['position'], u'team': data[team]['name']}
for key, values in player['stats'].iteritems():
if key is not u'ratings':
player_stats[key] = sum(values.values())
player_stats['goals'] = goals.get(player['playerId'], 0)
player_stats['penaltyConceded'] = penaltyConceded.get(player['playerId'], 0)
player_stats['assists'] = assists.get(player['playerId'], 0)
if player.has_key(u'isFirstEleven'):
player_stats['playMins'] = player.get('subbedOutExpandedMinute', 90)
player_stats['goalsConceded'] = sum(x <= player.get('subbedOutExpandedMinute', data['expandedMaxMinute']) for x in goals_mins[other_team])
else:
player_stats['playMins'] = abs(data['expandedMaxMinute'] - player.get('subbedInExpandedMinute', data['expandedMaxMinute']))
player_stats['goalsConceded'] = sum(x >= player.get('subbedInExpandedMinute', data['expandedMaxMinute']) for x in goals_mins[other_team])
stats.append(player_stats)
with open(file_name.replace('.json', '.csv'), 'wb') as output_file:
dict_writer = csv.DictWriter(
output_file, fieldnames=STATS_KEYS, restval=0)
dict_writer.writeheader()
dict_writer.writerows(stats)
def safe_division(a, b):
if b == 0:
return 0
else:
return a / b
def erfcc(x):
"""Complementary error function."""
z = abs(x)
t = 1. / (1. + 0.5*z)
r = t * exp(-z*z-1.26551223+t*(1.00002368+t*(.37409196+
t*(.09678418+t*(-.18628806+t*(.27886807+
t*(-1.13520398+t*(1.48851587+t*(-.82215223+
t*.17087277)))))))))
if (x >= 0.):
return r
else:
return 2. - r
def normcdf(x, mu, sigma):
t = x - mu
y = 0.5 * erfcc(-t / (sigma * sqrt(2.0)))
if y > 1.0:
y = 1.0
return y
def normpdf(x, mu, sigma):
u = (x - mu) / abs(sigma)
y = (1 / (sqrt(2 * pi) * abs(sigma))) * exp(-u * u / 2)
return y
def normdist(x, mu, sigma, f):
if f:
y = normcdf(x, mu, sigma)
else:
y = normpdf(x, mu, sigma)
return y
def calculate_scores(file_name):
# stats keys:
# [u'cornersTotal', u'aerialsWon', u'dribblesLost', u'shotsTotal', u'passesAccurate',
# u'tackleUnsuccesful', u'defensiveAerials', u'aerialsTotal', u'offensiveAerials',
# u'passesTotal', u'throwInsTotal', u'dispossessed', u'interceptions', u'ratings',
# u'touches', u'offsidesCaught', u'parriedSafe', u'clearances', u'throwInAccuracy',
# u'collected', u'parriedDanger', u'possession', u'shotsOffTarget', u'dribblesAttempted',
# u'dribblesWon', u'cornersAccurate', u'tackleSuccess', u'throwInsAccurate', u'dribbleSuccess',
# u'errors', u'aerialSuccess', u'tacklesTotal', u'tackleSuccessful', u'shotsOnTarget',
# u'passesKey', u'dribbledPast', u'foulsCommited', u'shotsBlocked', u'totalSaves', u'passSuccess']
results = []
with open(file_name, "rb") as theFile:
reader = csv.DictReader(theFile)
keys = []
for line in reader:
team, name, position = line['team'], line['name'], line['position']
result = {}
if float(line['playMins']) > 0:
defenseThree = float(line['interceptions']) + float(line['tackleSuccessful']) + float(line['clearances'])
aerialSuccess = safe_division(float(line['aerialsWon']), float(line['aerialsTotal']))
shotsAccuracy = safe_division(float(line['shotsOnTarget']), float(line['shotsTotal']))
passesAccuracy = safe_division(float(line['passesAccurate']), float(line['passesTotal']))
errors = float(line['errors']) + float(line['penaltyConceded']) + float(line['dispossessed'])
cccPasses = float(line['passesKey']) + float(line['assists'])
if position in ['FW', 'FWL', 'FWR']:
result['shotsAccuracyRating'] = normdist(shotsAccuracy, 0.33, 0.21, True) * 0.2 * 100
result['aerialSuccessRating'] = normdist(aerialSuccess, 0.33, 0.21, True) * 0.1 * 100
result['dribbleSuccessRating'] = normdist(float(line['dribbleSuccess']), 1, 1.42, True) * 0.05 * 100
result['passesAccuracyRating'] = normdist(passesAccuracy, 0.7, 0.13, True) * 0.2 * 100
result['defenseThreeRatting'] = normdist((defenseThree) * 90 / float(line['playMins']), 1.1, 0.7, True) * 0.05 * 100
result['cccPassesRating'] = float(line['passesKey']) * 6.0 * 90 / float(line['playMins'])# * 0.2
result['goalsRating'] = float(line['goals']) * 17.0 * 90 / float(line['playMins'])# * 0.2
elif position in ['AMC', 'SS', 'AML', 'AMR']:
result['shotsAccuracyRating'] = normdist(shotsAccuracy, 0.3, 0.09, True) * 0.05 * 100
result['aerialSuccessRating'] = normdist(aerialSuccess, 0.3, 0.09, True) * 0.05 * 100
result['dribbleSuccessRating'] = normdist(float(line['dribbleSuccess']), 2, 1.42, True) * 0.15 * 100
result['passesAccuracyRating'] = normdist(passesAccuracy, 0.8, 0.12, True) * 0.2 * 100
result['defenseThreeRatting'] = normdist((defenseThree) * 90 / float(line['playMins']), 1.5, 0.43, True) * 0.1 * 100
result['cccPassesRating'] = float(line['passesKey']) * 5 * 90 / float(line['playMins'])# * 0.3
result['goalsRating'] = float(line['goals']) * 25 * 90 / float(line['playMins'])# * 0.2
elif position in ['WF', 'IF']:
result['shotsAccuracyRating'] = normdist(shotsAccuracy, 0.33, 0.15, True) * 0.05 * 100
result['aerialSuccessRating'] = normdist(aerialSuccess, 0.3, 0.09, True) * 0.05 * 100
result['dribbleSuccessRating'] = normdist(float(line['dribbleSuccess']), 2, 1.42, True) * 0.15 * 100
result['passesAccuracyRating'] = normdist(passesAccuracy, 0.75, 0.08, True) * 0.15 * 100
result['defenseThreeRatting'] = normdist((defenseThree) * 90 / float(line['playMins']), 1.5, 0.75, True) * 0.1 * 100
result['cccPassesRating'] = float(line['passesKey']) * 6.25 * 90 / float(line['playMins'])# * 0.35
result['goalsRating'] = float(line['goals']) * 20 * 90 / float(line['playMins'])# * 0.15
result['errorsRating'] = 0.05 * 100 - errors * 25 / 90 * float(line['playMins'])
elif position in ['CM', 'LCM', 'RCM', 'MC', 'ML', 'MR']:
result['aerialSuccessRating'] = normdist(aerialSuccess, 0.5, 0.21, True) * 0.05 * 100
result['dribbleSuccessRating'] = normdist(float(line['dribbleSuccess']), 3, 1.5, True) * 0.1 * 100
result['passesAccuracyRating'] = normdist(passesAccuracy, 0.83, 0.08, True) * 0.2 * 100
result['defenseThreeRatting'] = normdist((defenseThree) * 90 / float(line['playMins']), 3, 2.29, True) * 0.25 * 100
result['cccPassesRating'] = float(line['passesKey']) * 5 * 90 / float(line['playMins'])# * 0.25
result['goalsRating'] = float(line['goals']) * 20 * 90 / float(line['playMins'])# * 0.1
result['errorsRating'] = 0.1 * 100 - errors * 15 / 90 * float(line['playMins'])
result['dribbledPastRating'] = 0.05 * 100 - float(line['dribbledPast']) * 5 * 90 / float(line['playMins'])
elif position in ['DM', 'DMC']:
result['aerialSuccessRating'] = normdist(aerialSuccess, 0.4, 0.21, True) * 0.1 * 100
result['passesAccuracyRating'] = normdist(passesAccuracy, 0.85, 0.08, True) * 0.2 * 100
result['defenseThreeRatting'] = normdist((defenseThree) * 90 / float(line['playMins']), 6.5, 3.6, True) * 0.3 * 100
result['cccPassesRating'] = float(line['passesKey']) * 5 * 90 / float(line['playMins'])# * 0.1
result['goalsRating'] = float(line['goals']) * 25 * 90 / float(line['playMins'])# * 0.05
result['errorsRating'] = 0.15 * 100 - errors * 25 / 90 * float(line['playMins'])
result['dribbledPastRating'] = 0.1 * 100 - (float(line['dribbledPast']) * 4 * 90 / float(line['playMins']))
elif position in ['DL', 'DR']:
result['aerialSuccessRating'] = normdist(aerialSuccess, 0.5, 0.21, True) * 0.05 * 100
result['dribbleSuccessRating'] = normdist(float(line['dribbleSuccess']), 1, 0.7, True) * 0.15 * 100
result['passesAccuracyRating'] = normdist(passesAccuracy, 0.75, 0.08, True) * 0.1 * 100
result['defenseThreeRatting'] = normdist((defenseThree) * 90 / float(line['playMins']), 8, 3.6, True) * 0.3 * 100
result['cccPassesRating'] = float(line['passesKey']) * 5 * 90 / float(line['playMins'])# * 0.1
result['goalsRating'] = float(line['goals']) * 25 * 90 / float(line['playMins'])# * 0.05
result['errorsRating'] = 0.09 * 100 - errors * 15 * 90 / float(line['playMins'])
result['dribbledPastRating'] = 0.16 * 100 - (float(line['dribbledPast']) * 8 * 90 / float(line['playMins']))
elif position in ['DC']:
result['aerialSuccessRating'] = normdist(aerialSuccess, 0.5, 0.21, True) * 0.25 * 100
result['passesAccuracyRating'] = normdist(passesAccuracy, 0.8, 0.08, True) * 0.1 * 100
result['defenseThreeRatting'] = normdist((defenseThree) * 90 / float(line['playMins']), 10, 7, True) * 0.3 * 100
result['cccPassesRating'] = float(line['passesKey']) * 5 * 90 / float(line['playMins'])# * 0.05
result['goalsRating'] = float(line['goals']) * 15 * 90 / float(line['playMins'])# * 0.05
result['errorsRating'] = 0.15 * 100 - errors * 30 * 90 / float(line['playMins'])
result['dribbledPastRating'] = 0.1 * 100 - (float(line['dribbledPast']) * 8 * 90 / float(line['playMins']))
elif position in ['GK']:
result['passesAccuracyRating'] = normdist(passesAccuracy, 0.67, 0.22, True) * 0.1 * 100
result['totalSavesRating'] = normdist(safe_division(float(line['totalSaves']), float(line['playMins'])) * 90, 3, 1.29, True) * 0.25 * 100
result['collectedRating'] = normdist(safe_division(float(line['collected']) + float(line['claimsHigh']), float(line['playMins'])) * 90, 1, 1.29, True) * 0.15 * 100
result['parriedDangerRating'] = normdist(safe_division(float(line['parriedDanger']), float(line['playMins'])) * 90, 0.1, 0.3, True) * 0.15 * 100
result['goalsConcededRating'] = 0.2 * 100 - (float(line['goalsConceded']) * 10 * 90 / float(line['playMins']))
result['errorsRating'] = 0.15 * 100 - errors * 25 * 90 / float(line['playMins'])
result['overallRating'] = sum(result.values())/10 + 0
result['adjustedRating'] = result['overallRating'] * 0.4
result.update({'team': team, 'name': name, 'position': position})
results.append(result)
with open(file_name.replace('match.csv', 'rating.csv'), 'wb') as output_file:
dict_writer = csv.DictWriter(
output_file, fieldnames=RATING_KEYS, restval=0)
dict_writer.writeheader()
dict_writer.writerows(results)
# Url sample
# https://www.whoscored.com/Matches/1190270/Live/England-Premier-League-2017-2018-Liverpool-Manchester-United
def get_match(driver, url):
driver.get(url)
print(url)
time.sleep(3)
print('Load HTML Done')
season = re.search(r"2[0-9]{3}-2[0-9]{3}",url).group(0)
league = url.split('/')[-1].split(season)[0][:-1]
html = driver.page_source
data = parse_match(html)
print('Load JSON From HTML')
date = data['startDate'].split('T')[0]
home = data['home']['name']
away = data['away']['name']
path = os.path.join('matches', league, season, date, '-'.join([home, away]))
if not os.path.exists(path):
os.makedirs(path)
with open(path + '/match.html', 'w') as file:
file.write(html)
write_json(path + '/match.json', data)
parse_stats(path + '/match.json')
calculate_scores(path + '/match.csv')
print('All Done')
def element_visiable(driver, class_name):
len([
e for e in driver.find_elements_by_class_name(class_name)
if e.value_of_css_property('display') != 'none'
]) > 0
def ajax_complete(driver):
try:
return 0 == driver.execute_script(
"return jQuery.active"
) # and ((not element_visiable(driver, 'loading-wrapper')) or element_visiable(driver, 'statistics-table-tab'))
except WebDriverException:
pass
def ajax_click(driver, element):
driver.execute_script("arguments[0].click();", element)
WebDriverWait(driver, 10000).until(ajax_complete,
"Timeout waiting for page to load")
time.sleep(1)
BASE_URL = "https://www.whoscored.com"
pl_url = "https://www.whoscored.com/Regions/252/Tournaments/2/England-Premier-League"
ia_url = "https://www.whoscored.com/Regions/108/Tournaments/5/Italy-Serie-A"
league_urls = [
"https://www.whoscored.com/Regions/252/Tournaments/2/England-Premier-League",
"https://www.whoscored.com/Regions/81/Tournaments/3/Germany-Bundesliga",
"https://www.whoscored.com/Regions/108/Tournaments/5/Italy-Serie-A",
"https://www.whoscored.com/Regions/206/Tournaments/4/Spain-La-Liga",
"https://www.whoscored.com/Regions/74/Tournaments/22/France-Ligue-1",
"https://www.whoscored.com/Regions/250/Tournaments/12/Europe-UEFA-Champions-League",
"https://www.whoscored.com/Regions/250/Tournaments/30/Europe-UEFA-Europa-League",
"https://www.whoscored.com/Regions/177/Tournaments/21/Portugal-Liga-NOS",
"https://www.whoscored.com/Regions/155/Tournaments/13/Seasons/6826/Netherlands-Eredivisie",
]
def get_all_fixtures(driver, league_urls):
for league_url in league_urls:
get_fixtures(driver, league_url)
def get_all_matches(driver):
for dir_name in os.listdir('matches'):
league_path = os.path.join('matches', dir_name)
if os.path.isdir(league_path):
for season in os.listdir(league_path):
season_path = os.path.join(league_path, season)
if os.path.isdir(season_path):
print season_path
fixtures = load_json(os.path.join(season_path, 'fixtures.json'))
log_path = os.path.join(season_path, 'log.json')
logs = []
if os.path.exists(log_path):
logs = load_json(log_path)
for id, match in fixtures.iteritems():
url = BASE_URL+match['url']
if id not in logs and match.get('stats', 0) == 'FT' or match.get('status', 0) == 'FT':
print(url, 'Start')
start_time = time.time()
get_match(driver, url)
time.sleep(5)
print("Match cost: --- %s seconds ---" % (time.time() - start_time))
print(url, 'Done')
logs.append(id)
write_json(log_path, logs)
options = webdriver.ChromeOptions()
# options.add_argument('headless')
# options.add_argument('window-size=1200x600')
driver = webdriver.Chrome(executable_path="chromedriver", chrome_options=options)
driver.implicitly_wait(100)
get_all_matches(driver)
driver.quit()
|
import random
from collections import Counter
import matplotlib.pyplot as plt
import math
# multiplica um vetor por um escalar
def scalar_multiply(escalar, vetor):
return [escalar * i for i in vetor]
def vector_sum(vetores):
resultado = vetores[0]
for vetor in vetores[1:]:
resultado = [resultado[i] + vetor[i] for i in range(len(vetor))]
return resultado
def vector_mean(vetores):
return scalar_multiply(1 / len(vetores), vector_sum(vetores))
def dot(v, w):
return sum(v_i * w_i for v_i, w_i in zip(v, w))
def vector_subtract(v, w):
return [v_i - w_i for v_i, w_i in zip(v, w)]
def sum_of_squares(v):
return dot(v, v)
def squared_distance(v, w):
return sum_of_squares(vector_subtract(v, w))
class KMeans:
def __init__(self, k, means=None):
self.k = k
self.means = means
def classify(self, ponto):
return min(range(self.k), key=lambda i: squared_distance(ponto, self.means[i]))
def train(self, pontos):
# escolher k elementos
self.means = random.sample(pontos, self.k)
assigments = None
while True:
# associa cada instância a um inteiro 0 <= i < k
new_assigments = list(map(self.classify, pontos))
if new_assigments == assigments:
return
assigments = new_assigments
for i in range(self.k):
# [1, 2, 3] e [1, 2, 2]
i_points = [p for p, a in zip(pontos, assigments) if a == i]
if i_points:
self.means[i] = vector_mean(i_points)
def test_kmeans():
dados = [[1], [2], [3], [6], [7], [10], [11]]
kmeans = KMeans(3, [[2], [10], [11]])
kmeans.train(dados)
print(kmeans.means)
def gera_sexo(quantidade_de_usuarios_na_rede):
sexo =[]
for i in range(quantidade_de_usuarios_na_rede):
if (((random.randint(0,10))%2) == 0):
sexo.append((i,"M"))
else:
sexo.append((i,"F"))
return sexo
def quantidade_de_amigos_por_sexo(gera_sexo):
cont_sexo = []
m = 0
f = 0
for j , i in gera_sexo:
if (i == "M"):
m += 1
else:
f += 1
cont_sexo.append(m)
cont_sexo.append(f)
return (cont_sexo)
def gera_idade(quantidade_de_usuarios_na_rede):
idades = []
for i in range(quantidade_de_usuarios_na_rede):
idades.append((i,random.randint(18,60)))
return idades
def gera_histograma_amigo_por_sexo(quantidade_de_amigos_por_sexo, qtde_usuarios_na_rede):
x= ["M", "F"]
xs = x
ys = quantidade_de_amigos_por_sexo
plt.bar(xs, ys)
plt.axis([-1, 2, 0, qtde_usuarios_na_rede ])
plt.title("Histograma da Contagem de Amigos por sexo")
plt.xlabel("sexo")
plt.ylabel("# de amigos")
plt.show()
def quantidade_de_usuarios_na_rede():
return 100
def gera_amizades(numero_conexoes_desejado, qtde_usuarios_na_rede):
conexoes = []
for i in range(numero_conexoes_desejado):
while True:
u1 = random.randint(0, qtde_usuarios_na_rede - 1)
u2 = random.randint(0, qtde_usuarios_na_rede - 1)
if u1 != u2:
conexoes.append((u1, u2))
break
return [aux for aux in set(conexoes)]
def quantidade_de_amigos(amizades):
a = Counter(i for i, _ in amizades)
b = Counter(i for _, i in amizades)
tudo = a + b
return Counter(x for x in tudo.values())
def gera_intencao(quantidade_de_usuarios_na_rede):
intencao =[]
for i in range(quantidade_de_usuarios_na_rede):
if (((random.randint(0,10))%2) == 0):
intencao.append((i,"Haddad"))
else:
intencao.append((i,"Bolsonaro"))
return intencao
def main():
print (gera_intencao(100))
main()
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.shortcuts import render, reverse, HttpResponse, redirect
from models import *
from django.contrib import messages
import bcrypt
import re
from django.core.urlresolvers import reverse
from django.core import serializers
import json
from django.http import JsonResponse
from django.utils import timezone
from django.http import HttpResponseRedirect
from django.contrib.auth import authenticate
from django.contrib.auth import logout as logout_funct
from django.contrib.auth import login as login_funct
def home(request):
return redirect(reverse('resources:home'))
def login(request):
if request.method =='POST':
username = request.POST['username']
password = request.POST['password']
user = authenticate(request, username=username, password=password)
if user is not None:
login_funct(request,user)
return HttpResponseRedirect(request.META.get('HTTP_REFERER'))
return redirect(reverse('main:home'))
def logout(request):
logout_funct(request)
return redirect(reverse('main:home'))
# def authenticate(request):
# if request.method !='POST':
# return redirect(reverse('main:register'))
# username = request.POST['username']
# password = request.POST['password']
# check = password
# this_user = User.objects.get(username = username)
# this_user.save()
# if bcrypt.checkpw(password.encode(), this_user.password.encode()):
# request.session['current_user_id'] = this_user.id
# return redirect(reverse('resources:home'))
# else:
# return redirect(reverse('main:login'))
# def register(request):
# return render(request, "main/register.html")
# def create_user(request):
# if request.method !='POST':
# return redirect(reverse('main:register'))
# # errors=User.objects.register_validator(request.POST)
# # if len(errors):
# # for error,error_message in errors.iteritems():
# # messages.error(error_message)
# # return redirect(reverse('main:register'))
# email = request.POST['email']
# username = request.POST['username']
# first_name = request.POST['first_name']
# last_name = request.POST['last_name']
# password = request.POST['password']
# confirm = request.POST['confirm']
# if password != confirm:
# return redirect(reverse('main:register'))
# hashed_key=bcrypt.hashpw(password.encode('utf-8'), bcrypt.gensalt())
# User.objects.create(first_name = first_name, last_name = last_name, email = email, password=hashed_key,username=username)
# request.session['current_first_name'] = first_name
# request.session['current_last_name'] = last_name
# request.session['current_email'] = email
# return redirect(reverse('main:new_user_success'))
# def new_user_success(request):
# return render(request, 'main/success.html')
# Create your views here.
|
import datetime as dt
import matplotlib.pyplot as plt
from matplotlib import style
import pandas as pd
import pandas_datareader.data as web
style.use('ggplot')
start = dt.datetime(2000,1,1)
end = dt.datetime(2016,12,31)
# Here in DataReader, parameters are stock_tick, website/source, start, end.
df = web.DataReader('TSLA','yahoo',start,end)
# print(df.head())
# It seems that this is not working properly. There is some error while using pandas_datareader.
# Instead, I'll use my own web scrapper that will get all of the relevant data.
df.to_csv('tsla.csv')
|
import sys,pygame
from pygame.locals import *
class State():
def __init__(self,screen,rm):
self.screen = screen
self.rm = rm
self.last_keystate = {}
def _draw(self):
pass
def _step(self):
pass
def _input(self,im):
# key input handling
if im.keystate[K_ESCAPE]:
pygame.display.quit()
sys.exit()
|
#!/usr/bin/env python
# encoding: utf-8
"""
Created by 'bens3' on 2013-06-21.
Copyright (c) 2013 'bens3'. All rights reserved.
python tasks/mongo_multimedia.py --local-scheduler --date 20150115
"""
import luigi
from ke2mongo.tasks.mongo import MongoTask
class MongoMultimediaTask(MongoTask):
"""
Import Multimedia Export file into MongoDB
"""
module = 'emultimedia'
def process_record(self, data):
# Add embargoed date = 0 so we don't have to query against field exists (doesn't use the index)
if not 'NhmSecEmbargoDate' in data:
data['NhmSecEmbargoDate'] = 0
# As above - make field indexable
if not 'GenDigitalMediaId' in data:
data['GenDigitalMediaId'] = 0
return super(MongoMultimediaTask, self).process_record(data)
def on_success(self):
"""
On completion, add mime format index
http://www.nhm.ac.uk/emu-classes/class.EMuMedia.php only works with jpeg + jp2 so we need to filter images
@return: None
"""
self.collection = self.get_collection()
# Need to filter on web publishable
self.collection.ensure_index('AdmPublishWebNoPasswordFlag')
# And embargo date
self.collection.ensure_index('NhmSecEmbargoDate')
# Add MAM GUID field
self.collection.ensure_index('GenDigitalMediaId')
if __name__ == "__main__":
luigi.run(main_task_cls=MongoMultimediaTask)
|
"""
The url shortener backend.
"""
from fastapi import FastAPI
from backend.routes import router
def build_app(app: FastAPI) -> FastAPI:
app.include_router(router)
return app
app = build_app(FastAPI())
|
def wordBreak(s, dict):
if s == "":
return True
newdict = set([])
for word in dict:
i=0
while i < len(word):
if i > (len(s)-1) or s[i] != word[i]:
break
else:
i+= 1
if i == len(word):
newdict.add(word)
t = False
for word in newdict:
t = t or wordBreak(s[len(word):],dict)
return t
|
from django.shortcuts import render, Http404, HttpResponse, HttpResponseRedirect
#import matplotlib.pyplot as plt
import pygal
import pygal.style
import io
from .utils import *
from .forms import NewDeviceForm, RemoveDeviceForm, EditDeviceForm
from .models import Device, LastUpdated, MACtoPort, IgnoredPort
from django.core.urlresolvers import reverse
from django.core.exceptions import ObjectDoesNotExist
from easysnmp import exceptions
import socket
import time
def main(request):
"""
Returns the front page of the website.
Please note that the online chart is generated separately and embedded into this page.
"""
pagevars = {'title': "NetStatus Dashboard"}
return render(request, 'base_index.html', pagevars)
def piechart_online(request):
"""
Generates a pie chart based on the number of online/offline devices on the network (those tracked by NetStatus).
Returns an SVG image of the chart (which is never stored on disk - only in memory).
"""
# Queries the database to get all the device rows
devlist = Device.objects.all()
online = 0
offline = 0
# Checks if every device in the list is online or offline by using the 'ping' funtion. A total of online/offline
# devices is created and each individual device's status is updated in the database.
for device in devlist:
if ping(device.ipv4_address):
online = online + 1
device.online = True
device.save()
else:
offline = offline + 1
device.online = False
device.save()
custom_style = pygal.style.Style(
background='transparent',
colors=("#006600", "#ff0000") # Colours (red and green) for the offline/online status
)
pie_chart = pygal.Pie(style=custom_style, human_readable=True, print_values=True)
pie_chart.title = "Number of online and offline devices on page load"
pie_chart.add("Online devices", online)
pie_chart.add("Offline devices", offline)
#pie_chart.render_to_file(:memory:)
# Returns a SVG image only - not a web page
# The browser and user wouldn't know this is dynamically generated.
return pie_chart.render_django_response()
def device_list(request):
"""
Returns a list of SNMP enabled devices in the school. These will most likely be switches.
Gets data from backend database.
"""
set_of_devices = Device.objects.all()
pagevars = {'title': "NetStatus Device List", 'set_of_devices': set_of_devices}
return render(request, 'base_device_list.html', pagevars)
def device_new(request):
"""
A page for creating a new entry in the database for a new device, gets the user submitted values from the form.
X and Y co-ordinates are also obtained from the Javascript in the HTML page.
"""
# Only go if user submits a form
if request.method == 'POST':
# create a form instance and populate it with data from the request:
form = NewDeviceForm(request.POST)
# Checks if x and y are actually numbers, in case the user has played with the Javascript
if form.is_valid():
if not ping(form.cleaned_data['ipv4_address']):
# If the server cannot contact the device that the user has been specified, then let the user know
# and do not let them add the device.
return render(request, 'base_device_new.html', {'title': "NetStatus New Device",
'error': "Error: A SNMP session could not be set up "
"with the device you entered. Please make sure"
" that the IPv4 address is correct and the "
"device is online before continuing.",
'form': form.as_p()})
# Connect to SNMP agent, which we already know is online, so don't need to check status again.
session = setup_snmp_session(form.cleaned_data['ipv4_address'])
description = session.get('sysDescr')
online = True # We know this because we just 'pinged' the device
# Create Device object with information the user submitted
device = Device(name=form.cleaned_data['name'], ipv4_address=form.cleaned_data['ipv4_address'],
location_x=form.cleaned_data['location_x'], location_y=form.cleaned_data['location_y'],
online=online, system_version=description)
# Add the entry to the database
device.save()
# Redirect the user to the success page
return HttpResponseRedirect(reverse('new-device-success'))
# if a GET (or any other method) we'll create a blank form
else:
form = NewDeviceForm()
# Output the page to the user
pagevars = {'title': "NetStatus New Device", 'form': form.as_p()}
return render(request, 'base_device_new.html', pagevars)
def device_new_success(request):
"""
Lets the user know their device was added successfully and gives them options on what to do next.
This is effectively a static view.
"""
# Output the page to the user
pagevars = {'title': "NetStatus New Device Success"}
return render(request, 'base_device_new_success.html', pagevars)
def device_remove(request):
"""
A page for removing devices from the database. Could be used for missentered devices, or devices that are no longer
in use.
"""
# Only go if user submits a form
if request.method == 'POST':
# create a form instance and populate it with data from the request:
form = RemoveDeviceForm(request.POST)
if form.is_valid():
# Get the primary key of the item chosen in the dropdown menu
id = form.cleaned_data['choose_device'].id
# Get the Device object relating to that primary key
device = Device.objects.get(pk=id)
# Remove this from the database
device.delete()
# Redirect the user back to the remove device page
return HttpResponseRedirect(reverse('remove-device'))
# if a GET (or any other method) we'll create a blank form
else:
form = RemoveDeviceForm()
# Output page to users browser
pagevars = {'title': "NetStatus Remove Device", 'form': form.as_p()}
return render(request, 'base_device_remove.html', pagevars)
def device_edit_db(request, id):
"""
A page for editing the database attributes of a device, uses a ModelForm to populate data from the database, and
changes are reflected from user changes when submitting the form.
"""
# Checks that the requested ID does actually belong to a device
try:
device = Device.objects.get(pk=id)
except ObjectDoesNotExist:
# ID does not belong to a device
raise Http404
except ValueError:
# Test in int field
raise Http404
# If the user submits the form...
if request.method == "POST":
# Using the POST data to populate the EditDeviceForm, editing an existing object, in this case the device
# defined earlier
form = EditDeviceForm(request.POST, instance=device)
if form.is_valid():
# Update form information in the database
form.save()
# Redirect the user to the editing device success page
return HttpResponseRedirect(reverse('device-edit-success'))
else:
# Create a new form instance, pre populating it with data from the device object so it can be edited by the
# user.
form = EditDeviceForm(instance=device)
# Output page to users browser with the following information sent to the page template
pagevars = {'title': "NetStatus Edit Device", 'form': form.as_p(), 'id': id, 'device': device}
return render(request, "base_device_edit_db.html", pagevars)
def device_edit_snmp(request, id):
"""
Page for editing the SNMP attributes of a device. Returns form with pre-entered values from the device.
Sets SNMP attributes based on submitted form.
"""
# Checks that the requested ID does actually belong to a device
try:
device = Device.objects.get(pk=id)
except ObjectDoesNotExist:
# ID does not belong to a device
raise Http404
except ValueError:
# Text in int field
raise Http404
# Check the device is online before getting or changing attributes - this is important as we are editing SNMP
# attributes, which are stored directly on the device.
if not ping(device.ipv4_address):
# If its not online, then we won't be able to get and therefore change these attributes
pagevars = {'title': 'Connection to device failed', 'info': 'Error, connection to the device specified failed. '
'The device may be offline, or not accepting SNMP '
'requests. Sorry, this means that the system will '
'be unable to edit the SNMP based attributes of the'
' device.'}
return render(request, "base_error.html", pagevars)
# If the user submits the form
if request.method == "POST":
# Get users input from form
sysName = request.POST.get('sysName')
sysLocation = request.POST.get('sysLocation')
sysContact = request.POST.get('sysContact')
# Establish SNMP session with device
session = setup_snmp_session(device.ipv4_address)
# Set device SNMP variables to user input values
# .0 is required here to edit the element
try:
session.set("sysName.0", sysName)
session.set("sysLocation.0", sysLocation)
session.set("sysContact.0", sysContact)
except (exceptions.EasySNMPTimeoutError, exceptions.EasySNMPError):
# For some reason the EasySNMP library returns a timeout error when it cannot set attributes for certain
# models of switches. (HP 1910-16G). The EasySNMPError exception covers noAccess (permission denied to edit)
pagevars = {'title': 'Editing attributes failed!', 'info': 'Error! The device you are trying to edit has'
' its community string set to read only mode. '
'Unfortunately, this means NetStatus cannot edit'
' the SNMP attributes of the device.'}
return render(request, "base_error.html", pagevars)
# To get to this stage, editing will have been successful so redirect the user to the editing device success
# page
return HttpResponseRedirect(reverse('device-edit-success'))
# Establish SNMP session with device
session = setup_snmp_session(device.ipv4_address)
# Get the system attributes we want the user to be able to edit from the device
sysName = session.get("sysName.0")
sysLocation = session.get("sysLocation.0")
sysContact = session.get("sysContact.0")
# Output the page to the user, with the following information sent to the page template
pagevars = {'title': "NetStatus Edit Device", 'device': device, 'sysName': sysName, 'sysLocation': sysLocation,
'sysContact': sysContact, 'id': id}
return render(request, "base_device_edit_snmp.html", pagevars)
def device_edit_success(request):
"""
Tell the user that editing the device was a success.
"""
pagevars = {'title': "NetStatus Edit Device success"}
return render(request, "base_device_edit_success.html", pagevars)
def device_info(request, id):
"""
Page for getting information via SNMP from a device, and outputting it to the user. Gets system based attributes
and logging information.
"""
# Checks that the requested ID does actually belong to a device
try:
device = Device.objects.get(pk=id)
except ObjectDoesNotExist:
# ID does not belong to a device
raise Http404
except ValueError:
# Text in an int only field
raise Http404
# Make sure device is online before trying to get any information from it
if not ping(device.ipv4_address):
pagevars = {'title': 'Connection to device failed', 'info': 'Error, connection to the device specified failed. '
'The device may be offline, or not accepting SNMP '
'requests.'}
return render(request, "base_error.html", pagevars)
# Establish SNMP session with the device
session = setup_snmp_session(device.ipv4_address)
# Get a list of system items from the device
system_items = session.walk('system')
system_information = {}
# Iterate over this list and put the items in a dictionary with OID -> OID Value (the rest of the information has
# little use to us)
for i in system_items:
if i.oid != 'sysUpTimeInstance':
system_information[i.oid] = i.value
else:
# Convert the timeticks value of this OID to days so its easier for the user to read
system_information[i.oid] = int(timeticks_to_days(int(i.value)))
# Get a list of log items from the device
log_items = session.walk('mib-2.16.9.2.1.4')
log_items_strings = []
# Iterate over this list
for item in log_items:
# This means that only items that are classed as warnings will be shown to the user.
# Informational alerts are less useful eg. show when a port has been connected and disconnected.
if item.value.startswith('W'):
log_items_strings.append(item.value)
# Output the page to the user with the following attributes sent to the template
pagevars = {'title': "NetStatus for " + device.name, 'system_information': system_information,
'log_items_strings': log_items_strings, 'device': device}
return render(request, "base_device_info.html", pagevars)
def search(request):
"""
Lets the user search for a device on the whole network, assuming it is connected to one of the switches tracked
by NetStatus.
First gets the MAC address of the device to find, and then gets the LLDP port tables (to get a list of ports to
ignore), MAC address tables and port tables from all of the switches on the system.
Checks the MAC address against the filtered MAC address tables to see where on the network the device is.
"""
# If the user submits the form...
if request.method == "POST":
# Get the (hopefully) IPv4 address the user entered
user_input = request.POST.get('ipv4_address')
# If the user has requested to delete the cached objects
if 'delcache' in request.POST:
# The user has requested that we delete all cached items
try:
# Get first row of the LastUpdated model
last_updated = LastUpdated.objects.get(pk=1)
# Setting the time stamps to 0 will force the system to regrab any results as 0 indicates that the
# last updated time was Thurs 1st Jan 1970 at 00:00:00 GMT.
last_updated.ignored_port = 0
last_updated.mac_to_port = 0
# Update database entry
last_updated.save()
except:
# This is in case the user clicks 'Delete cache' before any searches have even been made!
# Initialises the first object in the LastUpdated table
last_updated = LastUpdated(mac_to_port=0, ignored_port=0)
last_updated.save()
# Delete all the objects in the IgnoredPort and MACtoPort models
IgnoredPort.objects.all().delete()
MACtoPort.objects.all().delete()
# Output page with message telling user the cache was cleared
pagevars = {'title': "Search for a device", 'message': "Cache cleared successfully!"}
return render(request, "base_search.html", pagevars)
try:
# Tries to establish a socket with the IP address the user provided. Will error if the address is not
# correct/valid etc.
socket.inet_aton(user_input)
except socket.error:
pagevars = {'title': "Search for a device", 'message': "Error: The IPv4 address you specified was not "
"valid!"}
return render(request, "base_search.html", pagevars)
# Call the get_mac_address function in utils.py to get the MAC address of the IP address the user provided.
mac_to_find = get_mac_address(user_input)
# ERR_PING_FAIL returned when get_mac_address cannot ping the specified device
if mac_to_find == "ERR_PING_FAIL":
pagevars = {'title': "Search for a device", 'message': "Error: The system could not ping the device you "
"specified! The device firewall may be "
"preventing this."}
return render(request, "base_search.html", pagevars)
# get_mac_address either didn't get a MAC address at all, or the one it did get wasn't the correct number of
# characters
if mac_to_find == "ERR_ARP_FAIL" or mac_to_find == "ERR_MALFORMED_MAC":
pagevars = {'title': "Search for a device", 'message': "Error: The system could not get the MAC address of "
"the device you specified."}
return render(request, "base_search.html", pagevars)
device_list = Device.objects.all()
# To speed things up, the search system will generally used cached results in the database.
# The ignored ports will only be rechecked if a week has passed, as these are likely to rarely change.
# The MAC address to port results need to be updated more regularly as this data changes more often, so a value
# of 1 day has been chosen. The user can always choose to clear the cached results and start a search from
# scratch if they are having problems finding a correct location.
# Note: EasySNMPTimeoutError will be thrown when EasySNMP has problems connecting to a switch (even if it is
# online)
# A try statement is used in case this is the first run of the program. In this instance, the LastUpdated object
# with PK 1 will not exist yet. If the search function has been run before, then PK 1 will exist so the system
# can continue as normal.
try:
last_updated = LastUpdated.objects.get(pk=1)
# 604800 seconds = 1 week
if int(time.time()) >= last_updated.ignored_port + 604800:
last_updated.ignored_port = int(time.time())
last_updated.save()
# We delete the existing objects every time to 'purge' the cache
IgnoredPort.objects.all().delete()
try:
# Run the update_ignored_ports function in utils.py
update_ignored_ports(device_list)
except exceptions.EasySNMPTimeoutError:
# If sonnecting to a switch does fail...
pagevars = {'title': "Search for a device", 'message':
"Error: The system could not contact a switch during the search."}
return render(request, "base_search.html", pagevars)
# 8600 seconds = 1 day
if int(time.time()) >= last_updated.mac_to_port + 8600:
last_updated.mac_to_port = time.time()
last_updated.save()
# We delete the existing objects every time to 'purge' the cache
MACtoPort.objects.all().delete()
try:
# Run the update_mac_to_port function in utils.py
update_mac_to_port(device_list)
except exceptions.EasySNMPTimeoutError:
# If connecting to a switch does fail...
pagevars = {'title': "Search for a device", 'message':
"Error: The system could not contact a switch during the search."}
return render(request, "base_error.html", pagevars)
except ObjectDoesNotExist:
# This is the first run of the searching algorithm, so we need to initialise the PK 1 of LastUpdated
last_updated = LastUpdated(mac_to_port=int(time.time()), ignored_port=int(time.time()))
last_updated.save()
try:
# Run through the algorithm fully. We don't need to check last updated times as this is the first run.
update_ignored_ports(device_list)
update_mac_to_port(device_list)
except exceptions.EasySNMPTimeoutError:
# If connecting to a switch does fail...
pagevars = {'title': "Search for a device", 'message':
"Error: The system could not contact a switch during the search."}
return render(request, "base_search.html", pagevars)
# .1.3.6.1.2.1.17.4.3.1.1
try:
# Get the first result from the database in the MACtoPort table corresponding with the MAC address of the
# device the user entered.
mac_to_port_info = MACtoPort.objects.all().filter(mac_address__exact=mac_to_find).first()
if mac_to_port_info is not None:
# Get the corresponding switch attributes from the database, this is so we can tell the user the switch
# and which port the device is connected to.
device = Device.objects.get(id=mac_to_port_info.device_id)
pagevars = {'title': "Device search results", 'device': device, 'mac_to_port_info': mac_to_port_info}
return render(request, "base_search_result.html", pagevars)
else:
# Sometimes mac_to_port_info will return None when nothing is found, so this fixes that issue and
# tells the user that no results have been found
pagevars = {'title': "Device search returned no results"}
return render(request, "base_search_noresult.html", pagevars)
except ObjectDoesNotExist:
# The MAC address could not be found in the database - so the device could have been added recently,
# or its on a switch that we just don't track (eg. behind an IP phone).
pagevars = {'title': "Device search returned no results"}
return render(request, "base_search_noresult.html", pagevars)
# Output the search page to the user
pagevars = {'title': "Search for a device"}
return render(request, "base_search.html", pagevars)
|
"""Module for the root endpoint of the debug routes.
Contains the blueprint to avoid circular dependencies."""
from flask import Blueprint, render_template
DEBUG_BLP = Blueprint(
"debug-routes", __name__, template_folder="templates", url_prefix="/debug"
)
@DEBUG_BLP.route("/")
@DEBUG_BLP.route("/index")
def index():
return render_template("debug/index.html", title="Flask Template – Debug")
|
import os
import sys
import numpy as np
from setuptools import setup, Extension, find_packages
# HIDE WARNING:
# cc1plus: warning: command line option "-Wstrict-prototypes" is valid for C/ObjC but not for C++
from distutils.sysconfig import get_config_vars
cfg_vars = get_config_vars()
for k, v in cfg_vars.items():
if type(v) == str:
cfg_vars[k] = v.replace("-Wstrict-prototypes", "")
print('Begin: %s' % ' '.join(sys.argv))
instdir_env = os.environ.get('INSTDIR')
if not instdir_env:
raise Exception('Parameter --instdir is missing')
instdir = instdir_env
# Shorter BUILD_LIST can be used to speedup development loop.
#Command example: ./build_all.sh -b PEAKFINDER:HEXANODE:CFD -md
BUILD_LIST = ('PSANA','SHMEM','PEAKFINDER','HEXANODE','DGRAM','HSD','CFD','NDARRAY')# ,'XTCAV')
build_list_env = os.environ.get('BUILD_LIST')
if build_list_env:
BUILD_LIST = build_list_env.split(':')
#print('Build c++ python-extensions: %s' % s_exts)
# allows a version number to be passed to the setup
VERSION = '0.0.0'
version_env = os.environ.get('VERSION')
if version_env:
VERSION = version_env
print('-- psana.setup.py build extensions : %s' % ' '.join(BUILD_LIST))
print('-- psana.setup.py install directory : %s' % instdir)
print('-- psana.setup.py include sys.prefix: %s' % sys.prefix)
print('-- psana.setup.py np.get_include() : %s' % np.get_include())
if sys.platform == 'darwin':
# Flag -Wno-cpp hides warning:
#warning "Using deprecated NumPy API, disable it with " "#define NPY_NO_DEPRECATED_API NPY_1_7_API_VERSION" [-W#warnings]
macos_sdk_version_arg = '-mmacosx-version-min=10.9'
extra_c_compile_args = ['-Wno-#warnings', macos_sdk_version_arg]
extra_cxx_compile_args = ['-std=c++11', '-Wno-#warnings', macos_sdk_version_arg]
extra_link_args = [macos_sdk_version_arg]
# Use libgomp instead of the version provided by the compiler. Passing plain -fopenmp uses the llvm version of OpenMP
# which appears to have a conflict with the numpy we use from conda. numpy uses Intel MKL which itself uses OpenMP,
# but this seems to cause crashes if you use the llvm OpenMP in the same process.
openmp_compile_args = ['-fopenmp=libgomp']
openmp_link_args = ['-fopenmp=libgomp']
else:
# Flag -Wno-cpp hides warning:
#warning "Using deprecated NumPy API, disable it with " "#define NPY_NO_DEPRECATED_API NPY_1_7_API_VERSION" [-Wcpp]
extra_c_compile_args=['-Wno-cpp']
extra_cxx_compile_args=['-std=c++11', '-Wno-cpp']
extra_link_args = []
# Use the version of openmp provided by the compiler
openmp_compile_args = ['-fopenmp']
openmp_link_args = ['-fopenmp']
extra_link_args_rpath = extra_link_args + ['-Wl,-rpath,'+ os.path.abspath(os.path.join(instdir, 'lib'))]
CYT_BLD_DIR = 'build'
from Cython.Build import cythonize
# defaults if the build list is empty
PACKAGES = []
EXTS = []
CYTHON_EXTS = []
INSTALL_REQS = []
PACKAGE_DATA = {}
ENTRY_POINTS = {}
if 'PSANA' in BUILD_LIST :
dgram_module = Extension('psana.dgram',
sources = ['src/dgram.cc'],
libraries = ['xtc'],
include_dirs = ['src', np.get_include(), os.path.join(instdir, 'include')],
library_dirs = [os.path.join(instdir, 'lib')],
extra_link_args = extra_link_args_rpath,
extra_compile_args = extra_cxx_compile_args)
container_module = Extension('psana.container',
sources = ['src/container.cc'],
libraries = ['xtc'],
include_dirs = [np.get_include(), os.path.join(instdir, 'include')],
library_dirs = [os.path.join(instdir, 'lib')],
extra_link_args = extra_link_args_rpath,
extra_compile_args = extra_cxx_compile_args)
PACKAGES = find_packages()
PACKAGE_DATA = {'psana.graphqt': ['data/icons/*.png','data/icons/*.gif']}
EXTS = [dgram_module, container_module]
INSTALL_REQS = [
'numpy',
]
ENTRY_POINTS = {
'console_scripts': [
'convert_npy_to_txt = psana.pyalgos.app.convert_npy_to_txt:do_main',
'convert_txt_to_npy = psana.pyalgos.app.convert_txt_to_npy:do_main',
'merge_mask_ndarrays = psana.pyalgos.app.merge_mask_ndarrays:do_main',
'merge_max_ndarrays = psana.pyalgos.app.merge_max_ndarrays:do_main',
'cdb = psana.pscalib.app.cdb:cdb_cli',
'proc_info = psana.pscalib.app.proc_info:do_main',
'proc_control = psana.pscalib.app.proc_control:do_main',
'proc_new_datasets = psana.pscalib.app.proc_new_datasets:do_main',
'timeconverter = psana.graphqt.app.timeconverter:timeconverter',
'calibman = psana.graphqt.app.calibman:calibman_gui',
'hdf5explorer = psana.graphqt.app.hdf5explorer:hdf5explorer_gui',
'screengrabber = psana.graphqt.ScreenGrabberQt5:run_GUIScreenGrabber',
'detnames = psana.app.detnames:detnames',
'config_dump = psana.app.config_dump:config_dump',
'xtcavDark = psana.xtcav.app.xtcavDark:__main__',
'xtcavLasingOff = psana.xtcav.app.xtcavLasingOff:__main__',
'xtcavLasingOn = psana.xtcav.app.xtcavLasingOn:__main__',
'xtcavDisplay = psana.xtcav.app.xtcavDisplay:__main__',
'shmemClientSimple = psana.app.shmemClientSimple:main',
'epix10ka_pedestals_calibration = psana.app.epix10ka_pedestals_calibration:do_main',
'epix10ka_deploy_constants = psana.app.epix10ka_deploy_constants:do_main',
'epix10ka_raw_calib_image = psana.app.epix10ka_raw_calib_image:do_main',
'datinfo = psana.app.datinfo:do_main',
'det_dark_proc = psana.app.det_dark_proc:do_main',
'parallel_proc = psana.app.parallel_proc:do_main',
'iv = psana.graphqt.app.iv:image_viewer',
]
}
if 'SHMEM' in BUILD_LIST and sys.platform != 'darwin':
ext = Extension('shmem',
sources=["psana/shmem/shmem.pyx"],
libraries = ['xtc','shmemcli'],
include_dirs = [np.get_include(), os.path.join(instdir, 'include')],
library_dirs = [os.path.join(instdir, 'lib')],
language="c++",
extra_compile_args = extra_cxx_compile_args,
extra_link_args = extra_link_args_rpath,
)
CYTHON_EXTS.append(ext)
if 'PEAKFINDER' in BUILD_LIST :
ext = Extension("peakFinder",
sources=["psana/peakFinder/peakFinder.pyx",
"psana/peakFinder/src/PeakFinderAlgos.cc",
"psana/peakFinder/src/LocalExtrema.cc"],
libraries = ['utils'], # for SysLog
language="c++",
extra_compile_args = extra_cxx_compile_args,
extra_link_args = extra_link_args_rpath,
include_dirs=[np.get_include(), os.path.join(instdir, 'include')],
library_dirs = [os.path.join(instdir, 'lib')],
)
CYTHON_EXTS.append(ext)
# direct LCLS1 version of peak-finders
ext = Extension("psalg_ext",
sources=["psana/peakFinder/psalg_ext.pyx",
"psana/peakFinder/src/PeakFinderAlgosLCLS1.cc",
"psana/peakFinder/src/LocalExtrema.cc"],
libraries = ['utils'], # for SysLog
language="c++",
extra_compile_args = extra_cxx_compile_args,
extra_link_args = extra_link_args_rpath,
include_dirs=[np.get_include(), os.path.join(instdir, 'include')],
library_dirs = [os.path.join(instdir, 'lib')],
)
CYTHON_EXTS.append(ext)
ext = Extension("peakfinder8",
sources=["psana/peakFinder/peakfinder8.pyx",
"psana/peakFinder/peakfinder8.cc"],
libraries = ['utils'], # for SysLog
language="c++",
extra_compile_args = extra_cxx_compile_args,
extra_link_args = extra_link_args_rpath,
include_dirs=[np.get_include(), os.path.join(instdir, 'include')],
library_dirs = [os.path.join(instdir, 'lib')],
)
CYTHON_EXTS.append(ext)
if 'HEXANODE' in BUILD_LIST :
# ugly: only build hexanode apps if the roentdek software exists.
# this is a rough python equivalent of the way cmake finds out whether
# packages exist. - cpo
if(os.path.isfile(os.path.join(sys.prefix, 'lib', 'libResort64c_x64.a'))):
ext = Extension("hexanode",
sources=["psana/hexanode/hexanode_ext.pyx",
"psana/hexanode/src/cfib.cc",
"psana/hexanode/src/wrap_resort64c.cc",
"psana/hexanode/src/SortUtils.cc",
"psana/hexanode/src/LMF_IO.cc"],
language="c++",
extra_compile_args = extra_cxx_compile_args,
include_dirs=[os.path.join(sys.prefix,'include'), np.get_include(), os.path.join(instdir, 'include')],
library_dirs = [os.path.join(instdir, 'lib'), os.path.join(sys.prefix, 'lib')],
libraries=['Resort64c_x64'],
extra_link_args = extra_link_args,
)
CYTHON_EXTS.append(ext)
if 'HEXANODE_TEST' in BUILD_LIST :
if(os.path.isfile(os.path.join(sys.prefix, 'lib', 'libResort64c_x64.a'))):
ext = Extension("hexanode",
sources=["psana/hexanode/test_ext.pyx",
"psana/hexanode/src/LMF_IO.cc",
"psana/hexanode/src/cfib.cc"],
language="c++",
extra_compile_args = extra_cxx_compile_args,
include_dirs=[np.get_include(), os.path.join(instdir, 'include')],
library_dirs = [os.path.join(instdir, 'lib')],
extra_link_args = extra_link_args,
)
CYTHON_EXTS.append(ext)
if 'CFD' in BUILD_LIST :
ext = Extension("constFracDiscrim",
sources=["psana/constFracDiscrim/constFracDiscrim.pyx",
"psana/constFracDiscrim/src/ConstFracDiscrim.cc"],
language="c++",
extra_compile_args = extra_cxx_compile_args,
extra_link_args = extra_link_args,
include_dirs=[os.path.join(sys.prefix,'include'), np.get_include(), os.path.join(instdir, 'include')],
)
CYTHON_EXTS.append(ext)
if 'DGRAM' in BUILD_LIST :
ext = Extension('dgramCreate',
#packages=['psana.peakfinder',],
sources=["psana/peakFinder/dgramCreate.pyx"],
libraries = ['xtc'],
include_dirs = [np.get_include(), os.path.join(instdir, 'include')],
library_dirs = [os.path.join(instdir, 'lib')],
language="c++",
extra_compile_args = extra_cxx_compile_args,
extra_link_args = extra_link_args_rpath,
# include_dirs=[np.get_include(), "../install/include"]
)
CYTHON_EXTS.append(ext)
ext = Extension("psana.dgramchunk",
sources=["src/dgramchunk.pyx"],
extra_compile_args=extra_c_compile_args,
extra_link_args=extra_link_args,
)
CYTHON_EXTS.append(ext)
ext = Extension("psana.smdreader",
sources=["psana/smdreader.pyx"],
include_dirs=["psana"],
#extra_compile_args=extra_c_compile_args,
extra_compile_args=extra_c_compile_args + openmp_compile_args,
#extra_link_args=extra_link_args,
extra_link_args=extra_link_args + openmp_link_args,
)
CYTHON_EXTS.append(ext)
ext = Extension("psana.eventbuilder",
sources=["psana/eventbuilder.pyx"],
include_dirs=["psana"],
extra_compile_args=extra_c_compile_args,
extra_link_args=extra_link_args,
)
CYTHON_EXTS.append(ext)
ext = Extension("psana.parallelreader",
sources=["psana/parallelreader.pyx"],
include_dirs=["psana"],
extra_compile_args=extra_c_compile_args + openmp_compile_args,
extra_link_args=extra_link_args + openmp_link_args,
)
CYTHON_EXTS.append(ext)
ext = Extension("psana.dgrmdsource",
sources=["psana/dgrmdsource.pyx"],
include_dirs=["psana"],
extra_compile_args=extra_c_compile_args,
extra_link_args=extra_link_args,
)
CYTHON_EXTS.append(ext)
if 'HSD' in BUILD_LIST :
ext = Extension("hsd",
sources=["psana/hsd/hsd.pyx"],
libraries=[],
language="c++",
extra_compile_args=extra_cxx_compile_args,
include_dirs=[np.get_include(),
"../install/include",
os.path.join(instdir, 'include')],
library_dirs = [os.path.join(instdir, 'lib')],
extra_link_args = extra_link_args_rpath,
)
CYTHON_EXTS.append(ext)
#if 'NDARRAY' in BUILD_LIST :
# ext = Extension("ndarray",
# sources=["psana/pycalgos/NDArray_ext.pyx",
# "psana/peakFinder/src/WFAlgos.cc"],
# language="c++",
# extra_compile_args = extra_cxx_compile_args,
# include_dirs=[os.path.join(sys.prefix,'include'), np.get_include(), os.path.join(instdir, 'include')],
# library_dirs = [os.path.join(instdir, 'lib')],
# libraries=[],
# extra_link_args = extra_link_args,
# )
# CYTHON_EXTS.append(ext)
setup(
name = 'psana',
version = VERSION,
license = 'LCLS II',
description = 'LCLS II analysis package',
install_requires = INSTALL_REQS,
packages = PACKAGES,
package_data = PACKAGE_DATA,
#cmdclass={'build_ext': my_build_ext},
ext_modules = EXTS + cythonize(CYTHON_EXTS, build_dir=CYT_BLD_DIR, language_level=2, annotate=True),
entry_points = ENTRY_POINTS,
)
|
# Calculadora em Python
operacao = ''
while operacao != "sair":
print()
operacao = input(" Qual operacao desejada - soma, sub, mult, div ou sair: ")
if operacao == "sair":
break
numero_1 = float(input(" Digite primeiro número: "))
numero_2 = float(input(" Digite segundo número: "))
if operacao == "soma":
resultado = float(numero_1) + float(numero_2)
print("{} + {} ".format(numero_1, numero_2))
elif operacao == "sub":
resultado = float(numero_1) - float(numero_2)
print("{} - {} ".format(numero_1, numero_2))
elif operacao == "mult":
resultado = float(numero_1) * float(numero_2)
print("{} * {} ".format(numero_1, numero_2))
elif operacao == "div":
resultado = float(numero_1) / float(numero_2)
print("{} / {} ".format(numero_1, numero_2))
# elif operacao == "sair":
# break
else:
resultado = "Operador não valido. Utilizar apenas SOMA, SUB, MULT, DIV ou SAIR. "
print(resultado, ' - Resultado dessa operação')
print("Fim do programa")
|
namne=" moona"
print("hi", msg)
|
def create_list():
a = [1, 2, 3, 5, 6, 7, 7, 8, 8, 5, 9, 1]
return a
def set_list(a):
a = set(a)
return a
print (create_list())
print (set_list(create_list()))
|
from django.contrib.auth.forms import forms, UserCreationForm, UserChangeForm
from phonenumber_field.formfields import PhoneNumberField
from phonenumber_field.widgets import PhoneNumberPrefixWidget
from .models import CustomUser
class CustomUserCreationForm(UserCreationForm):
phone = PhoneNumberField(widget=PhoneNumberPrefixWidget)
class Meta(UserCreationForm):
model = CustomUser
fields = ('name', 'email', 'phone', 'password1', 'password2')
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.fields['name'].widget.attrs.update({'autofocus': True,'placeholder':('Full Name'),'class':('name')})
self.fields['email'].widget.attrs.update({'placeholder':('Email'),'class':('email')})
self.fields['phone'].widget.attrs.update({'placeholder':('Phone Number'),'class':('phone')})
self.fields['password1'].widget.attrs.update({'placeholder':('Password'),'class':('password1')})
self.fields['password2'].widget.attrs.update({'placeholder':('Repeat password'),'class':('password2')})
class LoginForm(forms.Form):
phone = PhoneNumberField(widget=PhoneNumberPrefixWidget)
password = forms.CharField(max_length=32, widget=forms.PasswordInput)
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.fields['phone'].widget.attrs.update({'placeholder':('Phone Number'),'class':('phone')})
self.fields['password'].widget.attrs.update({'placeholder':('password'),'class':('password')})
class CustomUserChangeForm(UserChangeForm):
class Meta:
model = CustomUser
fields = ('phone',)
|
from datetime import datetime
import string
import pandas
def get_house_bill_df(file):
df = pandas.read_excel(file, header=None)
index_to_leave = [1, 2, 4, 6, 7, 12, 18, 29, 30, 32, 40, 42, 43, 54]
for index, row in df.iteritems():
if index not in index_to_leave:
del df[index]
df.columns = ['Date Created', 'HBL', 'Customer',
'CI Reference', 'MBL', 'HBL T/R', 'Move Type',
'Loading Port', 'Final Destination', 'Operator',
'Overseas Agent', 'Consignee', 'Notify Party', 'Discharge Port']
return df
def get_containers_df(file):
df = pandas.read_excel(file, header=None, usecols='A,C')
df.columns = ['Containers', 'CI Reference']
return df
def transform_containers_df(df):
return df.fillna('').groupby(['CI Reference'])['Containers'].apply(', '.join).reset_index(drop=True)
def transform_house_bill_df(df):
df['Operator'] = df['Operator'].str.upper()
df['HBL T/R'] = df['HBL T/R'].apply(lambda x: 1 if 'Express' in x else None)
df['Consignee'] = df['Customer'].str.translate(str.maketrans('', '', string.punctuation))
df['Notify Party'] = df['Notify Party'].str.translate(str.maketrans('', '', string.punctuation))
df['Trucking'] = pandas.np.where(df['Move Type'] == 'CY/DOOR', 'Pending', 'N/A')
df['SOC G/L'] = pandas.np.where(df['CI Reference'].str[-1] == '^', 'Pending', 'N/A')
df['Payment'] = 'Pending'
df = df.loc[df['Operator'] == 'BRYAN']
df = df.loc[df['Date Created'] == datetime.today().strftime('%Y-%m-%d 00:00:00')]
df['Status'] = 'On Vessel'
return df
def join_dataframes(df1, df2):
return pandas.concat([df1, df2], axis=1, join='inner')
if __name__ == '__main__':
df1 = transform_house_bill_df(get_house_bill_df())
df2 = transform_containers_df(get_containers_df())
df3 = join_dataframes(df1, df2)
df3.to_csv('{}_new_records.csv'.format(datetime.today().strftime('%m%d%Y')), index=False)
|
# -*- coding: utf-8 -*-
# -*- author: hechao -*-
import os
from core.config import config
from core.db_info import get_db_table_info_data
from jinja2 import PackageLoader, Environment
def render_html(db_name, db_real_name, tables):
env = Environment(loader=PackageLoader("html", "templates"))
template = env.get_template('index.html')
table_count = len(tables)
html = template.render(tables=tables, db_name=db_name, table_count=table_count, db_real_name=db_real_name)
out_put_path = os.path.join("html", "output")
if not os.path.exists(out_put_path):
os.makedirs(out_put_path)
file = open(os.path.join(out_put_path, "{}.html".format(db_name)), "w", encoding="utf8")
file.write(html)
file.close()
def output_html():
for db in config():
name = db.get("name")
try:
db_info = get_db_table_info_data(**db)
render_html(db_name=name, db_real_name=db.get("db"), tables=db_info)
except Exception as e:
print("exception {}".format(e))
|
# -*- coding: utf-8 -*-
from south.utils import datetime_utils as datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding unique constraint on 'Matches', fields ['source', 'value']
db.create_unique(u'ontol1_matches', ['source_id', 'value'])
def backwards(self, orm):
# Removing unique constraint on 'Matches', fields ['source', 'value']
db.delete_unique(u'ontol1_matches', ['source_id', 'value'])
models = {
u'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
'ontol1.matches': {
'Meta': {'unique_together': "(('source', 'value'),)", 'object_name': 'Matches'},
'count': ('django.db.models.fields.IntegerField', [], {}),
'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'last_update': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'null': 'True', 'blank': 'True'}),
'real_type': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['contenttypes.ContentType']"}),
'source': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['ontol1.Sources']"}),
'type_of_word': ('django.db.models.fields.CharField', [], {'max_length': '250', 'null': 'True', 'blank': 'True'}),
'user_assigned_name': ('django.db.models.fields.CharField', [], {'max_length': '250', 'null': 'True', 'blank': 'True'}),
'user_comment': ('django.db.models.fields.CharField', [], {'max_length': '1000', 'null': 'True', 'blank': 'True'}),
'value': ('django.db.models.fields.CharField', [], {'max_length': '250', 'null': 'True', 'blank': 'True'})
},
'ontol1.sources': {
'Meta': {'object_name': 'Sources'},
'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'null': 'True', 'blank': 'True'}),
'difficulty': ('django.db.models.fields.IntegerField', [], {'null': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'last_update': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'null': 'True', 'blank': 'True'}),
'real_type': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['contenttypes.ContentType']"}),
'type_of_word': ('django.db.models.fields.CharField', [], {'max_length': '250', 'null': 'True', 'blank': 'True'}),
'user_assigned_name': ('django.db.models.fields.CharField', [], {'max_length': '250', 'null': 'True', 'blank': 'True'}),
'user_comment': ('django.db.models.fields.CharField', [], {'max_length': '1000', 'null': 'True', 'blank': 'True'}),
'value': ('django.db.models.fields.CharField', [], {'max_length': '250', 'unique': 'True', 'null': 'True', 'blank': 'True'})
}
}
complete_apps = ['ontol1']
|
import random
from game import constants
from game.actor import Actor
from game.point import Point
class Food(Actor):
'''This keeps track of the food item and how much each piece is worth.
attributes:
'''
def __init__(self):
'''constructor
'''
super().__init__()
self.set_text('@')
self._points = 0
self.reset()
def reset(self):
'''changes the postion of the food and worth
'''
self._points = random.randint(1,5)
pos_x = random.randint(0, constants.MAX_X)
pos_y = random.randint(0, constants.MAX_Y)
position = Point(pos_x, pos_y)
self.set_position(position)
def get_points(self):
'''returns points
'''
return self._points
|
import requests
BASE = 'http://127.0.0.1:5000/'
responses = requests.get(BASE + 'product')
for response in responses:
print(response.json)
|
# Author: Jack (z5129432) for COMP9021 Assignment 1
# Date: 23/08/2017
# Description:
import sys
# funtion: calculate_next_step
# version: v01
# dependency: transformation()
# input: this_step[]
# output: next_step[]
# description: just add new elements to next_step[], I mean this_step is included into next_step
def calculate_next_step(this_step):
this_step = list(this_step)
next_step = set()
for i in range(len(this_step)): # find every elements this_step[i]
for type in range(3): # apply 3 transformations on each elements
next_step.add(transformation(this_step[i], type))
return(next_step)
# funtion: count_steps
# dependency: check_steps()
# input: state_1, state_2
# output: steps
# description: use check_steps() to calculate steps from state_1 to state_2
def count_steps(state_1, state_2):
steps = int(0)
if state_1 == state_2:
return(steps)
this_step = [state_1]
while True:
steps += 1
this_step = calculate_next_step(this_step)
for e in this_step:
if state_2 == e:
return(steps)
# funtion: interface_input
# dependency: sys
# input: user input
# output: final_configuration[]
def interface_input():
try:
user_input = input('Input final configuration: ')
user_input = user_input.replace(" ", "") # remove space
if len(user_input) != 8: # check the length of input is equal to 8
raise ValueError
final_configuration = []
for i in range(8):
if int(list(user_input)[i]) in range(1, 9): # check if they are all digits and between 0 and 8
final_configuration.append(int(list(user_input)[i]))
else:
raise ValueError
if len(set(final_configuration)) < 8: # check if there are duplicate value
raise ValueError
except ValueError:
print('Incorrect configuration, giving up...')
sys.exit()
return(tuple(final_configuration))
# function: interface_output
# input: steps
# output: print
def interface_output(steps):
if steps == 0 or steps == 1:
unit = 'step is'
else:
unit = 'steps are'
print(f'{steps} {unit} needed to reach the final configuration.')
return
# function: transformation
# version: v02
# input: before_transformation(), type
# 0 for row Exchange
# 1 for right circular Shift
# 2 for middle clockwise rotation
# output: after_transformation()
# description:
# this funtion will manipulate current_state
# initial state 1 2 3 4 5 6 7 8
# E 8 7 6 5 4 3 2 1
# S 4 1 2 3 6 7 8 5
# R 1 7 2 4 5 3 6 8
def transformation(before_transformation, type):
before_transformation = list(before_transformation)
after_transformation = before_transformation[:]
if type == 0: # row Exchange
after_transformation[0] = before_transformation[7]
after_transformation[1] = before_transformation[6]
after_transformation[2] = before_transformation[5]
after_transformation[3] = before_transformation[4]
after_transformation[4] = before_transformation[3]
after_transformation[5] = before_transformation[2]
after_transformation[6] = before_transformation[1]
after_transformation[7] = before_transformation[0]
if type == 1: # right circular Shift
after_transformation[0] = before_transformation[3]
after_transformation[1] = before_transformation[0]
after_transformation[2] = before_transformation[1]
after_transformation[3] = before_transformation[2]
after_transformation[4] = before_transformation[5]
after_transformation[5] = before_transformation[6]
after_transformation[6] = before_transformation[7]
after_transformation[7] = before_transformation[4]
if type == 2: # middle clockwise rotation
after_transformation[0] = before_transformation[0]
after_transformation[1] = before_transformation[6]
after_transformation[2] = before_transformation[1]
after_transformation[3] = before_transformation[3]
after_transformation[4] = before_transformation[4]
after_transformation[5] = before_transformation[2]
after_transformation[6] = before_transformation[5]
after_transformation[7] = before_transformation[7]
return(tuple(after_transformation))
# initialization
current_state = (1, 2, 3, 4, 5, 6, 7, 8)
final_configuration = interface_input()
steps = count_steps(current_state, final_configuration)
interface_output(steps)
|
from musket_text import text_datasets
from musket_core import datasets
@datasets.dataset_provider(origin="train.csv",kind="TextClassificationDataSet")
def get_sample():
return text_datasets.BinaryTextClassificationDataSet("train.csv","question_text","target")
@datasets.dataset_provider(origin="test.csv",kind="TextClassificationDataSet")
def get_test():
return text_datasets.BinaryTextClassificationDataSet("test.csv","question_text","target")
|
import requests
from requests.exceptions import HTTPError
url = "https://covid19-us-api.herokuapp.com/twitter"
payload = {}
headers= {}
response = requests.get(url)
#print(response.text.encode('utf8'))
response.raise_for_status()
Jsonresponse = response.json()
a = Jsonresponse["message"]
i = 0
b = a["tweets"]
counter = 0
print(a["username"])
for i in b:
text = i["full_text"]
date = i["created_at"]
print(text)
n = date.split("T")
print(n[0])
|
import os
from flask import json, jsonify
from app import app
AUTH_TOKEN = ""
def test_signup():
print('testing /auth/signup')
# Create new user from sign in form
#'''
with app.test_client() as c:
rv = c.post('/auth/signup', json={
'email': "ralf_stone@host.com", #'bill_xavier@host.com',
'password': 'jkht6fd4le,*', # 'nnjk4cb&%d3', # 'dfgvd564rf'
})
json_data = rv.get_json()
# print(json_data)
print(rv.data)
token = rv.data
os.environ["AUTH_TOKEN"] = token.decode("utf-8")
print('status: ', rv.status_code)
#'''
# Create new user from admin panel
'''
with app.test_client() as c:
token = "eyJ0eXAiOiJKV1QiLCJhbGciOiJIUzI1NiJ9.eyJleHAiOjE2MTczOTA5NjEsImlhdCI6MTYxNzM2OTM2MSwiaWQiOiJiNTM1MGQ1YS03ZmYyLTQwM2ItOTMzNC00Y2UzNjQ0M2YzYTYiLCJlbWFpbCI6ImJpbGxfeGF2aWVyQGhvc3QuY29tIn0.GHzmEbzhMj5z3Zesr4wR_sIr9Od0SZ6SxRzO9gb780o"
authorization = 'Bearer ' + str(token)
headers = {
# 'Access-Control-Allow-Origin': '*',
# 'Content-Type': 'application/json',
'Authorization': authorization
}
password = 'sdvw3HGY' # '&hgvdsdf4xf'
rv = c.post('/auth/signup', json={
'email': 'uman_lesset@host.com', 'password': password
}, headers=headers)
# json_data = rv.get_json()
# print(json_data)
print(rv.data)
print('status: ', rv.status_code)
'''
def test_verify_email():
print('testing /verify-email')
token = "eyJ0eXAiOiJKV1QiLCJhbGciOiJIUzI1NiJ9.eyJleHAiOjE2MTc5Njk3NzUsImlhdCI6MTYxNzk2OTQxNSwic3ViIjoiN2YxZTVhMTAtNTdkNC00MGJmLThlZDAtODAzYzgzMTBlZjc3In0.oIFpXNmD2Ao1CT9GC9I426NUkmAHilVbV0RbUtFjtoQ"
# os.environ["EMAIL_VERIFY_TOKEN"] # AUTH_TOKEN"]
print(token)
with app.test_client() as c:
rv = c.put('/auth/verify-email', json={
'token': token
})
# json_data = rv.get_json()
print(rv.data)
print('status: ' , rv.status_code)
#print(json_data)
def test_send_email_address_verification_email():
print('test_send_email_address_verification_email')
#print(os.environ["AUTH_TOKEN"])
with app.test_client() as c:
token = "eyJ0eXAiOiJKV1QiLCJhbGciOiJIUzI1NiJ9.eyJleHAiOjE2MTc5OTA2MjAsImlhdCI6MTYxNzk2OTAyMCwiaWQiOiI3ZjFlNWExMC01N2Q0LTQwYmYtOGVkMC04MDNjODMxMGVmNzciLCJlbWFpbCI6InJhbGZfc3RvbmVAaG9zdC5jb20ifQ.Rzz27cK0SXsSA2Tgbg3V2lA5pnvL5BUzMHU0T4DL4s4" # os.environ["AUTH_TOKEN"]
#token = os.environ["AUTH_TOKEN"]
authorization = 'Bearer ' + str(token)
headers = {
# 'Content-Type': 'application/json',
'Authorization': authorization
}
password = 'sdvw3HGY' # '&hgvdsdf4xf'
rv = c.post('/auth/send-email-address-verification-email', json={}, headers=headers)
# json_data = rv.get_json()
# print(json_data)
print(rv.data)
print('status: ', rv.status_code)
def test_signin_local():
print('testing /signin/local')
with app.test_client() as c:
rv = c.post('/auth/signin/local', json={
'email': "ralf_stone@host.com", # 'billy_xavier@host.com', # 'billy_xavier@host.com',
'password': "dfgvd564rf", # 'jkht6fd4le,*', # "dfgvd564rf", # 'nnjk4cb&%d3', # 'dfgvd564rf'
})
json_data = rv.get_json()
#print(json_data)
token = rv.data
os.environ["AUTH_TOKEN"] = token.decode("utf-8")
token_file = 'auth_token.txt'
with open(token_file, 'w') as f:
f.write(token.decode("utf-8"))
print(token)
print('status: ' , rv.status_code)
def test_password_update():
print('testing /signin/password-update')
#print(os.environ["AUTH_TOKEN"])
token = "eyJ0eXAiOiJKV1QiLCJhbGciOiJIUzI1NiJ9.eyJleHAiOjE2MTc5OTA2MjAsImlhdCI6MTYxNzk2OTAyMCwiaWQiOiI3ZjFlNWExMC01N2Q0LTQwYmYtOGVkMC04MDNjODMxMGVmNzciLCJlbWFpbCI6InJhbGZfc3RvbmVAaG9zdC5jb20ifQ.Rzz27cK0SXsSA2Tgbg3V2lA5pnvL5BUzMHU0T4DL4s4" # os.environ["AUTH_TOKEN"]
#token = os.environ["AUTH_TOKEN"] #token # "eyJ0eXAiOiJKV1QiLCJhbGciOiJIUzI1NiJ9.eyJleHAiOjE2MTczODUxNjEsImlhdCI6MTYxNzM2MzU2MSwiaWQiOiIyYTE2ZTRmYy0xNmNkLTRlYTktOTNhZS0wZTIwZjg0ZWUzMjAiLCJlbWFpbCI6ImJpbGxfeGF2aWVyQGhvc3QuY29tIn0.49G21qLF1QFeE3y77z8FTwId5R7suxuDaitovl4oMoo"
authorization = 'Bearer ' + str(token)
headers = {
# 'Access-Control-Allow-Origin': '*',
# 'Content-Type': 'application/json',
'Authorization': authorization
}
with app.test_client() as c:
rv = c.put('/auth/password-update', json={
"current_password": 'jkht6fd4le,*', # "dfgvd564rf", # "dfgvd564rf",
"new_password": "dfgvd564rf", # "2as25Ifzr"
}, headers=headers)
json_data = rv.get_json()
print(rv.data)
print('status: ' , rv.status_code)
# print(json_data)
def test_send_pasword_reset_email():
with app.test_client() as c:
rv = c.post('/auth/send-password-reset-email', json={
'email': "ralf_stone@host.com" # 'bill_xavier@host.com'
})
print(rv.data)
print('status: ', rv.status_code)
def test_password_reset():
print('testing /auth/password-reset')
#print(os.environ["AUTH_TOKEN"])
auth_token = "eyJ0eXAiOiJKV1QiLCJhbGciOiJIUzI1NiJ9.eyJleHAiOjE2MTc5OTA2MjAsImlhdCI6MTYxNzk2OTAyMCwiaWQiOiI3ZjFlNWExMC01N2Q0LTQwYmYtOGVkMC04MDNjODMxMGVmNzciLCJlbWFpbCI6InJhbGZfc3RvbmVAaG9zdC5jb20ifQ.Rzz27cK0SXsSA2Tgbg3V2lA5pnvL5BUzMHU0T4DL4s4" # os.environ["AUTH_TOKEN"]
#auth_token = os.environ["AUTH_TOKEN"] # token # "eyJ0eXAiOiJKV1QiLCJhbGciOiJIUzI1NiJ9.eyJleHAiOjE2MTc2MzcwNzQsImlhdCI6MTYxNzYxNTQ3NCwiaWQiOiIyMjhmNGJiZS0yM2VjLTRhZmYtYTg3NC0yNTZlMWM4ZGVjZmMiLCJlbWFpbCI6ImJpbGxfeGF2aWVyQGhvc3QuY29tIn0.B1FXZ-UfjKq-HILHZuoVTV6C3uzlYhq54HI9b5hKb4w"
authorization = 'Bearer ' + str(auth_token)
password_reset_token = "eyJ0eXAiOiJKV1QiLCJhbGciOiJIUzI1NiJ9.eyJleHAiOjE2MTc5NzE2MDYsImlhdCI6MTYxNzk3MTI0Niwic3ViIjoiN2YxZTVhMTAtNTdkNC00MGJmLThlZDAtODAzYzgzMTBlZjc3In0.PWsReE41SFZB4cye_DFY-fXAJCZJ1VAur-aCFC3QUJQ"
headers = {
# 'Access-Control-Allow-Origin': '*',
# 'Content-Type': 'application/json',
#'Authorization': authorization
}
with app.test_client() as c:
rv = c.put('/auth/password-reset', json={
"password": 'jkht6fd4le,*', # "dfgvd564rf",
"token": password_reset_token, # "2as25Ifzr"
}, headers=headers)
print(rv.data)
print('status: ', rv.status_code)
def test_profile():
print('testing /auth/profile')
print(os.environ["AUTH_TOKEN"])
auth_token = os.environ["AUTH_TOKEN"]
authorization = 'Bearer ' + str(auth_token)
id = "188b7d6a-3bbd-44bd-8e07-6be6bc4b1e1f"
profile_data = {
#"id": id,
"email": "ralf_stone@host.com", #"bill_xavier@host.com",
"firstName": "Tailor", # "Billy",
"lastName": None, # "Xavier1",
"phoneNumber": "250051342", # "2211945",
"role": "admin",
"disabled": False,
"avatar": [
{
"id": '79c2b036-2efb-4f82-bd8b-6158fe0f36de',
"name": "tynvnmm.jpeg", # "test1.png",
"new": True,
"sizeInBytes": 321800,
"privateUrl": "users/avatar/79c2b036-2efb-4f82-bd8b-6158fe0f36de.jpeg", # "users/avatar/eb4b4851-a5e4-483a-a1d0-3f3feedae3a6.png",
"publicUrl": "http://127.0.0.1:5000/files/download?privateUrl=users/avatar/avatar/79c2b036-2efb-4f82-bd8b-6158fe0f36de.jpeg" # "http://127.0.0.1:5000/files/download?privateUrl=users/avatar/avatar/eb4b4851-a5e4-483a-a1d0-3f3feedae3a6.png"
},
{
"id": '27e264dd-aa46-4c98-8c04-aecace218d9e',
"name": "heyhere.png",
"new": True,
"sizeInBytes": 213500,
"privateUrl": "users/avatar/27e264dd-aa46-4c98-8c04-aecace218d9e.png",
"publicUrl": "http://127.0.0.1:5000/files/download?privateUrl=users/avatar/avatar/27e264dd-aa46-4c98-8c04-aecace218d9e.png"
},
]
# "avatar": []
}
headers = {
# 'Access-Control-Allow-Origin': '*',
'Content-Type': 'application/json',
'Authorization': authorization
}
with app.test_client() as c:
rv = c.put('/auth/profile', json={
"profile": profile_data
}, headers=headers)
print(rv.data)
print('status: ', rv.status_code)
def test_me():
print('testing /auth/me')
#print(os.environ["AUTH_TOKEN"])
auth_token = "eyJ0eXAiOiJKV1QiLCJhbGciOiJIUzI1NiJ9.eyJleHAiOjE2MTc5OTA2MjAsImlhdCI6MTYxNzk2OTAyMCwiaWQiOiI3ZjFlNWExMC01N2Q0LTQwYmYtOGVkMC04MDNjODMxMGVmNzciLCJlbWFpbCI6InJhbGZfc3RvbmVAaG9zdC5jb20ifQ.Rzz27cK0SXsSA2Tgbg3V2lA5pnvL5BUzMHU0T4DL4s4" # os.environ["AUTH_TOKEN"]
#auth_token = os.environ["AUTH_TOKEN"]
authorization = 'Bearer ' + str(auth_token)
headers = {
'Authorization': authorization
}
with app.test_client() as c:
rv = c.get('/auth/me', json={}, headers=headers)
print(rv.data)
print('status: ', rv.status_code)
def test_email_configured():
print('testing /auth/email-configured')
with app.test_client() as c:
rv = c.get('/auth/email-configured', json={})
print(rv.data)
print('status: ', rv.status_code)
if __name__ == '__main__':
#test_signup()
#test_signin_local()
#test_send_email_address_verification_email()
#test_verify_email()
#token = test_signin_local()
# test_password_update(token)
# Test password reset and update
#test_signup()
#test_verify_email()
#test_signin_local()
#test_send_pasword_reset_email()
# test_password_reset()
#test_password_update()
# Test sending email verification email
#test_send_email_address_verification_email()
#test_verify_email()
# Test profile and me
test_signin_local()
#test_me()
test_profile()
#test_email_configured()
|
#!/usr/bin/env python
import rospy
from std_msgs.msg import Int32
from geometry_msgs.msg import PoseStamped, TwistStamped
from styx_msgs.msg import Lane, Waypoint, TrafficLight, TrafficLightArray
import math
import copy
'''
This node will publish waypoints from the car's current position to some `x` distance ahead.
As mentioned in the doc, you should ideally first implement a version which does not care
about traffic lights or obstacles.
Once you have created dbw_node, you will update this node to use the status of traffic lights too.
Please note that our simulator also provides the exact location of traffic lights and their
current status in `/vehicle/traffic_lights` message. You can use this message to build this node
as well as to verify your TL classifier.
TODO (for Yousuf and Aaron): Stopline location for each traffic light.
'''
LOOKAHEAD_WPS = 200 # Number of waypoints we will publish. You can change this number
class WaypointUpdater(object):
def __init__(self):
rospy.init_node('waypoint_updater')
rospy.Subscriber('/current_pose', PoseStamped, self.pose_cb)
rospy.Subscriber('/base_waypoints', Lane, self.waypoints_cb)
rospy.Subscriber( '/current_velocity', TwistStamped, self.velocity_cb )
# TODO: Add a subscriber for /traffic_waypoint and /obstacle_waypoint below
rospy.Subscriber('/traffic_waypoint', Int32, self.traffic_cb)
rospy.Subscriber('/obstacle_waypoint', Int32, self.obstacle_cb)
self.final_waypoints_pub = rospy.Publisher('final_waypoints', Lane, queue_size=1)
# TODO: Add other member variables you need below
self.base_wps = None
self.traffic_wp = -1
self.velocity = 0
v = rospy.get_param('/waypoint_loader/velocity', 15. )
self.vel_max = ( v * 1000. ) / ( 60. * 60. )
rospy.spin()
def pose_cb(self, msg):
# TODO: Implement
# pos = msg.pose.position
# ort = msg.pose.orientation
# rospy.loginfo( "[waypoint_updater.pose_cb] position = (%f, %f, %f)", \
# pos.x, pos.y, pos.z )
# rospy.loginfo( "[waypoint_updater.pose_cb] orientation = (%f, %f, %f, %f)", \
# ort.x, ort.y, ort.z, ort.w )
if self.base_wps == None:
rospy.loginfo( "[waypoint_updater.pose_cb] No base_waypoints." )
return
cpos = msg.pose.position
cort = msg.pose.orientation
near_i = self.get_closest_waypoint( cpos ) + 1
num_wps = len( self.base_wps.waypoints )
lane = Lane()
lane.header.frame_id = '/world'
lane.header.stamp = rospy.Time(0)
# self.traffic_wp = -1
if self.traffic_wp == -1:
if near_i + LOOKAHEAD_WPS > num_wps:
lane.waypoints = self.base_wps.waypoints[ near_i : ] + \
self.base_wps.waypoints[ : near_i + LOOKAHEAD_WPS - num_wps ]
else:
lane.waypoints = self.base_wps.waypoints[ near_i : near_i + LOOKAHEAD_WPS ]
elif self.traffic_wp >= near_i:
lane.waypoints = self.base_wps.waypoints[ near_i : self.traffic_wp + 1 ]
elif near_i - self.traffic_wp <= 10:
lane.waypoints = self.base_wps.waypoints[ near_i : near_i + 1 ]
else:
lane.waypoints = self.base_wps.waypoints[ near_i : ] + \
self.base_wps.waypoints[ : self.traffic_wp + 1 ]
# Set velocities
if self.traffic_wp == -1:
self.accelerate( lane.waypoints, near_i )
else:
self.decelerate( lane.waypoints, near_i )
# rospy.loginfo( "[waypoint_updater ===>] car = %d, red = %d", near_i, self.traffic_wp )
self.final_waypoints_pub.publish( lane )
def velocity_cb( self, msg ):
self.velocity = msg.twist.linear.x
def get_closest_waypoint( self, pos ):
if self.base_wps == None:
return None
dist = float( "inf" )
wpi = None
for i, wp in enumerate( self.base_wps.waypoints ):
p = wp.pose.pose.position
d = math.sqrt( ( p.x - pos.x ) ** 2 + ( p.y - pos.y ) ** 2 + ( p.z - pos.z ) ** 2 )
if d < dist:
wpi = i
dist = d
return wpi
def waypoints_cb(self, lane):
# TODO: Implement
self.base_wps = lane
def traffic_cb(self, msg):
# TODO: Callback for /traffic_waypoint message. Implement
if self.traffic_wp != msg.data:
rospy.loginfo( "[waypoint_updater.traffic_cb] traffic_wp = %d", msg.data )
self.traffic_wp = msg.data
def obstacle_cb(self, msg):
# TODO: Callback for /obstacle_waypoint message. We will implement it later
pass
def get_waypoint_velocity(self, waypoint):
return waypoint.twist.twist.linear.x
def set_waypoint_velocity(self, waypoints, waypoint, velocity):
waypoints[waypoint].twist.twist.linear.x = velocity
def _distance(self, waypoints, wp1, wp2):
dist = 0
dl = lambda a, b: math.sqrt((a.x-b.x)**2 + (a.y-b.y)**2 + (a.z-b.z)**2)
for i in range(wp1, wp2+1):
dist += dl(waypoints[wp1].pose.pose.position, waypoints[i].pose.pose.position)
wp1 = i
return dist
def distance(self, waypoints, wp1, wp2):
num = len( waypoints )
wp1, wp2 = wp1 % num, wp2 % num
if wp2 > wp1:
return self._distance( waypoints, wp1, wp2 )
else:
num_wps = len( self.base_wps.waypoints )
return self._distance( waypoints, wp1, num_wps - 1 ) + \
self._distance( waypoints, 0, wp2 )
def accelerate( self, waypoints, near_i ):
for wp in waypoints:
wp.twist.twist.linear.x = self.vel_max
def decelerate( self, waypoints, near_i ):
MAX_DECEL = 0.5
MAX_ACCEL = 1.0
last = waypoints[-1]
last.twist.twist.linear.x = 0.
num = len( waypoints )
if num > 1:
d0 = self.distance( self.base_wps.waypoints, near_i, near_i + 1 )
meet = False
for i in range( num - 1 ):
dist = self.distance( waypoints, i, num - 1 )
vel = math.sqrt(2 * MAX_DECEL * dist)
if vel < 1.:
vel = 0.
vel = min( vel, self.vel_max )
waypoints[ i ].twist.twist.linear.x = vel
# if not meet:
# dist2 = self.distance( waypoints, 0, i ) + d0
# vel2 = math.sqrt( self.velocity ** 2 + 2 * MAX_ACCEL * dist2 )
# vel2 = min( vel2, self.vel_max )
# meet = vel2 >= vel
# waypoints[ i ].twist.twist.linear.x = min( vel, vel2 )
# else:
# waypoints[ i ].twist.twist.linear.x = vel
return waypoints
if __name__ == '__main__':
try:
WaypointUpdater()
except rospy.ROSInterruptException:
rospy.logerr('Could not start waypoint updater node.')
|
"""
Import and parse football data.
Author: Alton Wiggers, Kadan Lottick
Date: 12/17/19
"""
import csv
source_file = "spreadspoke_scores.csv"
#dest_file
def main():
read_csv(source_file,1979)
def get_bookie_score(source_file):
"""
check predicted winners against actual results
for all games in 2015-2017 seasons
"""
with open(source_file) as csv_file:
total = 0
correct = 0
csv_reader = csv.reader(csv_file, delimiter =",")
line_count = 1
for row in csv_reader:
if line_count >= 2503 and int(row[1]) >= 2015 and int(row[1]) < 2019:
if int(row[13]) > int(row[14]):
if row[3] == row[6]:
correct+=1
elif int(row[13]) < int(row[14]):
if (row[3] != row[6] and row[6] != "PICK"):
correct+=1
else:
if (row[6] == "PICK"):
correct+=1
total+=1
line_count+=1
return correct, total, correct /total
def read_csv(source_file, min_year):
"""
Parse in each line of .csv file as a datapoint.
Convert relevant lines to analyzable features.
Divide points into training and testing data.
Discard datapoints from seasons before given min_year.
"""
train_data = []
test_data = []
current_data = []
#array indexed by years (subtract of 1979 to get index),
#teams (assigned indexes by team_vals), wins/losses/ties (0/1/2)
records = []
for i in range(41):
year = []
records.append(year)
for j in range(32):
team = []
records[i].append(team)
for k in range(3):
records[i][j].append(0)
#array indexed by years (subtract of 1979 to get index),
#teams (assigned indexes by team_vals), teams they've played
#the value is the number of times the 1st team has beaten the 2nd that season
winMatrix = []
for i in range(41):
year = []
winMatrix.append(year)
for j in range(32):
team = []
winMatrix[i].append(team)
for k in range(32):
winMatrix[i][j].append(0)
with open(source_file) as csv_file:
csv_reader = csv.reader(csv_file, delimiter =",")
line_count = 1
#creating one datapoint for each game
for row in csv_reader:
#only using games after odds were given
if line_count >= 2503 and row[8] != ' ' and row[8] != '' and \
row[10] != ' ' and row[10] != '':
datapoint = []
#schedule season
year = int(row[1])-1979
datapoint.append(year)
#week
datapoint = datapoint + weeks(row[2])
#home team
home_vals,home_id = team_vals(row[3])
datapoint = datapoint + home_vals
#away team
away_vals,away_id = team_vals(row[4])
datapoint = datapoint + away_vals
#spread
if row[3] == row[6]:
datapoint.append(float(row[7]))
else:
datapoint.append(-1*float(row[7]))
#overUnder
datapoint.append(float(row[8]))
#temperature
datapoint.append(float(row[10]))
#wind
datapoint.append(float(row[11]))
#stadium neutral
if row[15] == "FALSE":
datapoint.append(0)
else:
datapoint.append(1)
#add records for home and away teams this season
if not year +1979 == 2019:
datapoint.append(records[year][home_id][0])
datapoint.append(records[year][home_id][1])
datapoint.append(records[year][home_id][2])
datapoint.append(records[year][away_id][0])
datapoint.append(records[year][away_id][1])
datapoint.append(records[year][away_id][2])
else:
datapoint.append(float(row[17]))
datapoint.append(float(row[18]))
datapoint.append(float(row[19]))
datapoint.append(float(row[20]))
datapoint.append(float(row[21]))
datapoint.append(float(row[22]))
#append matrix of whose beaten who
for i in range(32):
for j in range(32):
datapoint.append(winMatrix[year][i][j])
#assign label and update records
label = 0
#home wins
if int(row[13]) > int(row[14]):
label = 0
records[year][home_id][0] +=1
records[year][away_id][1] +=1
winMatrix[year][home_id][away_id] +=1
#away wins
elif int(row[13]) < int(row[14]):
label = 1
records[year][home_id][1] +=1
records[year][away_id][0] +=1
winMatrix[year][away_id][home_id] +=1
#tie
else:
label = 2
records[year][home_id][2] +=1
records[year][away_id][2] +=1
datapoint.append(label)
if year + 1979 >= min_year and label < 2:
if year + 1979 < 2015:
train_data.append(datapoint)
elif year + 1979 < 2019:
test_data.append(datapoint)
else:
current_data.append(datapoint)
line_count +=1
return train_data,test_data,current_data
def weeks(week):
"""
Given a week, create a list of one-hot
features for each possible week and
enable only the given value
"""
list = []
switcher = {
"1": 0,
"2": 1,
"3": 2,
"4": 3,
"5": 4,
"6": 5,
"7": 6,
"8": 7,
"9": 8,
"10": 9,
"11": 10,
"12": 11,
"13": 12,
"14": 13,
"15": 14,
"16": 15,
"17": 16,
"18": 17,
"Wildcard": 18,
"WildCard": 18,
"Division": 19,
"Conference": 20,
"Superbowl": 21,
"SuperBowl": 21
}
for i in range(22):
list.append(0)
list[switcher.get(week, "error")] = 1
return list
def team_vals(team_id):
"""
Given a team, create a list of one-hot
features for each possible team and
enable only the given value.
Return the team value for generating
other features.
"""
switch = {
"NE":0,
"BUF":1,
"NYJ":2,
"MIA":3,
"KC":4,
"OAK":5,
"DEN":6,
"LAC":7,
"BAL":8,
"PIT":9,
"CLE":10,
"CIN":11,
"HOU":12,
"TEN":13,
"IND":14,
"JAX":15,
"DAL":16,
"PHI":17,
"WAS":18,
"NYG":19,
"SEA":20,
"SF":21,
"LAR":22,
"ARI":23,
"GB":24,
"MIN":25,
"CHI":26,
"DET":27,
"NO":28,
"TB":29,
"CAR":30,
"ATL":31
}
pos_id = switch.get(team_id, "No Team")
team_features = []
for i in range(32):
team_features.append(0)
team_features[pos_id] = 1
return team_features,pos_id
if __name__ == "__main__" :
main()
|
#include ./splitterDict
def merge(patch, name):
# Flag lines for removal
to_remove = []
for n in range(len(patch)):
if 'solid' in patch[n]:
to_remove.append(n)
# Remove lines using list of flags
i = 0
for n in to_remove:
del patch[n-i]
i += 1
# Add beginning and end solid tags for patch
patch.insert(0, 'solid ' + name + '\n')
patch.append('endsolid ' + name + '\n')
# print(patch)
return patch
def splitter(input_file, name, v1, v2):
# For the defined box, rearrange into p1 and p2, with lower values in p1
n = 0
p1=[]
p2=[]
for n in range(3):
if v1[n] <= v2[n]:
p1.append(v1[n])
p2.append(v2[n])
elif v1[n] >= v2[n]:
p1.append(v2[n])
p2.append(v1[n])
line_number = 0
patches = []
for line in input_file:
# Check for word solid in line to indicate patch start or end
if 'solid' in line:
if line.index('solid') == 0:
patch_start = line_number
patch_valid = True
elif line.index('endsolid') == 0:
if patch_valid == True:
patches.append(patch_start)
patches.append(line_number)
# Look for vertex and flag if outside points
if 'vertex' in line:
# Convert string to vector:
vertex = line.replace('vertex', '')
vertex = vertex.split()
# Flag if vector is outside prescribed points
for n in range(3):
vertex[n] = float(vertex[n])
if vertex[n] < p1[n] or vertex[n] > p2[n]:
patch_valid = False
line_number += 1
patch = []
a = 0
# If a patch is valid, add it to new variable
for n in range(0,len(patches),2):
for i in range(patches[n],patches[n+1]+1):
patch.append(input_file[i-a])
del input_file[i-a]
a += 1
if len(patch) > 0:
patch = merge(patch, name)
return [patch, input_file]
input_stl = 'region1.stl'
# Read input file and load into variable/RAM
with open(input_stl, encoding='ASCII') as infile:
stl= infile.readlines()
inlet,stl = splitter(stl, 'inlet', [-0.211,0,0], [-0.209,0.005,0.003])
outlet1,stl = splitter(stl, 'outlet1', [0.2, 0, 0], [0.2,0.2,0.002])
outlet2,stl = splitter(stl, 'outlet2', [0, 0.2,0], [0.2,0.2,0.002])
sym,stl = splitter(stl, 'sym',[-0.3, 0, 0], [0.3, 0, 0.002])
outlet = merge(outlet1+outlet2, 'outlet')
wall = merge(stl, 'wall')
output = inlet+outlet+sym+wall
with open('regionFinal.stl', 'w') as outfile:
outfile.writelines(output)
|
# @see https://adventofcode.com/2015/day/14
# Note:
# -----------------------------------------------------------
# I apologise in advance to you (or to my future self) if this
# isn't readable enough :/
import re
def parse_line(s: str):
r = re.match(r'([A-Z][a-z]+) can fly ([\d]+) km/s for ([\d]+) seconds, but then must rest for ([\d]+) seconds.', s.strip())
return r[1], r[2], r[3], r[4] # name, speed, burst, rest
with open('day14_input.txt', 'r') as f:
reindeers = list()
for l in f:
n, speed, burst, rest = parse_line(l)
reindeers.append({'name': n, 'speed': int(speed), 'burst': int(burst), 'rest': int(rest)})
def calc_dist_covered(speed: int, burst: int, rest: int, t: int):
dist = speed * burst * int(t / (burst + rest))
rem = t % (burst + rest)
dist += speed * (rem if rem <= burst else burst)
return dist
def calc_winning_dist(data: list, t: int):
wd = 0
for x in data:
wd = max(wd, calc_dist_covered(x['speed'], x['burst'], x['rest'], t))
return wd
# List of leading reindeers at a giving time t
def leading_at_t(data: list, t: int):
# 1. Distance covered is mapped to contestants in reverse
# 2. The key is the distance covered, the value is a list of contestants
# 3. Multiple contestants might cover the same distance at time t
# 4. Structure should look like so:
# {23: ['Comet', 'Dancer'], 17: ['Cupid'], 10: ['Dasher', 'Donner']}
leaderboard = dict()
for x in data:
d = calc_dist_covered(x['speed'], x['burst'], x['rest'], t)
if d in leaderboard:
leaderboard[d].append(x['name'])
else:
leaderboard[d] = [x['name']]
return leaderboard[max(leaderboard.keys())]
def overall_winner(data: list, tt: int):
# Initialise our leaderboard...
# key is contestant's name, value is points accumulated
leaderboard = dict()
for rd in data: leaderboard[rd['name']] = 0
for t in range(1, tt+1):
for k in leading_at_t(data, t):
leaderboard[k] += 1
return max(leaderboard.values())
# Duration of contest
time_t = 2503
print('------------ PART 01 -------------')
print('Winning reindeer traveled:', calc_winning_dist(reindeers, time_t), 'km')
print('\n------------ PART 02 -------------')
print('Winning reindeer has:', overall_winner(reindeers, time_t), 'points')
|
#coding:utf-8
#!/usr/bin/env python
import gclib.curl
import gclib.cacheobj
class MMPersistent:
@staticmethod
def install(obj, roleid):
"""
安装
"""
i = 0
for column in obj.extend_columns:
setattr(obj, column['name'], column['value'])
i = i + 1
@staticmethod
def get(tp, roleid):
"""
得到对象
"""
key = cacheobj.makeMKey(tp, roleid)
conn = DBConnection.getConnection()
res = conn.query("SELECT * FROM " + tp.__name__ + " WHERE roleid = %s", [roleid])
if len(res) == 1:
obj = tp()
obj.id = res[0][0]
obj.roleid = res[0][1]
obj.load(roleid, json.loads(res[0][2]))
i = 0
for column in obj.extend_columns:
setattr(obj, column['name'], res[0][3 + i])
i = i + 1
return obj
return None
@staticmethod
def save(obj):
"""
保存
"""
conn = DBConnection.getConnection()
data = obj.getData()
dumpstr = json.dumps(data)
update_columns = ['object = %s']
update_value = [dumpstr]
for column in obj.extend_columns:
update_columns.append(column + ' = %s')
update_value.append(getattr(obj, column))
update_value.append(obj.id)
sql = "UPDATE " + obj.__class__.__name__ + " SET " + ', '.join(update_columns) + " WHERE id = %s"
conn.excute(sql, update_value)
def delete(obj):
"""
删除
"""
conn = DBconnection.getConnection()
conn.excute("DELETE FROM " + self.__class__.__name__ + " WHERE id = %s", [self.id])
return
|
"""
Week 5, Day 1: Course Schedule
There are a total of numCourses courses you have to take, labeled from 0 to numCourses-1.
Some courses may have prerequisites, for example to take course 0 you have to first
take course 1, which is expressed as a pair: [0,1]
Given the total number of courses and a list of prerequisite pairs, is it possible for
you to finish all courses?
Example 1:
Input: numCourses = 2, prerequisites = [[1,0]]
Output: true
Explanation: There are a total of 2 courses to take.
To take course 1 you should have finished course 0. So it is possible.
Example 2:
Input: numCourses = 2, prerequisites = [[1,0],[0,1]]
Output: false
Explanation: There are a total of 2 courses to take.
To take course 1 you should have finished course 0, and to take course 0
you should also have finished course 1. So it is impossible.
Constraints:
The input prerequisites is a graph represented by a list of edges, not adjacency
matrices. Read more about how a graph is represented.
You may assume that there are no duplicate edges in the input prerequisites.
1 <= numCourses <= 10^5
Hints:
This problem is equivalent to finding if a cycle exists in a directed graph.
If a cycle exists, no topological ordering exists and therefore it will be
impossible to take all courses.
"""
from collections import defaultdict
from typing import List
class Solution:
def canFinish(self, numCourses: int, prerequisites: List[List[int]]) -> bool:
follower_of = defaultdict(list)
for a, b in prerequisites:
follower_of[a].append(b)
def is_cyclic(course: int, visited: List[bool], memo: List[bool]) -> bool:
visited[course] = True
memo[course] = True
for neighbour in follower_of[course]:
if not visited[neighbour]:
if is_cyclic(neighbour, visited, memo):
return True
elif memo[neighbour]:
return True
memo[course] = False
return False
visited = [False] * numCourses
stack = [False] * numCourses
for course in range(numCourses):
if not visited[course]:
if is_cyclic(course, visited, stack):
return False
return True
if __name__ == '__main__':
o = Solution()
print(o.canFinish(2, [[1, 0]]), True)
print(o.canFinish(2, [[1, 0], [0, 1]]), False)
print(o.canFinish(4, [[0, 1], [1, 2], [2, 3], [0, 2], [2, 0], [3, 3]]), False)
print(o.canFinish(4, [[0, 1], [1, 2], [2, 3], [0, 2]]), True)
# last line of code
|
import pytest
# See: https://docs.pytest.org/en/latest/how-to/writing_plugins.html#assertion-rewriting
pytest.register_assert_rewrite("pants.testutil.pytest_util", "pants.testutil.process_util")
|
#
# @lc app=leetcode.cn id=77 lang=python3
#
# [77] 组合
#
# @lc code=start
class Solution:
def combine(self, n: int, k: int) -> List[List[int]]:
res, track = [], []
def backtrack(n, depth):
# track 表示当前路径
if len(track) == k:
# print(track)
res.append(track[:])
return
for i in range(depth, n+1):
# 选择i
track.append(i)
backtrack(n, i + 1)
# 撤销选择
track.pop()
backtrack(n, 1)
return res
# @lc code=end
|
# -*- coding: utf-8 -*-
'''
Created on 05-08-2013
@author: klangner
'''
from bluenotepad.storage.log import read_folder_sessions, read_sessions
from collections import defaultdict
import os
DATA_ROOT = os.path.join(os.path.dirname(__file__), '../../data/')
def event_before(sesions, event_name):
events = defaultdict(int)
for records in sessions.itervalues():
last_record = None
for record in records:
if record['event'] == event_name:
events[last_record['event']] += 1
last_record = record
for name, counter in events.iteritems():
print name, ":\t", counter
if __name__ == '__main__':
# sessions = read_folder_sessions(DATA_ROOT)
sessions = read_sessions(DATA_ROOT + '2013-08-14.log')
event_before(sessions, 'Show preview')
|
"""
Copyright MIT and Harvey Mudd College
MIT License
Summer 2020
Defines the interface of the Display module of the racecar_core library.
"""
import abc
import numpy as np
import math
from typing import List, Tuple, Any
from nptyping import NDArray
import racecar_utils as rc_utils
class Display(abc.ABC):
"""
Allows the user to print images to the screen.
"""
# The radii dots used to indicate points
__BIG_DOT_RADIUS = 8
__SMALL_DOT_RADIUS = 4
__LIDAR_CAR_RADIUS = 2
def __init__(self, isHeadless: bool) -> None:
self.__isHeadless = isHeadless
@abc.abstractmethod
def create_window(self) -> None:
"""
Creates an empty window into which images will be displayed.
Note:
It is not necessary to call create_window before any of the other display
methods (show_color_image, show_depth_image, etc.). These methods will
automatically create a new window if one was not already created.
Example::
# Creates a window
rc.camera.create_window()
# Display an image in this window
image = rc.camera.get_color_image()
rc.display.show_color_image(image)
"""
pass
@abc.abstractmethod
def show_color_image(self, image: NDArray) -> None:
"""
Displays a color image in a window.
Args:
image: The color image to display to the the screen.
Example::
image = rc.camera.get_color_image()
# Show the image captured by the camera
rc.display.show_color_image(image)
"""
pass
def show_depth_image(
self,
image: NDArray[(Any, Any), np.float32],
max_depth: int = 1000,
points: List[Tuple[int, int]] = [],
) -> None:
"""
Displays a depth image in grayscale in a window.
Args:
image: The depth image to display to the screen.
max_depth: The farthest depth to show in the image in cm. Anything past
this depth is shown as black.
points: A list of points in (pixel row, pixel column) format to show on
the image as colored dots.
Example::
depth_image = rc.camera.get_depth_image()
# Show the depth_image captured by the camera.
rc.display.show_depth_image(depth_image)
# Show anything that is at most 500 cm away, and show a black cross at
# row 3, column 5
rc.display.show_depth_image(depth_image, 500, [(3, 5)])
"""
if self.__isHeadless:
return
assert max_depth > 0, "max_depth must be positive."
for point in points:
assert (
0 <= point[0] < image.shape[0] and 0 <= point[1] < image.shape[1]
), f"The point [{point}] is not a valid pixel row and column within image."
color_image = rc_utils.colormap_depth_image(image, max_depth)
# Draw a dot at each point in points
for point in points:
rc_utils.draw_circle(
color_image,
point,
rc_utils.ColorBGR.green.value,
radius=self.__BIG_DOT_RADIUS,
)
rc_utils.draw_circle(
color_image,
point,
rc_utils.ColorBGR.blue.value,
radius=self.__SMALL_DOT_RADIUS,
)
self.show_color_image(color_image)
def show_lidar(
self,
samples: NDArray[Any, np.float32],
radius: int = 128,
max_range: int = 1000,
highlighted_samples: List[Tuple[float, float]] = [],
) -> None:
"""
Displays a set of LIDAR samples.
Args:
samples: A complete LIDAR scan.
radius: Half of the width or height (in pixels) of the generated image.
max_range: The farthest depth to show in the image in cm. Anything past
this depth is shown as black.
highlighted_samples: A list of samples in (angle, distance) format to show
as light blue dots. Angle must be in degrees from straight ahead
(clockwise), and distance must be in cm.
Note:
Each sample in samples is shown as a red pixel. Each sample in
highlighted_samples is shown as a blue pixel. The car is shown as a green
dot at the center of the visualization.
Warning:
samples must be a complete LIDAR scan. This function assumes that each
sample is equal angle appart, and that samples spans the entire 360 degrees.
If this is not the case, the visualization will be inaccurate.
Example::
lidar_scan = rc.lidar.get_samples()
# Show the lidar scan
rc.display.show_lidar(lidar_scan)
# Show the lidar scan out to 500 cm with the closest point highlighted
closest_point = rc_utils.get_lidar_closest_point(lidar_scan)
rc.display.show_lidar(lidar_scan, 500, [closest_point])
"""
assert radius > 0, "radius must be positive."
assert max_range > 0, "max_range must be positive."
if self.__isHeadless:
return
# Create a square black image with the requested radius
image = np.zeros((2 * radius, 2 * radius, 3), np.uint8, "C")
num_samples: int = len(samples)
# Draw a red pixel for each non-zero sample less than max_range
for i in range(num_samples):
if 0 < samples[i] < max_range:
angle: float = 2 * math.pi * i / num_samples
length: float = radius * samples[i] / max_range
r: int = int(radius - length * math.cos(angle))
c: int = int(radius + length * math.sin(angle))
image[r][c][2] = 255
# Draw a green dot to denote the car
rc_utils.draw_circle(
image,
(radius, radius),
rc_utils.ColorBGR.green.value,
self.__LIDAR_CAR_RADIUS,
)
# Draw a light blue pixel for each point in highlighted_samples
for (angle, distance) in highlighted_samples:
if 0 < distance < max_range:
angle_rad = angle * math.pi / 180
length: float = radius * distance / max_range
r: int = int(radius - length * math.cos(angle_rad))
c: int = int(radius + length * math.sin(angle_rad))
image[r][c][0] = 255
image[r][c][1] = 255
image[r][c][2] = 0
self.show_color_image(image)
|
#!/usr/bin/env python
# A simple script to suck up HTML, convert any images to inline Base64
# encoded format and write out the converted file.
#
# Usage: python standalone_html.py <input_file.html> <output_file.html>
#
# TODO: Consider MHTML format: https://en.wikipedia.org/wiki/MHTML
import os
# from bs4 import BeautifulSoup
from lxml import etree, html
from .utils import linenum
import urllib3
import urllib.parse as urlparse
import base64
import re
import mimetypes
def guess_type(filepath):
"""
Return the mimetype of a file, given it's path.
This is a wrapper around two alternative methods - Unix 'file'-style
magic which guesses the type based on file content (if available),
and simple guessing based on the file extension (eg .jpg).
:param filepath: Path to the file.
:type filepath: str
:return: Mimetype string.
:rtype: str
"""
try:
import magic # python-magic
return magic.from_file(filepath, mime=True)
except ImportError:
import mimetypes
return mimetypes.guess_type(filepath)[0]
def file_to_base64(filepath):
"""
Returns the content of a file as a Base64 encoded string.
:param filepath: Path to the file.
:type filepath: str
:return: The file content, Base64 encoded.
:rtype: str
"""
import base64
with open(filepath, 'rb') as f:
encoded_str = base64.b64encode(f.read())
return encoded_str
def make_html_images_inline(url):
"""
Takes an HTML file and writes a new version with inline Base64 encoded
images.
:param in_filepath: Input file path (HTML)
:type in_filepath: str
:param out_filepath: Output file path (HTML)
:type out_filepath: str
"""
# basepath = os.path.split(in_filepath.rstrip(os.path.sep))[0]
# soup = BeautifulSoup(open(in_filepath, 'r'), 'html.parser')
urlparsed = urlparse.urlparse(url)
http = urllib3.PoolManager()
parser = etree.HTMLParser(remove_blank_text=True)
htmltree = html.parse(url, parser=parser)
for img in htmltree.findall(".//img"):
img_path = urlparse.urlparse(img.attrib['src']).path
img_url = urlparsed._replace(path=img_path)
img_mimetype = mimetypes.guess_type(img_path)[0]
img_data = http.request('GET', img_url.geturl()).data
img.attrib['src'] = "data:%s;base64,%s" % (img_mimetype, base64.b64encode(img_data).decode('utf-8'))
htmlstring = etree.tostring(htmltree, pretty_print=False, with_tail=False).decode("utf-8")
htmlstring_1line = re.sub(r"\r?\n|\r", '', htmlstring)
return htmlstring_1line
# with open(out_filepath, 'w') as of:
# of.write(str(soup))
|
from Batch import batch2TrainData
from LoadFile import loadPrepareData, trimRareWords
from Algorithm import EncoderRNN, LuongAttnDecoderRNN
from torch import optim
from Train import trainIters
import torch.nn as nn
import random
import os
import torch
import argparse
parser = argparse.ArgumentParser(description='Train Data')
parser.add_argument("-l", "--loadtrain", action="store_true",
help="Load checkpoint train iterate")
parser.add_argument("-c", "--checkpoint", type=int,
help="Input checkpoint number")
parser.add_argument("-s", "--save", type=int,
help="save train iterate checkpoint every that number")
args = vars(parser.parse_args())
USE_CUDA = torch.cuda.is_available()
device = torch.device("cuda" if USE_CUDA else "cpu")
# Load/Assemble voc and pairs
corpus = "data"
datafile = os.path.join(corpus, "formatted_movie_lines.txt")
save_dir = os.path.join("model", "save")
voc, pairs = loadPrepareData(corpus, datafile)
MIN_COUNT = 3 # Minimum word count threshold for trimming
# Trim voc and pairs
pairs = trimRareWords(voc, pairs, MIN_COUNT)
# Example for validation
small_batch_size = 5
batches = batch2TrainData(voc, [random.choice(pairs)
for _ in range(small_batch_size)])
input_variable, lengths, target_variable, mask, max_target_len = batches
print("input_variable:", input_variable)
print("lengths:", lengths)
print("target_variable:", target_variable)
print("mask:", mask)
print("max_target_len:", max_target_len)
# Configure models
model_name = 'cb_model'
attn_model = 'dot'
hidden_size = 500
encoder_n_layers = 2
decoder_n_layers = 2
dropout = 0.1
batch_size = 64
# Set checkpoint to load from; set to None if starting from scratch
checkpoint_iter = args['checkpoint'] if args['checkpoint'] != None else 4000
loadFilename = None
if args['loadtrain']:
loadFilename = os.path.join(save_dir, model_name, corpus,
'{}-{}_{}'.format(encoder_n_layers,
decoder_n_layers, hidden_size),
'{}_checkpoint.tar'.format(checkpoint_iter))
# Load model if a loadFilename is provided
if loadFilename:
checkpoint = torch.load(loadFilename, map_location=device)
encoder_sd = checkpoint['en']
decoder_sd = checkpoint['de']
encoder_optimizer_sd = checkpoint['en_opt']
decoder_optimizer_sd = checkpoint['de_opt']
embedding_sd = checkpoint['embedding']
voc.__dict__ = checkpoint['voc_dict']
else:
checkpoint = None
print('Building encoder and decoder ...')
# Initialize word embeddings
embedding = nn.Embedding(voc.num_words, hidden_size)
if loadFilename:
embedding.load_state_dict(embedding_sd)
# Initialize encoder & decoder models
encoder = EncoderRNN(hidden_size, embedding, encoder_n_layers, dropout)
decoder = LuongAttnDecoderRNN(
attn_model, embedding, hidden_size, voc.num_words, decoder_n_layers, dropout)
if loadFilename:
encoder.load_state_dict(encoder_sd)
decoder.load_state_dict(decoder_sd)
else:
encoder = encoder.to(device)
decoder = decoder.to(device)
print('Models built and ready to go!')
# Configure training/optimization
clip = 50.0
teacher_forcing_ratio = 1.0
learning_rate = 0.0001
decoder_learning_ratio = 5.0
n_iteration = 4000
print_every = 1
save_every = args['save'] if args['save'] != None else 4000
# Ensure dropout layers are in train mode
encoder.train()
decoder.train()
# Initialize optimizers
print('Building optimizers ...')
encoder_optimizer = optim.Adam(encoder.parameters(), lr=learning_rate)
decoder_optimizer = optim.Adam(
decoder.parameters(), lr=learning_rate * decoder_learning_ratio)
if loadFilename:
encoder_optimizer.load_state_dict(encoder_optimizer_sd)
decoder_optimizer.load_state_dict(decoder_optimizer_sd)
# If you have cuda, configure cuda to call
if USE_CUDA:
for states in [encoder_optimizer.state.values(), decoder_optimizer.state.values()]:
for state in states:
for k, v in state.items():
if isinstance(v, torch.Tensor):
state[k] = v.cuda()
# Run training iterations
print("Starting Training!")
trainIters(model_name, voc, pairs, encoder, decoder, encoder_optimizer, decoder_optimizer,
embedding, encoder_n_layers, decoder_n_layers, save_dir, n_iteration, batch_size,
print_every, save_every, clip, corpus, loadFilename, teacher_forcing_ratio, hidden_size, checkpoint)
|
import argparse
import gym
import algs.q_learning
import test_envs
if __name__ == "__main__":
parser = argparse.ArgumentParser(
description="Run Q-Learning algorithm in Gridworld environment."
)
parser.add_argument("--height", type=int, help="height of the grid", default=10)
parser.add_argument("--width", type=int, help="width of the grid", default=10)
parser.add_argument(
"--num-goals",
type=int,
help="number of goals/good states which ends the episode with +1 reward",
default=1,
)
parser.add_argument(
"--num-traps",
type=int,
help="number of traps/bad states which ends the episode with -1 reward",
default=8,
)
parser.add_argument(
"--display-delay",
type=float,
help="delay in seconds between each render frame",
default=0.01,
)
parser.add_argument(
"--path-noise-prob",
type=float,
help="probability of noise movements while generating gridworld",
default=0.25,
)
parser.add_argument(
"--blind-switch-prob",
type=float,
help="probability disabling/enabling informed search while generating the environment",
default=0.10,
)
parser.add_argument(
"--reward-per-action",
type=float,
help="reward given to the agent per action/timestep",
default=-0.001,
)
parser.add_argument(
"--env-seed",
type=int,
help="environment seed for pseudo-random numbers",
default=16,
)
parser.add_argument(
"--model-seed",
type=int,
help="model seed for pseudo-random numbers",
default=None,
)
parser.add_argument(
"--num-episodes", type=int, help="number of episodes to optimize", default=1024
)
parser.add_argument(
"--epsilon",
metavar=("START", "END"),
type=float,
nargs=2,
help="epsilon (e-greedy argument) in the form [start, end]",
default=(0.90, 0.05),
)
parser.add_argument(
"--epsilon-decay-steps",
type=int,
help="Number of steps (episodes) to decay epsilon (e-greedy argument)",
default=512,
)
parser.add_argument(
"--episodes-to-print",
type=int,
help="optimization delay (in episodes) between each log",
default=32,
)
parser.add_argument(
"--episodes-to-render",
type=int,
help="optimization delay (in episodes) between each render",
default=128,
)
parser.add_argument(
"--no-load",
help="if given, do not load any saved modes (WARNING: this operation will overwrite any previously saved models if --no-save is not given!)",
action="store_true",
)
parser.add_argument(
"--no-save",
help="do not save the optimized model",
action="store_true",
)
parser.add_argument(
"--reset-epsilon",
help="if given, reset the model epsilon (e-greedy) to the new given schedule ('start', 'end' and 'epsilon-decay-steps')",
action="store_true",
)
args = parser.parse_args()
try:
if args.no_load:
raise FileNotFoundError
model = algs.q_learning.QLearning.load("models/q_learning")
except FileNotFoundError:
env = gym.make(
id="Gridworld-v0",
height=args.height,
width=args.width,
num_traps=args.num_traps,
num_goals=args.num_goals,
display_delay=args.display_delay,
reward_per_action=args.reward_per_action,
path_noise_prob=args.path_noise_prob,
blind_switch_prob=args.blind_switch_prob,
)
env.seed(args.env_seed)
model = algs.q_learning.QLearning(
env,
epsilon=args.epsilon,
epsilon_decay_steps=args.epsilon_decay_steps,
random_state=args.model_seed,
)
model.connect_values_to_env()
if args.reset_epsilon:
model.reset_epsilon(
new_epsilon=args.epsilon, new_epsilon_decay_steps=args.epsilon_decay_steps
)
try:
model.optimize(
num_episodes=args.num_episodes,
episodes_to_print=args.episodes_to_print,
episodes_to_render=args.episodes_to_render,
)
except KeyboardInterrupt:
pass
model.run(render=True)
if not args.no_save:
model.save("models/q_learning")
|
from django.db import models
from django.contrib.auth.models import User
# Create your models here.
class UserPersonalInfo(models.Model):
user=models.OneToOneField(User,on_delete=models.CASCADE)
#Additional Story:
Profile_Link=models.URLField(blank=True)
Image_Profile=models.ImageField(upload_to='profile_pics',blank=True)
|
import logging
from pyfiles.db import player
from pyfiles.model import session
# Current connected but unauthenticated sessions
_connected_sessions = []
# Dict mapping of session IDs to usernames once authenticated/logged_in
_active_sessions = {}
def add_connected_session(session_id: str) -> None:
logging.info('NEW CONNECTED SESSION ' + session_id)
_connected_sessions.append(session_id)
def add_active_session(session_id: str, username: str) -> None:
logging.info('NEW ACTIVE SESSION ' + session_id)
_active_sessions[session_id] = username
def get_active_username(session_id: str) :
username = _active_sessions[session_id]
return username
def list_sessions() -> None:
"""Prints the connected sessionIds and activeSessions to show auth handling"""
logging.info('---SESSION-LOG to follow---')
logging.info('Currently connected sessions: ')
if _active_sessions != {}:
logging.info('NONE')
else:
for session in _connected_sessions:
logging.info(session)
logging.info('Currently active (authenticated) sessions: ')
if not _active_sessions:
logging.info('NONE')
else:
for session in _active_sessions:
logging.info(session + ' | ' + _active_sessions[session])
# Clears all connected session yet to be authenticated
def remove_connected_sessions() -> None:
""" clears all connected sessions """
del _connected_sessions
def remove_connected_session(session_id: str) -> bool:
try:
logging.debug('Removing connected session: ' + session_id)
_connected_sessions.remove(session_id)
return True
except ValueError:
return False
def active_session_exists(session_id: str) -> bool:
""" Checks to see if there's a username assigned to a specific sessionId """
return session_id in _active_sessions and _active_sessions[session_id] is not None
def connected_session_exists(session_id: str) -> bool:
return session_id in _connected_sessions
def check_active_session(session_id: str, username: str) -> bool:
""" Checks to see if an active session exists matching session_id to uername"""
if session_id in _active_sessions:
if _active_sessions[session_id] == username:
print(_active_sessions[session_id])
return True
else:
logging.info('SessionID / Username mismatch! (' + session_id + ',' + username + ')')
else:
logging.info(username + ' not logged in or session not active')
return False
def remove_active_session(session_id: str) -> bool:
if session_id in _active_sessions.keys():
username = _active_sessions[session_id]
del _active_sessions[session_id] # Remove the sessionId from our activeSessions dict
return True, username
return False
""" Checks that a player with username exists and has a valid active session (logged in)
returns (bool, bool) meaning (found_player, valid_session_exists)
or that the check otherwise failed (bad data)
"""
def valid_player_session(self, username : str, session_id : str, found_player : player.Player) -> (bool, bool):
if username and session_id and found_player is not None:
if check_active_session(session_id, username):
return True, True
else:
return True, False
else:
return False, False
def contains_session_json(data: dict) -> bool:
# The client should pass this param in
return 'sessionJson' in data and 'sessionId' in data['sessionJson'] and 'username' in data['sessionJson']
def extract_session_json(json_data):
return json_data[session.SESSION_JSON_NAME]
# Returns True if there's a connected or active session
def is_sessionid_connected_or_active(sid):
logging.info('Validating SID: ')
logging.info(sid)
if not connected_session_exists(sid) and not active_session_exists(sid):
return False
else:
return True
"""
Checks for a valid (active) session ID and proxies to the right event handler if true
callback - the function to pass to if there's an active session
data - The data from SocketIO
"""
def verify_active_and_call(callback, data):
if data is not None:
if contains_session_json(data):
session_json = extract_session_json(data)
sid = session_json[session.SESSION_ID_JSON_NAME]
if is_sessionid_connected_or_active(sid) and active_session_exists(sid):
logging.info('Proxying event for request SID: '+sid)
callback(data)
else:
logging.error('Checking for active session before proxying to: ' + callback.__name__ +
'.. Could not find an active session for SID: ' + sid);
else:
logging.error('Checking for active session before proxying to: ' + callback.__name__ +
'.. sessionJson not provided!');
else:
# + ' SocketIO Event: ' + json.dumps(request.event)
logging.info('No data for proxy call to: ' + callback.__name__)
|
from django.contrib import admin
from Materia.models import Materia
from Materia.Turma.models import Turma
class MateriaAdmin(admin.ModelAdmin):
fieldsets = [
('Materia', {'fields': ['nome','sigla']}),
]
list_display = ('nome','sigla',)
class TurmaAdmin(admin.ModelAdmin):
pass
admin.site.register(Materia, MateriaAdmin)
admin.site.register(Turma, TurmaAdmin)
|
from django.core.management.base import BaseCommand, CommandError
from bay.models import Torrent, File
from unidecode import unidecode
import traceback
import csv
from pprint import pformat
import codecs
import os
import progressbar
def utf_8_encoder(unicode_csv_data):
for line in unicode_csv_data:
yield line.encode('utf-8')
class Command(BaseCommand):
args = '<data-directory>'
help = 'Imports the specified torrent from a data directory'
def scale(self, size, unit):
return {
'B': 1,
'K': 1024,
'M': 1024 * 1024,
'G': 1024 * 1024 * 1024
}[unit] * size
def create_file(self, torrent_id, file_details):
_, extension = os.path.splitext(file_details['Filename'])
if extension == '.mp3':
size = self.scale(file_details['Size'], file_details['Unit'])
file = File(
torrent_id=torrent_id,
name=file_details['Filename'],
extension=extension,
size=self.scale(file_details['Size'], file_details['Unit']),
)
file.save()
return True
# self.stdout.write('Created file "%s"' % file_details['Filename'])
return False
def create_torrent(self, torrent_id, details, description):
torrent = Torrent(
id=torrent_id,
description=description,
title=details['Title'],
seeders=details['Seeders'],
leechers=details['Leechers'],
uploaded=details['Uploaded'],
uploader=details['By'],
infohash=details['Info Hash']
)
torrent.save()
return torrent
def process_torrent_directory(self, torrent_directory):
torrent_id = int(os.path.split(torrent_directory)[1])
description_file_path = torrent_directory + '/description.txt'
with codecs.open(description_file_path, 'r', 'utf-8-sig') as description_file:
description = description_file.read()
details_file_path = torrent_directory + '/details.csv'
try:
with open(details_file_path, 'r') as details_file:
details_file.read(3)
details_reader = csv.reader(details_file.readlines())
column_names = details_reader.next()
column_values = details_reader.next()
details = dict(zip(column_names, column_values))
except:
print('Error when importing torrent details for torrent %i' % torrent_id)
raise
filelist_file_path = torrent_directory + '/filelist.csv'
with open(filelist_file_path, 'r') as filelist_file:
filelist_file.read(3) # skip BOM
filelist_reader = csv.reader(filelist_file.readlines())
column_names = filelist_reader.next()
filelist = []
for file_info in filelist_reader:
filelist.append(dict(zip(column_names, file_info)))
try:
for idx, file_details in enumerate(filelist):
filelist[idx]['Size'] = float(filelist[idx]['Size'])
except:
# print('Failed to process the size value for torrent %i. Probably an encoding issue.' % torrent_id)
raise
details['Seeders'] = int(details['Seeders'])
details['Leechers'] = int(details['Leechers'])
details['Type'] = int(details['Type'])
title = details['Title']
if details['Type'] != 101:
# self.stdout.write('Skipping non-music torrent %i ("%s")' % (torrent_id, title))
# self.stdout.write('.', ending='')
pass
else:
torrent = self.create_torrent(torrent_id, details, description)
files_count = 0
for file in filelist:
success = self.create_file(torrent_id, file)
if success:
files_count += 1
if files_count == 1:
plural = ''
else:
plural = 's'
if files_count:
pass
# self.stdout.write('\rImported torrent %i "%s" containing %i .mp3 file%s' % (torrent_id, title, files_count, plural))
else:
# self.stdout.write('\rSkipping torrent %i "%s" containing no .mp3 files' % (torrent_id, title))
torrent.delete()
def handle(self, *args, **options):
torrent_data_dir = args[0]
file_count = 0
for (dirpath, dirnames, filenames) in os.walk(torrent_data_dir):
if filenames:
file_count += 1
progress_count = 0
pbar = progressbar.ProgressBar(
widgets=['Importing: ', progressbar.Percentage(),
' ', progressbar.Bar(marker=progressbar.RotatingMarker()),
' ', progressbar.ETA()],
maxval=file_count
).start()
for (dirpath, dirnames, filenames) in os.walk(torrent_data_dir):
if filenames:
try:
self.process_torrent_directory(dirpath)
except:
pass
# print("Failed to import torrent %s. Callstack:" % dirpath)
# traceback.print_exc()
# print("Skipping torrent.")
progress_count += 1
pbar.update(progress_count)
pbar.finish()
|
# Graph Theory Project - Quentin Lautischer and Joshua White
"""
Assignment 3 - Directions
DRIVING ROUTE FINDER
Part 1: Server
Part 2: Client
"""
|
import time
import chromedriver_binary
from selenium import webdriver
from selenium.webdriver.support.ui import WebDriverWait
from selenium.webdriver.support import expected_conditions as EC
from selenium.webdriver.common.by import By
from selenium.common.exceptions import TimeoutException
from selenium.common.exceptions import NoSuchElementException
from balance_check import logger, config
from balance_check.utils.captcha import CaptchaSolver
from balance_check.utils.browser import get_image_b64_by_id
from balance_check.provider import BalanceCheckProvider
from balance_check.validators.credit_card import Issuer, CreditCardSchema
class PrepaidGiftBalance(BalanceCheckProvider):
def __init__(self):
super().__init__()
self.website_url = "https://www.prepaidgiftbalance.com"
self.schema = CreditCardSchema([Issuer.Visa, Issuer.MasterCard], exp_date=False)
def scrape(self, fields):
# Open Selenium browser
browser = webdriver.Chrome()
browser.set_window_size(600, 800)
logger.info("Fetching balance check page")
browser.get(self.website_url)
try:
form = browser.find_elements_by_tag_name("form")[1]
except (NoSuchElementException, IndexError):
raise RuntimeError("Unable to find login form on page")
logger.info("Filling login form 1/2")
try:
form.find_element_by_name("accountNumber").send_keys(
fields["accountNumber"]
)
time.sleep(1)
except NoSuchElementException:
browser.close()
raise RuntimeError(f"Unable to find 'accountNumber' field on page")
# Click continue button
form.find_element_by_css_selector("input[type='submit']").click()
# Wait for page to load
try:
WebDriverWait(browser, 3).until(
EC.presence_of_element_located((By.ID, "login-form"))
)
except TimeoutException:
browser.close()
raise RuntimeError("Login page took too long to load")
try:
form = browser.find_element_by_id("login-form")
except NoSuchElementException:
browser.close()
raise RuntimeError("Unable to find login form on page")
logger.info("Solving CAPTCHA (~10s)")
# Extract CAPTCHA image from page
captcha_b64 = get_image_b64_by_id(browser, "captchaImg")
captcha_solver = CaptchaSolver(api_key=config.ANTI_CAPTCHA_KEY)
captcha = captcha_solver.solve_image_b64(captcha_b64)
if captcha["errorId"] != 0:
browser.close()
raise RuntimeError(
"Unable to solve CAPTCHA ({})".format(captcha["errorDescription"])
)
logger.info("Filling login form 2/2")
try:
form.find_element_by_name("cv2").send_keys(fields["cv2"])
time.sleep(1)
except NoSuchElementException:
browser.close()
raise RuntimeError("Unable to find 'cv2' field on page")
try:
form.find_element_by_id(
"_MultiStageFSVpasswordloginresponsive_WAR_cardportalresponsive_captchaText"
).send_keys(captcha["solution"]["text"])
time.sleep(1)
except NoSuchElementException:
browser.close()
raise RuntimeError("Unable to find CAPTCHA field on page")
# Click continue button
form.find_element_by_css_selector("input[type='submit']").click()
# Wait for page to load
try:
WebDriverWait(browser, 3).until(
EC.presence_of_element_located((By.ID, "cardBalanceInfo"))
)
except TimeoutException:
browser.close()
raise RuntimeError("Balance page took too long to load")
logger.info("Obtaining card information")
try:
avail_balance = browser.find_element_by_class_name("cardBalanceText").text
except NoSuchElementException:
browser.close()
raise RuntimeError("Could not find available card balance")
browser.close()
logger.info(f"Success! Card balance: {avail_balance}")
return {"initial_balance": None, "available_balance": avail_balance}
def check_balance(self, **kwargs):
if self.validate(kwargs):
logger.info("Checking balance for card: {}".format(kwargs["card_number"]))
return self.scrape(
{"accountNumber": kwargs["card_number"], "cv2": kwargs["cvv"]}
)
|
from django.db import models
from django.utils import timezone
from projects.models.project import Project
class TechnicalSheet(models.Model):
class Meta:
verbose_name = 'technicalsheet'
verbose_name_plural = 'technicalsheets'
created = models.DateTimeField(editable=False, auto_now_add=True)
last_modified = models.DateTimeField(editable=False, auto_now=True)
project = models.ForeignKey(Project, on_delete=models.CASCADE)
test_file = models.FileField(upload_to='documents/tech/', null=True, blank=True)
## other data
## auto save time
''' ## incase if auto_now, auto_now_add work
def save(self, *args, **kwargs):
if not self.created:
self.created = timezone.now()
self.last_modified = timezone.now()
return super(TechnicalSheet, self).save(*args, **kwargs)
'''
|
import serial
import time
# define version one object which does basic communication with arduino
class serialCommObj:
def __init__(self, port, baud):
self.Connection = serial.Serial(port,baud)
time.sleep(2)
def setFreqAndDuty(self,freq, duty):
# maximum frequency is 500 Hz
# duty is a fraction e [0:1]
if duty > 1:
duty = 1
if duty < 0:
duty = 1
roundedPeriod = round(1000/freq)
highTimeInMilliseconds = round(duty*roundedPeriod);
lowTimeInMilliseconds = roundedPeriod - highTimeInMilliseconds
UpTimeString = 'u'+str(highTimeInMilliseconds)+'\n'
DownTimeString = 'd'+str(lowTimeInMilliseconds)+'\n'
print(bytes(UpTimeString,'ASCII'))
print(bytes(DownTimeString,'ASCII'))
self.Connection.write(bytes(UpTimeString,'ASCII'))
self.Connection.write(bytes(DownTimeString,'ASCII'))
return True
def singleDutyPhase(self, freq, phaseIndex):
qPeriod = round(0.25*(1000/freq))
comStr = phaseIndex+str(qPeriod)+'\n'
self.Connection.write(bytes(comStr,'ASCII'))
return True
|
from datetime import datetime
from django.views.generic import ListView
from .models import Battle
# Create your views here.
class UpcomingBattlesView(ListView):
queryset = Battle.objects.filter(start_time__gte=datetime.now(), is_active=True)
template_name = 'battles.html'
context_object_name = 'battles'
class ResultsView(ListView):
queryset = Battle.objects.filter(is_active=True, is_complete=True).order_by('end_time')
template_name = 'results.html'
context_object_name = 'battles'
|
#!/usr/bin/env python3
import os
import signal
import sys
from time import strftime, sleep
from selenium import webdriver
from selenium.webdriver.firefox.options import Options
def signal_handler(signal, frame):
if WEBDRIVER is not None:
WEBDRIVER.quit()
sys.exit(0)
def set_webdriver(driver):
options = Options()
options.headless = True
driver = webdriver.Firefox(executable_path=os.path.abspath("geckodriver"), options=options)
driver.implicitly_wait(5)
return driver
if __name__ == '__main__':
WEBDRIVER = None
WEBDRIVER = set_webdriver(WEBDRIVER)
signal.signal(signal.SIGINT, signal_handler)
print("Please Wait Starting Notify Me When is Up")
try:
TAIL = input("Enter URL > ")
URL_WEB = "https://downforeveryoneorjustme.com/" + (TAIL)
if URL_WEB == "https://downforeveryoneorjustme.com/":
raise Exception("No website to check status")
print("checking every 10 sec " + URL_WEB)
OFFLINE = True
ONLINE = True
ISSUE = 5
while True:
WEBDRIVER.get(URL_WEB)
if WEBDRIVER.current_url == "https://downforeveryoneorjustme.com/error":
raise Exception("This website doesn't exists")
array = WEBDRIVER.find_elements_by_class_name("is-size-6")
if len(array) != 0:
t = strftime("%Y-%m-%d %H:%M:%S")
if "It's not just you!" in array[0].text:
ISSUE = 5
print(t[11:], "-->", array[0].text)
if OFFLINE:
os.system('notify-send "-i" error "Notify Me!" "{0} "'
.format(array[0].text))
os.system('espeak -ven-us+f4 -s140 -a 500 "{0}"'.format(array[0].text))
OFFLINE = False
ONLINE = True
elif "It's just you" in array[0].text:
ISSUE = 5
print(t[11:], "-->", array[0].text)
if ONLINE:
os.system('notify-send "-i" info "Notify Me!" "{0} "'
.format(array[0].text))
os.system('espeak -ven-us+f4 -s140 -a 500 "{0}"'.format(array[0].text))
ONLINE = False
OFFLINE = True
else:
if ISSUE > 10:
os.system('notify-send "-i" critical "Notify Me!'
'" I have problems with downforeveryoneorjustme keep calm "')
sleep(ISSUE)
ISSUE = ISSUE + 2
sleep(2)
else:
raise Exception("Element is missing")
except Exception as error:
print("Panic exit", error)
if WEBDRIVER is not None:
WEBDRIVER.quit()
sys.exit(1)
|
#!/usr/bin/env python
# encoding: utf-8
import tornado.ioloop
import tornado.web
import tornado.autoreload
from tornado.options import options
import logging
from settings import settings
from gelyung.urls import url_patterns
from gelyung.sched import MonitTask
class MainApplication(tornado.web.Application):
def __init__(self):
logging.info("init MainApplication with settings: %s" % str(settings))
tornado.web.Application.__init__(self, url_patterns, **settings)
def main():
app = MainApplication()
app.listen(options.port)
MonitTask()
tornado.ioloop.IOLoop.current().start()
if __name__ == "__main__":
main()
|
from sys import argv
from os.path import exists
script, from_file, to_file = argv
print("Copying from {0} to {1}".format(from_file, to_file))
# we could do these two in one line, how?
# in_file = open(from_file)
# indata = in_file.read()
indata = open(from_file).read()
print("input file is {0} bytes long".format(len(indata)))
# print("does the output file exist? {0}".format(exists(to_file)))
# input()
# out_file = open(to_file, 'w')
# out_file.write(indata)
open(to_file, "w").write(indata)
print("Alright, all done")
# out_file.close()
# in_file.close()
|
# -*- coding: utf-8 -*-
"""
Created on Tue Oct 2 12:33:02 2018
@author: Estagio
"""
from sklearn.datasets import load_breast_cancer
from sklearn.model_selection import train_test_split
cancer = load_breast_cancer()
x = cancer['data']
y = cancer['target']
X_train, X_test, y_train, y_test = train_test_split(x, y)
|
import pandas as pd
loop = True
chunkSize = 10000
chunks = []
index=0
reader=pd.read_csv('H:\\SMPData\\Weibo.Corpus\\Weibo.data\\merge\\weibodata.csv',iterator = True)
while loop:
try:
chunk = reader.get_chunk(chunkSize)
chunks.append(chunk)
index=index+1
print "Iteration %d"%(index)
if index>2499 :
loop = False
except StopIteration:
loop = False
print "Iteration is stopped."
df = pd.concat(chunks, ignore_index=True)
df.columns=['id','reports','comments','source','time','text']
# print df['text'][0:10]
group=df['text'].groupby(df['id']).count()
# print group.size()
group.to_csv('message_count.csv',encoding='utf-8')
|
import pygame
from math import sqrt
pygame.init()
window = pygame.display.set_mode((600, 600))
pygame.display.set_caption('Tic-tac-toe')
current_player = 'X'
board = ["-", "-", "-",
"-", "-", "-",
"-", "-", "-"]
game_going_on = True
winner = None
font = pygame.font.Font('freesansbold.ttf', 42)
ls = []
clock = pygame.time.Clock()
def draw_board():
pygame.draw.rect(window, (0, 0, 0), (220, 90, 5, 420))
pygame.draw.rect(window, (0, 0, 0), (380, 90, 5, 420))
pygame.draw.rect(window, (0, 0, 0), (90, 220, 420, 5))
pygame.draw.rect(window, (0, 0, 0), (90, 380, 420, 5))
# pygame.draw.rect(window, (0, 0, 0), (105 + 105 // 2 + 135, 105 + 105 // 2 + 135, 20, 20))
def handle_turn():
global current_player
if current_player == 'X':
current_player = 'O'
else:
current_player = 'X'
def check_user_input(x, y):
x_rec = 105 + 105 // 2
y_rec = 105 + 105 // 2
r = 60
for m in range(9):
if m % 3 == 0 and m != 0:
x_rec = 105 + 105 // 2
y_rec += 135
distance = sqrt((x - x_rec) ** 2 + (y - y_rec) ** 2)
if distance < r:
return [x_rec, y_rec, m]
x_rec += 135
def win():
global winner
if check_rows():
winner = check_rows()
return True
elif check_dial():
winner = check_dial()
return True
elif check_col():
winner = check_col()
return True
else:
winner = None
# Check the rows for a window
def check_rows():
global game_going_on
if board[0] == board[1] == board[2] != "-":
game_going_on = False
return board[0]
elif board[3] == board[4] == board[5] != "-":
game_going_on = False
return board[3]
elif board[6] == board[7] == board[8] != "-":
game_going_on = False
return board[6]
else:
return None
# Check the columns for a window
def check_col():
global game_going_on
if board[0] == board[3] == board[6] != "-":
game_going_on = False
return board[0]
elif board[1] == board[4] == board[7] != "-":
game_going_on = False
return board[1]
elif board[2] == board[5] == board[8] != "-":
game_going_on = False
return board[2]
else:
return None
# Check the diagonals for a window
def check_dial():
global game_going_on
if board[0] == board[4] == board[8] != "-":
game_going_on = False
return board[0]
elif board[2] == board[4] == board[6] != "-":
game_going_on = False
return board[2]
else:
return None
# Check if there is a tie
def tie():
global game_going_on, winner
if "-" not in board:
game_going_on = False
winner = None
return True
while game_going_on:
clock.tick(60)
window.fill((255, 25, 255))
draw_board()
for event in pygame.event.get():
if event.type == pygame.QUIT:
game_going_on = False
if event.type == pygame.MOUSEBUTTONDOWN:
try:
x, y, n = check_user_input(*pygame.mouse.get_pos())
if len(ls) > 0:
for i in ls:
if x == i[0] and y == i[1]:
break
else:
ls.append([x, y, current_player])
board[n] = current_player
handle_turn()
else:
board[n] = current_player
ls.append([x, y, current_player])
handle_turn()
except TypeError:
pass
for i in ls:
window.blit(font.render(f'{i[2]}', 1, (255, 255, 255)), (i[0], i[1]))
if win():
handle_turn()
pygame.display.update()
pygame.time.delay(2000)
window.fill((255, 25, 255))
window.blit(font.render(f'Player {current_player} won', 1, (0, 0, 0)), (150, 300))
pygame.display.update()
pygame.time.delay(2000)
continue
if tie():
pygame.display.update()
pygame.time.delay(2000)
window.fill((255, 25, 255))
window.blit(font.render('It is a draw', 1, (0, 0, 0)), (200, 300))
pygame.display.update()
pygame.time.delay(2000)
pygame.display.update()
print(winner)
|
# Echo client program
import socket
import time
HOST = "10.0.0.2" # The remote host
PORT = 30002 # The same port as used by the server
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
s.connect((HOST, PORT))
s.send(("speedj([0, 0, 0, 0, 0, 10], a=5.0, t=10)" + "\n").encode())
s.close()
|
# -*- coding: utf-8 -*-
import pandas as pd
import numpy as np
import matplotlib.pyplot as plt
import seaborn as sns
from sklearn import preprocessing
abHeaders = ['sex', 'length', 'diameter', 'height', 'wholeWeight', 'shuckedWeight', 'visceraWeight', 'shellWeight', 'rings']
dfAb = pd.read_csv('abalone.data', sep=",", header=None, names=abHeaders, na_values=[" ?"])
#select categorical features to one hot encode
#this is only the gender feature
cat_dfAb = dfAb.select_dtypes(include=[object])
#print(cat_dfAb.head(30))
le = preprocessing.LabelEncoder()
cat2_dfAb = cat_dfAb.apply(le.fit_transform)
#print(cat2_dfHab.head(10))
enc = preprocessing.OneHotEncoder()
enc.fit(cat2_dfAb)
onehotlabels = enc.transform(cat2_dfAb).toarray()
#print(onehotlabels)
nocat_dfAb = dfAb.select_dtypes(exclude=[object])
#now lets remove the related terms: length
nocat_dfAb=nocat_dfAb.drop(columns=['diameter', 'shuckedWeight'])
nocatlabels = nocat_dfAb.to_numpy()
allabdata= np.concatenate((onehotlabels, nocatlabels), axis=1)
X_abalone = allabdata[:, :-1]
y_abalone= allabdata[:, -1]
#print(allabdata.shape)
#print(allabdata)
#print(X_ab)
#print(y_ab)
#GRAPHS
#bar graph by sex
dfAb["sex"].value_counts().plot(kind='bar')
#plt.show()
#bar graph length
out = pd.cut(dfAb['length'], bins=[0, 0.1, 0.2, 0.3, 0.4, 0.5, 0.6, 0.7, 0.8, 0.9, 1], include_lowest=True)
ax = out.value_counts(sort=False).plot.bar(rot=0, figsize=(6,4))
#plt.show()
#bar graph diameter
out = pd.cut(dfAb['diameter'], bins=[0, 0.05, 0.1, 0.15, 0.2, 0.25, 0.3, 0.35, 0.4, 0.45, 0.5, 0.55, 0.6, 0.65, 0.7], include_lowest=True)
ax = out.value_counts(sort=False).plot.bar(rot=0, figsize=(6,4))
#plt.show()
#bar graph height
out = pd.cut(dfAb['height'], bins=[0, 0.1, 0.2, 0.3, 0.4, 0.5, 0.6, 0.7, 0.8, 0.9, 1.0, 1.1, 1.2], include_lowest=True)
ax = out.value_counts(sort=False).plot.bar(rot=0, figsize=(6,4))
plt.title("Height")
#plt.show()
#bar graph wholeWeight
out = pd.cut(dfAb['wholeWeight'], bins=[0, 0.2, 0.4, 0.6, 0.8, 1.0, 1.2, 1.4, 1.6, 1.8, 2.0, 2.2, 2.4, 2.6, 2.8, 3.0], include_lowest=True)
ax = out.value_counts(sort=False).plot.bar(rot=0, figsize=(6,4))
plt.title("Whole Weight")
#plt.show()
#bar graph shuckedWeight
out = pd.cut(dfAb['shuckedWeight'], bins=[0, 0.2, 0.4, 0.6, 0.8, 1.0, 1.2, 1.4], include_lowest=True)
ax = out.value_counts(sort=False).plot.bar(rot=0, figsize=(6,4))
plt.title("Shucked Weight")
#plt.show()
#bar graph visceraWeight
out = pd.cut(dfAb['visceraWeight'], bins=[0, 0.1, 0.2, 0.3, 0.4, 0.5, 0.6, 0.7, 0.8], include_lowest=True)
ax = out.value_counts(sort=False).plot.bar(rot=0, figsize=(6,4))
plt.title("Visera Weight")
#plt.show()
#bar graph shellWeight
out = pd.cut(dfAb['shellWeight'], bins=[0, 0.1, 0.2, 0.3, 0.4, 0.5, 0.6, 0.7, 0.8, 0.9, 1.0], include_lowest=True)
ax = out.value_counts(sort=False).plot.bar(rot=0, figsize=(6,4))
plt.title("Shell Weight")
#plt.show()
#bar graph rings
dfAb["rings"].value_counts().sort_index(ascending=True).plot(kind='bar')
plt.title("Rings")
#plt.show()
#pairwise scatter
#sns.pairplot(dfAb)
#plt.savefig('ab.png')
#scatter class vs age
#this is the one that worked
"""
g = sns.FacetGrid(dfHab, col="survival")
g = g.map(plt.hist, "age")
plt.show()
#scatter class vs opyear
g = sns.FacetGrid(dfHab, col="survival")
g = g.map(plt.hist, "opYear")
plt.show()
#scatter class vs nodes
g = sns.FacetGrid(dfHab, col="survival")
g = g.map(plt.hist, "nodes")
plt.show()
#scatter age vs nodes
dfHab.plot(kind='scatter',x='age',y='nodes',color='blue')
plt.show()
#scatter Opyear vs nodes
dfHab.plot(kind='scatter',x='opYear',y='nodes',color='blue')
plt.show()
sns.pairplot(dfHab)
plt.savefig('a.png')
"""
|
import tensorflow as tf
import numpy as np
from dps import cfg
from dps.env import TensorFlowEnv
from dps.register import RegisterBank
from dps.utils import Param, Config
from dps.rl.policy import ProductDist, Normal, Gamma, Policy
def build_env():
if cfg.room_angular:
return RoomAngular()
else:
return Room()
def build_policy(env, **kwargs):
if cfg.room_angular:
action_selection = ProductDist(Normal(), Normal(), Gamma())
else:
action_selection = ProductDist(Normal(), Normal())
return Policy(action_selection, env.obs_shape, **kwargs)
config = Config(
build_env=build_env,
n_controller_units=128,
build_policy=build_policy,
env_name='room',
T=20,
restart_prob=0.0,
max_step=0.3,
room_angular=False,
l2l=False,
reward_radius=0.5,
n_val=100,
)
class Room(TensorFlowEnv):
action_names = ['delta_x', 'delta_y']
T = Param()
reward_radius = Param()
max_step = Param()
restart_prob = Param()
l2l = Param()
n_val = Param()
def __init__(self, **kwargs):
self.val_input = self._make_input(self.n_val)
self.test_input = self._make_input(self.n_val)
self.rb = RegisterBank(
'RoomRB', 'x y r dx dy', 'goal_x goal_y', [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0], 'x y'
)
super(Room, self).__init__()
def _make_input(self, batch_size):
if self.l2l:
return np.random.uniform(low=-1.0, high=1.0, size=(batch_size, 4))
else:
return np.concatenate(
[np.random.uniform(low=-1.0, high=1.0, size=(batch_size, 2)),
np.zeros((batch_size, 2))], axis=1)
def _build_placeholders(self):
self.input = tf.placeholder(tf.float32, (None, 4))
def _make_feed_dict(self, n_rollouts, T, mode):
if mode == 'train':
inp = self._make_input(n_rollouts)
elif mode == 'val':
inp = self.val_input
elif mode == 'test':
inp = self.test_input
else:
raise Exception("Unknown mode: {}.".format(mode))
if n_rollouts is not None:
inp = inp[:n_rollouts, :]
return {self.input: inp}
def build_init(self, r):
batch_size = tf.shape(r)[0]
return self.rb.wrap(
x=self.input[:, 0:1], y=self.input[:, 1:2],
goal_x=self.input[:, 2:3], goal_y=self.input[:, 3:4],
dx=tf.fill((batch_size, 1), 0.0),
dy=tf.fill((batch_size, 1), 0.0),
r=tf.fill((batch_size, 1), 0.0))
def _process_actions(self, a):
delta_x, delta_y = tf.split(a, 2, axis=1)
return delta_x, delta_y
def build_step(self, t, r, a):
x, y, _, _, _, goal_x, goal_y = self.rb.as_tuple(r)
delta_x, delta_y = self._process_actions(a)
if self.max_step > 0:
delta_x = tf.clip_by_value(delta_x, -self.max_step, self.max_step)
delta_y = tf.clip_by_value(delta_y, -self.max_step, self.max_step)
new_x = tf.clip_by_value(x + delta_x, -1.0, 1.0)
new_y = tf.clip_by_value(y + delta_y, -1.0, 1.0)
if self.restart_prob > 0:
restart = tf.contrib.distributions.Bernoulli(self.restart_prob).sample(tf.shape(x))
new_x = tf.where(
tf.equal(restart, 1),
tf.contrib.distributions.Uniform(-1., 1.).sample(tf.shape(x)),
new_x)
new_y = tf.where(
tf.equal(restart, 1),
tf.contrib.distributions.Uniform(-1., 1.).sample(tf.shape(x)),
new_y)
reward = -tf.cast(tf.sqrt((new_x-goal_x)**2 + (new_y-goal_y)**2) > self.reward_radius, tf.float32)
new_registers = self.rb.wrap(
x=new_x, y=new_y, goal_x=goal_x, goal_y=goal_y,
dx=delta_x, dy=delta_y, r=reward)
return tf.fill((tf.shape(r)[0], 1), 0.0), reward, new_registers
class RoomAngular(Room):
action_names = ['delta_x', 'delta_y', 'mag']
def _process_actions(self, a):
delta_x, delta_y, mag = tf.split(a, 3, axis=1)
norm = tf.sqrt(delta_x**2 + delta_y**2)
norm = tf.where(norm > 1e-6, norm, tf.zeros_like(norm))
delta_x = mag * delta_x / norm
delta_y = mag * delta_y / norm
return delta_x, delta_y
|
def start_master_application(essid, ap, password):
'''Start the master application on the network that you specify.
ARGS:
@essid -- the name of the network to put it on.
@ap -- the mac address of the ap.
RETURNS:
None
'''
from batman_sockets.TCPServerSocket import TCPServerSocket
from network_management.join_batman_network import join_batman_network
from web_app_communication.create_app_network import create_app_network
print 'Starting Network Master Application'
ip = '168.192.2.1'
try:
session = create_app_network(essid, password, ap, ip)
except:
print('Could Not Connect To The Internet.\n' +
'The network will be started in Isolation mode.\n' +
'Note: The network will be changed to Universal mode if the ' +
'master or any subservient node gains internet access.')
join_batman_network(essid, ap, ip)
socket = TCPServerSocket('168.192.2.1')
socket.start_server()
|
from __future__ import unicode_literals
from frappe import _
def get_data():
return [
{
"label": _("Documents"),
"icon": "icon-star",
"items": [
{
"type": "doctype",
"name": "Checklist Requisition",
"description": _("Run Checklist")
},
{
"type": "doctype",
"name": "Checklist Task",
"description": _("Task Details")
}
# {
# "type": "doctype",
# "name": "Checklist Time Log",
# "description": _("Time Log Details"),
# }
]
},
{
"label": _("Masters"),
"icon": "icon-star",
"items": [
{
"type": "doctype",
"name": "Checklist",
"description": _("Checklist Details"),
}
]
},
{
"label": _("Standard Reports"),
"icon": "icon-star",
"items": [
{
"type": "report",
"is_query_report": True,
"name": "Checklist Requisition Analysis",
"description": _("Process/Checklist Report"),
"doctype": "Checklist Requisition",
},
{
"type": "report",
"is_query_report": True,
"name": "Task Wise Analysis",
"description": _("Tasks Report"),
"doctype": "Checklist Task",
},
{
"type": "page",
"name": "checklist-report",
"icon": "icon-sitemap",
"label": _("Graphical Reports"),
"description": _("Graphical Report for Checklist"),
},
]
},
]
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.