hexsha stringlengths 40 40 | size int64 5 2.06M | ext stringclasses 10 values | lang stringclasses 1 value | max_stars_repo_path stringlengths 3 248 | max_stars_repo_name stringlengths 5 125 | max_stars_repo_head_hexsha stringlengths 40 78 | max_stars_repo_licenses listlengths 1 10 | max_stars_count int64 1 191k ⌀ | max_stars_repo_stars_event_min_datetime stringlengths 24 24 ⌀ | max_stars_repo_stars_event_max_datetime stringlengths 24 24 ⌀ | max_issues_repo_path stringlengths 3 248 | max_issues_repo_name stringlengths 5 125 | max_issues_repo_head_hexsha stringlengths 40 78 | max_issues_repo_licenses listlengths 1 10 | max_issues_count int64 1 67k ⌀ | max_issues_repo_issues_event_min_datetime stringlengths 24 24 ⌀ | max_issues_repo_issues_event_max_datetime stringlengths 24 24 ⌀ | max_forks_repo_path stringlengths 3 248 | max_forks_repo_name stringlengths 5 125 | max_forks_repo_head_hexsha stringlengths 40 78 | max_forks_repo_licenses listlengths 1 10 | max_forks_count int64 1 105k ⌀ | max_forks_repo_forks_event_min_datetime stringlengths 24 24 ⌀ | max_forks_repo_forks_event_max_datetime stringlengths 24 24 ⌀ | content stringlengths 5 2.06M | avg_line_length float64 1 1.02M | max_line_length int64 3 1.03M | alphanum_fraction float64 0 1 | count_classes int64 0 1.6M | score_classes float64 0 1 | count_generators int64 0 651k | score_generators float64 0 1 | count_decorators int64 0 990k | score_decorators float64 0 1 | count_async_functions int64 0 235k | score_async_functions float64 0 1 | count_documentation int64 0 1.04M | score_documentation float64 0 1 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
dc6d90a32212d727753439c0b78323060b68b9c6 | 925 | py | Python | tests/tokenizer_spacy.py | tevnpowers/thesis | af55bb47db34d90fcef1de073d16a6a0dff43161 | [
"MIT"
] | 2 | 2020-03-13T22:57:23.000Z | 2020-03-13T22:57:46.000Z | tests/tokenizer_spacy.py | tevnpowers/thesis | af55bb47db34d90fcef1de073d16a6a0dff43161 | [
"MIT"
] | null | null | null | tests/tokenizer_spacy.py | tevnpowers/thesis | af55bb47db34d90fcef1de073d16a6a0dff43161 | [
"MIT"
] | null | null | null | import string
import spacy
from text_studio.utils.timer import timer
from text_studio.transformer import Transformer
class SpacyTokenizer(Transformer):
def setup(self, stopwords=None, punct=None, lower=True, strip=True):
spacy.cli.download("en_core_web_sm")
self.nlp = spacy.load(
"en_core_web_sm", disable=["parser", "tagger", "ner"]
)
self.lower = lower
self.punct = punct or set(string.punctuation)
def process_batch(self, X):
docs = list(self.nlp.pipe(X))
return [list(self.process_instance(doc)) for doc in docs]
def process_single(self, document):
return self.process_instance(document)
def process_instance(self, document):
for token in document:
lexeme = self.nlp.vocab[token.text]
if lexeme.is_stop or (token.text in self.punct):
continue
yield token.lemma_
| 30.833333 | 72 | 0.650811 | 804 | 0.869189 | 233 | 0.251892 | 0 | 0 | 0 | 0 | 53 | 0.057297 |
dc6dedca034cca1e39d2cc1447e7675a569bfa52 | 25,336 | py | Python | lib/ndk/extypes.py | clayne/syringe-1 | 4a431aa65c371a2018fca95145a3952ba802a609 | [
"BSD-2-Clause"
] | null | null | null | lib/ndk/extypes.py | clayne/syringe-1 | 4a431aa65c371a2018fca95145a3952ba802a609 | [
"BSD-2-Clause"
] | null | null | null | lib/ndk/extypes.py | clayne/syringe-1 | 4a431aa65c371a2018fca95145a3952ba802a609 | [
"BSD-2-Clause"
] | null | null | null | import ptypes
from ptypes import *
from . import umtypes, ketypes, mmtypes
from .datatypes import *
class SYSTEM_INFORMATION_CLASS(pint.enum):
_values_ = [(n, v) for v, n in [
(0, 'SystemBasicInformation'),
(1, 'SystemProcessorInformation'),
(2, 'SystemPerformanceInformation'),
(3, 'SystemTimeOfDayInformation'),
(4, 'SystemPathInformation'),
(5, 'SystemProcessInformation'),
(6, 'SystemCallCountInformation'),
(7, 'SystemDeviceInformation'),
(8, 'SystemProcessorPerformanceInformation'),
(9, 'SystemFlagsInformation'),
(10, 'SystemCallTimeInformation'),
(11, 'SystemModuleInformation'),
(12, 'SystemLocksInformation'),
(13, 'SystemStackTraceInformation'),
(14, 'SystemPagedPoolInformation'),
(15, 'SystemNonPagedPoolInformation'),
(16, 'SystemHandleInformation'),
(17, 'SystemObjectInformation'),
(18, 'SystemPageFileInformation'),
(19, 'SystemVdmInstemulInformation'),
(20, 'SystemVdmBopInformation'),
(21, 'SystemFileCacheInformation'),
(22, 'SystemPoolTagInformation'),
(23, 'SystemInterruptInformation'),
(24, 'SystemDpcBehaviorInformation'),
(25, 'SystemFullMemoryInformation'),
(26, 'SystemLoadGdiDriverInformation'),
(27, 'SystemUnloadGdiDriverInformation'),
(28, 'SystemTimeAdjustmentInformation'),
(29, 'SystemSummaryMemoryInformation'),
(30, 'SystemMirrorMemoryInformation'),
(31, 'SystemPerformanceTraceInformation'),
(32, 'SystemObsolete0'),
(33, 'SystemExceptionInformation'),
(34, 'SystemCrashDumpStateInformation'),
(35, 'SystemKernelDebuggerInformation'),
(36, 'SystemContextSwitchInformation'),
(37, 'SystemRegistryQuotaInformation'),
(38, 'SystemExtendServiceTableInformation'),
(39, 'SystemPrioritySeperation'),
(40, 'SystemVerifierAddDriverInformation'),
(41, 'SystemVerifierRemoveDriverInformation'),
(42, 'SystemProcessorIdleInformation'),
(43, 'SystemLegacyDriverInformation'),
(44, 'SystemCurrentTimeZoneInformation'),
(45, 'SystemLookasideInformation'),
(46, 'SystemTimeSlipNotification'),
(47, 'SystemSessionCreate'),
(48, 'SystemSessionDetach'),
(49, 'SystemSessionInformation'),
(50, 'SystemRangeStartInformation'),
(51, 'SystemVerifierInformation'),
(52, 'SystemVerifierThunkExtend'),
(53, 'SystemSessionProcessInformation'),
(54, 'SystemLoadGdiDriverInSystemSpace'),
(55, 'SystemNumaProcessorMap'),
(56, 'SystemPrefetcherInformation'),
(57, 'SystemExtendedProcessInformation'),
(58, 'SystemRecommendedSharedDataAlignment'),
(59, 'SystemComPlusPackage'),
(60, 'SystemNumaAvailableMemory'),
(61, 'SystemProcessorPowerInformation'),
(62, 'SystemEmulationBasicInformation'),
(63, 'SystemEmulationProcessorInformation'),
(64, 'SystemExtendedHandleInformation'),
(65, 'SystemLostDelayedWriteInformation'),
(66, 'SystemBigPoolInformation'),
(67, 'SystemSessionPoolTagInformation'),
(68, 'SystemSessionMappedViewInformation'),
(69, 'SystemHotpatchInformation'),
(70, 'SystemObjectSecurityMode'),
(71, 'SystemWatchdogTimerHandler'),
(72, 'SystemWatchdogTimerInformation'),
(73, 'SystemLogicalProcessorInformation'),
(74, 'SystemWow64SharedInformationObsolete'),
(75, 'SystemRegisterFirmwareTableInformationHandler'),
(76, 'SystemFirmwareTableInformation'),
(77, 'SystemModuleInformationEx'),
(78, 'SystemVerifierTriageInformation'),
(79, 'SystemSuperfetchInformation'),
(80, 'SystemMemoryListInformation'),
(81, 'SystemFileCacheInformationEx'),
(82, 'SystemThreadPriorityClientIdInformation'),
(83, 'SystemProcessorIdleCycleTimeInformation'),
(84, 'SystemVerifierCancellationInformation'),
(85, 'SystemProcessorPowerInformationEx'),
(86, 'SystemRefTraceInformation'),
(87, 'SystemSpecialPoolInformation'),
(88, 'SystemProcessIdInformation'),
(89, 'SystemErrorPortInformation'),
(90, 'SystemBootEnvironmentInformation'),
(91, 'SystemHypervisorInformation'),
(92, 'SystemVerifierInformationEx'),
(93, 'SystemTimeZoneInformation'),
(94, 'SystemImageFileExecutionOptionsInformation'),
(95, 'SystemCoverageInformation'),
(96, 'SystemPrefetchPatchInformation'),
(97, 'SystemVerifierFaultsInformation'),
(98, 'SystemSystemPartitionInformation'),
(99, 'SystemSystemDiskInformation'),
(100, 'SystemProcessorPerformanceDistribution'),
(101, 'SystemNumaProximityNodeInformation'),
(102, 'SystemDynamicTimeZoneInformation'),
(103, 'SystemCodeIntegrityInformation'),
(104, 'SystemProcessorMicrocodeUpdateInformation'),
(105, 'SystemProcessorBrandString'),
(106, 'SystemVirtualAddressInformation'),
(107, 'SystemLogicalProcessorAndGroupInformation'),
(108, 'SystemProcessorCycleTimeInformation'),
(109, 'SystemStoreInformation'),
(110, 'SystemRegistryAppendString'),
(111, 'SystemAitSamplingValue'),
(112, 'SystemVhdBootInformation'),
(113, 'SystemCpuQuotaInformation'),
(114, 'SystemNativeBasicInformation'),
(115, 'SystemErrorPortTimeouts'),
(116, 'SystemLowPriorityIoInformation'),
(117, 'SystemTpmBootEntropyInformation'),
(118, 'SystemVerifierCountersInformation'),
(119, 'SystemPagedPoolInformationEx'),
(120, 'SystemSystemPtesInformationEx'),
(121, 'SystemNodeDistanceInformation'),
(122, 'SystemAcpiAuditInformation'),
(123, 'SystemBasicPerformanceInformation'),
(124, 'SystemQueryPerformanceCounterInformation'),
(125, 'SystemSessionBigPoolInformation'),
(126, 'SystemBootGraphicsInformation'),
(127, 'SystemScrubPhysicalMemoryInformation'),
(128, 'SystemBadPageInformation'),
(129, 'SystemProcessorProfileControlArea'),
(130, 'SystemCombinePhysicalMemoryInformation'),
(131, 'SystemEntropyInterruptTimingCallback'),
(132, 'SystemConsoleInformation'),
(133, 'SystemPlatformBinaryInformation'),
(134, 'SystemThrottleNotificationInformation'),
(135, 'SystemHypervisorProcessorCountInformation'),
(136, 'SystemDeviceDataInformation'),
(137, 'SystemDeviceDataEnumerationInformation'),
(138, 'SystemMemoryTopologyInformation'),
(139, 'SystemMemoryChannelInformation'),
(140, 'SystemBootLogoInformation'),
(141, 'SystemProcessorPerformanceInformationEx'),
(142, 'SystemCriticalProcessErrorLogInformation'),
(143, 'SystemSecureBootPolicyInformation'),
(144, 'SystemPageFileInformationEx'),
(145, 'SystemSecureBootInformation'),
(146, 'SystemEntropyInterruptTimingRawInformation'),
(147, 'SystemPortableWorkspaceEfiLauncherInformation'),
(148, 'SystemFullProcessInformation'),
(149, 'SystemKernelDebuggerInformationEx'),
(150, 'SystemBootMetadataInformation'),
(151, 'SystemSoftRebootInformation'),
(152, 'SystemElamCertificateInformation'),
(153, 'SystemOfflineDumpConfigInformation'),
(154, 'SystemProcessorFeaturesInformation'),
(155, 'SystemRegistryReconciliationInformation'),
(156, 'SystemEdidInformation'),
(157, 'SystemManufacturingInformation'),
(158, 'SystemEnergyEstimationConfigInformation'),
(159, 'SystemHypervisorDetailInformation'),
(160, 'SystemProcessorCycleStatsInformation'),
(161, 'SystemVmGenerationCountInformation'),
(162, 'SystemTrustedPlatformModuleInformation'),
(163, 'SystemKernelDebuggerFlags'),
(164, 'SystemCodeIntegrityPolicyInformation'),
(165, 'SystemIsolatedUserModeInformation'),
(166, 'SystemHardwareSecurityTestInterfaceResultsInformation'),
(167, 'SystemSingleModuleInformation'),
(168, 'SystemAllowedCpuSetsInformation'),
(169, 'SystemDmaProtectionInformation'),
(170, 'SystemInterruptCpuSetsInformation'),
(171, 'SystemSecureBootPolicyFullInformation'),
(172, 'SystemCodeIntegrityPolicyFullInformation'),
(173, 'SystemAffinitizedInterruptProcessorInformation'),
(174, 'SystemRootSiloInformation'),
(175, 'SystemCpuSetInformation'),
(176, 'SystemCpuSetTagInformation'),
(177, 'SystemWin32WerStartCallout'),
(178, 'SystemSecureKernelProfileInformation'),
(179, 'SystemCodeIntegrityPlatformManifestInformation'),
(180, 'SystemInterruptSteeringInformation'),
(181, 'SystemSuppportedProcessorArchitectures'),
(182, 'SystemMemoryUsageInformation'),
(183, 'SystemCodeIntegrityCertificateInformation'),
(184, 'SystemPhysicalMemoryInformation'),
(185, 'SystemControlFlowTransition'),
(186, 'SystemKernelDebuggingAllowed'),
(187, 'SystemActivityModerationExeState'),
(188, 'SystemActivityModerationUserSettings'),
(189, 'SystemCodeIntegrityPoliciesFullInformation'),
(190, 'SystemCodeIntegrityUnlockInformation'),
(191, 'SystemIntegrityQuotaInformation'),
(192, 'SystemFlushInformation'),
(193, 'SystemProcessorIdleMaskInformation'),
(194, 'SystemSecureDumpEncryptionInformation'),
(195, 'SystemWriteConstraintInformation'),
(196, 'SystemKernelVaShadowInformation'),
(197, 'SystemHypervisorSharedPageInformation'),
(198, 'SystemFirmwareBootPerformanceInformation'),
(199, 'SystemCodeIntegrityVerificationInformation'),
(200, 'SystemFirmwarePartitionInformation'),
(201, 'SystemSpeculationControlInformation'),
(202, 'SystemDmaGuardPolicyInformation'),
(203, 'SystemEnclaveLaunchControlInformation'),
(204, 'SystemWorkloadAllowedCpuSetsInformation'),
(205, 'SystemCodeIntegrityUnlockModeInformation'),
(206, 'SystemLeapSecondInformation'),
(207, 'SystemFlags2Information'),
(208, 'SystemSecurityModelInformation'),
(209, 'SystemCodeIntegritySyntheticCacheInformation'),
]]
class SYSTEM_MANUFACTURING_INFORMATION(pstruct.type):
type = SYSTEM_INFORMATION_CLASS.byname('SystemManufacturingInformation')
_fields_ = [
(ULONG, 'Options'),
(umtypes.UNICODE_STRING, 'ProfileName'),
]
class SYSTEM_ENERGY_ESTIMATION_CONFIG_INFORMATION(pstruct.type):
type = SYSTEM_INFORMATION_CLASS.byname('SystemEnergyEstimationConfigInformation')
_fields_ = [
(UCHAR, 'Enabled'),
]
class HV_DETAILS(parray.type):
_object_, length = ULONG, 4
class SYSTEM_HYPERVISOR_DETAIL_INFORMATION(pstruct.type):
type = SYSTEM_INFORMATION_CLASS.byname('SystemHypervisorInformation')
_fields_ = [
(HV_DETAILS, 'HvVendorAndMaxFunction'),
(HV_DETAILS, 'HypervisorInterface'),
(HV_DETAILS, 'HypervisorVersion'),
(HV_DETAILS, 'HvFeatures'),
(HV_DETAILS, 'HvFeatures'),
(HV_DETAILS, 'EnlightenmentInfo'),
(HV_DETAILS, 'ImplementationLimits'),
]
class SYSTEM_PROCESSOR_CYCLE_STATS_INFORMATION(pstruct.type):
type = SYSTEM_INFORMATION_CLASS.byname('SystemProcessorCycleStatsInformation')
_fields_ = [
(dyn.array(dyn.array(ULONGLONG, 2), 4), 'Cycles'),
]
class SYSTEM_KERNEL_DEBUGGER_FLAGS(pstruct.type):
type = SYSTEM_INFORMATION_CLASS.byname('SystemKernelDebuggerFlags')
_fields_ = [
(UCHAR, 'KernelDebuggerIgnoreUmExceptions'),
]
class SYSTEM_CODEINTEGRITYPOLICY_INFORMATION(pstruct.type):
type = SYSTEM_INFORMATION_CLASS.byname('SystemCodeIntegrityPolicyInformation')
_fields_ = [
(ULONG, 'Options'),
(ULONG, 'HVCIOptions'),
(ULONGLONG, 'Version'),
(GUID, 'PolicyGuid'),
]
class SYSTEM_ISOLATED_USER_MODE_INFORMATION(pstruct.type):
type = SYSTEM_INFORMATION_CLASS.byname('SystemIsolatedUserModeInformation')
_fields_ = [
(UCHAR, 'SecureKernelRunning'),
(UCHAR, 'HvciEnabled'),
(UCHAR, 'HvciStrictMode'),
(UCHAR, 'DebugEnabled'),
(UCHAR, 'SpareFlags'),
(UCHAR, 'TrustletRunning'),
(UCHAR, 'SpareFlags2'),
(dyn.array(UCHAR, 6), 'Spare0'),
(ULONGLONG, 'Spare'),
]
class SYSTEM_SINGLE_MODULE_INFORMATION(pstruct.type):
type = SYSTEM_INFORMATION_CLASS.byname('SystemSingleModuleInformation')
def __ExInfo(self):
import rtltypes
return rtltypes.RTL_PROCESS_MODULE_INFORMATION
_fields_ = [
(PVOID, 'TargetModuleAddress'),
(__ExInfo, 'ExInfo'),
]
class SYSTEM_DMA_PROTECTION_INFORMATION(pstruct.type):
type = SYSTEM_INFORMATION_CLASS.byname('SystemDmaProtectionInformation')
_fields_ = [
(UCHAR, 'DmaProtectionsAvailable'),
(UCHAR, 'DmaProtectionsInUse'),
]
class SYSTEM_INTERRUPT_CPU_SET_INFORMATION(pstruct.type):
type = SYSTEM_INFORMATION_CLASS.byname('SystemInterruptCpuSetsInformation')
_fields_ = [
(ULONG, 'Gsiv'),
(USHORT, 'Group'),
(ULONGLONG, 'CpuSets'),
]
class SYSTEM_SECUREBOOT_POLICY_INFORMATION(pstruct.type):
type = SYSTEM_INFORMATION_CLASS.byname('SystemSecureBootPolicyInformation')
_fields_ = [
(GUID, 'PolicyPublisher'),
(ULONG, 'PolicyVersion'),
(ULONG, 'PolicyOptions'),
]
class SYSTEM_SECUREBOOT_POLICY_FULL_INFORMATION(pstruct.type):
type = SYSTEM_INFORMATION_CLASS.byname('SystemSecureBootPolicyFullInformation')
_fields_ = [
(SYSTEM_SECUREBOOT_POLICY_INFORMATION, 'PolicyInformation'),
(ULONG, 'PolicySize'),
(UCHAR, 'Policy'),
]
class SYSTEM_ROOT_SILO_INFORMATION(pstruct.type):
type = SYSTEM_INFORMATION_CLASS.byname('SystemRootSiloInformation')
_fields_ = [
(ULONG, 'NumberOfSilos'),
(PVOID, 'SiloList'),
]
class SUPERFETCH_INFORMATION_CLASS(pint.enum):
_values_ = [(n, v) for v, n in [
(0x1, 'SuperfetchRetrieveTrace'),
(0x2, 'SuperfetchSystemParameters'),
(0x3, 'SuperfetchLogEvent'),
(0x4, 'SuperfetchGenerateTrace'),
(0x5, 'SuperfetchPrefetch'),
(0x6, 'SuperfetchPfnQuery'),
(0x7, 'SuperfetchPfnSetPriority'),
(0x8, 'SuperfetchPrivSourceQuery'),
(0x9, 'SuperfetchSequenceNumberQuery'),
(0xA, 'SuperfetchScenarioPhase'),
(0xB, 'SuperfetchWorkerPriority'),
(0xC, 'SuperfetchScenarioQuery'),
(0xD, 'SuperfetchScenarioPrefetch'),
(0xE, 'SuperfetchRobustnessControl'),
(0xF, 'SuperfetchTimeControl'),
(0x10, 'SuperfetchMemoryListQuery'),
(0x11, 'SuperfetchMemoryRangesQuery'),
(0x12, 'SuperfetchTracingControl'),
(0x13, 'SuperfetchTrimWhileAgingControl'),
(0x14, 'SuperfetchInformationMax'),
]]
class SUPERFETCH_INFORMATION(pstruct.type):
type = SYSTEM_INFORMATION_CLASS.byname('SystemSuperfetchInformation')
class _InfoClass(SUPERFETCH_INFORMATION_CLASS, ULONG):
pass
_fields_ = [
(ULONG, 'Version'),
(ULONG, 'Magic'),
(_InfoClass, 'InfoClass'),
(PVOID, 'Data'),
(ULONG, 'Length'),
]
class PFS_PRIVATE_PAGE_SOURCE_TYPE(pint.enum):
_values_ = [(n, v) for v, n in [
(0x0, 'PfsPrivateSourceKernel'),
(0x1, 'PfsPrivateSourceSession'),
(0x2, 'PfsPrivateSourceProcess'),
(0x3, 'PrfsPrivateSourceMax'),
]]
class PFS_PRIVATE_PAGE_SOURCE(pstruct.type):
class _SourceId(dynamic.union):
_fields_ = [
(DWORD, 'SessionId'),
(DWORD, 'ProcessId'),
]
_fields_ = [
(PFS_PRIVATE_PAGE_SOURCE_TYPE, 'Type'),
(_SourceId, 'SourceId'),
(dyn.array(DWORD, 2), 'SpareDwords'),
(ULONG, 'ImagePathHash'),
(ULONG, 'UniqueProcessHash'),
]
class PF_PRIVSOURCE_INFO_V3(pstruct.type):
class _Owner(dynamic.union):
_fields_ = [
(ULONG_PTR, 'EProcess'),
(ULONG_PTR, 'GlobalVA'),
]
_fields_ = [
(PFS_PRIVATE_PAGE_SOURCE, 'DbInfo'),
(_Owner, 'Owner'),
(ULONG, 'WsPrivatePages'),
(ULONG, 'TotalPrivatePages'),
(ULONG, 'SessionID'),
(dyn.array(CHAR, 16), 'ImageName'),
(dyn.array(BYTE, 12), 'SpareBytes'),
]
class PF_PRIVSOURCE_INFO_V3PLUS(pstruct.type):
_fields_ = [
(dyn.array(BYTE, 8), 'data2'),
(DWORD, 'ProcessId'),
(dyn.array(BYTE, 16), 'data3'),
(ULONG_PTR, 'EProcess'),
(dyn.array(BYTE, 60), 'data'),
]
class PF_PRIVSOURCE_QUERY_REQUEST(pstruct.type):
class _sv3(pstruct.type):
_fields_ = [
(ULONG, 'InfoCount'),
(lambda self: dyn.array(PF_PRIVSOURCE_INFO_V3, self['InfoCount'].li.int()), 'InfoArrayV3'),
]
class _sv3plus(pstruct.type):
_fields_ = [
(ULONG, 'Type'),
(ULONG, 'InfoCount'),
(lambda self: dyn.array(PF_PRIVSOURCE_INFO_V3PLUS, self['InfoCount'].li.int()), 'InfoArrayV3Plus'),
]
def __Info(self):
version = self['Version'].li.int()
if version == 3:
return PF_PRIVSOURCE_QUERY_REQUEST._sv3
elif version > 3:
return PF_PRIVSOURCE_QUERY_REQUEST._sv3plus
raise NotImplementedError(version)
_fields_ = [
(ULONG, 'Version'),
(__Info, 'Info'),
(dyn.align(4), 'alignment(Info)'),
]
class SYSTEM_POOLTAG(pstruct.type):
_fields_ = [
(dyn.clone(pstr.string, length=4), 'Tag'),
(ULONG, 'PagedAllocs'),
(ULONG, 'PagedFrees'),
(SIZE_T, 'PagedUsed'),
(ULONG, 'NonPagedAllocs'),
(ULONG, 'NonPagedFrees'),
(SIZE_T, 'NonPagedUsed'),
]
class SYSTEM_POOLTAG_INFORMATION(pstruct.type):
_fields_ = [
(ULONG, 'Count'),
(lambda self: dyn.array(SYSTEM_POOLTAG, self['Count'].li.int()), 'TagInfo'),
]
class ERESOURCE_THREAD(ULONG_PTR): pass
class OWNER_ENTRY(pstruct.type, versioned):
_fields_ = [
(ERESOURCE_THREAD, 'OwnerThread'),
(ULONG, 'TableSize'),
(lambda self: dyn.padding(8 if getattr(self, 'WIN64', False) else 4), 'padding(TableSize)'),
]
class ERESOURCE(pstruct.type, versioned):
_fields_ = [
(LIST_ENTRY, 'SystemResourcesList'),
(P(OWNER_ENTRY), 'OwnerTable'),
(SHORT, 'ActiveCount'),
(USHORT, 'Flag'),
(lambda self: dyn.align(8 if getattr(self, 'WIN64', False) else 4), 'align(SharedWaiters)'), # FIXME: this might not be right
(P(ketypes.KSEMAPHORE), 'SharedWaiters'),
(P(ketypes.KEVENT), 'ExclusiveWatiers'),
(OWNER_ENTRY, 'OwnerEntry'),
(ULONG, 'ActiveEntries'),
(ULONG, 'ContentionCount'),
(ULONG, 'NumberOfSharedWaiters'),
(ULONG, 'NumberOfExclusiveWaiters'),
(lambda self: PVOID if getattr(self, 'WIN64', False) else pint.uint_t, 'Reserved2'),
(PVOID, 'Address'),
(ketypes.KSPIN_LOCK, 'SpinLock'),
]
class POOL_HEADER(pstruct.type, versioned):
class _Ulong(pbinary.struct):
def summary(self):
res = []
res.append("Type={:s}({:d})".format(self.item('PoolType').str(), self.item('PoolType').int()))
res.append("Index={:d}".format(self['PoolIndex']))
res.append("PreviousSize={:#x}".format(self['PreviousSize']))
res.append("BlockSize={:#x}".format(self['BlockSize']))
return ' '.join(res)
class _Ulong1_PagedPool(_Ulong):
class _PagedPoolType(mmtypes._POOL_TYPE_PagedPool):
length = 7
_fields_ = [
(_PagedPoolType, 'PoolType'),
(9, 'BlockSize'),
(7, 'PoolIndex'),
(9, 'PreviousSize'),
]
class _Ulong1_NonPagedPool(_Ulong):
class _NonPagedPoolType(mmtypes._POOL_TYPE_NonPagedPool):
length = 7
_fields_ = [
(_NonPagedPoolType, 'PoolType'),
(9, 'BlockSize'),
(7, 'PoolIndex'),
(9, 'PreviousSize'),
]
class _Ulong164_PagedPool(_Ulong):
class _PagedPoolType(mmtypes._POOL_TYPE_PagedPool):
length = 8
_fields_ = [
(_PagedPoolType, 'PoolType'),
(8, 'BlockSize'),
(8, 'PoolIndex'),
(8, 'PreviousSize'),
]
class _Ulong164_NonPagedPool(_Ulong):
class _NonPagedPoolType(mmtypes._POOL_TYPE_NonPagedPool):
length = 8
_fields_ = [
(_NonPagedPoolType, 'PoolType'),
(8, 'BlockSize'),
(8, 'PoolIndex'),
(8, 'PreviousSize'),
]
def __Ulong1(self):
nonpaged_attribute = self.NONPAGED if hasattr(self, 'NONPAGED') else not self.PAGED if hasattr(self, 'PAGED') else False
if nonpaged_attribute:
pooltype32, pooltype64 = (POOL_HEADER._Ulong1_NonPagedPool, POOL_HEADER._Ulong164_NonPagedPool)
else:
pooltype32, pooltype64 = (POOL_HEADER._Ulong1_PagedPool, POOL_HEADER._Ulong164_PagedPool)
res = pooltype64 if getattr(self, 'WIN64', False) else pooltype
return pbinary.littleendian(res)
_fields_ = [
(__Ulong1, 'Ulong1'),
(dyn.clone(pstr.string, length=4), 'PoolTag'),
(lambda self: PVOID if getattr(self, 'WIN64', False) else pint.uint_t, 'ProcessBilled'),
]
def summary(self):
res, tag = self['Ulong1'], self['PoolTag'].str()
encoded = tag.encode('unicode_escape')
return "\"{:s}\" {:s}".format(encoded.decode(sys.getdefaultencoding()).replace('"', '\\"'), res.summary())
class POOL_FREE_CHUNK(pstruct.type, versioned): pass
class POOL_FREE_CHUNK_LIST_ENTRY(LIST_ENTRY):
_object_ = fpointer(POOL_FREE_CHUNK, 'ListEntry')
_path_ = ['ListEntry']
POOL_FREE_CHUNK._fields_ = [
(POOL_HEADER, 'Header'),
(POOL_FREE_CHUNK_LIST_ENTRY, 'ListEntry'),
]
class GENERAL_LOOKASIDE(pstruct.type):
@pbinary.littleendian
class _PagedPoolType(mmtypes._POOL_TYPE_PagedPool):
length = 32
@pbinary.littleendian
class _NonPagedPoolType(mmtypes._POOL_TYPE_NonPagedPool):
length = 32
def __PoolType(self):
nonpaged_attribute = self.NONPAGED if hasattr(self, 'NONPAGED') else not self.PAGED if hasattr(self, 'PAGED') else False
return GENERAL_LOOKASIDE._NonPagedPoolType if nonpaged_Attribute else GENERAL_LOOKASIDE._PagedPoolType
_fields_ = [
(dyn.clone(SLIST_HEADER, _object_=POOL_FREE_CHUNK, _path_=['ListEntry']), 'ListHead'),
(UINT16, 'Depth'),
(UINT16, 'MaximumDepth'),
(ULONG, 'TotalAllocates'),
(ULONG, 'AllocateMissesOrHits'),
(ULONG, 'TotalFrees'),
(ULONG, 'FreeMissesOrHits'),
(__PoolType, 'Type'),
(dyn.clone(pstr.string, length=4), 'Tag'),
(ULONG, 'Size'),
(PVOID, 'Allocate'),
(PVOID, 'Free'),
(LIST_ENTRY, 'ListEntry'),
(ULONG, 'LastTotalAllocates'),
(ULONG, 'LastAllocateMissesOrHits'),
(dyn.array(ULONG, 2), 'Future'),
]
class PP_LOOKASIDE_LIST(pstruct.type):
_fields_ = [
(P(GENERAL_LOOKASIDE), 'P'),
(P(GENERAL_LOOKASIDE), 'L'),
]
class POOL_DESCRIPTOR(pstruct.type, versioned):
@pbinary.littleendian
class _PagedPoolType(mmtypes._POOL_TYPE_PagedPool):
length = 32
@pbinary.littleendian
class _NonPagedPoolType(mmtypes._POOL_TYPE_NonPagedPool):
length = 32
def __PoolType(self):
nonpaged_attribute = self.NONPAGED if hasattr(self, 'NONPAGED') else not self.PAGED if hasattr(self, 'PAGED') else False
return POOL_DESCRIPTOR._NonPagedPoolType if nonpaged_Attribute else POOL_DESCRIPTOR._PagedPoolType
def __ListHeads(self):
POOL_PAGE_SIZE = pow(2, 12)
POOL_BLOCK_SIZE = 16 if getattr(self, 'WIN64', False) else 8
POOL_LISTS_PER_PAGE = POOL_PAGE_SIZE // POOL_BLOCK_SIZE
return dyn.array(POOL_FREE_CHUNK_LIST_ENTRY, POOL_LISTS_PER_PAGE)
_fields_ = [
(__PoolType, 'PoolType'),
(ULONG, 'PoolIndex'),
(ULONG, 'RunningAllocs'),
(ULONG, 'RunningDeAllocs'),
(ULONG, 'TotalPages'),
(ULONG, 'TotalBigPages'),
(ULONG, 'Threshold'),
(PVOID, 'LockAddress'),
(PVOID, 'PendingFrees'),
(LONG, 'ThreadsProcessingDeferrals'),
(LONG, 'PendingFreeDepth'),
(ULONG, 'TotalBytes'),
(ULONG, 'Spare0'),
(__ListHeads, 'ListHeads'),
]
class POOL_TRACKER_TABLE(pstruct.type):
_fields_ = [
(ULONG, 'Key'),
(ULONG, 'NonPagedAllocs'),
(ULONG, 'NonPagedFrees'),
(SIZE_T, 'NonPagedBytes'),
(ULONG, 'PagedAllocs'),
(ULONG, 'PagedFrees'),
(SIZE_T, 'PagedBytes'),
]
class POOL_TRACKER_BIG_PAGES(pstruct.type):
_fields_ = [
(PVOID, 'Va'),
(ULONG, 'Key'),
(ULONG, 'NumberOfPages'),
(ULONG, 'QuotaObject'),
]
| 38.446131 | 135 | 0.64363 | 25,057 | 0.988988 | 0 | 0 | 400 | 0.015788 | 0 | 0 | 10,384 | 0.409852 |
dc6f33b488d5c21f4b47a7970e947ab1c0093889 | 3,829 | py | Python | spyke/enginePreview.py | m4reQ/spyke | 4b1aa4c01c5c33e050f03eb9e8d396f9716f9e93 | [
"Unlicense"
] | null | null | null | spyke/enginePreview.py | m4reQ/spyke | 4b1aa4c01c5c33e050f03eb9e8d396f9716f9e93 | [
"Unlicense"
] | 4 | 2022-01-24T01:35:28.000Z | 2022-02-10T19:48:35.000Z | spyke/enginePreview.py | m4reQ/spyke | 4b1aa4c01c5c33e050f03eb9e8d396f9716f9e93 | [
"Unlicense"
] | null | null | null | from OpenGL import GL
from PIL import Image
from pathlib import Path
import numpy as np
import gc
import os
import ctypes
GL_COMPRESSED_RGBA_S3TC_DXT1_EXT = 0x83F1
VBO = None
VAO = None
TEXTURE = None
SHADER = None
vertexData = [
-1.0, -1.0, 0.0, 0.0, 1.0,
-1.0, 1.0, 0.0, 0.0, 0.0,
1.0, 1.0, 0.0, 1.0, 0.0,
1.0, 1.0, 0.0, 1.0, 0.0,
1.0, -1.0, 0.0, 1.0, 1.0,
-1.0, -1.0, 0.0, 0.0, 1.0]
_filepath = os.path.join(
Path(__file__).parent.parent.parent, "branding/spykeLogo.dds")
tex = dds_loader.DDSTexture()
tex.load(_filepath)
texData = np.fromstring(tex.data, dtype=np.uint8)
texImageSize = tex.real_size
vertSource = """
#version 450 core
layout(location = 0) in vec3 aPosition;
layout(location = 1) in vec2 aTexCoord;
out vec2 vTexCoord;
void main() {
vTexCoord = aTexCoord;
gl_Position = vec4(aPosition, 1.0f);
}
"""
fragSource = """
#version 450 core
in vec2 vTexCoord;
uniform sampler2D uTexture;
out vec4 Color;
void main() {
Color = texture(uTexture, vTexCoord);
}
"""
def __SetupShader():
global SHADER
SHADER = GL.glCreateProgram()
vert = GL.glCreateShader(GL.GL_VERTEX_SHADER)
GL.glShaderSource(vert, vertSource)
GL.glCompileShader(vert)
GL.glAttachShader(SHADER, vert)
frag = GL.glCreateShader(GL.GL_FRAGMENT_SHADER)
GL.glShaderSource(frag, fragSource)
GL.glCompileShader(frag)
GL.glAttachShader(SHADER, frag)
GL.glLinkProgram(SHADER)
GL.glValidateProgram(SHADER)
GL.glDetachShader(SHADER, vert)
GL.glDetachShader(SHADER, frag)
GL.glDeleteShader(vert)
GL.glDeleteShader(frag)
def __SetupVbo():
global VBO
VBO = GL.glGenBuffers(1)
GL.glBindBuffer(GL.GL_ARRAY_BUFFER, VBO)
GL.glBufferData(GL.GL_ARRAY_BUFFER, len(vertexData) * ctypes.sizeof(ctypes.c_float),
np.asarray(vertexData, dtype=np.float32), GL.GL_STATIC_DRAW)
def __SetupVao():
global VAO
vertexSize = (3 + 2) * ctypes.sizeof(ctypes.c_float)
VAO = GL.glGenVertexArrays(1)
GL.glBindVertexArray(VAO)
GL.glBindBuffer(GL.GL_ARRAY_BUFFER, VBO)
GL.glVertexAttribPointer(0, 3, GL.GL_FLOAT, False,
vertexSize, ctypes.c_void_p(0))
GL.glEnableVertexAttribArray(0)
GL.glVertexAttribPointer(1, 2, GL.GL_FLOAT, False, vertexSize, ctypes.c_void_p(
3 * ctypes.sizeof(ctypes.c_float)))
GL.glEnableVertexAttribArray(1)
def __SetupTexture():
global TEXTURE
TEXTURE = GL.glGenTextures(1)
GL.glBindTexture(GL.GL_TEXTURE_2D, TEXTURE)
GL.glCompressedTexImage2D(
GL.GL_TEXTURE_2D, 0, GL_COMPRESSED_RGBA_S3TC_DXT1_EXT, 1024, 1024, texImageSize, texData)
GL.glTexParameter(GL.GL_TEXTURE_2D, GL.GL_TEXTURE_MAG_FILTER, GL.GL_LINEAR)
GL.glTexParameter(GL.GL_TEXTURE_2D, GL.GL_TEXTURE_MIN_FILTER, GL.GL_LINEAR)
def CleanupPreview():
global vertexData, texData, vertSource, fragSource
GL.glClear(GL.GL_COLOR_BUFFER_BIT)
GL.glDeleteProgram(SHADER)
GL.glDeleteBuffers(1, [VBO])
GL.glDeleteVertexArrays(1, [VAO])
GL.glDeleteTextures(1, [TEXTURE])
err = GL.glGetError()
while err != GL.GL_NO_ERROR:
err = GL.glGetError()
del vertexData
del texData
del vertSource
del fragSource
gc.collect()
def RenderPreview():
global VBO, VAO, TEXTURE, SHADER
__SetupShader()
__SetupVbo()
__SetupVao()
__SetupTexture()
GL.glEnable(GL.GL_BLEND)
GL.glBlendFunc(GL.GL_SRC_ALPHA, GL.GL_ONE_MINUS_SRC_ALPHA)
GL.glUseProgram(SHADER)
GL.glBindVertexArray(VAO)
GL.glBindTexture(GL.GL_TEXTURE_2D, TEXTURE)
GL.glClear(GL.GL_COLOR_BUFFER_BIT)
GL.glDrawArrays(GL.GL_TRIANGLES, 0, 6)
GL.glBindTexture(GL.GL_TEXTURE_2D, 0)
err = GL.glGetError()
while err != GL.GL_NO_ERROR:
err = GL.glGetError()
| 22.523529 | 97 | 0.687908 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 384 | 0.100287 |
dc6f435009ba534907e0424d4bc520d7d078dd5e | 329 | py | Python | cses-problem-set/1629 Movie Festival.py | jaredliw/python-question-bank | 9c8c246623d8d171f875700b57772df0afcbdcdf | [
"MIT"
] | 1 | 2021-04-08T07:49:15.000Z | 2021-04-08T07:49:15.000Z | cses-problem-set/1629 Movie Festival.py | jaredliw/leetcode-solutions | 9c8c246623d8d171f875700b57772df0afcbdcdf | [
"MIT"
] | null | null | null | cses-problem-set/1629 Movie Festival.py | jaredliw/leetcode-solutions | 9c8c246623d8d171f875700b57772df0afcbdcdf | [
"MIT"
] | 1 | 2022-01-23T02:12:24.000Z | 2022-01-23T02:12:24.000Z | # Time: 0.72 s
movies = []
for _ in range(int(input())):
movies.append(tuple(map(int, input().split())))
movies.sort(key=lambda x: x[1])
last_end_time = 0
movie_count = 0
for start_time, end_time in movies:
if start_time >= last_end_time:
last_end_time = end_time
movie_count += 1
print(movie_count)
| 23.5 | 51 | 0.659574 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 14 | 0.042553 |
dc72a0d962a7a0e4845a4c13d164512dff7b1695 | 1,487 | py | Python | YOLOv1/config.py | SkyLord2/Yolo-v1-by-keras | 8db7426db9d4de8a0d0f81a505567ae4b3500e09 | [
"MIT"
] | null | null | null | YOLOv1/config.py | SkyLord2/Yolo-v1-by-keras | 8db7426db9d4de8a0d0f81a505567ae4b3500e09 | [
"MIT"
] | null | null | null | YOLOv1/config.py | SkyLord2/Yolo-v1-by-keras | 8db7426db9d4de8a0d0f81a505567ae4b3500e09 | [
"MIT"
] | null | null | null | import os
'''
path and dataset parameter
配置文件
'''
DATA_PATH = 'data'
PASCAL_PATH = os.path.join(DATA_PATH, 'pascal_voc')
CACHE_PATH = os.path.join(PASCAL_PATH, 'cache')
OUTPUT_DIR = os.path.join(PASCAL_PATH, 'output') # 存放输出文件的地方,data/pascal_voc/output
WEIGHTS_DIR = os.path.join(PASCAL_PATH, 'weights') # weights_dir, 路径为data/pascal_voc/weights
WEIGHTS_FILE = None # weights file
# WEIGHTS_FILE = os.path.join(DATA_PATH, 'weights', 'YOLO_small.ckpt')
# PASCAL VOC数据集的20个类别
CLASSES = ['aeroplane', 'bicycle', 'bird', 'boat', 'bottle', 'bus',
'car', 'cat', 'chair', 'cow', 'diningtable', 'dog', 'horse',
'motorbike', 'person', 'pottedplant', 'sheep', 'sofa',
'train', 'tvmonitor']
FLIPPED = True
"""
model parameter
"""
IMAGE_SIZE = 448 # 输入图片的大小
CELL_SIZE = 7 # grid cell大小(cell_size * cell_size的大小)
BOXES_PER_CELL = 2 # 每个cell负责预测两个bounding box
ALPHA = 0.1 # Leaky Relu的泄露参数
DISP_CONSOLE = False
"""
下面这几个是论文中涉及的参数
"""
OBJECT_SCALE = 1.0
NOOBJECT_SCALE = 1.0
CLASS_SCALE = 2.0
COORD_SCALE = 5.0
"""
hyper-parameter
"""
GPU = ''
LEARNING_RATE = 0.0001 # 学习率
DECAY_STEPS = 30000
DECAY_RATE = 0.1
STAIRCASE = True
BATCH_SIZE = 64 # batch size
MAX_ITER = 135 # 迭代次数135,论文中为135个迭代,可自定义
SUMMARY_ITER = 10
SAVE_ITER = 1000
MOMENTUM = 0.9 # 角动量
"""
test parameter
"""
THRESHOLD = 0.2
IOU_THRESHOLD = 0.5 # IOU阈值0.5 | 24.783333 | 97 | 0.627438 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 836 | 0.5015 |
dc73b72fc6848a6e17daae8fe15f8aad5356d498 | 707 | py | Python | sarna/auxiliary/user_helpers.py | rsrdesarrollo/sarna | 0c1f44e06a932520b70e505585a5469b77f6302e | [
"MIT"
] | 25 | 2019-03-11T22:42:52.000Z | 2022-03-15T09:49:15.000Z | sarna/auxiliary/user_helpers.py | hackingmess/sarna | 0c1f44e06a932520b70e505585a5469b77f6302e | [
"MIT"
] | 14 | 2019-01-08T08:35:51.000Z | 2022-03-11T23:30:28.000Z | sarna/auxiliary/user_helpers.py | hackingmess/sarna | 0c1f44e06a932520b70e505585a5469b77f6302e | [
"MIT"
] | 12 | 2019-07-26T05:38:32.000Z | 2022-03-29T09:54:49.000Z | from typing import List
from wtforms import ValidationError
from sarna.core.roles import valid_auditors, valid_managers
from sarna.model import User
def users_are_managers(_, field):
users: List[User] = field.data
if type(users) != list:
users = [users]
for user in users:
if user.user_type not in valid_managers:
raise ValidationError('user {} is not a manager'.format(user.name))
def user_is_auditor(_, field):
users: List[User] = field.data
if type(users) != list:
users = list(users)
for user in users:
if user.user_type not in valid_auditors:
raise ValidationError('user {} is not an auditor'.format(user.name))
| 24.37931 | 80 | 0.673267 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 53 | 0.074965 |
dc744794d57c761d617d1d83c8575d25c369489b | 16,133 | py | Python | geo/bms/old/models.py | Tamlyn78/geo | dd63372acdd1fe8b744c05eca5ad23836e6a1604 | [
"MIT"
] | null | null | null | geo/bms/old/models.py | Tamlyn78/geo | dd63372acdd1fe8b744c05eca5ad23836e6a1604 | [
"MIT"
] | null | null | null | geo/bms/old/models.py | Tamlyn78/geo | dd63372acdd1fe8b744c05eca5ad23836e6a1604 | [
"MIT"
] | null | null | null | from os.path import join, splitext
from uuid import uuid4
import datetime
from django.db import models
#from django.utils.encoding import python_2_unicode_compatible
from django.utils import timezone
from django.urls import reverse
from django.contrib.auth.models import User
# Create your models here.
#@python_2_unicode_compatible # only if you need to support Python 2
class Organisation(models.Model):
name = models.TextField(max_length = None, null = True)
abbreviation = models.TextField(max_length = None, null = True)
address = models.TextField(max_length = None, blank = True, null = True)
notes = models.TextField(max_length = None, blank = True, null = True)
old_id = models.IntegerField(default = 0)
class Meta:
ordering = ('abbreviation',)
def __str__(self):
output = self.abbreviation if self.abbreviation else self.organisation
return(str(output))
#@python_2_unicode_compatible # only if you need to support Python 2
class Client(models.Model):
organisation = models.ForeignKey(Organisation, to_field = 'id', on_delete = models.CASCADE, null = True)
firstname = models.TextField(max_length = None, null = True)
lastname = models.TextField(max_length = None, null = True)
status = models.BooleanField(default=1)
notes = models.TextField(max_length = None, blank = True, null = True)
old_id = models.IntegerField(default = 0)
class Meta:
ordering = ('firstname',)
def __str__(self):
fullname = str(self.firstname) + ' ' + str(self.lastname)
if self.organisation:
fullname += ' at ' + str(self.organisation)
return(fullname)
#@python_2_unicode_compatible # only if you need to support Python 2
class Location(models.Model):
description = models.TextField(max_length = None)
notes = models.TextField(max_length = None, blank = True, null = True)
old_id = models.IntegerField(default = 0)
class Meta:
ordering = ('-id',)
def __str__(self):
return(self.description)
#@python_2_unicode_compatible # only if you need to support Python 2
class Job(models.Model):
location = models.ForeignKey(Location, to_field = 'id', on_delete = models.CASCADE, blank = True, null = True)
client = models.ForeignKey(Client, to_field = 'id', on_delete = models.CASCADE)
description = models.TextField(max_length = None)
open = models.DateField()
notes = models.TextField(max_length = None, blank = True, null = True)
old_id = models.IntegerField(default = 0)
class Meta:
ordering = ('-open',)
def __str__(self):
output = str(self.id) + ' ' + str(self.client) + ' ' + str(self.description)
return(output)
def get_absolute_url(self):
return reverse('old:job_detail', args=[self.id])
#@python_2_unicode_compatible # only if you need to support Python 2
class JobStatus(models.Model):
job = models.ForeignKey(Job, to_field = 'id', on_delete = models.CASCADE)
date = models.DateField()
status = models.BooleanField()
notes = models.TextField(max_length = None, blank = True, null = True)
class Meta:
ordering = ('-date',)
def __str__(self):
output = str(job.id)
return(output)
#@python_2_unicode_compatible
#class Manager(models.Model):
# job = models.ForeignKey(Job, to_field = 'id', on_delete = models.CASCADE)
# person = models.ForeignKey(User, to_field = 'id', on_delete = models.CASCADE, blank = True, null = True, related_name='person')
# notes = models.TextField(max_length = None, blank = True, null = True)
#
# class Meta:
# ordering = ('-job',)
#
# def __str__(self):
# output = str(self.person)
# return(output)
#@python_2_unicode_compatible
class Closure(models.Model):
"""All closed jobs"""
job = models.ForeignKey(Job, to_field = 'id', on_delete = models.CASCADE)
date = models.DateField(null = True, max_length = None, blank = True)
notes = models.TextField(max_length = None, blank = True, null = True)
class Meta:
ordering = ('-date',)
def __str__(self):
output = str(self.job)
return(output)
#@python_2_unicode_compatible # only if you need to support Python 2
class Invoice(models.Model):
job = models.ForeignKey(Job, to_field = 'id', on_delete = models.CASCADE)
date = models.DateField(null = True, max_length = None, blank = True)
value = models.DecimalField(decimal_places=2, max_digits=13)
payment_date = models.DateField(null = True, max_length = None, blank = True)
notes = models.TextField(max_length = None, blank = True, null = True)
class Meta:
ordering = ('-id',)
def __str__(self):
output = str(self.id) + ' ' + str(self.date)
return(output)
#@python_2_unicode_compatible # only if you need to support Python 2
class Quote(models.Model):
job = models.ForeignKey(Job, to_field = 'id', on_delete = models.CASCADE, blank = True, null = True)
date = models.DateField(null = True, max_length = None, blank = True)
description = models.TextField(max_length = None, blank = True, null = True)
status = models.NullBooleanField(default = None, blank = True, null = True)
notes = models.TextField(max_length = None, blank = True, null = True)
class Meta:
ordering = ('-id',)
def __str__(self):
output = str(self.id) + ' ' + str(self.date)
return(output)
#@python_2_unicode_compatible # only if you need to support Python 2
class Factor(models.Model):
job = models.ForeignKey(Job, to_field = 'id', on_delete = models.CASCADE)
label = models.TextField(max_length = None)
notes = models.TextField(null = True, max_length = None, blank = True)
class Meta:
ordering = ('-id',)
def __str__(self):
output = str(self.job) + ' ' + str(self.label)
output = str(self.label)
return(output)
class Element(models.Model):
factor = models.ForeignKey('Factor', to_field = 'id', on_delete = models.CASCADE)
value = models.TextField(max_length = None)
notes = models.TextField(null = True, max_length = None, blank = True)
class Meta:
ordering = ('-id',)
def __str__(self):
return(str(self.factor) + str(self.value))
class Rank(models.Model):
parent = models.ForeignKey(Element, to_field = 'id', on_delete = models.CASCADE, related_name='parent_element')
child = models.ForeignKey(Element, to_field = 'id', on_delete = models.CASCADE, related_name='child_element')
class Meta:
ordering = ('-id',)
def __str__(self):
return(str(self.parent) + str(self.child))
#@python_2_unicode_compatible # only if you need to support Python 2
class ASC(models.Model):
element = models.ForeignKey(Element, to_field = 'id', on_delete = models.CASCADE)
label = models.TextField(max_length = None, blank = True, null = True)
unit_order = models.IntegerField(blank = True, null = True)
horizon_prefix = models.IntegerField(blank = True, null = True)
horizon = models.TextField(null = True, max_length = None, blank = True)
horizon_suffix = models.IntegerField(blank = True, null = True)
horizon_suffix2 = models.IntegerField(blank = True, null = True)
upper_depth = models.FloatField(blank = True, null = True)
lower_depth = models.FloatField(blank = True, null = True)
colour = models.TextField(null = True, max_length = None, blank = True)
hue_dry = models.TextField(null = True, max_length = None, blank = True)
value_dry = models.TextField(null = True, max_length = None, blank = True)
chroma_dry = models.TextField(null = True, max_length = None, blank = True)
hue_moist = models.TextField(null = True, max_length = None, blank = True)
value_moist = models.TextField(null = True, max_length = None, blank = True)
chroma_moist = models.TextField(null = True, max_length = None, blank = True)
field_texture = models.TextField(null = True, max_length = None, blank = True)
texture_qualifier = models.TextField(null = True, max_length = None, blank = True)
sand_size = models.TextField(null = True, max_length = None, blank = True)
sand_sorting = models.TextField(null = True, max_length = None, blank = True)
moisture = models.TextField(null = True, max_length = None, blank = True)
strength = models.TextField(null = True, max_length = None, blank = True)
structure_type = models.TextField(null = True, max_length = None, blank = True)
structure_grade = models.TextField(null = True, max_length = None, blank = True)
structure_size = models.TextField(null = True, max_length = None, blank = True)
coarse_frags_distribution = models.TextField(null = True, max_length = None, blank = True)
coarse_frags_abundance = models.TextField(null = True, max_length = None, blank = True)
coarse_frags_size = models.TextField(null = True, max_length = None, blank = True)
coarse_frags_roundness = models.TextField(null = True, max_length = None, blank = True)
coarse_frags_sphericity = models.TextField(null = True, max_length = None, blank = True)
coarse_frags_type = models.TextField(null = True, max_length = None, blank = True)
coarse_frags2_distribution = models.TextField(null = True, max_length = None, blank = True)
coarse_frags2_abundance = models.TextField(null = True, max_length = None, blank = True)
coarse_frags2_size = models.TextField(null = True, max_length = None, blank = True)
coarse_frags2_roundness = models.TextField(null = True, max_length = None, blank = True)
coarse_frags2_sphericity = models.TextField(null = True, max_length = None, blank = True)
coarse_frags2_type = models.TextField(null = True, max_length = None, blank = True)
voids_cracks = models.TextField(null = True, max_length = None, blank = True)
voids_pore_size = models.TextField(null = True, max_length = None, blank = True)
voids_pore_abundance = models.TextField(null = True, max_length = None, blank = True)
roots1_size = models.TextField(null = True, max_length = None, blank = True)
roots1_abundance = models.TextField(null = True, max_length = None, blank = True)
roots2_size = models.TextField(null = True, max_length = None, blank = True)
roots2_abundance = models.TextField(null = True, max_length = None, blank = True)
segregations1_colour = models.TextField(null = True, max_length = None, blank = True)
segregations1_abundance = models.TextField(null = True, max_length = None, blank = True)
segregations1_size = models.TextField(null = True, max_length = None, blank = True)
segregations1_form = models.TextField(null = True, max_length = None, blank = True)
segregations2_colour = models.TextField(null = True, max_length = None, blank = True)
segregations2_abundance = models.TextField(null = True, max_length = None, blank = True)
segregations2_size = models.TextField(null = True, max_length = None, blank = True)
segregations2_form = models.TextField(null = True, max_length = None, blank = True)
lower_bound_dist = models.TextField(null = True, max_length = None, blank = True)
lower_bound_shape = models.TextField(null = True, max_length = None, blank = True)
notes = models.TextField(null = True, max_length = None, blank = True)
data_entry_notes = models.TextField(null = True, max_length = None, blank = True)
def __str__(self):
return(str(self.element))
#@python_2_unicode_compatible # only if you need to support Python 2
class Sample(models.Model):
element = models.ForeignKey(Element, to_field = 'id', on_delete = models.CASCADE)
# date field here which would represent prep date
field_label = models.TextField(null = True, max_length = None, blank = True)
x_cm = models.FloatField(null = True, default = None, blank = True)
y_cm = models.FloatField(null = True, default = None, blank = True)
z_cm = models.FloatField(null = True, blank = True)
upper_depth_cm = models.FloatField(null = True, blank = True)
lower_depth_cm = models.FloatField(null = True, blank = True)
sample_and_vessel_g = models.FloatField(null = True, blank = True)
vessel_g = models.FloatField(null = True, blank = True)
gravel_g = models.FloatField(null = True, blank = True)
notes = models.TextField(null = True, max_length = None, blank = True)
def __str__(self):
return(str(self.element))
#@python_2_unicode_compatible # only if you need to support Python 2
class PSA(models.Model):
lab_id = models.IntegerField()
sample = models.ForeignKey(Sample, to_field = 'id', on_delete = models.CASCADE)
date = models.DateField(null = True, max_length = None, blank = True)
notes = models.TextField(null = True, max_length = None, blank = True)
def __str__(self):
return(self.sample)
def rename_receipt(instance, filename):
Y, m, d = instance.date.isoformat().split('-')
upload_to = Y + '/' + m + '/' + d
infile, ext = splitext(filename)
outfile = '{}{}'.format(uuid4().hex, ext)
outpath = join(upload_to, outfile)
return(outpath)
class Receipt(models.Model):
"""A ledger of receipts. Experience uploading receipts has shown that multiple documents may be relevant for a single transaction; for example, an invoice from the University of Gloucestershire for OSL dating, and, a bankwest statement documenting the transaction. This probably argues for a singular document ledger with a field linking to one or multiple documents"""
upload = models.FileField(upload_to=rename_receipt)
date = models.DateField()
value = models.DecimalField(max_digits=9, decimal_places=2)
currency = models.TextField(default='AUD')
RECEIPT_CHOICE = (
("asset", "Asset"),
("computer_part", "Computer Part"),
("computer_software", "Computer Software"),
("equipment_hire", "Equipment Hire"),
("equipment_repair", "Equipment Repair"), # this should be changed to 'equipment maintenance' to include repair, maintenance, and license fees (trailer rego)
("field_supplies", "Field Supplies"),
("hardware", "Hardware"),
("household", "Household"), # renovation and maintenance of home office property
("insurance", "Insurance"),
("it_service", "IT Service"),
("laboratory_chemicals", "Laboratory Chemicals"),
("laboratory_hardware", "Laboratory Hardware"),
("laboratory_services", "Laboratory Services"),
("laboratory_supplies", "Laboratory Supplies"),
("meals_and_accommodation", "Meals and Accommodation"),
("office_supplies", "Office Supplies"),
("phone", "Phone"),
("post", "Post"),
("professional_development", "Professional Development"),
("reference_material", "Reference Material"),
("travel", "Travel"),
("vehicle_accessories", "Vehicle Accessories"),
("vehicle_fuel", "Vehicle Fuel"),
("vehicle_insurance", "Vehicle Insurance"),
("vehicle_maintenance", "Vehicle Maintenance"),
("vehicle_registration", "Vehicle Registration"),
("wages_salary", "Wages/Salary"),
)
category = models.TextField(choices=RECEIPT_CHOICE, max_length=None, blank=True, null=True)
description = models.TextField(max_length=None, blank=True, null=True)
note = models.TextField(max_length=None, blank=True, null=True)
class Meta:
ordering = ('-id',)
def __str__(self):
output = str(self.description)
return(output)
| 49.185976 | 374 | 0.663299 | 14,045 | 0.870576 | 0 | 0 | 0 | 0 | 0 | 0 | 3,105 | 0.192463 |
dc752daa633f83bb21deb535da2d6656216758db | 2,370 | py | Python | 2020_April_Leetcode_30_days_challenge/Week_1_Happy Number/by_cycle_detection.py | coderMaruf/leetcode-1 | 20ffe26e43999e44c8acf9800acb371a49bb5853 | [
"MIT"
] | 32 | 2020-01-05T13:37:16.000Z | 2022-03-26T07:27:09.000Z | 2020_April_Leetcode_30_days_challenge/Week_1_Happy Number/by_cycle_detection.py | coderMaruf/leetcode-1 | 20ffe26e43999e44c8acf9800acb371a49bb5853 | [
"MIT"
] | null | null | null | 2020_April_Leetcode_30_days_challenge/Week_1_Happy Number/by_cycle_detection.py | coderMaruf/leetcode-1 | 20ffe26e43999e44c8acf9800acb371a49bb5853 | [
"MIT"
] | 8 | 2020-06-18T16:17:27.000Z | 2022-03-15T23:58:18.000Z | '''
Description:
Write an algorithm to determine if a number n is "happy".
A happy number is a number defined by the following process: Starting with any positive integer, replace the number by the sum of the squares of its digits, and repeat the process until the number equals 1 (where it will stay), or it loops endlessly in a cycle which does not include 1. Those numbers for which this process ends in 1 are happy numbers.
Return True if n is a happy number, and False if not.
Example:
Input: 19
Output: true
Explanation:
12 + 92 = 82
82 + 22 = 68
62 + 82 = 100
12 + 02 + 02 = 1
'''
class Solution:
def isHappy(self, n: int) -> bool:
self.table = {}
# Compute the sum of digit square
def digit_sum( n ):
if n in self.table:
return self.table[n]
sum_of_digit_square = 0
while n :
sum_of_digit_square += (n % 10)**2
n //= 10
self.table[n] = sum_of_digit_square
return sum_of_digit_square
slow, fast = n, n
# Use cycle detection for happy number judgement
while True:
slow, fast = digit_sum(slow), digit_sum(fast)
fast = digit_sum( fast )
if slow == 1 or fast == 1:
return True
if slow == fast:
return False
# n : the number of input value
## Space Complexity: O( 1 )
#
# The overhead in space is the storage for loop index and temporary variable, which is of O( 1 ).
from collections import namedtuple
TestEntry = namedtuple('TestEntry', 'n')
def test_bench():
test_data = [
TestEntry( n = 19),
TestEntry( n = 20),
TestEntry( n = 21),
TestEntry( n = 22),
TestEntry( n = 23),
TestEntry( n = 24),
TestEntry( n = 25),
]
# expected output:
'''
True
False
False
False
True
False
False
'''
for t in test_data:
print( Solution().isHappy( n = t.n) )
return
if __name__ == '__main__':
test_bench() | 21.944444 | 352 | 0.509283 | 936 | 0.394937 | 0 | 0 | 0 | 0 | 0 | 0 | 954 | 0.402532 |
dc76b772fbdfd251ee31a56e201773cd07f09136 | 1,195 | py | Python | waimai/libs/shopping.py | xucheng11/test | 2987d34823825798bffac3cfb30cadab42dae998 | [
"MulanPSL-1.0"
] | null | null | null | waimai/libs/shopping.py | xucheng11/test | 2987d34823825798bffac3cfb30cadab42dae998 | [
"MulanPSL-1.0"
] | null | null | null | waimai/libs/shopping.py | xucheng11/test | 2987d34823825798bffac3cfb30cadab42dae998 | [
"MulanPSL-1.0"
] | null | null | null | """
-------------------------------------------------
# @Project :外卖系统
# @File :shopping
# @Date :2021/8/8 10:21
# @Author :小成
# @Email :1224069978
# @Software :PyCharm
-------------------------------------------------
"""
import requests,os
from conf.host import *
from libs.login import *
from libs.shop import *
class shopping:
def __init__(self,inToken):
self.header = {"Authorization":inToken}
def add_shopping_king(self,indata,id):
url = f"{Host}/shopping/addcategory"
if indata["restaurant_id"] == "3269":
indata["restaurant_id"] =id
res = requests.post(url,data=indata,headers=self.header)
return res.json()
def shopping_list(self,indata):
url = f"{Host}/shopping/v2/myFoods"
res = requests.get(url,data=indata,headers=self.header)
return res.json()
if __name__ == '__main__':
token = Login().login(user,getToken=True)
shop = Shop(token).shop_list({"page":1,"limit":1})['data']['records'][0]['id']
food = shopping(token).add_shopping_king({"restaurant_id":""},shop)
print(food)
# food_init.list_food({"page":1,"limit":1})['data']['records'][0]['restaurant_id']
| 29.146341 | 82 | 0.579079 | 533 | 0.441591 | 0 | 0 | 0 | 0 | 0 | 0 | 499 | 0.413422 |
dc77b30ee4cc565f7a11d7b978f1ef142afe6bf3 | 1,039 | py | Python | winsniffer/gui/frame_formatting.py | netaneld122/winsniffer | 9cbd50652861dafe0f76367f9b146ecca8e62b95 | [
"MIT"
] | null | null | null | winsniffer/gui/frame_formatting.py | netaneld122/winsniffer | 9cbd50652861dafe0f76367f9b146ecca8e62b95 | [
"MIT"
] | null | null | null | winsniffer/gui/frame_formatting.py | netaneld122/winsniffer | 9cbd50652861dafe0f76367f9b146ecca8e62b95 | [
"MIT"
] | null | null | null | import binascii
from winsniffer.gui.parsing.default_parser import DefaultParser
def prettify_mac_address(mac_address):
return ':'.join(map(binascii.hexlify, mac_address))
def get_protocol_stack(frame):
protocols = []
while hasattr(frame, 'data'):
protocols.append(frame.__class__.__name__)
frame = frame.data
return protocols
def find_parser(frame, data, parsers):
protocol_stack_set = set(get_protocol_stack(frame))
for parser in parsers:
if parser.condition(protocol_stack_set, data):
return parser
return DefaultParser()
def get_unparsed_frame_data(frame):
while not isinstance(frame, str):
frame = frame.data
return frame
def get_frame_data_preview(frame, parsers):
data = get_unparsed_frame_data(frame)
parser = find_parser(frame, data, parsers)
try:
parsed_data = parser.parse(frame, data)
except Exception as e:
import traceback
parsed_data = traceback.format_exc()
return len(data), parsed_data
| 24.738095 | 63 | 0.705486 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 9 | 0.008662 |
dc78f1958822bfe1bb3123057ba41a3b7fb2318b | 772 | py | Python | matury/2020pr/zad42.py | bartekpacia/informatyka-frycz | 6fdbbdea0c6b6a710378f22e90d467c9f91e64aa | [
"MIT"
] | 2 | 2021-03-06T22:09:44.000Z | 2021-03-14T14:41:03.000Z | matury/2020pr/zad42.py | bartekpacia/informatyka-frycz | 6fdbbdea0c6b6a710378f22e90d467c9f91e64aa | [
"MIT"
] | 1 | 2020-03-25T15:42:47.000Z | 2020-10-06T21:41:14.000Z | matury/2020pr/zad42.py | bartekpacia/informatyka-frycz | 6fdbbdea0c6b6a710378f22e90d467c9f91e64aa | [
"MIT"
] | null | null | null | from typing import List
from reader import read_nums
nums = read_nums()
longest_reg_fragment: List[int] = []
reg_fragment: List[int] = []
current_gap = nums[1] - nums[0]
for i in range(1, len(nums)):
num1 = nums[i - 1]
num2 = nums[i]
gap = abs(num1 - num2)
if not reg_fragment:
reg_fragment.append(num1)
continue
if gap == current_gap:
reg_fragment.append(num1)
continue
if gap != current_gap:
reg_fragment.append(num1)
if len(reg_fragment) > len(longest_reg_fragment):
longest_reg_fragment = reg_fragment.copy()
current_gap = gap
reg_fragment = [num1]
first = longest_reg_fragment[0]
last = longest_reg_fragment[-1]
print(first, last, len(longest_reg_fragment))
| 22.057143 | 57 | 0.650259 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 |
dc7b03e8a3e926b78480c08bddc17a6d3fba7ee0 | 1,586 | py | Python | satchmo/apps/satchmo_store/shop/satchmo_settings.py | predatell/satchmo | 6ced1f845aadec240c7e433c3cbf4caca96e0d92 | [
"BSD-3-Clause"
] | 16 | 2015-03-06T14:42:27.000Z | 2019-12-23T21:37:01.000Z | satchmo/apps/satchmo_store/shop/satchmo_settings.py | predatell/satchmo | 6ced1f845aadec240c7e433c3cbf4caca96e0d92 | [
"BSD-3-Clause"
] | null | null | null | satchmo/apps/satchmo_store/shop/satchmo_settings.py | predatell/satchmo | 6ced1f845aadec240c7e433c3cbf4caca96e0d92 | [
"BSD-3-Clause"
] | 8 | 2015-01-28T16:02:37.000Z | 2022-03-03T21:29:40.000Z | """A central mechanism for shop-wide settings which have defaults.
Repurposed from Sphene Community Tools: http://sct.sphene.net
"""
from django.conf import settings
satchmo_settings_defaults = {
# Only settings for core `satchmo` applications are defined here,
# (or global settings) -- all other defaults should be added using
# the add_setting_defaults method !
'SHOP_URLS' : [],
'SHOP_BASE' : '/shop',
'MULTISHOP' : False ,
'CUSTOM_NEWSLETTER_MODULES' : [],
'CUSTOM_SHIPPING_MODULES' : [],
'CUSTOM_PRODUCT_MODULES' : [],
'CUSTOM_TAX_MODULES' : [],
'ALLOW_PRODUCT_TRANSLATIONS' : True,
'COOKIE_MAX_SECONDS' : 60*60*24*30, #one month
'CATEGORY_SLUG': 'category', # Used for the category url
'PRODUCT_SLUG' : 'product', # Used for the product url
'SSL' : False, # Used for checkout pages
}
def add_setting_defaults(newdefaults):
"""
This method can be used by other applications to define their
default values.
newdefaults has to be a dictionary containing name -> value of
the settings.
"""
satchmo_settings_defaults.update(newdefaults)
def set_satchmo_setting(name, value):
if not hasattr(settings, 'SATCHMO_SETTINGS'):
settings.SATCHMO_SETTINGS = {}
settings.SATCHMO_SETTINGS[name] = value
def get_satchmo_setting(name, default_value = None):
if not hasattr(settings, 'SATCHMO_SETTINGS'):
return satchmo_settings_defaults.get(name, default_value)
return settings.SATCHMO_SETTINGS.get(name, satchmo_settings_defaults.get(name, default_value))
| 31.098039 | 98 | 0.70681 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 843 | 0.531526 |
dc7d2a5bf7773798fe0cb25e532c00e8fdc1e5bb | 148 | py | Python | utils/postprocessing/__init__.py | bdvllrs/misinformation-detection-tensor-embeddings | eb43e55fb7d4317f4ca0d5c1db8191be3d543716 | [
"MIT"
] | 7 | 2020-03-18T03:40:48.000Z | 2021-12-29T11:04:53.000Z | utils/postprocessing/__init__.py | bdvllrs/misinformation-detection-tensor-embeddings | eb43e55fb7d4317f4ca0d5c1db8191be3d543716 | [
"MIT"
] | null | null | null | utils/postprocessing/__init__.py | bdvllrs/misinformation-detection-tensor-embeddings | eb43e55fb7d4317f4ca0d5c1db8191be3d543716 | [
"MIT"
] | 3 | 2019-09-30T05:41:59.000Z | 2020-12-03T19:49:10.000Z | from utils.postprocessing.PostProcessing import PostProcessing
from utils.postprocessing.SelectLabelsPostprocessor import SelectLabelsPostprocessor
| 49.333333 | 84 | 0.918919 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 |
dc7db988b842558ad78f3ddf6acf90c07a5a88a6 | 1,227 | py | Python | api/needley/models.py | kino-ma/needley | ae0463d24aed64f37385385415c766a43fa7d1d4 | [
"MIT"
] | null | null | null | api/needley/models.py | kino-ma/needley | ae0463d24aed64f37385385415c766a43fa7d1d4 | [
"MIT"
] | 4 | 2021-01-28T02:35:40.000Z | 2021-02-17T10:51:31.000Z | api/needley/models.py | kino-ma/python-portfolio | ae0463d24aed64f37385385415c766a43fa7d1d4 | [
"MIT"
] | null | null | null | from django.db import models
from django.utils import timezone
from django.core.validators import MinLengthValidator
from django.contrib.auth.models import AbstractUser
class User(AbstractUser):
email = models.EmailField(unique=True)
# Nickname is display name
nickname = models.CharField(
validators=[MinLengthValidator(1)], max_length=20)
# Avator is a url icon image url
avatar = models.URLField(
validators=[MinLengthValidator(1)], max_length=200, null=True)
def __str__(self):
return "@%s" % self.username
class Article(models.Model):
# The author of this article. This field can be referenced by `article.author`
author = models.ForeignKey(
User,
related_name="author",
on_delete=models.CASCADE
)
# The title of this article
title = models.CharField(
validators=[MinLengthValidator(1)], max_length=100)
# Actual content of this article
content = models.TextField()
# Date when data were created/updated
created_at = models.DateTimeField(auto_now_add=True)
updated_at = models.DateTimeField(auto_now=True)
def __str__(self):
return "\"%s\" by %s" % (self.title, self.author.profile)
| 31.461538 | 82 | 0.698452 | 1,052 | 0.857376 | 0 | 0 | 0 | 0 | 0 | 0 | 259 | 0.211084 |
dc7e4cc1fad0e53f405f8e31d1dce74d002b131c | 12,465 | py | Python | app.py | tomachalek/riki | 4c15985501ef249695833850b4fa26b715454edc | [
"Apache-2.0"
] | 1 | 2021-11-29T21:23:37.000Z | 2021-11-29T21:23:37.000Z | app.py | tomachalek/riki | 4c15985501ef249695833850b4fa26b715454edc | [
"Apache-2.0"
] | 1 | 2021-12-10T19:56:20.000Z | 2021-12-10T19:56:20.000Z | app.py | tomachalek/riki | 4c15985501ef249695833850b4fa26b715454edc | [
"Apache-2.0"
] | null | null | null | # Copyright 2014 Tomas Machalek <tomas.machalek@gmail.com>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
import sys
import logging
from logging import handlers
from typing import List, Tuple, Optional
from dataclasses import asdict, dataclass
from dataclasses_json import dataclass_json, LetterCase
from aiohttp.web import View, Application, run_app
from aiohttp import web
import markdown
from jinja2 import Environment, FileSystemLoader, FileSystemBytecodeCache
import pymdownx.emoji
import files
import pictures
import search
import appconf
if 'RIKI_CONF_PATH' in os.environ:
conf_path = os.environ['RIKI_CONF_PATH']
else:
conf_path = os.path.realpath(os.path.join(os.path.dirname(__file__), 'config.json'))
conf = appconf.load_conf(conf_path)
APP_NAME = conf.app_name
APP_PATH = conf.app_path
logger = logging.getLogger('')
def setup_logger(path, debug=False):
"""
Sets-up Python logger with file rotation
Arguments:
path -- where the log files will be written
debug -- debug mode on/off (bool)
"""
if path == '#stderr':
hdlr = logging.StreamHandler(sys.stderr)
elif path == '#stdout':
hdlr = logging.StreamHandler(sys.stdout)
else:
hdlr = handlers.RotatingFileHandler(path, maxBytes=(1 << 23), backupCount=50)
hdlr.setFormatter(logging.Formatter('%(asctime)s [%(name)s] %(levelname)s: %(message)s'))
logger.addHandler(hdlr)
logger.setLevel(logging.INFO if not debug else logging.DEBUG)
setup_logger(str(conf.log_path))
logging.getLogger(__name__).info(f'using Riki configuration {conf_path}')
markdown_config = {
'pymdownx.emoji': {
'emoji_index': pymdownx.emoji.twemoji,
'emoji_generator': pymdownx.emoji.to_svg,
'options': {
'image_path': conf.emoji_cdn_url
}
},
'pymdownx.arithmatex': {
'generic': True,
'preview': False
}
}
def path_dir_elms(path: str) -> List[Tuple[str, str]]:
items = [x for x in path.split('/') if x != '']
cumul = []
ans = []
for elm in items:
cumul.append(elm)
ans.append((elm, '/'.join(cumul[:])))
return ans
def load_markdown(path: str) -> str:
"""
Loads a markdown file and returns an HTML code
arguments:
path -- path to a markdown file
returns:
a string containing output HTML
"""
with open(path) as page_file:
return markdown.markdown(
page_file.read(),
extensions=conf.markdown_extensions,
extension_configs=markdown_config)
routes = web.RouteTableDef()
@dataclass_json(letter_case=LetterCase.CAMEL)
@dataclass
class DirMetadata:
directory_type: Optional[str] = 'page'
description: Optional[str] = None
class ActionHelper:
def __init__(self, conf: appconf.Conf, assets_url: str):
self._dir_metadata = {}
self._cache = FileSystemBytecodeCache(conf.template_cache_dir) if conf.template_cache_dir else None
self._assets_url = assets_url
self._template_env: Environment = Environment(
loader=FileSystemLoader(os.path.realpath(os.path.join(os.path.dirname(__file__), 'templates'))),
bytecode_cache=self._cache,
trim_blocks=True,
lstrip_blocks=True)
def response_html(self, template, data):
values = dict(
app_name=APP_NAME,
app_path=APP_PATH,
enable_search=True) # TODO
values.update(data)
template_object = self._template_env.get_template(template)
return web.Response(text=template_object.render(values), content_type='text/html')
def response_file(self, path: str):
return web.FileResponse(path)
def dir_metadata(self, page_fs_path: str) -> DirMetadata:
try:
dir_path = page_fs_path if files.page_is_dir(page_fs_path) else os.path.dirname(page_fs_path)
except FileNotFoundError as ex:
if os.path.basename(page_fs_path) == 'index': # 'index' is an acceptable virtual page
dir_path = os.path.dirname(page_fs_path)
else:
raise ex
if dir_path not in self._dir_metadata:
try:
with open(os.path.join(dir_path, 'metadata.json'), 'rb') as fr:
self._dir_metadata[dir_path] = DirMetadata.from_json(fr.read())
except IOError:
self._dir_metadata[dir_path] = DirMetadata()
return self._dir_metadata[dir_path]
class BaseAction(View):
@property
def _ctx(self) -> ActionHelper:
return self.request.app['helper']
def response_html(self, template, data):
return self._ctx.response_html(template, data)
def response_file(self, path: bytes):
return self._ctx.response_file(path)
@property
def riki_path(self):
return self.request.match_info['path']
def url_arg(self, k):
return self.request.rel_url.query.get(k)
class Action(BaseAction):
@property
def data_dir(self):
return conf.data_dir
@staticmethod
def get_current_dirname(path):
ans = os.path.basename(path)
if ans == 'index':
ans = os.path.basename(os.path.dirname(path))
return ans
def generate_page_list(self, curr_dir_fs):
page_list = files.list_files(curr_dir_fs, None, recursive=False, include_dirs=True)
return [(
files.strip_prefix(x, self.data_dir),
os.path.basename(files.strip_prefix(x, self.data_dir)),
os.path.isdir(x)
) for x in page_list]
@property
def dir_metadata(self) -> DirMetadata:
return self._ctx.dir_metadata(os.path.join(self.data_dir, self.riki_path))
@routes.view('/')
class Index(Action):
"""
Homepage
"""
async def get(self):
raise web.HTTPSeeOther(f'{APP_PATH}page/index')
@routes.view('/page')
class PageNoSpec(View):
async def get(self):
raise web.HTTPSeeOther(f'{APP_PATH}page/index')
@routes.view('/page/{path:.+\.(txt|pdf|json|xml|yml|yaml)}')
class Plain(Action):
async def get(self):
return self.response_file(os.path.join(self.data_dir, self.riki_path))
@routes.view('/page/{path:.+\.(jpg|JPG|jpeg|JPEG|png|PNG|gif|GIF)}')
class Picture(Action):
"""
Provides access to images
"""
async def get(self):
fs_path = os.path.join(self.data_dir, self.riki_path)
width = self.request.rel_url.query.get('width')
normalize = bool(int(self.request.rel_url.query.get('normalize', '0')))
if width is not None:
fs_path = pictures.get_resized_image(
cache_dir=conf.picture_cache_dir,
path=fs_path,
width=width,
normalize=normalize)
return self.response_file(fs_path)
@routes.view('/page/{path:.*}')
class Page(Action):
"""
A riki page
"""
async def get(self):
if not self.riki_path:
raise web.HTTPSeeOther(f'{APP_PATH}page/index')
page_fs_path = os.path.join(self.data_dir, self.riki_path)
pelms = page_fs_path.rsplit('.', 1)
page_suff = None if len(pelms) < 2 else pelms[-1]
if self.dir_metadata.directory_type == 'gallery':
raise web.HTTPSeeOther(f'{APP_PATH}gallery/{self.riki_path}/index')
elif files.page_is_dir(page_fs_path):
if self.dir_metadata.directory_type == 'page':
raise web.HTTPSeeOther(f'{APP_PATH}page/{self.riki_path}/index')
else:
raise web.HTTPServerError('Unknown page type')
elif page_suff and page_suff in appconf.RAW_FILES:
with open(page_fs_path, 'rb') as fr:
web.header('Content-Type', appconf.RAW_FILES[page_suff])
return fr.read()
else:
page_fs_path = f'{page_fs_path}.md'
curr_dir = os.path.dirname(self.riki_path)
page_name = os.path.basename(self.riki_path)
# setup the directory information
if curr_dir:
path_elms = path_dir_elms(curr_dir)
curr_dir_fs = os.path.join(self.data_dir, curr_dir)
else:
curr_dir = ''
path_elms = []
curr_dir_fs = self.data_dir
# transform the page
if files.page_exists(page_fs_path):
page_info = files.get_version_info(
self.data_dir, page_fs_path, info_encoding=conf.hg_info_encoding)
inner_html = load_markdown(page_fs_path)
page_template = 'page.html'
else:
inner_html = ''
page_info = files.RevisionInfo()
page_template = 'dummy_page.html'
data = dict(
html=inner_html,
page_list=self.generate_page_list(curr_dir_fs),
path_elms=path_elms,
page_info=page_info,
page_name=page_name,
curr_dir_name=self.get_current_dirname(curr_dir))
return self.response_html(page_template, data)
@routes.view('/_images')
class Images(Action):
"""
A page displaying list of all images
"""
async def get(self):
images = files.list_files(self.data_dir, files.file_is_image, recursive=True)
extended = []
for img in images:
extended.append(files.get_file_info(img, path_prefix=self.data_dir))
return self.response_html('files.html', dict(files=extended))
@routes.view('/gallery/{path:.*}')
class Gallery(Action):
async def get_num_files(self, path: str):
return len(os.listdir(path)) - 1 # minus metadata.json which is required for a gallery page
async def get(self):
gallery_fs_dir = os.path.join(self.data_dir, self.riki_path)
if files.page_is_dir(gallery_fs_dir):
if self.dir_metadata.directory_type == 'page':
raise web.HTTPSeeOther(f'{APP_PATH}page/{self.riki_path}/index')
elif self.dir_metadata.directory_type == 'gallery':
raise web.HTTPSeeOther(f'{APP_PATH}gallery/{self.riki_path}/index')
else:
raise web.HTTPServerError('Unknown page type')
elif os.path.isfile(gallery_fs_dir):
raise web.HTTPInternalServerError('Gallery directory malformed')
elif os.path.basename(gallery_fs_dir) == 'index':
gallery_fs_dir = os.path.dirname(gallery_fs_dir)
else:
raise web.HTTPNotFound()
try:
images = files.list_files(gallery_fs_dir, files.file_is_image, recursive=False)
except FileNotFoundError:
raise web.HTTPNotFound()
extended: List[files.FileInfo] = []
for img in images:
info = files.get_file_info(img, path_prefix=self.data_dir)
info.metadata = pictures.get_metadata(img)
extended.append(info)
values = dict(
files=extended,
page_list=[],
path_elms=path_dir_elms(self.riki_path),
curr_dir_name=self.get_current_dirname(self.riki_path),
num_files=await self.get_num_files(gallery_fs_dir),
description=self.dir_metadata.description)
return self.response_html('gallery.html', values)
@routes.view('/_search')
class Search(Action):
"""
Search results page
"""
async def get(self):
srch = search.FulltextSearcher(conf.search_index_dir, conf.data_dir)
rows = srch.search(self.url_arg('query'))
values = dict(query=self.url_arg('query'), rows=rows)
return self.response_html('search.html', values)
app = Application()
app.add_routes(routes)
async def setup_runtime(app):
app['helper'] = ActionHelper(conf, assets_url=None) # TODO
app.on_startup.append(setup_runtime)
async def factory():
return app
if __name__ == '__main__':
app.update(asdict(conf))
run_app(app, port='8080')
| 31.717557 | 108 | 0.64268 | 8,665 | 0.695146 | 0 | 0 | 6,572 | 0.527236 | 5,278 | 0.423426 | 2,304 | 0.184838 |
dc7f12fe57697e7793ad98c74901e55f8e3f8e91 | 1,960 | py | Python | app/src/modeling/predict_model.py | joaofracasso/banknoteBrazil | 28e4d194adf5b32f3e83a4eafce23ac1ada151c6 | [
"MIT"
] | 8 | 2018-11-04T15:52:30.000Z | 2022-02-02T23:51:13.000Z | app/src/modeling/predict_model.py | joaofracasso/banknoteBrazil | 28e4d194adf5b32f3e83a4eafce23ac1ada151c6 | [
"MIT"
] | 5 | 2021-03-03T11:33:00.000Z | 2021-03-29T12:58:08.000Z | app/src/modeling/predict_model.py | joaofracasso/banknoteBrazil | 28e4d194adf5b32f3e83a4eafce23ac1ada151c6 | [
"MIT"
] | 1 | 2020-08-26T01:28:43.000Z | 2020-08-26T01:28:43.000Z | import io
import torchvision.transforms as transforms
from PIL import Image
import onnxruntime as ort
import numpy as np
class_map = {
0: "10 Reais Frente",
1: "10 Reais Verso",
2: "20 Reais Frente",
3: "20 Reais Verso",
4: "2 Reais Frente",
5: "2 Reais Verso",
6: "50 Reais Frente",
7: "50 Reais Verso",
8: "5 Reais Frente",
9: "5 Reais Verso"
}
def transform_image(image_bytes):
my_transforms = transforms.Compose([
transforms.Resize([224, 224]),
transforms.ToTensor(),
transforms.Normalize([0.485, 0.456, 0.406], [0.229, 0.224, 0.225])
])
image = Image.open(io.BytesIO(image_bytes))
return my_transforms(image).unsqueeze_(0)
def get_prediction(image_bytes, inference_session):
tensor = transform_image(image_bytes=image_bytes)
outputs = inference_session.run(None, {'input.1': tensor.numpy()})
y_hat = np.argmax(outputs[0], axis=1)[0]
return class_map[y_hat]
if __name__ == "__main__":
ort_session = ort.InferenceSession('app/models/banknote_best.onnx')
filename = [
"data/validation/2reaisFrente/compressed_0_1835891.jpeg",
'data/validation/2reaisVerso/compressed_0_3752849.jpeg',
"data/validation/5reaisFrente/compressed_0_1986857.jpeg",
"data/validation/5reaisVerso/compressed_0_4651610.jpeg",
"data/validation/10reaisFrente/compressed_0_2854543.jpeg",
"data/validation/10reaisVerso/compressed_0_2175135.jpeg",
'data/validation/20reaisFrente/compressed_0_1516768.jpeg',
'data/validation/20reaisVerso/compressed_0_3080811.jpeg',
'data/validation/50reaisFrente/compressed_0_1478513.jpeg',
'data/validation/50reaisVerso/compressed_0_3923784.jpeg']
for img in filename:
with open(img, 'rb') as f:
image_bytes = f.read()
tensor = get_prediction(image_bytes, ort_session)
print(tensor)
| 33.220339 | 78 | 0.673469 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 776 | 0.395918 |
dc7fef98ee8b4350186c803b3e6a489a9a3f0111 | 150 | py | Python | arcpy_ListDatasets.py | geocot/Python_ArcGIS_Desktop | aef5d855d8ce3f564dd4fba80599be32b89fcb5b | [
"Apache-2.0"
] | null | null | null | arcpy_ListDatasets.py | geocot/Python_ArcGIS_Desktop | aef5d855d8ce3f564dd4fba80599be32b89fcb5b | [
"Apache-2.0"
] | null | null | null | arcpy_ListDatasets.py | geocot/Python_ArcGIS_Desktop | aef5d855d8ce3f564dd4fba80599be32b89fcb5b | [
"Apache-2.0"
] | null | null | null | import arcpy
arcpy.env.workspace = "c:/temp/Donnees.gdb"
arcpy.env.overwriteOutput = True
listes = arcpy.ListDatasets()
for d in listes:
print(d) | 21.428571 | 43 | 0.74 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 21 | 0.14 |
dc8290a1517bfe32adbe47391e23854b9f8fd7e1 | 5,421 | py | Python | src/server.py | ForgedSnow/Frontiersman | c564238b120bd9a526a2ebd6b79ed5b021be2a6e | [
"MIT"
] | null | null | null | src/server.py | ForgedSnow/Frontiersman | c564238b120bd9a526a2ebd6b79ed5b021be2a6e | [
"MIT"
] | null | null | null | src/server.py | ForgedSnow/Frontiersman | c564238b120bd9a526a2ebd6b79ed5b021be2a6e | [
"MIT"
] | null | null | null | import random
import socket
import time
class client:
def __init__(self, name, address, socket, color):
self.name = name
self.address = address
self.socket = socket
self.color = color
sep = '\n'
def dice_roll():
return (str(random.randint(1, 6)) + ',' + str(random.randint(1, 6)))
def readfromBuffer(sock):
totalMessage = ""
while sep not in totalMessage:
totalMessage += sock.recv(1).decode('utf-8')
return totalMessage[:-1]
clients = []
Colors = ['red', 'cyan', 'orange', 'blue', 'green', 'pink', 'yellow']
random.shuffle(Colors)
ServerSocket = socket.socket()
host = '127.0.0.1'
port = 1233
ThreadCount = 0
try:
ServerSocket.bind((host, port))
except socket.error as e:
print(str(e))
def sendToAll(message):
for cli in clients:
cli.socket.send(str.encode(message + "\n"))
def sendToAllButOne(message, cli2):
for cli in clients:
if (cli != cli2):
cli.socket.send(str.encode(message + "\n"))
print('Waitiing for a Connection..')
ServerSocket.listen(5)
Client, address = ServerSocket.accept()
res = readfromBuffer(Client)
name = res
print(name)
Client.send(str.encode("host\n"))
clients.append(client(name, address, Client, Colors[0]))
Colors.pop(0)
numplayers = readfromBuffer(Client)
print(int(numplayers))
for x in range(int(numplayers) - 1):
Client, address = ServerSocket.accept()
res = readfromBuffer(Client)
name = res
print(name)
# for cli in clients:
# i =0
# cli.socket.send(str.encode(name+" connected\n"))
clients.append(client(name, address, Client, Colors[0]))
Colors.pop(0)
random.shuffle(clients)
for cli in clients:
sendToAllButOne("enemy," + cli.name + "," + cli.color, cli)
for cli in clients:
sendstring = "color,"
sendstring = sendstring + cli.color + '\n'
cli.socket.send(str.encode(sendstring))
# board randomizer
resource_list = ['Wheat'] * 4 + \
['Sheep'] * 4 + \
['Ore'] * 3 + \
['Brick'] * 3 + \
['Wood'] * 4 + \
['Desert'] * 1
number_list = [2, 12]
for index in range(3, 12):
if index == 7:
pass
else:
number_list.append(index)
number_list.append(index)
port_list = ['Wheat'] + \
['Sheep'] + \
['Ore'] + \
['Brick'] + \
['Wood'] + \
['None'] * 4
developmentDeck = ['knight'] * 15 + \
['roadBuilding'] * 2 + \
['yearOfPlenty'] * 2 + \
['monopoly'] * 2 + \
['victoryPoint'] * 5
random.shuffle(developmentDeck)
random.shuffle(number_list)
random.shuffle(resource_list)
random.shuffle(port_list)
numberstring = 'board|' + ','.join([str(elem) for elem in number_list]) + '|' + ','.join(
resource_list) + '|' + ','.join(port_list)
sendToAll(numberstring)
winner = False
# setup
for cli in clients:
cli.socket.send(str.encode("set\n"))
coordinates = readfromBuffer(cli.socket)
sendToAllButOne("set," + coordinates + "," + cli.color, cli)
message = readfromBuffer(cli.socket)
sendToAllButOne(message, cli)
cli.socket.send(str.encode('startroad\n'))
coordinates = readfromBuffer(cli.socket)
sendToAllButOne("road," + coordinates + "," + cli.color, cli)
message = readfromBuffer(cli.socket)
sendToAllButOne(message, cli)
for cli in reversed(clients):
cli.socket.send(str.encode("set\n"))
coordinates = readfromBuffer(cli.socket)
sendToAllButOne("set," + coordinates + "," + cli.color, cli)
message = readfromBuffer(cli.socket)
sendToAllButOne(message, cli)
cli.socket.send(str.encode('startroad\n'))
coordinates = readfromBuffer(cli.socket)
sendToAllButOne("road," + coordinates + "," + cli.color, cli)
message = readfromBuffer(cli.socket)
sendToAllButOne(message, cli)
for cli in clients:
cli.socket.send(str.encode('getstart\n'))
message = readfromBuffer(cli.socket)
sendToAllButOne(message, cli)
message = readfromBuffer(cli.socket)
sendToAllButOne(message, cli)
while (not winner):
for cli in clients:
dice = dice_roll()
sendToAll('dice,' + dice)
message = readfromBuffer(cli.socket)
sendToAllButOne(message, cli)
message = readfromBuffer(cli.socket)
sendToAllButOne(message, cli)
if (int(dice.split(',')[0]) + int(dice.split(',')[1]) == 7):
print("afjgsadkfjsad")
cli.socket.send(str.encode('robber\n'))
else:
cli.socket.send(str.encode('turn\n'))
while True:
message = readfromBuffer(cli.socket)
print(message)
if (message == "end"):
break
if (message.split(',')[0] == "winner"):
print(message)
winner=True
sendToAll(message)
break
elif (message == "dev"):
card = developmentDeck.pop(0)
cli.socket.send(str.encode(card + '\n'))
else:
sendToAllButOne(message, cli)
print("here")
cli.socket.send(str.encode('notturn\n'))
# message=readfromBuffer(cli.socket)
# print(message)
# sendToAllButOne(message, cli)
time.sleep(10)
# game loop
for cli in clients:
cli.socket.send(str.encode("quit\n"))
ServerSocket.close()
| 29.461957 | 89 | 0.594724 | 179 | 0.03302 | 0 | 0 | 0 | 0 | 0 | 0 | 690 | 0.127283 |
dc82be9729f135d53e5a6812431dbe49d50879b1 | 849 | py | Python | adminCustom/templatetags/sort_app.py | Kiri23/DECE-Backend-Project | f488277bf294a4421c86efa512927e6d0f3255d6 | [
"MIT"
] | null | null | null | adminCustom/templatetags/sort_app.py | Kiri23/DECE-Backend-Project | f488277bf294a4421c86efa512927e6d0f3255d6 | [
"MIT"
] | 13 | 2019-05-24T21:13:59.000Z | 2022-03-11T23:45:53.000Z | adminCustom/templatetags/sort_app.py | Kiri23/DECE-Backend-Project | f488277bf294a4421c86efa512927e6d0f3255d6 | [
"MIT"
] | null | null | null | from django import template
from django.conf import settings
register = template.Library()
@register.filter
def sort_apps(apps):
count = len(apps)
print(f'count del index admin page: {count}')
apps.sort(
key=lambda x:
settings.APP_ORDER.index(x['app_label'])
if x['app_label'] in settings.APP_ORDER
else count
)
return apps
@register.filter
def sort_models(models):
count = len(models)
print(f'count del index admin page models: {count}')
name = models[0]['object_name']
# print(f'modelo nombre: {name}')
# print(f'settings: {settings.MODELS_ORDER.index(name)}')
models.sort(
key=lambda x:
settings.MODELS_ORDER.index(x['object_name'])
if x['object_name'] in settings.APP_ORDER
else count
)
return models
| 25.727273 | 61 | 0.630153 | 0 | 0 | 0 | 0 | 752 | 0.885748 | 0 | 0 | 234 | 0.275618 |
dc86eee1b0eb92b3d7f43c645a1d6d938f71dc2e | 1,147 | py | Python | A_MIA_R3_Core/Graphproc/Graphmod.py | nao0423/A_MIA_R3 | 49dd9849e082fd5cee98a9919e8e4962a8376e59 | [
"MIT"
] | null | null | null | A_MIA_R3_Core/Graphproc/Graphmod.py | nao0423/A_MIA_R3 | 49dd9849e082fd5cee98a9919e8e4962a8376e59 | [
"MIT"
] | null | null | null | A_MIA_R3_Core/Graphproc/Graphmod.py | nao0423/A_MIA_R3 | 49dd9849e082fd5cee98a9919e8e4962a8376e59 | [
"MIT"
] | null | null | null | import os
import numpy as np
from matplotlib import pyplot as plt
class DrawGraphs:
def __init__(self,path_ONLY):
self.path_ONLY=path_ONLY
if not os.path.exists("./MakeGraph/graphs/"):
os.makedirs("./MakeGraph/graphs/")
def DrawEmotion(self,emotiondataarray):
colors = ["#ff0000", "#ffff00", "#000000", "#0000ff", "#00ff00"]
ylist = [[], [], [], [], []]
for i in range(5):
for j in range(len(emotiondataarray)):
ylist[i].append(emotiondataarray[j][i])
x=list(range(len(emotiondataarray)))
print(x)
fig=plt.figure()
ax = fig.add_subplot(1, 1, 1)
linetype='-'
title='detected emotions (Face only) ' + self.path_ONLY
for i in range(5):
ax.plot(x,ylist[i],linetype,c=colors[i],linewidth=1)
# 汎用要素
ax.grid(True)
ax.set_xlabel('frame [?]')
ax.set_ylabel('exist rate')
ax.set_title(title)
ax.legend(['angry','happy','neutral','sad','surprise'])
fig.canvas.draw()
im = np.array(fig.canvas.renderer.buffer_rgba())
return im | 31.861111 | 72 | 0.564952 | 1,086 | 0.94026 | 0 | 0 | 0 | 0 | 0 | 0 | 197 | 0.170563 |
dc8808beaf5010b2226dd480feb0ba48ae8b88fd | 926 | py | Python | zkay/transaction/int_casts.py | nibau/zkay | da04760088767e05214aac2d2beee4cbf8ed77f6 | [
"MIT"
] | null | null | null | zkay/transaction/int_casts.py | nibau/zkay | da04760088767e05214aac2d2beee4cbf8ed77f6 | [
"MIT"
] | null | null | null | zkay/transaction/int_casts.py | nibau/zkay | da04760088767e05214aac2d2beee4cbf8ed77f6 | [
"MIT"
] | null | null | null | from enum import IntEnum
from typing import Optional, Any
from zkay.compiler.privacy.library_contracts import bn128_scalar_field
from zkay.transaction.types import AddressValue
def __convert(val: Any, nbits: Optional[int], signed: bool) -> int:
if isinstance(val, IntEnum):
val = val.value
elif isinstance(val, AddressValue):
val = int.from_bytes(val.val, byteorder='big')
if nbits is None: # modulo field prime
trunc_val = val % bn128_scalar_field
else:
trunc_val = val & ((1 << nbits) - 1) # unsigned representation
if signed and trunc_val & (1 << (nbits - 1)):
trunc_val -= (1 << nbits) # signed representation
return trunc_val
for i in range(8, 257, 8):
globals()[f'int{i}'] = lambda x, i=i: __convert(x, i, True)
globals()[f'uint{i}'] = lambda x, i=i: __convert(x, i, False)
globals()[f'uint'] = lambda x: __convert(x, None, False)
| 34.296296 | 71 | 0.654428 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 99 | 0.106911 |
dc88339f42e2b1a9b7ab7afdaa2a87808aaca155 | 170 | py | Python | reid/evaluation_metrics/__init__.py | xueping187/weakly-supervised-person-re-id | 3cfe98264dcdb667c132727a57ab80da5a9e6a8f | [
"Apache-2.0"
] | 2 | 2021-09-14T03:39:43.000Z | 2021-09-14T03:41:04.000Z | reid/evaluation_metrics/__init__.py | xueping187/weakly-supervised-person-re-id | 3cfe98264dcdb667c132727a57ab80da5a9e6a8f | [
"Apache-2.0"
] | null | null | null | reid/evaluation_metrics/__init__.py | xueping187/weakly-supervised-person-re-id | 3cfe98264dcdb667c132727a57ab80da5a9e6a8f | [
"Apache-2.0"
] | null | null | null | from __future__ import absolute_import
from .classification import accuracy
from .ranking_1 import cmc, mean_ap
__all__ = [
'accuracy',
'cmc',
'mean_ap',
]
| 15.454545 | 38 | 0.711765 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 24 | 0.141176 |
dc88577e170bf6ccee3242427067dd649b90ce5d | 661 | py | Python | Modules/secreat-message.py | cclauss/pythonCodes | feee6fda9fcab90b6eda7b97e1c50af21861df4e | [
"MIT"
] | null | null | null | Modules/secreat-message.py | cclauss/pythonCodes | feee6fda9fcab90b6eda7b97e1c50af21861df4e | [
"MIT"
] | null | null | null | Modules/secreat-message.py | cclauss/pythonCodes | feee6fda9fcab90b6eda7b97e1c50af21861df4e | [
"MIT"
] | null | null | null | #This file contain examples for os module
#What is os module?
#is a module using for list files in folder, we can get name of current working directory
#rename files , write on files
import os
def rename_files():
# (1) get file names from a folder
file_list = os.listdir(r"C:\Users\user\Desktop\python\pythonCodes\Modules\images")
# r (row path) mean take the string as it's and don't interpreter
saved_path = os.getcwd()
print saved_path
saved_path = os.chdir(r"C:\Users\user\Desktop\python\pythonCodes\Modules\images")
for file_name in file_list:
os.rename(file_name , file_name.translate(None , "0123456789"))
print saved_path
rename_files() | 31.47619 | 89 | 0.75643 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 407 | 0.615734 |
dc88ab8f3f255c6120421288555f6c5b8861989f | 279 | py | Python | backend/database/db_result.py | Mancid/mancid_project | 4923264af324439658ad256444f3af6a4963e44f | [
"Unlicense"
] | 2 | 2021-05-12T14:10:16.000Z | 2021-05-16T22:05:41.000Z | backend/database/db_result.py | Mancid/mancid_project | 4923264af324439658ad256444f3af6a4963e44f | [
"Unlicense"
] | 18 | 2021-05-11T14:24:05.000Z | 2021-06-10T10:42:42.000Z | backend/database/db_result.py | Mancid/mancid_project | 4923264af324439658ad256444f3af6a4963e44f | [
"Unlicense"
] | 7 | 2021-05-01T17:50:54.000Z | 2021-06-09T12:04:11.000Z | import logging
def result_db(database):
""" This function return the result of database.
They return a dict
:returns: a dict with all rows in database
:rtype: dict
"""
logging.info("The result of filter")
return list(database.find({}, {"_id": 0}))
| 21.461538 | 52 | 0.648746 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 171 | 0.612903 |
dc8a59b30c82608dcdd7060ae1382d398f258c24 | 493 | py | Python | donkeycar/parts/angle_adjust.py | hironorinaka99/donkeycar | e76b5242c45b2e20cf72efd44ca28af09cf285c1 | [
"MIT"
] | null | null | null | donkeycar/parts/angle_adjust.py | hironorinaka99/donkeycar | e76b5242c45b2e20cf72efd44ca28af09cf285c1 | [
"MIT"
] | null | null | null | donkeycar/parts/angle_adjust.py | hironorinaka99/donkeycar | e76b5242c45b2e20cf72efd44ca28af09cf285c1 | [
"MIT"
] | null | null | null | class angle_adjustclass(): #ステアリングの切れ角を調整する
def __init__(self):
self.angle_adjust = 1.0
return
def angleincrease(self):
self.angle_adjust = round(min(2.0, self.angle_adjust + 0.05), 2)
print("In angle_adjust increase",self.angle_adjust)
def angledecrease(self):
self.angle_adjust = round(max(0.5, self.angle_adjust - 0.05), 2)
print("In angle_adjust increase",self.angle_adjust)
def run(self):
return self.angle_adjust | 32.866667 | 72 | 0.661258 | 523 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 98 | 0.18738 |
dc8be82c0b5d6c554dac1d17d3517255a183251c | 18,351 | py | Python | sem_seg/train_pyramid.py | Hao-FANG-92/3D_PSPNet | 2821a3181cbdb70ee3291c29ebac19af38e6c8dc | [
"MIT"
] | 6 | 2020-06-13T08:34:49.000Z | 2021-11-09T04:09:48.000Z | sem_seg/train_pyramid.py | jtpils/3D_PSPNet | 2821a3181cbdb70ee3291c29ebac19af38e6c8dc | [
"MIT"
] | null | null | null | sem_seg/train_pyramid.py | jtpils/3D_PSPNet | 2821a3181cbdb70ee3291c29ebac19af38e6c8dc | [
"MIT"
] | 4 | 2020-03-08T17:00:19.000Z | 2022-03-18T15:42:14.000Z | import argparse
import math
import h5py
import numpy as np
import tensorflow as tf
import socket
import time
import resource
import os
import sys
BASE_DIR = os.path.dirname(os.path.abspath(__file__))
ROOT_DIR = os.path.dirname(BASE_DIR)
sys.path.append(BASE_DIR)
sys.path.append(ROOT_DIR)
sys.path.append(os.path.join(ROOT_DIR, 'utils'))
import provider
import tf_util
from model import *
import pyramid_nets
parser = argparse.ArgumentParser()
parser.add_argument('--gpu', type=int, default=0, help='GPU to use [default: GPU 0]')
parser.add_argument('--log_dir', default='log', help='Log dir [default: log]')
parser.add_argument('--num_point', type=int, default=4096, help='Point number [default: 4096]')
parser.add_argument('--max_epoch', type=int, default=50, help='Epoch to run [default: 50]')
parser.add_argument('--batch_size', type=int, default=24, help='Batch Size during training [default: 24]')
parser.add_argument('--learning_rate', type=float, default=0.001, help='Initial learning rate [default: 0.001]')
parser.add_argument('--momentum', type=float, default=0.9, help='Initial learning rate [default: 0.9]')
parser.add_argument('--optimizer', default='adam', help='adam or momentum [default: adam]')
parser.add_argument('--decay_step', type=int, default=300000, help='Decay step for lr decay [default: 300000]')
parser.add_argument('--decay_rate', type=float, default=0.5, help='Decay rate for lr decay [default: 0.5]')
parser.add_argument('--test_area', type=int, default=5, help='Which area to use for test, option: 1-6 [default: 6]')
parser.add_argument('--model_path', default='log/model.ckpt', help='model checkpoint file path')
FLAGS = parser.parse_args()
print(FLAGS.gpu)
print(FLAGS.log_dir)
print(FLAGS.test_area)
BATCH_SIZE = FLAGS.batch_size
NUM_POINT = FLAGS.num_point
MAX_EPOCH = FLAGS.max_epoch
NUM_POINT = FLAGS.num_point
BASE_LEARNING_RATE = FLAGS.learning_rate
GPU_INDEX = FLAGS.gpu
MOMENTUM = FLAGS.momentum
OPTIMIZER = FLAGS.optimizer
DECAY_STEP = FLAGS.decay_step
DECAY_RATE = FLAGS.decay_rate
MODEL_PATH = FLAGS.model_path
LOG_DIR = FLAGS.log_dir
if not os.path.exists(LOG_DIR): os.mkdir(LOG_DIR)
os.system('cp model.py %s' % (LOG_DIR)) # bkp of model def
os.system('cp train_pyramid.py %s' % (LOG_DIR)) # bkp of train procedure
LOG_FOUT = open(os.path.join(LOG_DIR, 'log_train.txt'), 'w')
LOG_FOUT.write(str(FLAGS)+'\n')
MAX_NUM_POINT = 4096
NUM_CLASSES = 13
BN_INIT_DECAY = 0.5
BN_DECAY_DECAY_RATE = 0.5
#BN_DECAY_DECAY_STEP = float(DECAY_STEP * 2)
BN_DECAY_DECAY_STEP = float(DECAY_STEP)
BN_DECAY_CLIP = 0.99
# voxel size
num_scale = 4
max_num = 8
HOSTNAME = socket.gethostname()
print("base_dir: " + BASE_DIR)
print("file_list: " + os.path.join(BASE_DIR, 'indoor3d_sem_seg_hdf5_data', 'all_files.txt'))
#ALL_FILES = provider.getDataFiles('sem_seg/indoor3d_sem_seg_hdf5_data/all_files.txt')
#room_filelist = [line.rstrip() for line in open('sem_seg/indoor3d_sem_seg_hdf5_data/room_filelist.txt')]
ALL_FILES = provider.getDataFiles(os.path.join(BASE_DIR, 'indoor3d_sem_seg_hdf5_data', 'all_files.txt'))
room_filelist = [line.rstrip() for line in open(os.path.join(BASE_DIR, 'indoor3d_sem_seg_hdf5_data/room_filelist.txt'))]
def initialize_uninitialized(sess):
global_vars = tf.global_variables()
is_not_initialized = sess.run([tf.is_variable_initialized(var) for var in global_vars])
not_initialized_vars = [v for (v, f) in zip(global_vars, is_not_initialized) if not f]
print ([(str(i.name) + '\n') for i in not_initialized_vars])# only for testing
if len(not_initialized_vars):
sess.run(tf.variables_initializer(not_initialized_vars))
# Load ALL data
data_batch_list = []
label_batch_list = []
for h5_filename in ALL_FILES:
h5_filename = os.path.join(BASE_DIR, h5_filename)
data_batch, label_batch = provider.loadDataFile(h5_filename)
data_batch_list.append(data_batch)
label_batch_list.append(label_batch)
data_batches = np.concatenate(data_batch_list, 0)
label_batches = np.concatenate(label_batch_list, 0)
print(data_batches.shape)
print(label_batches.shape)
test_area = 'Area_'+str(FLAGS.test_area)
train_idxs = []
test_idxs = []
for i,room_name in enumerate(room_filelist):
if test_area in room_name:
test_idxs.append(i)
else:
train_idxs.append(i)
train_data = data_batches[train_idxs,...]
train_label = label_batches[train_idxs]
test_data = data_batches[test_idxs,...]
test_label = label_batches[test_idxs]
print(train_data.shape, train_label.shape)
print(test_data.shape, test_label.shape)
def log_string(out_str):
LOG_FOUT.write(out_str+'\n')
LOG_FOUT.flush()
print(out_str)
def get_learning_rate(batch):
learning_rate = tf.train.exponential_decay(
BASE_LEARNING_RATE, # Base learning rate.
batch * BATCH_SIZE, # Current index into the dataset.
DECAY_STEP, # Decay step.
DECAY_RATE, # Decay rate.
staircase=True)
learning_rate = tf.maximum(learning_rate, 0.00001) # CLIP THE LEARNING RATE!!
return learning_rate
def get_bn_decay(batch):
bn_momentum = tf.train.exponential_decay(
BN_INIT_DECAY,
batch*BATCH_SIZE,
BN_DECAY_DECAY_STEP,
BN_DECAY_DECAY_RATE,
staircase=True)
bn_decay = tf.minimum(BN_DECAY_CLIP, 1 - bn_momentum)
return bn_decay
def train():
with tf.Graph().as_default():
with tf.device('/gpu:'+str(GPU_INDEX)):
pointclouds_pl, labels_pl = placeholder_inputs(BATCH_SIZE, NUM_POINT)
is_training_pl = tf.placeholder(tf.bool, shape=())
# Note the global_step=batch parameter to minimize.
# That tells the optimizer to helpfully increment the 'batch' parameter for you every time it trains.
batch = tf.Variable(0)
bn_decay = get_bn_decay(batch)
tf.summary.scalar('bn_decay', bn_decay)
# Get model and loss
#pred = get_model(pointclouds_pl, is_training_pl, bn_decay=bn_decay)
#loss = get_loss(pred, labels_pl)
# num_scale x m x n x 3 (voxel coordinates for each point in each scale)
point_coords_in_voxels = placeholder_inputs_voxel_id(num_scale, BATCH_SIZE, NUM_POINT)
pred, end_points = get_model_pyramid(pointclouds_pl, point_coords_in_voxels, num_scale, is_training_pl, bn_decay=bn_decay)
#pred, end_points = get_model_with_pyramid(pointclouds_pl, point_coords_in_voxels, num_scale, is_training_pl, bn_decay=bn_decay)
#pred, end_points = transfer_learning_with_pyramid(pointclouds_pl, point_coords_in_voxels, num_scale, is_training_pl, bn_decay=bn_decay)
loss = get_loss_pyramid_with_transform_nets(pred, labels_pl, end_points)
tf.summary.scalar('loss', loss)
correct = tf.equal(tf.argmax(pred, 2), tf.to_int64(labels_pl))
accuracy = tf.reduce_sum(tf.cast(correct, tf.float32)) / float(BATCH_SIZE*NUM_POINT)
tf.summary.scalar('accuracy', accuracy)
# Get training operator
learning_rate = get_learning_rate(batch)
tf.summary.scalar('learning_rate', learning_rate)
if OPTIMIZER == 'momentum':
optimizer = tf.train.MomentumOptimizer(learning_rate, momentum=MOMENTUM)
elif OPTIMIZER == 'adam':
optimizer = tf.train.AdamOptimizer(learning_rate)
train_op = optimizer.minimize(loss, global_step=batch)
# Add ops to save and restore all the variables.
saver = tf.train.Saver()
# Create a session
config = tf.ConfigProto()
config.gpu_options.allow_growth = True
config.allow_soft_placement = True
config.log_device_placement = True
sess = tf.Session(config=config)
# pretrained model
#saver.restore(sess, MODEL_PATH)
# Add summary writers
merged = tf.summary.merge_all()
train_writer = tf.summary.FileWriter(os.path.join(LOG_DIR, 'train'),
sess.graph)
test_writer = tf.summary.FileWriter(os.path.join(LOG_DIR, 'test'))
# Init variables
init = tf.global_variables_initializer()
sess.run(init, {is_training_pl:True})
ops = {'pointclouds_pl': pointclouds_pl,
'labels_pl': labels_pl,
'point_3d_voxel_id' : point_coords_in_voxels,
'is_training_pl': is_training_pl,
'pred': pred,
'loss': loss,
'train_op': train_op,
'merged': merged,
'step': batch}
for epoch in range(MAX_EPOCH):
log_string('**** EPOCH %03d ****' % (epoch))
sys.stdout.flush()
train_one_epoch(sess, ops, train_writer)
eval_one_epoch(sess, ops, test_writer)
# Save the variables to disk.
if epoch % 1 == 0:
save_path = saver.save(sess, os.path.join(LOG_DIR, "model.ckpt"))
log_string("Model saved in file: %s" % save_path)
def train_fine_tuning():
#with tf.Graph().as_default():
with tf.device('/gpu:'+str(GPU_INDEX)):
# Create a session
config = tf.ConfigProto()
config.gpu_options.allow_growth = True
config.allow_soft_placement = True
config.log_device_placement = True
sess = tf.Session(config=config)
# load pretrained model
saver = tf.train.import_meta_graph(MODEL_PATH + '.meta')
saver.restore(sess, MODEL_PATH)
pointnet_graph = tf.get_default_graph()
#pointclouds_pl, labels_pl = placeholder_inputs(BATCH_SIZE, NUM_POINT)
#is_training_pl = tf.placeholder(tf.bool, shape=())
pointclouds_pl = pointnet_graph.get_tensor_by_name("Placeholder:0")
labels_pl = pointnet_graph.get_tensor_by_name("Placeholder_1:0")
is_training_pl = pointnet_graph.get_tensor_by_name("Placeholder_2:0")
# Note the global_step=batch parameter to minimize.
# That tells the optimizer to helpfully increment the 'batch' parameter for you every time it trains.
batch = tf.Variable(0)
bn_decay = get_bn_decay(batch)
tf.summary.scalar('bn_decay', bn_decay)
# Get model and loss
#pred = get_model(pointclouds_pl, is_training_pl, bn_decay=bn_decay)
#loss = get_loss(pred, labels_pl)
# num_scale x m x n x 3 (voxel coordinates for each point in each scale)
#point_coords_in_voxels = placeholder_inputs_voxel_id(num_scale, BATCH_SIZE, NUM_POINT)
point_coords_in_voxels = placeholder_inputs_voxel_id(num_scale, BATCH_SIZE, NUM_POINT)
#from tensorflow.python import pywrap_tensorflow
#reader = pywrap_tensorflow.NewCheckpointReader(MODEL_PATH)
#var_to_shape_map = reader.get_variable_to_shape_map()
#for key in sorted(var_to_shape_map):
# print("tensor_name: ", key)
#op = sess.graph.get_operations()
#for m in op:
# print("operators:", m.values())
#print("test_tensor:", pointnet_graph.get_tensor_by_name("conv2/weights:0"))
#print("points_feat1:", pointnet_graph.get_tensor_by_name("conv5/Relu:0"))
#print("bn_decay:", bn_decay)
#pred, end_points = fine_tuning_with_pyramid_3(pointnet_graph, pointclouds_pl, point_coords_in_voxels, num_scale, is_training_pl, bn_decay=bn_decay)
pred, end_points = fine_tuning_with_pyramid(pointnet_graph, pointclouds_pl, point_coords_in_voxels, num_scale, is_training_pl, bn_decay=bn_decay)
loss = get_loss_pyramid_with_transform_nets(pred, labels_pl, end_points)
tf.summary.scalar('loss', loss)
correct = tf.equal(tf.argmax(pred, 2), tf.to_int64(labels_pl))
accuracy = tf.reduce_sum(tf.cast(correct, tf.float32)) / float(BATCH_SIZE*NUM_POINT)
tf.summary.scalar('accuracy', accuracy)
# Get training operator
learning_rate = get_learning_rate(batch)
tf.summary.scalar('learning_rate', learning_rate)
if OPTIMIZER == 'momentum':
optimizer = tf.train.MomentumOptimizer(learning_rate, momentum=MOMENTUM)
elif OPTIMIZER == 'adam':
optimizer = tf.train.AdamOptimizer(learning_rate, name='fine_tune_adam')
train_op = optimizer.minimize(loss, global_step=batch)
#update_ops = tf.get_collection(tf.GraphKeys.UPDATE_OPS)
#with tf.control_dependencies(update_ops):
# train_op = optimizer.minimize(loss, global_step=batch)
# Add ops to save and restore all the variables.
new_saver = tf.train.Saver()
# Add summary writers
merged = tf.summary.merge_all()
train_writer = tf.summary.FileWriter(os.path.join(LOG_DIR, 'train'),
sess.graph)
test_writer = tf.summary.FileWriter(os.path.join(LOG_DIR, 'test'))
# Init variables
init = tf.global_variables_initializer()
sess.run(init, {is_training_pl:True})
#initialize_uninitialized(sess)
ops = {'pointclouds_pl': pointclouds_pl,
'labels_pl': labels_pl,
'point_3d_voxel_id' : point_coords_in_voxels,
'is_training_pl': is_training_pl,
'pred': pred,
'loss': loss,
'train_op': train_op,
'merged': merged,
'step': batch}
for epoch in range(MAX_EPOCH):
log_string('**** EPOCH %03d ****' % (epoch))
sys.stdout.flush()
time_start = time.clock()
train_one_epoch(sess, ops, train_writer)
time_elapsed = (time.clock() - time_start)
log_string('total computational time: %f' % (time_elapsed))
log_string('peak computational momory: %f in Kb' % (resource.getrusage(resource.RUSAGE_SELF).ru_maxrss))
eval_one_epoch(sess, ops, test_writer)
# Save the variables to disk.
if epoch % 1 == 0:
save_path = new_saver.save(sess, os.path.join(LOG_DIR, "model.ckpt"))
log_string("Model saved in file: %s" % save_path)
def train_one_epoch(sess, ops, train_writer):
""" ops: dict mapping from string to tf ops """
is_training = True
log_string('----')
current_data, current_label, _ = provider.shuffle_data(train_data[:,0:NUM_POINT,:], train_label)
file_size = current_data.shape[0]
num_batches = file_size // BATCH_SIZE
total_correct = 0
total_seen = 0
loss_sum = 0
for batch_idx in range(num_batches):
if batch_idx % 100 == 0:
print('Current batch/total batch num: %d/%d'%(batch_idx,num_batches))
start_idx = batch_idx * BATCH_SIZE
end_idx = (batch_idx+1) * BATCH_SIZE
# num_scale x m x n x 3
current_point_3d_voxel_id = provider.voxle_3d_id_for_batch_data(current_data[start_idx:end_idx, :, :], max_num, num_scale)
feed_dict = {ops['pointclouds_pl']: current_data[start_idx:end_idx, :, :],
ops['labels_pl']: current_label[start_idx:end_idx],
ops['point_3d_voxel_id']: current_point_3d_voxel_id,
ops['is_training_pl']: is_training,}
summary, step, _, loss_val, pred_val = sess.run([ops['merged'], ops['step'], ops['train_op'], ops['loss'], ops['pred']],
feed_dict=feed_dict)
train_writer.add_summary(summary, step)
pred_val = np.argmax(pred_val, 2)
correct = np.sum(pred_val == current_label[start_idx:end_idx])
total_correct += correct
total_seen += (BATCH_SIZE*NUM_POINT)
loss_sum += loss_val
log_string('mean loss: %f' % (loss_sum / float(num_batches)))
log_string('accuracy: %f' % (total_correct / float(total_seen)))
def eval_one_epoch(sess, ops, test_writer):
""" ops: dict mapping from string to tf ops """
is_training = False
total_correct = 0
total_seen = 0
loss_sum = 0
total_seen_class = [0 for _ in range(NUM_CLASSES)]
total_correct_class = [0 for _ in range(NUM_CLASSES)]
log_string('----')
current_data = test_data[:,0:NUM_POINT,:]
current_label = np.squeeze(test_label)
file_size = current_data.shape[0]
num_batches = file_size // BATCH_SIZE
for batch_idx in range(num_batches):
start_idx = batch_idx * BATCH_SIZE
end_idx = (batch_idx+1) * BATCH_SIZE
# num_scale x m x n x 3
current_point_3d_voxel_id = provider.voxle_3d_id_for_batch_data(current_data[start_idx:end_idx, :, :], max_num, num_scale)
feed_dict = {ops['pointclouds_pl']: current_data[start_idx:end_idx, :, :],
ops['labels_pl']: current_label[start_idx:end_idx],
ops['point_3d_voxel_id']: current_point_3d_voxel_id,
ops['is_training_pl']: is_training}
summary, step, loss_val, pred_val = sess.run([ops['merged'], ops['step'], ops['loss'], ops['pred']],
feed_dict=feed_dict)
test_writer.add_summary(summary, step)
pred_val = np.argmax(pred_val, 2)
correct = np.sum(pred_val == current_label[start_idx:end_idx])
total_correct += correct
total_seen += (BATCH_SIZE*NUM_POINT)
loss_sum += (loss_val*BATCH_SIZE)
for i in range(start_idx, end_idx):
for j in range(NUM_POINT):
l = current_label[i, j]
total_seen_class[l] += 1
total_correct_class[l] += (pred_val[i-start_idx, j] == l)
log_string('eval mean loss: %f' % (loss_sum / float(total_seen/NUM_POINT)))
log_string('eval accuracy: %f'% (total_correct / float(total_seen)))
log_string('eval avg class acc: %f' % (np.mean(np.array(total_correct_class)/np.array(total_seen_class,dtype=np.float))))
if __name__ == "__main__":
#train()
train_fine_tuning()
LOG_FOUT.close()
| 40.509934 | 156 | 0.65288 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 4,780 | 0.260476 |
dc8c2777bf74705cf12f2debe29527c9ee43e568 | 1,015 | py | Python | tests/pyre/components/protocol.py | BryanRiel/pyre | 179359634a7091979cced427b6133dd0ec4726ea | [
"BSD-3-Clause"
] | null | null | null | tests/pyre/components/protocol.py | BryanRiel/pyre | 179359634a7091979cced427b6133dd0ec4726ea | [
"BSD-3-Clause"
] | null | null | null | tests/pyre/components/protocol.py | BryanRiel/pyre | 179359634a7091979cced427b6133dd0ec4726ea | [
"BSD-3-Clause"
] | null | null | null | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
#
# michael a.g. aïvázis
# orthologue
# (c) 1998-2018 all rights reserved
#
"""
Check that declarations of trivial protocols produce the expected layout
"""
def test():
import pyre
# declare
class protocol(pyre.protocol):
"""a trivial protocol"""
# check the basics
assert protocol.__name__ == "protocol"
assert protocol.__bases__ == (pyre.protocol,)
# did I get a key
assert protocol.pyre_key is None # since i didn't specify my family name
# check the layout
assert protocol.pyre_namemap == {}
assert protocol.pyre_localTraits == ()
assert protocol.pyre_inheritedTraits == ()
assert protocol.pyre_pedigree == (protocol, pyre.protocol)
assert protocol.pyre_internal == False
# exercise the configurable interface
assert list(protocol.pyre_traits()) == []
assert protocol.pyre_isCompatible(protocol)
return protocol
# main
if __name__ == "__main__":
test()
# end of file
| 21.145833 | 76 | 0.675862 | 63 | 0.061947 | 0 | 0 | 0 | 0 | 0 | 0 | 399 | 0.39233 |
dc8d4387b8718ac172644dcb9029d94d050e9c2f | 1,210 | py | Python | Modules/init_logging.py | LoveBootCaptain/WeatherPi | 24132f392080a61cabaa6fe99c3ef3ee684ccd77 | [
"MIT"
] | 7 | 2016-10-23T16:57:16.000Z | 2020-05-18T14:18:12.000Z | Modules/init_logging.py | LoveBootCaptain/WeatherPi | 24132f392080a61cabaa6fe99c3ef3ee684ccd77 | [
"MIT"
] | null | null | null | Modules/init_logging.py | LoveBootCaptain/WeatherPi | 24132f392080a61cabaa6fe99c3ef3ee684ccd77 | [
"MIT"
] | 1 | 2018-08-05T00:44:07.000Z | 2018-08-05T00:44:07.000Z | #!/usr/bin/python
# -*- coding: utf-8 -*-
# create some logger details
import logging.handlers
# create a weather logger
weather_path = '/home/pi/WeatherPi/logs/Weather_Log_Data.log'
WEATHER_LOG_FILENAME = weather_path
# Set up a specific logger with our desired output level
weather_logger = logging.getLogger('WeatherLogger')
weather_logger.setLevel(logging.INFO)
# Add the log message handler to the logger and make a log-rotation of 100 files with max. 10MB per file
weather_handler = logging.handlers.RotatingFileHandler(WEATHER_LOG_FILENAME, maxBytes=10485760, backupCount=100)
weather_logger.addHandler(weather_handler)
# create a debug logger
debug_path = '/home/pi/WeatherPi/logs/Debug_Log.log'
DEBUG_LOG_FILENAME = debug_path
# Set up a specific logger with our desired output level
debug_logger = logging.getLogger('werkzeug')
debug_logger.setLevel(logging.DEBUG)
# Add the log message handler to the logger and make a log-rotation of 100 files with max. 10MB per file
debug_handler = logging.handlers.RotatingFileHandler(DEBUG_LOG_FILENAME, maxBytes=100000, backupCount=1)
debug_logger.addHandler(debug_handler)
def log_string(string):
print(string)
debug_logger.debug(string)
| 31.025641 | 112 | 0.8 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 546 | 0.45124 |
dc8e1380dde51a4caa0b3df07a69e44c39836128 | 2,358 | py | Python | tests/functional/step_defs/test_predictions.py | carlos10seg/rec-service | 91ffb33c57028e959e16a7fc97a4dafee4f610a0 | [
"MIT"
] | 1 | 2021-04-14T02:33:12.000Z | 2021-04-14T02:33:12.000Z | tests/functional/step_defs/test_predictions.py | carlos10seg/rec-service | 91ffb33c57028e959e16a7fc97a4dafee4f610a0 | [
"MIT"
] | 2 | 2021-08-25T16:14:32.000Z | 2022-02-10T02:51:22.000Z | tests/functional/step_defs/test_predictions.py | carlos10seg/rec-service | 91ffb33c57028e959e16a7fc97a4dafee4f610a0 | [
"MIT"
] | 2 | 2020-12-11T16:17:10.000Z | 2021-03-02T01:38:23.000Z | import pytest
import requests
import logging
from pytest_bdd import scenarios, given, then, parsers
from requests.exceptions import ConnectionError
def is_responsive(url):
try:
response = requests.get(url)
if response.status_code == 404:
return True
except ConnectionError:
return False
@pytest.fixture(scope="session")
def http_service(docker_ip, docker_services):
"""Ensure that HTTP service is up and responsive."""
# `port_for` takes a container port and returns the corresponding host port
port = docker_services.port_for("recserver", 5000)
url = "http://{}:{}/".format(docker_ip, port)
docker_services.wait_until_responsive(
timeout=30.0, pause=0.1, check=lambda: is_responsive(url)
)
return url
# Scenarios
scenarios('../features/predictions.feature', example_converters=dict(user_id=int, num_recs=int))
# Given Steps
@given('a running recommendation server')
def is_server_running(http_service):
response = requests.get(http_service + "status")
assert response.status_code == 200
@given('a trained recommender model')
def get_trained_als_model(http_service):
right_url = 'algorithms/implicitmf/info'
logging.info(http_service + right_url)
response = requests.get(http_service + right_url)
assert len(response.json()['model']) > 0
@given('the predict API is called with <user_id> and <items>')
def predictions_response(http_service, user_id, items):
params = {'user_id': user_id, 'items': items, 'format': 'json'}
right_url = 'algorithms/implicitmf/predictions'
response = requests.get(http_service + right_url, params=params)
return response
# Then Steps
@then('the response returns a list of predictions')
def get_response_list_recs(predictions_response):
recs = predictions_response.json()['predictions']
assert len(recs) > 0
assert recs[0]['score'] > 0
@then('the response returns an empty list')
def get_response_empty_list_recs(predictions_response):
recs = predictions_response.json()['predictions']
assert len(recs) == 0
@then(parsers.parse('the response status code is "{code:d}"'))
def ddg_response_code(predictions_response, code):
if predictions_response.status_code != code:
logging.error(predictions_response.text)
assert predictions_response.status_code == code | 35.19403 | 96 | 0.729432 | 0 | 0 | 0 | 0 | 1,875 | 0.795165 | 0 | 0 | 608 | 0.257846 |
dc8ea639aefe380ccad355b48b1e7babaaca301c | 1,922 | py | Python | meiduo_mall/meiduo_mall/apps/contents/views.py | hgztlmb/meiduo_project | 5eb3b875faa06db844c8ba842cc299eccd976311 | [
"MIT"
] | 2 | 2019-05-27T12:41:53.000Z | 2019-06-11T02:58:00.000Z | meiduo_mall/meiduo_mall/apps/contents/views.py | hgztlmb/meiduo_project | 5eb3b875faa06db844c8ba842cc299eccd976311 | [
"MIT"
] | null | null | null | meiduo_mall/meiduo_mall/apps/contents/views.py | hgztlmb/meiduo_project | 5eb3b875faa06db844c8ba842cc299eccd976311 | [
"MIT"
] | null | null | null | from django.shortcuts import render
from django.views import View
from goods.models import GoodsChannel
from contents.models import ContentCategory
from .utils import get_categories
class IndexView(View):
def get(self, request):
# 定义一个字典categories包装所有数据
# categories = {}
# # 查询出所有商品频道数据并按照group_id(组),sequence(组内顺序)进行排序
# good_channel_qs = GoodsChannel.objects.order_by('group_id', 'sequence')
# # 遍历查询集(商品频道数据)
# for channel in good_channel_qs:
# # 获取当前商品group_id
# group_id = channel.group_id
# # 判断组别(group_id)是否存在字典中
# if channel.group_id not in categories:
# # 不存在则添加一个新的数据格式:{group_id:{“channels":[],"sub_cats":[]}}
# categories[group_id] = {"channels": [], "sub_cats": []}
# # 通过频道获取一级商品数据模型category
# cat1 = channel.category
# # 把频道的url赋给cat1
# cat1.url = channel.url
# # 将一级商品数据加入字典channels中
# categories[group_id]["channels"].append(cat1)
# # 获取当前一级下二级商品数据查询集
# cat2_qs = cat1.subs.all()
# # 遍历二级数据查询集
# for cat2 in cat2_qs:
# # 获取三级数据查询集
# cat3_qs = cat2.subs.all()
# # 将当前二级数据下的三级数据查询集保存到二级数据的sub_cats属性中
# cat2.sub_cats = cat3_qs
# # 将二级数据加入字典sub_cats中
# categories[group_id]["sub_cats"].append(cat2)
# 首页广告
# 建立字典保存广告数据
contents = {}
# 获取广告类别查询集
contents_qs = ContentCategory.objects.all()
# 遍历广告类型查询集
for cat in contents_qs:
# 构建广告数据格式
contents[cat.key] = cat.content_set.filter(status=True).order_by('sequence')
context = {
"categories": get_categories(),
"contents": contents
}
return render(request, 'index.html', context)
| 35.592593 | 88 | 0.564516 | 2,189 | 0.922072 | 0 | 0 | 0 | 0 | 0 | 0 | 1,524 | 0.641955 |
dc8f73f0528c0d2a14df7cd7036a596e1d08c194 | 615 | py | Python | examples/ticker.py | rstms/txtrader-monitor | 63de1d8ca7c06e9c7301775679a4373a90912426 | [
"MIT"
] | null | null | null | examples/ticker.py | rstms/txtrader-monitor | 63de1d8ca7c06e9c7301775679a4373a90912426 | [
"MIT"
] | null | null | null | examples/ticker.py | rstms/txtrader-monitor | 63de1d8ca7c06e9c7301775679a4373a90912426 | [
"MIT"
] | null | null | null | from txtrader_monitor import Monitor
import json
from pprint import pprint
m = Monitor(log_level='WARNING')
def status(channel, data):
print(f"{channel}: {data}")
if data.startswith('.Authorized'):
pass
return True
def ticker(channel, data):
print(f"{channel}: {data}")
return True
def timer(channel, data):
print(f"{channel}: {data}")
return True
def main():
m.set_callbacks(callbacks={
'*': None,
'TICK': ticker,
'TIME': timer,
'STATUS': status,
})
m.set_tick_interval(5)
m.run()
if __name__ == '__main__':
main()
| 16.184211 | 38 | 0.6 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 115 | 0.186992 |
dc8fe8f74f9c641facf00630552d4d110386f403 | 5,055 | py | Python | code/models/maxEnt.py | trenslow/thesis | fbf757176c0ae61fba993d16b24a43129578b1e5 | [
"BSD-3-Clause"
] | null | null | null | code/models/maxEnt.py | trenslow/thesis | fbf757176c0ae61fba993d16b24a43129578b1e5 | [
"BSD-3-Clause"
] | null | null | null | code/models/maxEnt.py | trenslow/thesis | fbf757176c0ae61fba993d16b24a43129578b1e5 | [
"BSD-3-Clause"
] | null | null | null | import tensorflow as tf
import numpy as np
import itertools
import time
import random
import sys
def chunks(l, n):
# for efficient iteration over whole data set
for i in range(0, len(l), n):
yield l[i:i + n]
def read_file(file):
feats = []
wrds = []
vcb = {'UNK': 0}
with open(file) as inpt:
for line in inpt: # for looping over whole dataset
#for line in itertools.islice(inpt, 49999): # for taking first n lines of dataset (for development)
split = line.strip().split()
feat_vec = [float(val) for val in split[:-1]]
feats.append(feat_vec)
wrd = split[-1]
wrds.append(wrd)
if wrd not in vcb:
vcb[wrd] = len(vcb)
return feats, wrds, vcb
def lookup_numeric_labels(word_list, voc):
# maps each unique word to a unique integer label
return np.array([[voc[word]] if word in voc else voc['UNK'] for word in word_list])
if __name__ == '__main__':
# currently set for swadesh vectors
langs = sys.argv[1:3]
lang1, lang2 = langs[0], langs[1]
print(lang1, lang2)
features, words, vocab = read_file('data/vectors_swadesh.' + lang1 + '.' + lang2)
num_classes = len(vocab)
print('vocab size: ' + str(num_classes))
num_features = len(features[0])
num_tokens = len(features)
batch_size = 256
num_gpus = 4
def parallelize(fn, num_gpus, **kwargs):
# input data comes in as a multiple of num_gpus
input_split = {}
for k, v in kwargs.items():
input_split[k] = []
for i in range(num_gpus):
# slice up the data into equal sized pieces to send to each gpu
shape = tf.shape(v)
size = tf.concat([shape[:1] // num_gpus, shape[1:]], axis=0)
stride = tf.concat([shape[:1] // num_gpus, shape[1:]*0], axis=0)
start = stride * i
input_split[k].append(tf.slice(v, start, size))
output_split = []
for i in range(num_gpus):
with tf.device(tf.DeviceSpec(device_type="GPU", device_index=i)):
with tf.variable_scope(tf.get_variable_scope(), reuse=i > 0):
# send data through model one gpu at a time
output_split.append(fn(**{k : v[i] for k, v in input_split.items()}))
# return all the losses returned from each gpu
return tf.concat(output_split, axis=0)
def maxEnt_model(ex, y_tru):
# draw graph (Multinomial Logistic Regression/Log-Linear model)
W = tf.get_variable('W', [num_features, num_classes], initializer=tf.contrib.layers.xavier_initializer()) # based on Glorot and Bengio (2010)
b = tf.Variable(tf.zeros([num_classes]), name='b')
# calculate log probs based on Beta_0 + Beta_1 * x
y = tf.add(tf.matmul(ex, W), b)
# sparse loss function used to speed up computation
# using unique labels is a lot less to hold in memory than one-hot vectors of vocab size dimensions
loss_func = tf.nn.sparse_softmax_cross_entropy_with_logits(logits=y, labels=tf.reshape(y_tru, [-1]))
return loss_func
# graph input
x = tf.placeholder(tf.float32, [None, num_features], name='x')
y_true = tf.placeholder(tf.int32, [None, 1], name='y_true')
loss_function = parallelize(maxEnt_model, num_gpus, ex=x, y_tru=y_true)
learning_rate = 0.001
optimizer = tf.train.AdamOptimizer(learning_rate).minimize(loss_function, colocate_gradients_with_ops=True)
# launch session
config = tf.ConfigProto()
config.gpu_options.allow_growth = True
with tf.Session(config=config) as sess:
sess.run(tf.global_variables_initializer())
old_PP = 1.0
tol = 0.05
max_iter = 100
for j in range(max_iter):
start = time.time()
combined = list(zip(features, words))
random.shuffle(combined)
total_loss = 0.0
for batch in chunks(combined, batch_size * num_gpus):
batch_x, batch_y_words = zip(*batch) # "unzip"
batch_y_true = lookup_numeric_labels(batch_y_words, vocab)
_, l = sess.run([optimizer, loss_function], feed_dict={x: np.array(batch_x), y_true: batch_y_true})
batch_loss = np.sum(l)
assert not np.isnan(batch_loss), 'Model diverged with loss = NaN'
total_loss += batch_loss
print('after epoch:', j+1)
print('W:', np.mean(sess.run(tf.contrib.framework.get_variables_by_name('W'))[0], axis=1))
avg_loss = total_loss / num_tokens
print('average loss:', avg_loss)
perplexity = np.exp(avg_loss) # TensorFlow's cross entropy is calculated with natural log
print('perplexity:', perplexity)
print('time:', time.time() - start)
if np.abs(1.0 - perplexity / old_PP) < tol:
print('model converged')
break
old_PP = perplexity | 39.80315 | 150 | 0.604154 | 0 | 0 | 126 | 0.024926 | 0 | 0 | 0 | 0 | 1,042 | 0.206133 |
dc8fea87cacc8f38ab0d4cc77ece002769227f5f | 1,840 | py | Python | scripts/boot-sequence.py | kaynarov/commun.contracts | 6e1482acd28cd2b33fc93ee5d6c88c7038bb0d44 | [
"MIT"
] | null | null | null | scripts/boot-sequence.py | kaynarov/commun.contracts | 6e1482acd28cd2b33fc93ee5d6c88c7038bb0d44 | [
"MIT"
] | null | null | null | scripts/boot-sequence.py | kaynarov/commun.contracts | 6e1482acd28cd2b33fc93ee5d6c88c7038bb0d44 | [
"MIT"
] | 1 | 2020-10-25T13:58:12.000Z | 2020-10-25T13:58:12.000Z | #!/usr/bin/env python3
import os
import sys
import subprocess
default_contracts_dir = '/opt/cyberway/bin/data-dir/contracts/'
nodeos_url = os.environ.get('CYBERWAY_URL', 'http://nodeosd:8888')
os.environ['CYBERWAY_URL'] = nodeos_url
os.environ['CLEOS'] = '/opt/cyberway/bin/cleos'
args = {
'basedir': os.path.dirname(os.path.dirname(os.path.realpath(__file__))),
'cleos':'/opt/cyberway/bin/cleos --url=%s ' % nodeos_url,
'public_key':'GLS6MRyAjQq8ud7hVNYcfnVPJqcVpscN5So8BhtHuGYqET5GDW5CV',
'private_key':'5KQwrPbwdL6PhXujxW37FSSQZ1JiwsST4cqQzDeyXtP79zkvFD3',
'creator_auth':'c.com@c.com',
'creator_key':'5JdhhMMJdb1KEyCatAynRLruxVvi7mWPywiSjpLYqKqgsT4qjsN',
'commun_contracts_dir': os.environ.get('COMMUN_CONTRACTS', default_contracts_dir),
}
commun_boot_sequence=('{basedir}/scripts/commun-boot-sequence.py '
'--contracts-dir "{commun_contracts_dir}" '
'--private-key {private_key} ').format(**args)
if subprocess.call(commun_boot_sequence, shell=True):
print('commun-boot-sequence.py exited with error')
sys.exit(1)
community_params = {
'community_name': 'cats',
'maximum_supply': '1000000000.000 CATS',
'reserve_amount': '1000000.0000 CMN',
'cw': 3333,
'fee': 100,
}
community_args = ''
for (key, value) in community_params.items():
community_args += ' --{arg} "{value}"'.format(arg=key.replace('_', '-'), value=value)
community_boot_sequence=('{basedir}/scripts/community-boot-sequence.py '
'--creator-auth {creator_auth} '
'--creator-key {creator_key} '
+ community_args).format(**args)
print(community_boot_sequence)
if subprocess.call(community_boot_sequence, shell=True):
print('community-boot-sequence.py exited with error')
sys.exit(1)
sys.exit(0)
| 36.8 | 89 | 0.686957 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 904 | 0.491304 |
dc90b4ca7f2137cdbc5874453b2ec51c3e2f8b39 | 500 | py | Python | ds-udacity/curso 2/tutorial/exercicio4/mapper_1/reduce.py | tassotirap/data-science | 644bc351740cda90c0d8c907132d9da9630266c9 | [
"Apache-2.0"
] | null | null | null | ds-udacity/curso 2/tutorial/exercicio4/mapper_1/reduce.py | tassotirap/data-science | 644bc351740cda90c0d8c907132d9da9630266c9 | [
"Apache-2.0"
] | null | null | null | ds-udacity/curso 2/tutorial/exercicio4/mapper_1/reduce.py | tassotirap/data-science | 644bc351740cda90c0d8c907132d9da9630266c9 | [
"Apache-2.0"
] | null | null | null | #!/usr/bin/python
import sys
import csv
reader = csv.reader(sys.stdin, delimiter='\t')
writer = csv.writer(sys.stdout, delimiter='\t', quotechar='"', quoting=csv.QUOTE_ALL)
userInfo = None
for line in reader:
thisType = line[1]
if thisType == 'A':
userInfo = line
elif thisType == 'B':
if userInfo and userInfo[0] == line[0]:
new_line = []
new_line.extend(userInfo)
new_line.extend(line)
writer.writerow(new_line) | 21.73913 | 85 | 0.596 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 34 | 0.068 |
dc91ea563724becd2551ceddc127da30cb181d3c | 724 | py | Python | node/node.py | abudnik/prun | 643a6bf49249e220f08317b8a4739570faf7b2ae | [
"Apache-2.0"
] | 20 | 2015-05-14T19:44:01.000Z | 2018-04-14T15:25:08.000Z | node/node.py | abudnik/prun | 643a6bf49249e220f08317b8a4739570faf7b2ae | [
"Apache-2.0"
] | 11 | 2015-04-15T19:51:06.000Z | 2017-01-03T14:57:49.000Z | node/node.py | abudnik/prun | 643a6bf49249e220f08317b8a4739570faf7b2ae | [
"Apache-2.0"
] | 7 | 2015-05-08T12:44:38.000Z | 2021-12-10T18:00:01.000Z | import sys
import os
NODE_SCRIPT_EXEC_FAILED = -5
errCode = 0
try:
readFifo = sys.argv[2]
scriptLen = int(sys.argv[3])
taskId = int(sys.argv[4])
numTasks = int(sys.argv[5])
jobId = sys.argv[6]
fifo = os.open(readFifo, os.O_RDONLY)
bytes = bytearray()
while len(bytes) < scriptLen:
bytes += os.read(fifo, scriptLen)
s = bytes.decode("utf-8")
exec(s, {"taskId": taskId, "numTasks": numTasks, "jobId": jobId})
except Exception as e:
errCode = NODE_SCRIPT_EXEC_FAILED
print(e)
try:
writeFifo = sys.argv[1]
fifo = os.open(writeFifo, os.O_WRONLY | os.O_NONBLOCK)
os.write(fifo, str(errCode).encode())
os.close(fifo)
except Exception as e:
print(e)
| 21.939394 | 69 | 0.63674 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 32 | 0.044199 |
dc92728440353e2f2240052acb296267123bd464 | 21,328 | py | Python | dist/Basilisk/fswAlgorithms/rwMotorVoltage/rwMotorVoltage.py | ian-cooke/basilisk_mag | a8b1e37c31c1287549d6fd4d71fcaa35b6fc3f14 | [
"0BSD"
] | null | null | null | dist/Basilisk/fswAlgorithms/rwMotorVoltage/rwMotorVoltage.py | ian-cooke/basilisk_mag | a8b1e37c31c1287549d6fd4d71fcaa35b6fc3f14 | [
"0BSD"
] | 1 | 2019-03-13T20:52:22.000Z | 2019-03-13T20:52:22.000Z | dist/Basilisk/fswAlgorithms/rwMotorVoltage/rwMotorVoltage.py | ian-cooke/basilisk_mag | a8b1e37c31c1287549d6fd4d71fcaa35b6fc3f14 | [
"0BSD"
] | null | null | null | # This file was automatically generated by SWIG (http://www.swig.org).
# Version 3.0.12
#
# Do not make changes to this file unless you know what you are doing--modify
# the SWIG interface file instead.
from sys import version_info as _swig_python_version_info
if _swig_python_version_info >= (2, 7, 0):
def swig_import_helper():
import importlib
pkg = __name__.rpartition('.')[0]
mname = '.'.join((pkg, '_rwMotorVoltage')).lstrip('.')
try:
return importlib.import_module(mname)
except ImportError:
return importlib.import_module('_rwMotorVoltage')
_rwMotorVoltage = swig_import_helper()
del swig_import_helper
elif _swig_python_version_info >= (2, 6, 0):
def swig_import_helper():
from os.path import dirname
import imp
fp = None
try:
fp, pathname, description = imp.find_module('_rwMotorVoltage', [dirname(__file__)])
except ImportError:
import _rwMotorVoltage
return _rwMotorVoltage
try:
_mod = imp.load_module('_rwMotorVoltage', fp, pathname, description)
finally:
if fp is not None:
fp.close()
return _mod
_rwMotorVoltage = swig_import_helper()
del swig_import_helper
else:
import _rwMotorVoltage
del _swig_python_version_info
try:
_swig_property = property
except NameError:
pass # Python < 2.2 doesn't have 'property'.
try:
import builtins as __builtin__
except ImportError:
import __builtin__
def _swig_setattr_nondynamic(self, class_type, name, value, static=1):
if (name == "thisown"):
return self.this.own(value)
if (name == "this"):
if type(value).__name__ == 'SwigPyObject':
self.__dict__[name] = value
return
method = class_type.__swig_setmethods__.get(name, None)
if method:
return method(self, value)
if (not static):
if _newclass:
object.__setattr__(self, name, value)
else:
self.__dict__[name] = value
else:
raise AttributeError("You cannot add attributes to %s" % self)
def _swig_setattr(self, class_type, name, value):
return _swig_setattr_nondynamic(self, class_type, name, value, 0)
def _swig_getattr(self, class_type, name):
if (name == "thisown"):
return self.this.own()
method = class_type.__swig_getmethods__.get(name, None)
if method:
return method(self)
raise AttributeError("'%s' object has no attribute '%s'" % (class_type.__name__, name))
def _swig_repr(self):
try:
strthis = "proxy of " + self.this.__repr__()
except __builtin__.Exception:
strthis = ""
return "<%s.%s; %s >" % (self.__class__.__module__, self.__class__.__name__, strthis,)
try:
_object = object
_newclass = 1
except __builtin__.Exception:
class _object:
pass
_newclass = 0
def new_doubleArray(nelements):
return _rwMotorVoltage.new_doubleArray(nelements)
new_doubleArray = _rwMotorVoltage.new_doubleArray
def delete_doubleArray(ary):
return _rwMotorVoltage.delete_doubleArray(ary)
delete_doubleArray = _rwMotorVoltage.delete_doubleArray
def doubleArray_getitem(ary, index):
return _rwMotorVoltage.doubleArray_getitem(ary, index)
doubleArray_getitem = _rwMotorVoltage.doubleArray_getitem
def doubleArray_setitem(ary, index, value):
return _rwMotorVoltage.doubleArray_setitem(ary, index, value)
doubleArray_setitem = _rwMotorVoltage.doubleArray_setitem
def new_longArray(nelements):
return _rwMotorVoltage.new_longArray(nelements)
new_longArray = _rwMotorVoltage.new_longArray
def delete_longArray(ary):
return _rwMotorVoltage.delete_longArray(ary)
delete_longArray = _rwMotorVoltage.delete_longArray
def longArray_getitem(ary, index):
return _rwMotorVoltage.longArray_getitem(ary, index)
longArray_getitem = _rwMotorVoltage.longArray_getitem
def longArray_setitem(ary, index, value):
return _rwMotorVoltage.longArray_setitem(ary, index, value)
longArray_setitem = _rwMotorVoltage.longArray_setitem
def new_intArray(nelements):
return _rwMotorVoltage.new_intArray(nelements)
new_intArray = _rwMotorVoltage.new_intArray
def delete_intArray(ary):
return _rwMotorVoltage.delete_intArray(ary)
delete_intArray = _rwMotorVoltage.delete_intArray
def intArray_getitem(ary, index):
return _rwMotorVoltage.intArray_getitem(ary, index)
intArray_getitem = _rwMotorVoltage.intArray_getitem
def intArray_setitem(ary, index, value):
return _rwMotorVoltage.intArray_setitem(ary, index, value)
intArray_setitem = _rwMotorVoltage.intArray_setitem
def new_shortArray(nelements):
return _rwMotorVoltage.new_shortArray(nelements)
new_shortArray = _rwMotorVoltage.new_shortArray
def delete_shortArray(ary):
return _rwMotorVoltage.delete_shortArray(ary)
delete_shortArray = _rwMotorVoltage.delete_shortArray
def shortArray_getitem(ary, index):
return _rwMotorVoltage.shortArray_getitem(ary, index)
shortArray_getitem = _rwMotorVoltage.shortArray_getitem
def shortArray_setitem(ary, index, value):
return _rwMotorVoltage.shortArray_setitem(ary, index, value)
shortArray_setitem = _rwMotorVoltage.shortArray_setitem
def getStructSize(self):
try:
return eval('sizeof_' + repr(self).split(';')[0].split('.')[-1])
except (NameError) as e:
typeString = 'sizeof_' + repr(self).split(';')[0].split('.')[-1]
raise NameError(e.message + '\nYou tried to get this size macro: ' + typeString +
'\n It appears to be undefined. \nYou need to run the SWIG GEN_SIZEOF' +
' SWIG macro against the class/struct in your SWIG file if you want to ' +
' make this call.\n')
def protectSetAttr(self, name, value):
if(hasattr(self, name) or name == 'this'):
object.__setattr__(self, name, value)
else:
raise ValueError('You tried to add this variable: ' + name + '\n' +
'To this class: ' + str(self))
def protectAllClasses(moduleType):
import inspect
clsmembers = inspect.getmembers(sys.modules[__name__], inspect.isclass)
for member in clsmembers:
try:
exec(str(member[0]) + '.__setattr__ = protectSetAttr')
exec(str(member[0]) + '.getStructSize = getStructSize')
except (AttributeError, TypeError) as e:
pass
Update_rwMotorVoltage = _rwMotorVoltage.Update_rwMotorVoltage
SelfInit_rwMotorVoltage = _rwMotorVoltage.SelfInit_rwMotorVoltage
CrossInit_rwMotorVoltage = _rwMotorVoltage.CrossInit_rwMotorVoltage
Reset_rwMotorVoltage = _rwMotorVoltage.Reset_rwMotorVoltage
sizeof_rwMotorVoltageConfig = _rwMotorVoltage.sizeof_rwMotorVoltageConfig
sizeof_RWArrayTorqueIntMsg = _rwMotorVoltage.sizeof_RWArrayTorqueIntMsg
sizeof_RWAvailabilityFswMsg = _rwMotorVoltage.sizeof_RWAvailabilityFswMsg
sizeof_RWSpeedIntMsg = _rwMotorVoltage.sizeof_RWSpeedIntMsg
sizeof_RWArrayConfigFswMsg = _rwMotorVoltage.sizeof_RWArrayConfigFswMsg
class rwMotorVoltageConfig(_object):
__swig_setmethods__ = {}
__setattr__ = lambda self, name, value: _swig_setattr(self, rwMotorVoltageConfig, name, value)
__swig_getmethods__ = {}
__getattr__ = lambda self, name: _swig_getattr(self, rwMotorVoltageConfig, name)
__repr__ = _swig_repr
__swig_setmethods__["VMin"] = _rwMotorVoltage.rwMotorVoltageConfig_VMin_set
__swig_getmethods__["VMin"] = _rwMotorVoltage.rwMotorVoltageConfig_VMin_get
if _newclass:
VMin = _swig_property(_rwMotorVoltage.rwMotorVoltageConfig_VMin_get, _rwMotorVoltage.rwMotorVoltageConfig_VMin_set)
__swig_setmethods__["VMax"] = _rwMotorVoltage.rwMotorVoltageConfig_VMax_set
__swig_getmethods__["VMax"] = _rwMotorVoltage.rwMotorVoltageConfig_VMax_get
if _newclass:
VMax = _swig_property(_rwMotorVoltage.rwMotorVoltageConfig_VMax_get, _rwMotorVoltage.rwMotorVoltageConfig_VMax_set)
__swig_setmethods__["K"] = _rwMotorVoltage.rwMotorVoltageConfig_K_set
__swig_getmethods__["K"] = _rwMotorVoltage.rwMotorVoltageConfig_K_get
if _newclass:
K = _swig_property(_rwMotorVoltage.rwMotorVoltageConfig_K_get, _rwMotorVoltage.rwMotorVoltageConfig_K_set)
__swig_setmethods__["rwSpeedOld"] = _rwMotorVoltage.rwMotorVoltageConfig_rwSpeedOld_set
__swig_getmethods__["rwSpeedOld"] = _rwMotorVoltage.rwMotorVoltageConfig_rwSpeedOld_get
if _newclass:
rwSpeedOld = _swig_property(_rwMotorVoltage.rwMotorVoltageConfig_rwSpeedOld_get, _rwMotorVoltage.rwMotorVoltageConfig_rwSpeedOld_set)
__swig_setmethods__["priorTime"] = _rwMotorVoltage.rwMotorVoltageConfig_priorTime_set
__swig_getmethods__["priorTime"] = _rwMotorVoltage.rwMotorVoltageConfig_priorTime_get
if _newclass:
priorTime = _swig_property(_rwMotorVoltage.rwMotorVoltageConfig_priorTime_get, _rwMotorVoltage.rwMotorVoltageConfig_priorTime_set)
__swig_setmethods__["resetFlag"] = _rwMotorVoltage.rwMotorVoltageConfig_resetFlag_set
__swig_getmethods__["resetFlag"] = _rwMotorVoltage.rwMotorVoltageConfig_resetFlag_get
if _newclass:
resetFlag = _swig_property(_rwMotorVoltage.rwMotorVoltageConfig_resetFlag_get, _rwMotorVoltage.rwMotorVoltageConfig_resetFlag_set)
__swig_setmethods__["voltageOutMsgName"] = _rwMotorVoltage.rwMotorVoltageConfig_voltageOutMsgName_set
__swig_getmethods__["voltageOutMsgName"] = _rwMotorVoltage.rwMotorVoltageConfig_voltageOutMsgName_get
if _newclass:
voltageOutMsgName = _swig_property(_rwMotorVoltage.rwMotorVoltageConfig_voltageOutMsgName_get, _rwMotorVoltage.rwMotorVoltageConfig_voltageOutMsgName_set)
__swig_setmethods__["voltageOutMsgID"] = _rwMotorVoltage.rwMotorVoltageConfig_voltageOutMsgID_set
__swig_getmethods__["voltageOutMsgID"] = _rwMotorVoltage.rwMotorVoltageConfig_voltageOutMsgID_get
if _newclass:
voltageOutMsgID = _swig_property(_rwMotorVoltage.rwMotorVoltageConfig_voltageOutMsgID_get, _rwMotorVoltage.rwMotorVoltageConfig_voltageOutMsgID_set)
__swig_setmethods__["torqueInMsgName"] = _rwMotorVoltage.rwMotorVoltageConfig_torqueInMsgName_set
__swig_getmethods__["torqueInMsgName"] = _rwMotorVoltage.rwMotorVoltageConfig_torqueInMsgName_get
if _newclass:
torqueInMsgName = _swig_property(_rwMotorVoltage.rwMotorVoltageConfig_torqueInMsgName_get, _rwMotorVoltage.rwMotorVoltageConfig_torqueInMsgName_set)
__swig_setmethods__["torqueInMsgID"] = _rwMotorVoltage.rwMotorVoltageConfig_torqueInMsgID_set
__swig_getmethods__["torqueInMsgID"] = _rwMotorVoltage.rwMotorVoltageConfig_torqueInMsgID_get
if _newclass:
torqueInMsgID = _swig_property(_rwMotorVoltage.rwMotorVoltageConfig_torqueInMsgID_get, _rwMotorVoltage.rwMotorVoltageConfig_torqueInMsgID_set)
__swig_setmethods__["rwParamsInMsgName"] = _rwMotorVoltage.rwMotorVoltageConfig_rwParamsInMsgName_set
__swig_getmethods__["rwParamsInMsgName"] = _rwMotorVoltage.rwMotorVoltageConfig_rwParamsInMsgName_get
if _newclass:
rwParamsInMsgName = _swig_property(_rwMotorVoltage.rwMotorVoltageConfig_rwParamsInMsgName_get, _rwMotorVoltage.rwMotorVoltageConfig_rwParamsInMsgName_set)
__swig_setmethods__["rwParamsInMsgID"] = _rwMotorVoltage.rwMotorVoltageConfig_rwParamsInMsgID_set
__swig_getmethods__["rwParamsInMsgID"] = _rwMotorVoltage.rwMotorVoltageConfig_rwParamsInMsgID_get
if _newclass:
rwParamsInMsgID = _swig_property(_rwMotorVoltage.rwMotorVoltageConfig_rwParamsInMsgID_get, _rwMotorVoltage.rwMotorVoltageConfig_rwParamsInMsgID_set)
__swig_setmethods__["inputRWSpeedsInMsgName"] = _rwMotorVoltage.rwMotorVoltageConfig_inputRWSpeedsInMsgName_set
__swig_getmethods__["inputRWSpeedsInMsgName"] = _rwMotorVoltage.rwMotorVoltageConfig_inputRWSpeedsInMsgName_get
if _newclass:
inputRWSpeedsInMsgName = _swig_property(_rwMotorVoltage.rwMotorVoltageConfig_inputRWSpeedsInMsgName_get, _rwMotorVoltage.rwMotorVoltageConfig_inputRWSpeedsInMsgName_set)
__swig_setmethods__["inputRWSpeedsInMsgID"] = _rwMotorVoltage.rwMotorVoltageConfig_inputRWSpeedsInMsgID_set
__swig_getmethods__["inputRWSpeedsInMsgID"] = _rwMotorVoltage.rwMotorVoltageConfig_inputRWSpeedsInMsgID_get
if _newclass:
inputRWSpeedsInMsgID = _swig_property(_rwMotorVoltage.rwMotorVoltageConfig_inputRWSpeedsInMsgID_get, _rwMotorVoltage.rwMotorVoltageConfig_inputRWSpeedsInMsgID_set)
__swig_setmethods__["rwAvailInMsgName"] = _rwMotorVoltage.rwMotorVoltageConfig_rwAvailInMsgName_set
__swig_getmethods__["rwAvailInMsgName"] = _rwMotorVoltage.rwMotorVoltageConfig_rwAvailInMsgName_get
if _newclass:
rwAvailInMsgName = _swig_property(_rwMotorVoltage.rwMotorVoltageConfig_rwAvailInMsgName_get, _rwMotorVoltage.rwMotorVoltageConfig_rwAvailInMsgName_set)
__swig_setmethods__["rwAvailInMsgID"] = _rwMotorVoltage.rwMotorVoltageConfig_rwAvailInMsgID_set
__swig_getmethods__["rwAvailInMsgID"] = _rwMotorVoltage.rwMotorVoltageConfig_rwAvailInMsgID_get
if _newclass:
rwAvailInMsgID = _swig_property(_rwMotorVoltage.rwMotorVoltageConfig_rwAvailInMsgID_get, _rwMotorVoltage.rwMotorVoltageConfig_rwAvailInMsgID_set)
__swig_setmethods__["rwConfigParams"] = _rwMotorVoltage.rwMotorVoltageConfig_rwConfigParams_set
__swig_getmethods__["rwConfigParams"] = _rwMotorVoltage.rwMotorVoltageConfig_rwConfigParams_get
if _newclass:
rwConfigParams = _swig_property(_rwMotorVoltage.rwMotorVoltageConfig_rwConfigParams_get, _rwMotorVoltage.rwMotorVoltageConfig_rwConfigParams_set)
__swig_setmethods__["voltageOut"] = _rwMotorVoltage.rwMotorVoltageConfig_voltageOut_set
__swig_getmethods__["voltageOut"] = _rwMotorVoltage.rwMotorVoltageConfig_voltageOut_get
if _newclass:
voltageOut = _swig_property(_rwMotorVoltage.rwMotorVoltageConfig_voltageOut_get, _rwMotorVoltage.rwMotorVoltageConfig_voltageOut_set)
def __init__(self):
this = _rwMotorVoltage.new_rwMotorVoltageConfig()
try:
self.this.append(this)
except __builtin__.Exception:
self.this = this
__swig_destroy__ = _rwMotorVoltage.delete_rwMotorVoltageConfig
__del__ = lambda self: None
rwMotorVoltageConfig_swigregister = _rwMotorVoltage.rwMotorVoltageConfig_swigregister
rwMotorVoltageConfig_swigregister(rwMotorVoltageConfig)
class RWSpeedIntMsg(_object):
__swig_setmethods__ = {}
__setattr__ = lambda self, name, value: _swig_setattr(self, RWSpeedIntMsg, name, value)
__swig_getmethods__ = {}
__getattr__ = lambda self, name: _swig_getattr(self, RWSpeedIntMsg, name)
__repr__ = _swig_repr
__swig_setmethods__["wheelSpeeds"] = _rwMotorVoltage.RWSpeedIntMsg_wheelSpeeds_set
__swig_getmethods__["wheelSpeeds"] = _rwMotorVoltage.RWSpeedIntMsg_wheelSpeeds_get
if _newclass:
wheelSpeeds = _swig_property(_rwMotorVoltage.RWSpeedIntMsg_wheelSpeeds_get, _rwMotorVoltage.RWSpeedIntMsg_wheelSpeeds_set)
__swig_setmethods__["wheelThetas"] = _rwMotorVoltage.RWSpeedIntMsg_wheelThetas_set
__swig_getmethods__["wheelThetas"] = _rwMotorVoltage.RWSpeedIntMsg_wheelThetas_get
if _newclass:
wheelThetas = _swig_property(_rwMotorVoltage.RWSpeedIntMsg_wheelThetas_get, _rwMotorVoltage.RWSpeedIntMsg_wheelThetas_set)
def __init__(self):
this = _rwMotorVoltage.new_RWSpeedIntMsg()
try:
self.this.append(this)
except __builtin__.Exception:
self.this = this
__swig_destroy__ = _rwMotorVoltage.delete_RWSpeedIntMsg
__del__ = lambda self: None
RWSpeedIntMsg_swigregister = _rwMotorVoltage.RWSpeedIntMsg_swigregister
RWSpeedIntMsg_swigregister(RWSpeedIntMsg)
MAX_EFF_CNT = _rwMotorVoltage.MAX_EFF_CNT
MAX_NUM_CSS_SENSORS = _rwMotorVoltage.MAX_NUM_CSS_SENSORS
MAX_ST_VEH_COUNT = _rwMotorVoltage.MAX_ST_VEH_COUNT
NANO2SEC = _rwMotorVoltage.NANO2SEC
class RWArrayTorqueIntMsg(_object):
__swig_setmethods__ = {}
__setattr__ = lambda self, name, value: _swig_setattr(self, RWArrayTorqueIntMsg, name, value)
__swig_getmethods__ = {}
__getattr__ = lambda self, name: _swig_getattr(self, RWArrayTorqueIntMsg, name)
__repr__ = _swig_repr
__swig_setmethods__["motorTorque"] = _rwMotorVoltage.RWArrayTorqueIntMsg_motorTorque_set
__swig_getmethods__["motorTorque"] = _rwMotorVoltage.RWArrayTorqueIntMsg_motorTorque_get
if _newclass:
motorTorque = _swig_property(_rwMotorVoltage.RWArrayTorqueIntMsg_motorTorque_get, _rwMotorVoltage.RWArrayTorqueIntMsg_motorTorque_set)
def __init__(self):
this = _rwMotorVoltage.new_RWArrayTorqueIntMsg()
try:
self.this.append(this)
except __builtin__.Exception:
self.this = this
__swig_destroy__ = _rwMotorVoltage.delete_RWArrayTorqueIntMsg
__del__ = lambda self: None
RWArrayTorqueIntMsg_swigregister = _rwMotorVoltage.RWArrayTorqueIntMsg_swigregister
RWArrayTorqueIntMsg_swigregister(RWArrayTorqueIntMsg)
class RWArrayVoltageIntMsg(_object):
__swig_setmethods__ = {}
__setattr__ = lambda self, name, value: _swig_setattr(self, RWArrayVoltageIntMsg, name, value)
__swig_getmethods__ = {}
__getattr__ = lambda self, name: _swig_getattr(self, RWArrayVoltageIntMsg, name)
__repr__ = _swig_repr
__swig_setmethods__["voltage"] = _rwMotorVoltage.RWArrayVoltageIntMsg_voltage_set
__swig_getmethods__["voltage"] = _rwMotorVoltage.RWArrayVoltageIntMsg_voltage_get
if _newclass:
voltage = _swig_property(_rwMotorVoltage.RWArrayVoltageIntMsg_voltage_get, _rwMotorVoltage.RWArrayVoltageIntMsg_voltage_set)
def __init__(self):
this = _rwMotorVoltage.new_RWArrayVoltageIntMsg()
try:
self.this.append(this)
except __builtin__.Exception:
self.this = this
__swig_destroy__ = _rwMotorVoltage.delete_RWArrayVoltageIntMsg
__del__ = lambda self: None
RWArrayVoltageIntMsg_swigregister = _rwMotorVoltage.RWArrayVoltageIntMsg_swigregister
RWArrayVoltageIntMsg_swigregister(RWArrayVoltageIntMsg)
class RWAvailabilityFswMsg(_object):
__swig_setmethods__ = {}
__setattr__ = lambda self, name, value: _swig_setattr(self, RWAvailabilityFswMsg, name, value)
__swig_getmethods__ = {}
__getattr__ = lambda self, name: _swig_getattr(self, RWAvailabilityFswMsg, name)
__repr__ = _swig_repr
__swig_setmethods__["wheelAvailability"] = _rwMotorVoltage.RWAvailabilityFswMsg_wheelAvailability_set
__swig_getmethods__["wheelAvailability"] = _rwMotorVoltage.RWAvailabilityFswMsg_wheelAvailability_get
if _newclass:
wheelAvailability = _swig_property(_rwMotorVoltage.RWAvailabilityFswMsg_wheelAvailability_get, _rwMotorVoltage.RWAvailabilityFswMsg_wheelAvailability_set)
def __init__(self):
this = _rwMotorVoltage.new_RWAvailabilityFswMsg()
try:
self.this.append(this)
except __builtin__.Exception:
self.this = this
__swig_destroy__ = _rwMotorVoltage.delete_RWAvailabilityFswMsg
__del__ = lambda self: None
RWAvailabilityFswMsg_swigregister = _rwMotorVoltage.RWAvailabilityFswMsg_swigregister
RWAvailabilityFswMsg_swigregister(RWAvailabilityFswMsg)
BOOL_FALSE = _rwMotorVoltage.BOOL_FALSE
BOOL_TRUE = _rwMotorVoltage.BOOL_TRUE
AVAILABLE = _rwMotorVoltage.AVAILABLE
UNAVAILABLE = _rwMotorVoltage.UNAVAILABLE
class RWArrayConfigFswMsg(_object):
__swig_setmethods__ = {}
__setattr__ = lambda self, name, value: _swig_setattr(self, RWArrayConfigFswMsg, name, value)
__swig_getmethods__ = {}
__getattr__ = lambda self, name: _swig_getattr(self, RWArrayConfigFswMsg, name)
__repr__ = _swig_repr
__swig_setmethods__["GsMatrix_B"] = _rwMotorVoltage.RWArrayConfigFswMsg_GsMatrix_B_set
__swig_getmethods__["GsMatrix_B"] = _rwMotorVoltage.RWArrayConfigFswMsg_GsMatrix_B_get
if _newclass:
GsMatrix_B = _swig_property(_rwMotorVoltage.RWArrayConfigFswMsg_GsMatrix_B_get, _rwMotorVoltage.RWArrayConfigFswMsg_GsMatrix_B_set)
__swig_setmethods__["JsList"] = _rwMotorVoltage.RWArrayConfigFswMsg_JsList_set
__swig_getmethods__["JsList"] = _rwMotorVoltage.RWArrayConfigFswMsg_JsList_get
if _newclass:
JsList = _swig_property(_rwMotorVoltage.RWArrayConfigFswMsg_JsList_get, _rwMotorVoltage.RWArrayConfigFswMsg_JsList_set)
__swig_setmethods__["numRW"] = _rwMotorVoltage.RWArrayConfigFswMsg_numRW_set
__swig_getmethods__["numRW"] = _rwMotorVoltage.RWArrayConfigFswMsg_numRW_get
if _newclass:
numRW = _swig_property(_rwMotorVoltage.RWArrayConfigFswMsg_numRW_get, _rwMotorVoltage.RWArrayConfigFswMsg_numRW_set)
__swig_setmethods__["uMax"] = _rwMotorVoltage.RWArrayConfigFswMsg_uMax_set
__swig_getmethods__["uMax"] = _rwMotorVoltage.RWArrayConfigFswMsg_uMax_get
if _newclass:
uMax = _swig_property(_rwMotorVoltage.RWArrayConfigFswMsg_uMax_get, _rwMotorVoltage.RWArrayConfigFswMsg_uMax_set)
def __init__(self):
this = _rwMotorVoltage.new_RWArrayConfigFswMsg()
try:
self.this.append(this)
except __builtin__.Exception:
self.this = this
__swig_destroy__ = _rwMotorVoltage.delete_RWArrayConfigFswMsg
__del__ = lambda self: None
RWArrayConfigFswMsg_swigregister = _rwMotorVoltage.RWArrayConfigFswMsg_swigregister
RWArrayConfigFswMsg_swigregister(RWArrayConfigFswMsg)
import sys
protectAllClasses(sys.modules[__name__])
# This file is compatible with both classic and new-style classes.
| 49.256351 | 177 | 0.790463 | 13,059 | 0.612294 | 0 | 0 | 0 | 0 | 0 | 0 | 1,590 | 0.07455 |
dc94642e6f6a93fa65fb69c2f410be31e9ee122c | 86 | py | Python | prowl/__main__.py | SoorajModi/PrOwl | b58845d9763d5db80c021adf42fb7e9e55432375 | [
"MIT"
] | null | null | null | prowl/__main__.py | SoorajModi/PrOwl | b58845d9763d5db80c021adf42fb7e9e55432375 | [
"MIT"
] | 11 | 2021-05-19T05:03:15.000Z | 2021-09-08T00:53:01.000Z | prowl/__main__.py | SoorajModi/PyOwl | b58845d9763d5db80c021adf42fb7e9e55432375 | [
"MIT"
] | null | null | null | """Begin PrOwl
"""
from .prowl import watch
if __name__ == '__main__':
watch()
| 9.555556 | 26 | 0.616279 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 28 | 0.325581 |
dc9494ac835fbdd17583721bdefffc09c468c9f3 | 248 | py | Python | app/main/__init__.py | sundayliu/flask-tutorial | 8621441b429020ea884b4bf090efa8dd15133af8 | [
"MIT"
] | null | null | null | app/main/__init__.py | sundayliu/flask-tutorial | 8621441b429020ea884b4bf090efa8dd15133af8 | [
"MIT"
] | null | null | null | app/main/__init__.py | sundayliu/flask-tutorial | 8621441b429020ea884b4bf090efa8dd15133af8 | [
"MIT"
] | null | null | null | # -*- coding:utf-8 -*-
from flask import Blueprint
main = Blueprint('main',__name__)
from . import views,errors
from ..models import Permission
@main.app_context_processor
def inject_permissions():
return dict(Permission=Permission) | 20.666667 | 35 | 0.733871 | 0 | 0 | 0 | 0 | 91 | 0.366935 | 0 | 0 | 29 | 0.116935 |
dc975d58db2911f7f63cb23ede835ba47e879bc3 | 954 | py | Python | setup.py | sgykfjsm/flask-logging-decorator | 21552202b0abecda15bb806c0053e568f30db6cd | [
"MIT"
] | 5 | 2018-12-07T01:51:48.000Z | 2019-10-25T15:08:20.000Z | setup.py | sgykfjsm/flask-logging-decorator | 21552202b0abecda15bb806c0053e568f30db6cd | [
"MIT"
] | null | null | null | setup.py | sgykfjsm/flask-logging-decorator | 21552202b0abecda15bb806c0053e568f30db6cd | [
"MIT"
] | 1 | 2020-02-19T22:19:43.000Z | 2020-02-19T22:19:43.000Z | #!/usr/bin/env python
from setuptools import setup
from os.path import abspath, dirname, join
from codecs import open
here = abspath(dirname(__file__))
long_description = ''
with open(join(here, 'README.md'), encoding='utf-8') as f:
long_description = f.read()
setup(
name='flask_logging_decorator',
version='0.0.5',
description='Simple logging decorator for Flask.',
long_description=long_description,
long_description_content_type='text/markdown',
url='https://github.com/sgykfjsm/flask-logging-decorator',
author='Shigeyuki Fujishima',
author_email='shigeyuki.fujishima@gmail.com',
python_requires=">=3.5, !=2.*.*, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*",
classifiers=[
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 3.6',
],
keywords='flask logging decorator',
py_modules=('flask-logging-decorator',),
packages=['flask_logging_decorator']
)
| 31.8 | 73 | 0.674004 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 436 | 0.457023 |
dc994acdfddbe96baef204ffa8e2df2f54505606 | 656 | py | Python | contracts/doe_token_contract.py | pettitpeon/doe-nft-contract | 0b1da9323df4c2a045d7979f7fe6f38db5d5406b | [
"MIT"
] | null | null | null | contracts/doe_token_contract.py | pettitpeon/doe-nft-contract | 0b1da9323df4c2a045d7979f7fe6f38db5d5406b | [
"MIT"
] | null | null | null | contracts/doe_token_contract.py | pettitpeon/doe-nft-contract | 0b1da9323df4c2a045d7979f7fe6f38db5d5406b | [
"MIT"
] | null | null | null | from web3 import Web3
import contracts.doe_token_abi as doe_token_abi
def get_main_balance(w3, wallet):
contract_address = "0xf8E9F10c22840b613cdA05A0c5Fdb59A4d6cd7eF"
contract = w3.eth.contract(address=contract_address, abi=doe_token_abi.get_abi())
balanceOf = contract.functions.balanceOf(wallet).call()
return Web3.fromWei(balanceOf, 'ether')
def get_arb_balance(w3, wallet):
contract_address = "0xE71Db7a96daB25cDb9f4cbC7F686da02192B0E88"
contract = w3.eth.contract(address=contract_address, abi=doe_token_abi.get_abi())
balanceOf = contract.functions.balanceOf(wallet).call()
return Web3.fromWei(balanceOf, 'ether')
| 41 | 85 | 0.786585 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 102 | 0.155488 |
dc9b43d49a1297a0cd78d268ff6c9669c5fb1610 | 188 | py | Python | bin/analytic/catalog/_mypath.py | DarkEnergyScienceCollaboration/chroma | 64fc123a065334b307654f29b3bea52885b46ec8 | [
"BSD-2-Clause",
"BSD-3-Clause"
] | 3 | 2015-10-22T14:57:27.000Z | 2016-08-25T08:16:02.000Z | bin/publish/highlight_key_figure/_mypath.py | DarkEnergyScienceCollaboration/chroma | 64fc123a065334b307654f29b3bea52885b46ec8 | [
"BSD-2-Clause",
"BSD-3-Clause"
] | 1 | 2017-08-28T14:42:46.000Z | 2017-08-28T16:08:37.000Z | bin/publish/highlight_key_figure/_mypath.py | DarkEnergyScienceCollaboration/chroma | 64fc123a065334b307654f29b3bea52885b46ec8 | [
"BSD-2-Clause",
"BSD-3-Clause"
] | null | null | null | import os, sys
thisdir = os.path.dirname(os.path.abspath(__file__))
libdir = os.path.abspath(os.path.join(thisdir, '../../../'))
if libdir not in sys.path:
sys.path.insert(0, libdir)
| 26.857143 | 60 | 0.680851 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 11 | 0.058511 |
dc9cb4ba052280cdcbdb81eb3839f173ead5100a | 402 | py | Python | Code_Challenges/array_replace.py | mvkumar14/Practice | c3c7142da6098d950aac528d2def77ef65b3132d | [
"MIT"
] | null | null | null | Code_Challenges/array_replace.py | mvkumar14/Practice | c3c7142da6098d950aac528d2def77ef65b3132d | [
"MIT"
] | null | null | null | Code_Challenges/array_replace.py | mvkumar14/Practice | c3c7142da6098d950aac528d2def77ef65b3132d | [
"MIT"
] | null | null | null | # 072220
# CodeSignal
# https://app.codesignal.com/arcade/intro/level-6/mCkmbxdMsMTjBc3Bm/solutions
def array_replace(inputArray, elemToReplace, substitutionElem):
# loop through input array
# if element = elemToReplace
# replace that element
for index,i in enumerate(inputArray):
if i == elemToReplace:
inputArray[index] = substitutionElem
return inputArray
| 28.714286 | 77 | 0.718905 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 174 | 0.432836 |
dc9cf14d3bafd94c1a7db1cea9146e94b8b3873a | 1,574 | py | Python | sms.py | varunotelli/Gujarat | c5e757026feba3938162eeda21ebf6ff96ff3468 | [
"MIT"
] | 2 | 2019-08-21T15:35:00.000Z | 2021-04-30T14:58:04.000Z | sms.py | varunotelli/E-Seva | c5e757026feba3938162eeda21ebf6ff96ff3468 | [
"MIT"
] | 10 | 2018-03-25T20:32:05.000Z | 2018-04-03T06:33:03.000Z | sms.py | varunotelli/E-Seva | c5e757026feba3938162eeda21ebf6ff96ff3468 | [
"MIT"
] | null | null | null | import urllib.request, urllib.error, urllib.parse
import http.cookiejar
from getpass import getpass
import sys
def send(number,scheme):
username="9791011603"
passwd="D5222M"
message="You have successfully been enrolled for "+scheme
'''
username = input("Enter Username: ")
passwd = getpass()
message = input("Enter Message: ")
number = input("Enter Mobile number:")
'''
message = "+".join(message.split(' '))
#Logging into the SMS Site
url = 'http://site24.way2sms.com/Login1.action?'
data = 'username='+username+'&password='+passwd+'&Submit=Sign+in'
#For Cookies:
cj = http.cookiejar.CookieJar()
opener = urllib.request.build_opener(urllib.request.HTTPCookieProcessor(cj))
# Adding Header detail:
opener.addheaders = [('User-Agent','Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/37.0.2062.120 Safari/537.36')]
try:
usock = opener.open(url, data.encode('utf-8'))
except IOError:
print("Error while logging in.")
#sys.exit(1)
jession_id = str(cj).split('~')[1].split(' ')[0]
send_sms_url = 'http://site24.way2sms.com/smstoss.action?'
send_sms_data = 'ssaction=ss&Token='+jession_id+'&mobile='+number+'&message='+message+'&msgLen=136'
opener.addheaders = [('Referer', 'http://site25.way2sms.com/sendSMS?Token='+jession_id)]
try:
sms_sent_page = opener.open(send_sms_url,send_sms_data.encode('utf-8'))
except IOError:
print("Error while sending message")
#sys.exit(1)
print("SMS has been sent.")
return True
#send("9791011603","hello")
#send("8870173154","piyu") | 30.269231 | 145 | 0.695044 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 793 | 0.503812 |
dc9cf905743fa7cb5e599cce0ef947cd502fe5fb | 1,169 | py | Python | tests/python-playground/tv_1d_0.py | marcocannici/scs | 799a4f7daed4294cd98c73df71676195e6c63de4 | [
"MIT"
] | 25 | 2017-06-30T15:31:33.000Z | 2021-04-21T20:12:18.000Z | tests/python-playground/tv_1d_0.py | marcocannici/scs | 799a4f7daed4294cd98c73df71676195e6c63de4 | [
"MIT"
] | 34 | 2017-06-07T01:18:17.000Z | 2021-04-24T09:44:00.000Z | tests/python-playground/tv_1d_0.py | marcocannici/scs | 799a4f7daed4294cd98c73df71676195e6c63de4 | [
"MIT"
] | 13 | 2017-06-07T01:16:09.000Z | 2021-06-07T09:12:56.000Z | # This is automatically-generated code.
# Uses the jinja2 library for templating.
import cvxpy as cp
import numpy as np
import scipy as sp
# setup
problemID = "tv_1d_0"
prob = None
opt_val = None
# Variable declarations
np.random.seed(0)
n = 100000
k = max(int(np.sqrt(n)/2), 1)
x0 = np.ones((n,1))
idxs = np.random.randint(0, n, (k,2))
idxs.sort()
for a, b in idxs:
x0[a:b] += 10*(np.random.rand()-0.5)
b = x0 + np.random.randn(n, 1)
lam = np.sqrt(n)
# Problem construction
x = cp.Variable(n)
f = 0.5*cp.sum_squares(x-b) + lam*cp.norm1(x[1:]-x[:-1])
prob = cp.Problem(cp.Minimize(f))
# Problem collection
# Single problem collection
problemDict = {
"problemID" : problemID,
"problem" : prob,
"opt_val" : opt_val
}
problems = [problemDict]
# For debugging individual problems:
if __name__ == "__main__":
def printResults(problemID = "", problem = None, opt_val = None):
print(problemID)
problem.solve()
print("\tstatus: {}".format(problem.status))
print("\toptimal value: {}".format(problem.value))
print("\ttrue optimal value: {}".format(opt_val))
printResults(**problems[0])
| 17.712121 | 69 | 0.640719 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 326 | 0.278871 |
dc9cfa85420ac5293533d515f08d8e42f1aa97a1 | 1,071 | py | Python | src/pipeline.py | iyunbo/logz | e529188be8a232827dddf203b0c61bf32730ecff | [
"MIT"
] | null | null | null | src/pipeline.py | iyunbo/logz | e529188be8a232827dddf203b0c61bf32730ecff | [
"MIT"
] | 3 | 2020-03-01T17:43:08.000Z | 2020-03-01T17:50:43.000Z | src/pipeline.py | iyunbo/logx | e529188be8a232827dddf203b0c61bf32730ecff | [
"MIT"
] | null | null | null | """Construction of the master pipeline.
"""
from typing import Dict
from kedro.pipeline import Pipeline
from .data import pipeline as de
from .models import pipeline as ds
###########################################################################
# Here you can find an example pipeline, made of two modular pipelines.
#
# Delete this when you start working on your own Kedro project as
# well as pipelines/data_science AND pipelines/data_engineering
# -------------------------------------------------------------------------
def create_pipelines(**kwargs) -> Dict[str, Pipeline]:
"""Create the project's pipeline.
Args:
kwargs: Ignore any additional arguments added in the future.
Returns:
A mapping from a pipeline name to a ``Pipeline`` object.
"""
data_engineering_pipeline = de.create_pipeline()
data_science_pipeline = ds.create_pipeline()
return {
"de": data_engineering_pipeline,
"ds": data_science_pipeline,
"__default__": data_engineering_pipeline + data_science_pipeline,
}
| 27.461538 | 75 | 0.616246 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 615 | 0.57423 |
dc9dbecdd39757aea1bc7e53ba11298fa99e950a | 9,946 | py | Python | aiochclient/types.py | maxifom/aiochclient | 817de89f80da8bbda0a92b33a2471145afd141f1 | [
"MIT"
] | null | null | null | aiochclient/types.py | maxifom/aiochclient | 817de89f80da8bbda0a92b33a2471145afd141f1 | [
"MIT"
] | null | null | null | aiochclient/types.py | maxifom/aiochclient | 817de89f80da8bbda0a92b33a2471145afd141f1 | [
"MIT"
] | null | null | null | import datetime as dt
import re
from abc import ABC, abstractmethod
from decimal import Decimal
from typing import Any, Callable, Generator, Optional
from uuid import UUID
from aiochclient.exceptions import ChClientError
try:
import ciso8601
datetime_parse = date_parse = ciso8601.parse_datetime
except ImportError:
def datetime_parse(string):
return dt.datetime.strptime(string, '%Y-%m-%d %H:%M:%S')
def date_parse(string):
return dt.datetime.strptime(string, '%Y-%m-%d')
__all__ = ["what_py_converter", "rows2ch"]
RE_TUPLE = re.compile(r"^Tuple\((.*)\)$")
RE_ARRAY = re.compile(r"^Array\((.*)\)$")
RE_NULLABLE = re.compile(r"^Nullable\((.*)\)$")
RE_LOW_CARDINALITY = re.compile(r"^LowCardinality\((.*)\)$")
class BaseType(ABC):
__slots__ = ("name", "container")
ESC_CHR_MAPPING = {
b"b": b"\b",
b"N": b"\\N", # NULL
b"f": b"\f",
b"r": b"\r",
b"n": b"\n",
b"t": b"\t",
b"0": b" ",
b"'": b"'",
b"\\": b"\\",
}
DQ = "'"
CM = ","
TUP_OP = '('
TUP_CLS = ')'
ARR_OP = '['
ARR_CLS = ']'
def __init__(self, name: str, container: bool = False):
self.name = name
self.container = container
@abstractmethod
def p_type(self, string):
""" Function for implementing specific actions for each type """
@classmethod
def decode(cls, val: bytes) -> str:
"""
Converting bytes from clickhouse with
backslash-escaped special characters
to pythonic string format
"""
n = val.find(b"\\")
if n < 0:
return val.decode()
n += 1
d = val[:n]
b = val[n:]
while b:
d = d[:-1] + cls.ESC_CHR_MAPPING.get(b[0:1], b[0:1])
b = b[1:]
n = b.find(b"\\")
if n < 0:
d = d + b
break
n += 1
d = d + b[:n]
b = b[n:]
return d.decode()
@classmethod
def seq_parser(cls, raw: str) -> Generator[str, None, None]:
"""
Generator for parsing tuples and arrays.
Returns elements one by one
"""
cur = []
in_str = False
in_arr = False
in_tup = False
if not raw:
return None
for sym in raw:
if not (in_str or in_arr or in_tup):
if sym == cls.CM:
yield "".join(cur)
cur = []
continue
elif sym == cls.DQ:
in_str = not in_str
elif sym == cls.ARR_OP:
in_arr = True
elif sym == cls.TUP_OP:
in_tup = True
elif in_str and sym == cls.DQ:
in_str = not in_str
elif in_arr and sym == cls.ARR_CLS:
in_arr = False
elif in_tup and sym == cls.TUP_CLS:
in_tup = False
cur.append(sym)
yield "".join(cur)
def convert(self, value: bytes) -> Any:
return self.p_type(self.decode(value))
@staticmethod
def unconvert(value) -> bytes:
return b"%a" % value
class StrType(BaseType):
def p_type(self, string: str):
if self.container:
return string.strip("'")
return string
@staticmethod
def unconvert(value: str) -> bytes:
value = value.replace("\\", "\\\\").replace("'", "\\'")
return f"'{value}'".encode()
class IntType(BaseType):
p_type = int
def convert(self, value: bytes) -> Any:
return self.p_type(value)
@staticmethod
def unconvert(value: int) -> bytes:
return b"%d" % value
class FloatType(IntType):
p_type = float
@staticmethod
def unconvert(value: float) -> bytes:
return b"%r" % value
class DateType(BaseType):
def p_type(self, string: str):
string = string.strip("'")
try:
return date_parse(string).date()
except ValueError:
# In case of 0000-00-00
if string == "0000-00-00":
return None
raise
def convert(self, value: bytes) -> Optional[dt.date]:
return self.p_type(value.decode())
@staticmethod
def unconvert(value: dt.date) -> bytes:
return b"%a" % str(value)
class DateTimeType(BaseType):
def p_type(self, string: str):
string = string.strip("'")
try:
return datetime_parse(string)
except ValueError:
# In case of 0000-00-00 00:00:00
if string == "0000-00-00 00:00:00":
return None
raise
def convert(self, value: bytes) -> Optional[dt.datetime]:
return self.p_type(value.decode())
@staticmethod
def unconvert(value: dt.datetime) -> bytes:
return b"%a" % str(value.replace(microsecond=0))
class UUIDType(BaseType):
def p_type(self, string):
return UUID(string.strip("'"))
def convert(self, value: bytes) -> UUID:
return self.p_type(value.decode())
@staticmethod
def unconvert(value: UUID) -> bytes:
return b"%a" % str(value)
class TupleType(BaseType):
__slots__ = ("name", "types")
def __init__(self, name: str, **kwargs):
super().__init__(name, **kwargs)
tps = RE_TUPLE.findall(name)[0]
self.types = tuple(what_py_type(tp, container=True) for tp in tps.split(","))
def p_type(self, string: str) -> tuple:
return tuple(
tp.p_type(val)
for tp, val in zip(self.types, self.seq_parser(string.strip("()")))
)
@staticmethod
def unconvert(value) -> bytes:
return b"(" + b",".join(py2ch(elem) for elem in value) + b")"
class ArrayType(BaseType):
__slots__ = ("name", "type")
def __init__(self, name: str, **kwargs):
super().__init__(name, **kwargs)
self.type = what_py_type(RE_ARRAY.findall(name)[0], container=True)
def p_type(self, string: str) -> list:
return [self.type.p_type(val) for val in self.seq_parser(string[1:-1])]
@staticmethod
def unconvert(value) -> bytes:
return b"[" + b",".join(py2ch(elem) for elem in value) + b"]"
class NullableType(BaseType):
__slots__ = ("name", "type")
NULLABLE = {r"\N", "NULL"}
def __init__(self, name: str, **kwargs):
super().__init__(name, **kwargs)
self.type = what_py_type(RE_NULLABLE.findall(name)[0])
def p_type(self, string: str) -> Any:
if string in self.NULLABLE:
return None
return self.type.p_type(string)
@staticmethod
def unconvert(value) -> bytes:
return b"NULL"
class NothingType(BaseType):
def p_type(self, string: str) -> None:
return None
def convert(self, value: bytes) -> None:
return None
class LowCardinalityType(BaseType):
__slots__ = ("name", "type")
def __init__(self, name: str, **kwargs):
super().__init__(name, **kwargs)
self.type = what_py_type(RE_LOW_CARDINALITY.findall(name)[0])
def p_type(self, string: str) -> Any:
return self.type.p_type(string)
class DecimalType(BaseType):
p_type = Decimal
def convert(self, value: bytes) -> Decimal:
return self.p_type(value.decode())
@staticmethod
def unconvert(value: Decimal) -> bytes:
return str(value).encode()
CH_TYPES_MAPPING = {
"UInt8": IntType,
"UInt16": IntType,
"UInt32": IntType,
"UInt64": IntType,
"Int8": IntType,
"Int16": IntType,
"Int32": IntType,
"Int64": IntType,
"Float32": FloatType,
"Float64": FloatType,
"String": StrType,
"FixedString": StrType,
"Enum8": StrType,
"Enum16": StrType,
"Date": DateType,
"DateTime": DateTimeType,
"Tuple": TupleType,
"Array": ArrayType,
"Nullable": NullableType,
"Nothing": NothingType,
"UUID": UUIDType,
"LowCardinality": LowCardinalityType,
"Decimal": DecimalType,
"Decimal32": DecimalType,
"Decimal64": DecimalType,
"Decimal128": DecimalType,
}
PY_TYPES_MAPPING = {
int: IntType.unconvert,
float: FloatType.unconvert,
str: StrType.unconvert,
dt.date: DateType.unconvert,
dt.datetime: DateTimeType.unconvert,
tuple: TupleType.unconvert,
list: ArrayType.unconvert,
type(None): NullableType.unconvert,
UUID: UUIDType.unconvert,
Decimal: DecimalType.unconvert,
}
def what_py_type(name: str, container: bool = False) -> BaseType:
""" Returns needed type class from clickhouse type name """
name = name.strip()
try:
return CH_TYPES_MAPPING[name.split("(")[0]](name, container=container)
except KeyError:
raise ChClientError(f"Unrecognized type name: '{name}'")
def what_py_converter(name: str, container: bool = False) -> Callable:
""" Returns needed type class from clickhouse type name """
return what_py_type(name, container).convert
def py2ch(value):
try:
return PY_TYPES_MAPPING[type(value)](value)
except KeyError:
raise ChClientError(
f"Unrecognized type: '{type(value)}'. "
f"The value type should be exactly one of "
f"int, float, str, dt.date, dt.datetime, tuple, list, uuid.UUID (or None). "
f"No subclasses yet."
)
def rows2ch(*rows):
return b",".join(TupleType.unconvert(row) for row in rows)
def json2ch(*records, dumps: Callable[[Any], bytes]):
return dumps(records)[1:-1]
| 26.808625 | 89 | 0.547456 | 6,904 | 0.694148 | 1,049 | 0.10547 | 2,957 | 0.297305 | 0 | 0 | 1,414 | 0.142168 |
dc9ef3b1807ee1a51f159e4adbc4940a7c7a3b65 | 866 | py | Python | pets/meupet/migrations/0016_auto_20160105_2019.py | diogum/pets | cd41dc1046718a0a73fb003828bc2da06bba7003 | [
"MIT"
] | 57 | 2018-01-05T02:50:37.000Z | 2021-07-15T15:30:03.000Z | pets/meupet/migrations/0016_auto_20160105_2019.py | diogum/pets | cd41dc1046718a0a73fb003828bc2da06bba7003 | [
"MIT"
] | 55 | 2018-01-30T12:24:16.000Z | 2021-10-01T16:52:17.000Z | pets/meupet/migrations/0016_auto_20160105_2019.py | diogum/pets | cd41dc1046718a0a73fb003828bc2da06bba7003 | [
"MIT"
] | 34 | 2018-01-20T21:27:24.000Z | 2022-01-27T09:53:01.000Z | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
import datetime
from django.db import migrations, models
from django.utils.timezone import utc
class Migration(migrations.Migration):
dependencies = [("meupet", "0015_pet_published")]
operations = [
migrations.AddField(
model_name="pet",
name="created",
field=models.DateTimeField(
default=datetime.datetime(2016, 1, 5, 20, 19, 6, 548541, tzinfo=utc), auto_now_add=True
),
preserve_default=False,
),
migrations.AddField(
model_name="pet",
name="modified",
field=models.DateTimeField(
default=datetime.datetime(2016, 1, 5, 20, 19, 15, 300296, tzinfo=utc), auto_now=True
),
preserve_default=False,
),
]
| 27.935484 | 103 | 0.58545 | 702 | 0.810624 | 0 | 0 | 0 | 0 | 0 | 0 | 80 | 0.092379 |
dc9f09cf9ee2ea2c23e057814412e9cccf50d425 | 5,389 | py | Python | space_api/db/update.py | AliabbasMerchant/space-api-python | e5f047d567540d503be7fe72e82f2b198e48b5f9 | [
"Apache-2.0"
] | 8 | 2019-04-02T06:06:45.000Z | 2019-11-12T16:53:26.000Z | space_api/db/update.py | SaiprasadDuduka/space-api-python | 278ad650fa5579089a7ff465dbe74ec5469940ae | [
"Apache-2.0"
] | 28 | 2019-03-25T11:35:07.000Z | 2020-05-11T05:10:00.000Z | space_api/db/update.py | SaiprasadDuduka/space-api-python | 278ad650fa5579089a7ff465dbe74ec5469940ae | [
"Apache-2.0"
] | 4 | 2019-03-22T17:09:22.000Z | 2019-10-24T17:10:43.000Z | from space_api.utils import generate_find, AND
from space_api.transport import Transport
from space_api.response import Response
class Update:
"""
The DB Update Class
::
from space_api import API, AND, OR, COND
api = API("My-Project", "localhost:4124")
db = api.mongo() # For a MongoDB interface
response = db.update('posts').where(AND(COND('title', '==', 'Title1'))).set({'title':'Title2'}).apply()
:param transport: (Transport) The API's transport instance
:param collection: (str) The collection name
:param db_type: (str) The database type
:param operation: (str) The (optional) operation (one/all/upsert) (Defaults to 'all')
"""
def __init__(self, transport: Transport, collection: str, db_type: str, operation: str = 'all'):
self.transport = transport
self.collection = collection
self.db_type = db_type
self.operation = operation
self.params = {'find': {}, 'update': {}}
def where(self, *conditions) -> 'Update':
"""
Prepares the find parameters
:param conditions: (*) The conditions to find by
"""
self.params['find'] = generate_find(AND(*conditions))
return self
def set(self, obj) -> 'Update':
"""
Prepares the updated values
::
response = db.update('posts').set({'author': 'Drake'}).apply()
:param obj: An object containing the fields to set
"""
self.params['update']['$set'] = obj
return self
def push(self, obj) -> 'Update':
"""
Adds an item to an list
::
response = db.update('posts').push({'author': 'Drake'}).apply()
:param obj: An object containing the fields to set
"""
self.params['update']['$push'] = obj
return self
def remove(self, *fields) -> 'Update':
"""
Removes the specified fields from a document
::
response = db.update('posts').remove('age', 'likes').apply()
:param fields: (*) The fields to be removed
"""
self.params['update']['$unset'] = {x: '' for x in fields}
return self
def rename(self, obj) -> 'Update':
"""
Renames the specified fields
::
response = db.update('posts').rename({'mobile': 'contact'}).apply()
:param obj: An object containing the fields to rename
"""
self.params['update']['$rename'] = obj
return self
def inc(self, obj) -> 'Update':
"""
Increments the value of a field by a specified amount
::
response = db.update('posts').inc({'views': 1}).apply()
:param obj: An object containing the fields to increment, along with the increment value
"""
self.params['update']['$inc'] = obj
return self
def mul(self, obj) -> 'Update':
"""
Multiplies the value of a field by a specified amount
::
response = db.update('posts').mul({'amount': 4}).apply()
:param obj: An object containing the fields to multiply, along with the multiplier value
"""
self.params['update']['$mul'] = obj
return self
def max(self, obj) -> 'Update':
"""
Updates the field if the specified value is greater than the existing field value
::
response = db.update('posts').max({'highScore': 1200}).apply()
:param obj: An object containing the fields to set
"""
self.params['update']['$max'] = obj
return self
def min(self, obj) -> 'Update':
"""
Updates the field if the specified value is lesser than the existing field value
::
response = db.update('posts').min({'lowestScore': 300}).apply()
:param obj: An object containing the fields to set
"""
self.params['update']['$min'] = obj
return self
def current_timestamp(self, *values) -> 'Update':
"""
Sets the value of a field(s) to the current timestamp
::
response = db.update('posts').current_timestamp('lastModified').apply()
:param values: (*) A list containing the fields to set
"""
if self.params['update'].get('$currentDate') is None:
self.params['update']['$currentDate'] = {}
self.params['update']['$currentDate'].update({x: {'$type': 'timestamp'} for x in values})
return self
def current_date(self, *values) -> 'Update':
"""
Sets the value of a field(s) to the date
::
response = db.update('posts').current_date('lastModified').apply()
:param values: (*) A list containing the fields to set
"""
if self.params['update'].get('$currentDate') is None:
self.params['update']['$currentDate'] = {}
self.params['update']['$currentDate'].update({x: {'$type': 'date'} for x in values})
return self
def apply(self) -> Response:
"""
Triggers the update request
:return: (Response) The response object containing values corresponding to the request
"""
return self.transport.update(self.params['find'], self.operation, self.params['update'], self.db_type,
self.collection)
__all__ = ['Update']
| 33.265432 | 111 | 0.566339 | 5,234 | 0.971238 | 0 | 0 | 0 | 0 | 0 | 0 | 3,494 | 0.648358 |
dca02eed4f9a0e7e4aab52531c96d239e6236b81 | 432 | py | Python | OpenCV/Histogramas/h6.py | matewszz/Python | 18b7fc96d3ed294d2002ed484941a0ee8cf18108 | [
"MIT"
] | null | null | null | OpenCV/Histogramas/h6.py | matewszz/Python | 18b7fc96d3ed294d2002ed484941a0ee8cf18108 | [
"MIT"
] | null | null | null | OpenCV/Histogramas/h6.py | matewszz/Python | 18b7fc96d3ed294d2002ed484941a0ee8cf18108 | [
"MIT"
] | null | null | null | import dlib
import cv2
image = cv2.imread("../testeOpenCV.jpg")
gray = cv2.cvtColor(image, cv2.COLOR_BGR2GRAY)
hogFaceDetector = dlib.get_frontal_face_detector()
faces = hogFaceDetector(gray, 1)
for (i, rect) in enumerate(faces):
x = rect.left()
y = rect.top()
w = rect.right() - x
h = rect.bottom() - y
cv2.rectangle(image, (x, y), (x + w, y + h), (0, 255, 0), 2)
cv2.imshow("Image", image)
k = cv2.waitKey() | 25.411765 | 64 | 0.638889 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 27 | 0.0625 |
dca034f5cc1099ca680a5daf8638e61d4dde6b17 | 305 | py | Python | preacher/compilation/yaml/tag/argument.py | ymoch/preacher | ae68170d14c72791884e91b20054bd13a79b52d0 | [
"MIT"
] | 3 | 2019-08-01T03:14:49.000Z | 2020-01-31T08:55:22.000Z | preacher/compilation/yaml/tag/argument.py | ymoch/preacher | ae68170d14c72791884e91b20054bd13a79b52d0 | [
"MIT"
] | 353 | 2019-04-14T14:53:28.000Z | 2022-03-11T03:26:08.000Z | preacher/compilation/yaml/tag/argument.py | ymoch/preacher | ae68170d14c72791884e91b20054bd13a79b52d0 | [
"MIT"
] | 1 | 2020-08-01T06:23:08.000Z | 2020-08-01T06:23:08.000Z | from yaml import Node
from yamlen import Tag, TagContext
from preacher.compilation.argument import Argument
class ArgumentTag(Tag):
def construct(self, node: Node, context: TagContext) -> object:
key = context.constructor.construct_scalar(node) # type: ignore
return Argument(key)
| 27.727273 | 72 | 0.740984 | 193 | 0.632787 | 0 | 0 | 0 | 0 | 0 | 0 | 14 | 0.045902 |
dca244a29e158a60507829eefc5aceb50205134c | 1,341 | py | Python | Drivers/PS-228xS/PS228xS_Python_Sockets_Driver/PS228xS_Example.py | 398786172/keithley | f78c5220841775a45ae60645c774e8b443b02ec3 | [
"BSD-Source-Code"
] | 31 | 2019-04-11T14:25:39.000Z | 2022-03-18T15:09:33.000Z | Drivers/PS-228xS/PS228xS_Python_Sockets_Driver/PS228xS_Example.py | 398786172/keithley | f78c5220841775a45ae60645c774e8b443b02ec3 | [
"BSD-Source-Code"
] | 27 | 2019-04-10T20:21:52.000Z | 2021-12-09T01:59:32.000Z | Drivers/PS-228xS/PS228xS_Python_Sockets_Driver/PS228xS_Example.py | 398786172/keithley | f78c5220841775a45ae60645c774e8b443b02ec3 | [
"BSD-Source-Code"
] | 30 | 2019-06-08T09:38:20.000Z | 2022-03-18T15:10:37.000Z | #!/usr/bin/python
import socket
import struct
import math
import time
import Keithley_PS228xS_Sockets_Driver as ps
echoCmd = 1
#===== MAIN PROGRAM STARTS HERE =====
ipAddress1 = "134.63.78.214"
ipAddress2 = "134.63.74.152"
ipAddress3 = "134.63.78.214"
port = 5025
timeout = 20.0
t1 = time.time()
#ps.instrConnect(s1, ipAddress1, port, timeout, 0, 0)
s1 = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
s1, idStr = ps.PowerSupply_Connect(s1, ipAddress1, port, timeout, echoCmd, 1, 1)
print(idStr)
ps.PowerSupply_SetVoltage(s1, 10.0)
ps.PowerSupply_SetCurrent(s1, 1.5)
ps.PowerSupply_SetVoltageProtection(s1, 33.0)
ps.PowerSupply_SetCurrentProtection(s1, 2.0)
print(ps.PowerSupply_GetVoltage(s1))
print(ps.PowerSupply_GetCurrent(s1))
ps.PowerSupply_SetDataFormat(s1, 1, 0, 0)
ps.PowerSupply_SetOutputState(s1, 1)
ps.PowerSupply_SetDisplayText(s1, "Powering On DUT...")
print(ps.PowerSupply_GetOutputState(s1))
time.sleep(3.0)
print(ps.PowerSupply_MeasureCurrent(s1))
print(ps.PowerSupply_MeasureVoltage(s1))
time.sleep(1.0)
ps.PowerSupply_SetOutputState(s1, 0)
ps.PowerSupply_SetDisplayText(s1, "Powering Off DUT...")
ps.PowerSupply_Disconnect(s1)
t2 = time.time()
# Notify the user of completion and the test time achieved.
print("done")
print("{0:.6f} s".format(t2-t1))
input("Press Enter to continue...")
exit()
exit()
| 22.35 | 80 | 0.756898 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 298 | 0.222222 |
dca297c71e432213d85dd239f69fc7b854c190ae | 736 | py | Python | saveimage.py | NaviRice/HeadTracking | 8227cc247425ecacd3e789dbbac11d3e5103d3e2 | [
"MIT"
] | 1 | 2019-10-24T14:29:00.000Z | 2019-10-24T14:29:00.000Z | saveimage.py | NaviRice/HeadTracking | 8227cc247425ecacd3e789dbbac11d3e5103d3e2 | [
"MIT"
] | 7 | 2017-11-28T23:58:40.000Z | 2022-03-11T23:12:12.000Z | saveimage.py | NaviRice/HeadTracking | 8227cc247425ecacd3e789dbbac11d3e5103d3e2 | [
"MIT"
] | null | null | null | import OpenEXR
from navirice_get_image import KinectClient
from navirice_helpers import navirice_image_to_np
DEFAULT_HOST= 'navirice'
DEFAULT_PORT=29000
kin = KinectClient(DEFAULT_HOST, DEFAULT_PORT)
kin.navirice_capture_settings(rgb=False, ir=True, depth=True)
last_count=0
img_set, last_count = kin.navirice_get_next_image()
np_depth_image = navirice_image_to_np(img_set.Depth, scale=False)
hdr = OpenEXR.Header(img_set.Depth.width, img_set.Depth.height)
print(hdr)
#hdr['channels'] = {'R': FLOAT(1,1)}
#hdr['channels'] = {'R': hdr['channels']['R']}
print(hdr)
print()
print(img_set.Depth.data_type)
exr = OpenEXR.OutputFile("out.exr", hdr)
exr.writePixels({
'R': np_depth_image,
'G': np_depth_image,
'B': np_depth_image,
})
| 22.30303 | 65 | 0.767663 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 110 | 0.149457 |
dca357411b72a921aa801ee5af30b257b6b3d771 | 2,386 | py | Python | nonebot_plugin_arcaea/crud/crud_user.py | iyume/nonebot-plugin-arcaea | cd2110b11ae41707396a0db21f228474cc3e35b4 | [
"MIT"
] | 35 | 2021-02-08T09:55:51.000Z | 2022-03-19T08:22:07.000Z | nonebot_plugin_arcaea/crud/crud_user.py | iyume/nonebot-plugin-arcaea | cd2110b11ae41707396a0db21f228474cc3e35b4 | [
"MIT"
] | 10 | 2021-02-22T04:15:58.000Z | 2022-03-28T02:55:02.000Z | nonebot_plugin_arcaea/crud/crud_user.py | iyume/nonebot-plugin-arcaea | cd2110b11ae41707396a0db21f228474cc3e35b4 | [
"MIT"
] | 1 | 2022-01-18T03:57:07.000Z | 2022-01-18T03:57:07.000Z | from typing import Optional
from datetime import datetime
from sqlite3.dbapi2 import Cursor
from ..config import config
from .. import schema
class CRUDUser():
model = schema.User
def create(
self,
db: Cursor,
qq: int,
code: str
) -> None:
user_dict = {
"qq": qq,
"code": code,
"created_time": f"{datetime.now():%F %X}",
"is_active": True,
"recent_type": config.DEFAULT_RECENT_TYPE,
"b30_type": config.DEFAULT_BEST30_TYPE
}
db_arg_columns = [i for i in user_dict.keys()]
db_arg_values = [str(i) for i in user_dict.values()]
db.execute(
f"""INSERT INTO accounts ({','.join(db_arg_columns)})
VALUES ({','.join(list('?' * len(user_dict)))})""",
db_arg_values)
def get_by_qq(
self,
db: Cursor,
qq: int
) -> Optional[schema.User]:
user = db.execute(
"SELECT * FROM accounts WHERE qq=?",
(qq,)
).fetchone()
if not user:
return None
return self.model(**user)
def get_by_code(
self,
db: Cursor,
code: str
) -> Optional[schema.User]:
user = db.execute(
"SELECT * FROM accounts WHERE code=?",
(code,)
).fetchone()
if not user:
return None
return self.model(**user)
def update(
self,
db: Cursor,
qq: int,
code: Optional[str] = None,
is_active: Optional[bool] = None,
best30_type: Optional[str] = None,
recent_type: Optional[str] = None
) -> None:
update_dict = {
"code": code,
"is_active": is_active,
"recent_type": recent_type,
"b30_type": best30_type
}
db_arg_columns, db_arg_values = zip(
*[(f"{i}=?", val) for i, val in update_dict.items() if val != None])
db.execute(
f"UPDATE accounts SET {','.join(db_arg_columns)} WHERE qq=?",
(*db_arg_values, qq))
def delete(
self,
db: Cursor,
qq: int
) -> None:
# unbind method, not really delete user record
db.execute(
"UPDATE accounts SET code=NULL WHERE qq=?",
(qq,)
)
user = CRUDUser()
| 25.655914 | 80 | 0.50461 | 2,220 | 0.930427 | 0 | 0 | 0 | 0 | 0 | 0 | 467 | 0.195725 |
dca3d2cab2990fa6bda7c9d0fc5f20898f5d6657 | 2,424 | py | Python | baseline/eval_sent.py | parallelcrawl/DataCollection | 4308473e6b53779159a15c1416bff3f2291dd1f2 | [
"Apache-2.0"
] | 8 | 2018-02-08T16:03:00.000Z | 2022-01-19T11:41:38.000Z | baseline/eval_sent.py | christianbuck/CorpusMining | f9248c3528a415a1e5af2c5a54a60c16cd79ff1d | [
"Apache-2.0"
] | 3 | 2017-08-08T10:53:29.000Z | 2017-08-08T10:58:51.000Z | baseline/eval_sent.py | parallelcrawl/DataCollection | 4308473e6b53779159a15c1416bff3f2291dd1f2 | [
"Apache-2.0"
] | 4 | 2018-06-09T21:53:09.000Z | 2022-01-19T11:41:48.000Z | #!/usr/bin/env python
# -*- coding: utf-8 -*-
import sys
from strip_language_from_uri import LanguageStripper
import urlparse
correct, wrong = [], []
def strip_uri(uri, language_stripper):
parsed_uri = urlparse.urlparse(uri)
matched_language = language_stripper.match(parsed_uri.path)
if not matched_language:
matched_language = language_stripper.match(parsed_uri.query)
assert matched_language
stripped_path = language_stripper.strip(parsed_uri.path)
stripped_query = language_stripper.strip(parsed_uri.query)
stripped_uri = urlparse.ParseResult(parsed_uri.scheme,
parsed_uri.netloc,
stripped_path,
parsed_uri.params,
stripped_query,
parsed_uri.fragment).geturl()
return matched_language, stripped_uri
if __name__ == "__main__":
import argparse
parser = argparse.ArgumentParser()
parser.add_argument(
'infile', type=argparse.FileType('r'), default=sys.stdin)
parser.add_argument(
'--outfile', type=argparse.FileType('w'))
parser.add_argument('-filter', action="store_true")
args = parser.parse_args()
stripper = LanguageStripper()
source_uris, target_uris = set(), set()
for line in args.infile:
source_uri, target_uri, source, target, score = line.split("\t")
source_lang, stripped_source_uri = strip_uri(source_uri, stripper)
target_lang, stripped_target_uri = strip_uri(target_uri, stripper)
source_uris.add(source_uri)
target_uris.add(target_uri)
if stripped_source_uri != stripped_target_uri:
wrong.append((stripped_source_uri, stripped_target_uri))
else:
if args.outfile:
args.outfile.write(line)
correct.append((stripped_source_uri, stripped_target_uri))
print "found %s source and %s target uris" % (len(source_uris), len(target_uris))
total = len(wrong) + len(correct)
total_unique = len(set(wrong).union(set(correct)))
if wrong:
print "Wrong: ", len(wrong), len(set(wrong))
if correct:
print "Correct", len(correct), len(set(correct))
if total > 0:
print "Acc1", float(len(wrong)) / total
print "Acc2", float(len(set(wrong))) / total_unique
| 36.727273 | 85 | 0.634076 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 170 | 0.070132 |
dca3fb284135452a106049556780c22fb651e4d7 | 4,276 | py | Python | dz/dz-05/src/integrators/implicit.py | Yalfoosh/AIPR | cd89e562682e7f868d56db2414a92c2d14d8ad8d | [
"Apache-2.0"
] | null | null | null | dz/dz-05/src/integrators/implicit.py | Yalfoosh/AIPR | cd89e562682e7f868d56db2414a92c2d14d8ad8d | [
"Apache-2.0"
] | null | null | null | dz/dz-05/src/integrators/implicit.py | Yalfoosh/AIPR | cd89e562682e7f868d56db2414a92c2d14d8ad8d | [
"Apache-2.0"
] | null | null | null | import copy
from typing import Callable
import numpy as np
from .explicit import ExplicitIntegrator
from .integrator import Integrator
class ImplicitIntegrator(Integrator):
def generate_correct_function(
self, *args, **kwargs
) -> Callable[[np.ndarray, np.ndarray, float], np.ndarray]:
raise NotImplementedError
class InverseEulerIntegrator(ImplicitIntegrator):
@staticmethod
def __check_generate_function_arguments(**kwargs):
if "a" not in kwargs:
raise KeyError('Argument kwargs must contain an entry with key "a"!')
if "b" not in kwargs:
raise KeyError('Argument kwargs must contain an entry with key "b"!')
a, b = kwargs["a"], kwargs["b"]
if not isinstance(a, np.ndarray):
raise TypeError(
f'Expected kwargs["a"] to be a np.ndarray, instead it is {type(a)}.'
)
if not isinstance(b, np.ndarray):
raise TypeError(
f'Expected kwargs["b"] to be a np.ndarray, instead it is {type(b)}.'
)
return copy.deepcopy(a), copy.deepcopy(b)
@staticmethod
def __check_generate_correct_function_arguments(**kwargs):
return InverseEulerIntegrator.__check_generate_function_arguments(**kwargs)
def generate_function(
self, *args, **kwargs
) -> Callable[[np.ndarray, float], np.ndarray]:
a, b = self.__check_generate_function_arguments(**kwargs)
r = copy.deepcopy(kwargs["time_function"])
step = copy.deepcopy(kwargs["step"])
p = np.linalg.inv(np.eye(a.shape[0]) - step * a)
q = p @ (step * b)
def __f(x: np.ndarray, t: float):
return p @ x + q @ r(t + step)
return __f
def generate_correct_function(
self, *args, **kwargs
) -> Callable[[np.ndarray, np.ndarray, float], np.ndarray]:
a, b = self.__check_generate_correct_function_arguments(**kwargs)
r = copy.deepcopy(kwargs["time_function"])
step = copy.deepcopy(kwargs["step"])
def __f(x: np.ndarray, prediction: np.ndarray, t: float):
return x + step * (a @ prediction + b @ r(t))
return __f
class TrapezoidalIntegrator(ImplicitIntegrator):
@staticmethod
def __check_generate_function_arguments(**kwargs):
if "a" not in kwargs:
raise KeyError('Argument kwargs must contain an entry with key "a"!')
if "b" not in kwargs:
raise KeyError('Argument kwargs must contain an entry with key "b"!')
a, b = kwargs["a"], kwargs["b"]
if not isinstance(a, np.ndarray):
raise TypeError(
f'Expected kwargs["a"] to be a np.ndarray, instead it is {type(a)}.'
)
if not isinstance(b, np.ndarray):
raise TypeError(
f'Expected kwargs["b"] to be a np.ndarray, instead it is {type(b)}.'
)
return copy.deepcopy(a), copy.deepcopy(b)
@staticmethod
def __check_generate_correct_function_arguments(**kwargs):
return TrapezoidalIntegrator.__check_generate_function_arguments(**kwargs)
def generate_function(
self, *args, **kwargs
) -> Callable[[np.ndarray, float], np.ndarray]:
a, b = self.__check_generate_function_arguments(**kwargs)
r_function = copy.deepcopy(kwargs["time_function"])
step = copy.deepcopy(kwargs["step"])
half_step = step / 2.0
_inv = np.linalg.inv(np.eye(a.shape[0]) - half_step * a)
_ninv = np.eye(a.shape[0]) + half_step * a
r = _inv @ _ninv
s = _inv @ (half_step * b)
def __f(x: np.ndarray, t: float):
return r @ x + s @ (r_function(t) + r_function(t + step))
return __f
def generate_correct_function(
self, *args, **kwargs
) -> Callable[[np.ndarray, np.ndarray, float], np.ndarray]:
a, b = self.__check_generate_correct_function_arguments(**kwargs)
r = copy.deepcopy(kwargs["time_function"])
step = copy.deepcopy(kwargs["step"])
half_step = step / 2.0
def __f(x: np.ndarray, prediction: np.ndarray, t: float):
return x + half_step * (a @ (x + prediction) + b @ (r(t) + r(t + step)))
return __f
| 31.910448 | 84 | 0.606174 | 4,130 | 0.965856 | 0 | 0 | 1,773 | 0.41464 | 0 | 0 | 592 | 0.138447 |
dca4f33a579f46818e1b7025c4102006baa718f4 | 3,714 | py | Python | result_service_gui/services/result_adapter.py | abdulfahad66/result-service-gui | 214342dd6d00f1173bfe90f8429c7d6c9947783b | [
"Apache-2.0"
] | null | null | null | result_service_gui/services/result_adapter.py | abdulfahad66/result-service-gui | 214342dd6d00f1173bfe90f8429c7d6c9947783b | [
"Apache-2.0"
] | null | null | null | result_service_gui/services/result_adapter.py | abdulfahad66/result-service-gui | 214342dd6d00f1173bfe90f8429c7d6c9947783b | [
"Apache-2.0"
] | null | null | null | """Module for results adapter."""
import logging
import os
from typing import List
from aiohttp import ClientSession
from aiohttp import hdrs
from aiohttp import web
from multidict import MultiDict
RACE_HOST_SERVER = os.getenv("RACE_HOST_SERVER", "localhost")
RACE_HOST_PORT = os.getenv("RACE_HOST_PORT", "8088")
RACE_SERVICE_URL = f"http://{RACE_HOST_SERVER}:{RACE_HOST_PORT}"
class ResultAdapter:
"""Class representing result."""
async def get_all_results(self, token: str, event_id: str) -> List:
"""Get all results - lap time or heat place function."""
headers = MultiDict(
[
(hdrs.AUTHORIZATION, f"Bearer {token}"),
]
)
results = []
async with ClientSession() as session:
async with session.get(
f"{RACE_SERVICE_URL}/results", headers=headers
) as resp:
logging.debug(f"get_all_results - got response {resp.status}")
if resp.status == 200:
results = await resp.json()
elif resp.status == 401:
raise Exception(f"Login expired: {resp}")
else:
servicename = "get_all_results"
body = await resp.json()
logging.error(f"{servicename} failed - {resp.status} - {body}")
raise web.HTTPBadRequest(
reason=f"Error - {resp.status}: {body['detail']}."
)
results = [
{
"bib": "414", # start number
"name": "Taiyo Fuseya Skjærven", # full name
"club": "Rustad IL",
"race": "G11KA1", # race id
"point": "Mål", # point in race where result where registered
"rank": "1", # optional for interval start
"time": "10:01:30", # optional for sprint competition
"next_race": "SA1-1", # optional, only for sprint competition
},
{
"race": "G11KA1",
"bib": "415",
"name": "Aksel Lied-Storstenvik",
"club": "Kjelsås IL",
"rank": "2",
"time": "10:01:30",
"next_race": "SA1-3",
},
{
"race": "G11KA1",
"bib": "416",
"name": "Andreas Heggelund Dahl",
"club": "Bækkelagets SK",
"rank": "3",
"time": "10:01:30",
"next_race": "SA1-5",
},
{
"race": "G11KA1",
"bib": "417",
"name": "Theodor Owe",
"club": "Kjelsås IL",
"rank": "4",
"time": "10:01:30",
"next_race": "SA1-7",
},
{
"race": "G11KA1",
"bib": "418",
"name": "Erik Skjellevik Innselset",
"club": "Kjelsås IL",
"rank": "5",
"time": "10:01:30",
"next_race": "SC1-1",
},
{
"race": "G11KA1",
"bib": "419",
"name": "Aleksander Tronsmo-Oraug",
"club": "Kjelsås IL",
"rank": "6",
"time": "10:01:30",
"next_race": "SC1-3",
},
]
return results
| 36.772277 | 83 | 0.407377 | 3,338 | 0.897071 | 0 | 0 | 0 | 0 | 3,275 | 0.88014 | 1,327 | 0.356625 |
dca5acdd95599a8793bd8263beb9c92d163998fa | 786 | py | Python | todo/search.py | ruslan-ok/ServerApps | 541aa12f1933054a12f590ce78544178be374669 | [
"MIT"
] | 1 | 2021-06-07T02:14:13.000Z | 2021-06-07T02:14:13.000Z | todo/search.py | ruslan-ok/ServerApps | 541aa12f1933054a12f590ce78544178be374669 | [
"MIT"
] | 9 | 2021-08-14T07:53:47.000Z | 2022-03-18T19:07:22.000Z | todo/search.py | ruslan-ok/ServerApps | 541aa12f1933054a12f590ce78544178be374669 | [
"MIT"
] | null | null | null | from django.db.models import Q
from hier.search import SearchResult
from hier.grp_lst import search as hier_search
from hier.params import get_search_mode
from .models import app_name, Task
def search(user, query):
result = SearchResult(query)
search_mode = get_search_mode(query)
lookups = None
if (search_mode == 1):
lookups = Q(name__icontains=query) | Q(info__icontains=query) | Q(url__icontains=query)
elif (search_mode == 2):
lookups = Q(categories__icontains=query[1:])
items = Task.objects.filter(user = user.id).filter(lookups).distinct()
for item in items:
result.add(app_name, 'task', item.id, item.created.date(), item.name, item.info)
result.items += hier_search(user, app_name, query)
return result.items
| 35.727273 | 95 | 0.709924 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 6 | 0.007634 |
dca66a7ca7f299b52d7325456eda52e50d368e26 | 4,217 | py | Python | usrobj_src/pyembroideryGH_AddStitchblock.py | fstwn/pyembroideryGH | 5153e7e5692f9bd158276baecff5e3e53a9363f4 | [
"MIT"
] | 4 | 2020-08-15T13:24:14.000Z | 2021-11-16T12:34:29.000Z | usrobj_src/pyembroideryGH_AddStitchblock.py | fstwn/pyembroideryGH | 5153e7e5692f9bd158276baecff5e3e53a9363f4 | [
"MIT"
] | 1 | 2020-08-12T18:45:12.000Z | 2021-11-29T10:32:38.000Z | usrobj_src/pyembroideryGH_AddStitchblock.py | fstwn/pyembroideryGH | 5153e7e5692f9bd158276baecff5e3e53a9363f4 | [
"MIT"
] | null | null | null | """
Adds one or many StitchBlocks to an embroidery pattern supplied as
pyembroidery.EmbPattern instance
Inputs:
Pattern: The pattern to be modified as pyembroidery.EmbPattern
instance.
{item, EmbPattern}
StitchBlock: The stitchblock(s) to add to the pattern.
{list, StitchBlock}
Output:
Pattern: The modified pattern with the newly added stitchblock(s).
{item/list/tree, EmbPattern}
Remarks:
Author: Max Eschenbach
License: MIT License
Version: 201030
"""
# PYTHON STANDARD LIBRARY IMPORTS
from __future__ import division
# GHPYTHON SDK IMPORTS
from ghpythonlib.componentbase import executingcomponent as component
import Grasshopper, GhPython
import System
import Rhino
import rhinoscriptsyntax as rs
# GHENV COMPONENT SETTINGS
ghenv.Component.Name = "AddStitchBlock"
ghenv.Component.NickName = "ASB"
ghenv.Component.Category = "pyembroideryGH"
ghenv.Component.SubCategory = "3 Pattern Creation"
# LOCAL MODULE IMPORTS
try:
import pyembroidery
except ImportError:
errMsg = ("The pyembroidery python module seems to be not correctly " +
"installed! Please make sure the module is in you search " +
"path, see README for instructions!.")
raise ImportError(errMsg)
class StitchBlock(object):
def __init__(self, stitches, thread):
self._set_stitches(stitches)
self._set_thread(thread)
def __getitem__(self, item):
return (self.stitches, self.thread)[item]
def get_stitches_iter(self):
for s in self._stitches:
yield s
def _get_stitches(self):
return self._stitches
def _set_stitches(self, stitches):
if isinstance(stitches, list):
self._stitches = stitches
elif isinstance(stitches, tuple):
self._stitches = list(stitches)
else:
raise ValueError("Supplied data for stitches is not a valid list " +
"of stitches!")
stitches = property(_get_stitches, _set_stitches, None,
"The stitches of this StitchBlock")
def _get_thread(self):
return self._thread
def _set_thread(self, thread):
if isinstance(thread, pyembroidery.EmbThread):
self._thread = thread
else:
raise ValueError("Supplied thread is not a valid EmbThread " +
"instance!")
thread = property(_get_thread, _set_thread, None,
"The thread of this StitchBlock")
def ToString(self):
descr = "StitchBlock ({} Stitches, EmbThread {})"
color = self.thread.hex_color()
descr = descr.format(len(self.stitches), color)
return descr
class AddStitchBlock(component):
def RunScript(self, pattern_in, stitchblock):
# initialize outputs
Pattern = Grasshopper.DataTree[object]()
if pattern_in is not None and stitchblock:
# copy the input pattern to avoid modification on the original object
if isinstance(pattern_in, pyembroidery.EmbPattern):
pattern_in = pattern_in.copy()
else:
raise TypeError("Supplied pattern is no valid " +
"pyembroidery.EmbPattern instance! " +
"Please check your inputs and try again.")
# loop over all stitchblocks and add to pattern
for i, sb in enumerate(stitchblock):
pattern_in.add_stitchblock(sb)
# add pattern to output tree
Pattern.Add(pattern_in)
else:
rml = self.RuntimeMessageLevel.Warning
if pattern_in is None:
errMsg = ("Input Pattern failed to collect data!")
self.AddRuntimeMessage(rml, errMsg)
if not stitchblock:
errMsg = ("Input StitchBlock failed to collect data!")
self.AddRuntimeMessage(rml, errMsg)
# return outputs if you have them; here I try it for you:
return Pattern
| 34.284553 | 81 | 0.611572 | 2,873 | 0.68129 | 81 | 0.019208 | 0 | 0 | 0 | 0 | 1,537 | 0.364477 |
dca7a1a09a3b992422e57d631b27fdd4b7429114 | 3,920 | py | Python | api.py | brannonvann/neato-driver-python | 84d0ff4c1ff89c12b340712890e901e7c8b77526 | [
"BSD-3-Clause"
] | 1 | 2021-10-02T21:47:00.000Z | 2021-10-02T21:47:00.000Z | api.py | brannonvann/neato-driver-python | 84d0ff4c1ff89c12b340712890e901e7c8b77526 | [
"BSD-3-Clause"
] | null | null | null | api.py | brannonvann/neato-driver-python | 84d0ff4c1ff89c12b340712890e901e7c8b77526 | [
"BSD-3-Clause"
] | null | null | null | # Script used to read all help text from Neato.
# Simply connect Neato, update your port
# name ('/dev/neato') and run this script.
# All help markdown is written to a file in the
# same directory called neato_help.md
# Author: Brannon Vann brannon.vann@gmail.com
# License: MIT
# Run this script: python api.py
# Note: This script does not save your serial numbers. #Prevent Serial Write parts below prevent the write out serial numbers.
import neato_driver as robot
robot.init('/dev/tty.usbmodem14601')
commands = []
toc = "\n## Table of Contents\n"
def add_section(level, title):
global toc
toc += "\n" + ((len(level)-2) * " ") + \
"- [" + title + "](#" + "-".join(title.lower().split()) + ")"
return "\n\n" + level + " " + title
def print_lines(text=None):
markdown = "\n\n```"
if text:
lines = text.split('\n')
for line in lines:
# all lines line.find('Serial') != -1 or
if line == '' or line.startswith("Help Strlen"):
# ignore serial numbers and blank lines
markdown = markdown # do nothing
else:
markdown += "\n" + line
return markdown + "\n```"
# print help output
help = robot.Help()
main = ""
for line in help.split('\n')[1:]:
if line.find(' - ') != -1:
parts = line.split(" - ")
commands.append(parts[0])
# iterate help output to request command specific output
for command in commands:
# command
if command == "SetMotor":
main += add_section(
"##", "SetMotorBrush, SetMotorVacuum, SetMotorWheelsEnable, and SetMotorWheels")
else:
main += add_section("##", command)
# command help
desc = "\n\nThe below description from the Neato vacuum maps to the `neato_driver." + \
command + "()` function"
if command == "SetMotor":
desc = "\n\nThe below description from the Neato vacuum maps to the `neato_driver.`SetMotorBrush()`, `neato_driver.SetMotorVacuum()`, `neato_driver.SetMotorWheelsEnable()`, and `neato_driver.SetMotorWheels()` functions. These were divided to make it easier to integrate to the Neato."
if command == "SetIEC":
desc = "\n\nThe SetIEC function is not supported by this driver."
if command == "GetSysLog":
desc = "\n\nThe GetSysLog function was not implemented in the test Neato. The raw results are returned."
if command == "Upload":
desc = "\n\nThe Upload function is not supported by this driver."
main += desc
main += print_lines(robot.Help(command))
# command example
if command.startswith('Get') or command.startswith('Help'):
fn = getattr(robot, command)
result = fn()
if type(result) is dict:
for key in result:
if str(key).find("Serial") > -1:
result[key] = "SERIAL-EXCLUDED"
example = str(result)
main += "\n\nReturns: " + "`" + str(type(result)) + "`"
main += "\n\n**Data Example:**"
main += print_lines(example)
header = "# API\n"
header += '\n'
header += "This describes the `neato_driver.py` API. The documentation is ordered and grouped according to the Neato API which matches the `neato_driver.py` API in all but a few cases. Any differences are noted.\n\n"
header += "Each of the `neato_driver` functions are described below along with the Neato vacuum supplied help description that describes the function and if the function returns data, the data type returned and an example is provided.\n\n"
header += "This was generated using the `api.py` script. To produce the documentation, adjust the serial port to match your Neato's and run the script.\n"
# write out file, overwrites any existing file
helpResponseProcessor = open("api.md", "w")
helpResponseProcessor.write(header+main) # +toc
helpResponseProcessor.close()
print("Done creating api.md document")
| 34.385965 | 292 | 0.640051 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2,250 | 0.57398 |
dca7b5998d3f52978888cf82288345897a3e20b1 | 2,162 | py | Python | UseImportJumpCommand.py | tinwatchman/Sublime-UseImport | a08770c4d1a32be947033a05e640afdf69fd95f1 | [
"MIT"
] | null | null | null | UseImportJumpCommand.py | tinwatchman/Sublime-UseImport | a08770c4d1a32be947033a05e640afdf69fd95f1 | [
"MIT"
] | null | null | null | UseImportJumpCommand.py | tinwatchman/Sublime-UseImport | a08770c4d1a32be947033a05e640afdf69fd95f1 | [
"MIT"
] | null | null | null | import sublime, sublime_plugin
import json
import useutil
class UseImportJumpCommand(sublime_plugin.TextCommand):
def description(self):
return 'Jump to File (Use-Import)'
def is_enabled(self):
return self.is_javascript_view()
def is_visible(self):
return self.is_javascript_view() and self.is_use_import_name()
def run(self, edit):
if self.is_javascript_view():
name = self.find_use_import_name()
if (name != False):
data = self.get_config()
if name in data:
relpath = data.get(name)
configpath = self.view.settings().get('UseImport_use_json_path')
abspath = useutil.get_abs_filepath(relpath, configpath)
if abspath != False:
self.view.window().open_file(abspath)
def is_javascript_view(self):
file_syntax = self.view.settings().get('syntax')
return useutil.is_javascript_syntax(file_syntax)
def is_use_import_name(self):
sels = self.view.sel()
for sel in sels:
curline = self.view.substr(self.view.line(sel))
m = useutil.parse_use_import_name(curline)
if (m != False):
return True
return False
def find_use_import_name(self):
sels = self.view.sel()
for sel in sels:
curline = self.view.substr(self.view.line(sel))
m = useutil.parse_use_import_name(curline)
if (m != False):
return m
return False
def get_config(self):
if self.view.settings().has('UseImport_use_json_path'):
filepath = self.view.settings().get('UseImport_use_json_path')
else:
filepath = useutil.search(self.view.file_name())
self.view.settings().set('UseImport_use_json_path', filepath)
if filepath != False:
return self.load_file(filepath)
return False
def load_file(self, filepath):
with open(filepath, 'r') as myfile:
rawdata = myfile.read()
return json.loads(rawdata)
| 34.31746 | 84 | 0.592044 | 2,102 | 0.972248 | 0 | 0 | 0 | 0 | 0 | 0 | 138 | 0.06383 |
dca8bfe06cd9c25c611a5f5c53620c138ed89415 | 221 | py | Python | ExerciciosPython/67- Tabuada.py | lucadomingues/Python | b129c03cb95dac2a0baf59461eef7667373dc52f | [
"MIT"
] | null | null | null | ExerciciosPython/67- Tabuada.py | lucadomingues/Python | b129c03cb95dac2a0baf59461eef7667373dc52f | [
"MIT"
] | null | null | null | ExerciciosPython/67- Tabuada.py | lucadomingues/Python | b129c03cb95dac2a0baf59461eef7667373dc52f | [
"MIT"
] | null | null | null | while True:
print('\n--- MULTIPLICATION TABLE ---')
num = int(input('Type a number integer: '))
if num < 0:
break
for c in range(1, 11):
print(f'{c} X {num} = {c*num}')
print('END PROGRAM') | 27.625 | 47 | 0.533937 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 94 | 0.425339 |
dca8e81c52f411e5fd1e8a901f0fe89f256a3a88 | 423 | py | Python | 154/main.py | pauvrepetit/leetcode | 6ad093cf543addc4dfa52d72a8e3c0d05a23b771 | [
"MIT"
] | null | null | null | 154/main.py | pauvrepetit/leetcode | 6ad093cf543addc4dfa52d72a8e3c0d05a23b771 | [
"MIT"
] | null | null | null | 154/main.py | pauvrepetit/leetcode | 6ad093cf543addc4dfa52d72a8e3c0d05a23b771 | [
"MIT"
] | null | null | null | # 154. 寻找旋转排序数组中的最小值 II
# 剑指 Offer 11. 旋转数组的最小数字
#
# 20200722
# huao
# 这个其实还真是不好做呀
# O(n)的算法自然是非常简单的,直接扫一遍就完了
# 但是这个list本身是由两段排好序的list组合而成的,这个条件实在是不太好用上啊
from typing import List
class Solution:
def findMin(self, nums: List[int]) -> int:
minNum = nums[0]
for i in range(len(nums)):
minNum = min(minNum, nums[i])
return minNum
sol = Solution()
print(sol.findMin([1,3,3]))
| 18.391304 | 46 | 0.640662 | 186 | 0.310518 | 0 | 0 | 0 | 0 | 0 | 0 | 322 | 0.537563 |
dca914a8d792e00fc88ce27a1110cee5515a0008 | 1,202 | py | Python | lume_model/tests/keras/test_layers.py | slaclab/lume-model | 7602a71b23b53488321c62eb7064728c99023bac | [
"BSD-3-Clause-LBNL"
] | 2 | 2020-08-21T05:14:38.000Z | 2022-03-02T17:45:07.000Z | lume_model/tests/keras/test_layers.py | jacquelinegarrahan/lume-model | d66e8031a7ff378626cf35d50e82479ce41fddd5 | [
"BSD-3-Clause-LBNL"
] | 3 | 2020-11-06T07:08:40.000Z | 2022-03-03T19:36:23.000Z | lume_model/tests/keras/test_layers.py | jacquelinegarrahan/lume-model | d66e8031a7ff378626cf35d50e82479ce41fddd5 | [
"BSD-3-Clause-LBNL"
] | 4 | 2020-06-12T21:38:29.000Z | 2021-07-21T01:41:09.000Z | import pytest
import sys
# test value and failed initialization with characters
@pytest.mark.parametrize(
"offset,scale,lower,upper",
[
(1, 2, 0, 1),
(5, 4, -1, 1),
pytest.param("t", "e", "s", "t", marks=pytest.mark.xfail),
],
)
def test_scale_layer(offset, scale, lower, upper):
layers = pytest.importorskip("lume_model.keras.layers")
scale_layer = layers.ScaleLayer(offset, scale, lower, upper)
# test value and failed initialization with characters
@pytest.mark.parametrize(
"offset,scale,lower,upper",
[
(1, 2, 0, 1),
(5, 4, -1, 1),
pytest.param("t", "e", "s", "t", marks=pytest.mark.xfail),
],
)
def test_unscale_layer(offset, scale, lower, upper):
layers = pytest.importorskip("lume_model.keras.layers")
unscale_layer = layers.UnscaleLayer(offset, scale, lower, upper)
# test value and failed initialization with characters
@pytest.mark.parametrize(
"offset,scale", [(1, 2), (5, 4), pytest.param("t", "e", marks=pytest.mark.xfail),],
)
def test_unscale_image_layer(offset, scale):
layers = pytest.importorskip("lume_model.keras.layers")
unscale_layer = layers.UnscaleImgLayer(offset, scale)
| 30.820513 | 87 | 0.663894 | 0 | 0 | 0 | 0 | 1,004 | 0.835275 | 0 | 0 | 333 | 0.277038 |
dca992f619b9c214f0d6426b004d8c1ba8badeba | 17,548 | py | Python | pyswrve/export_api.py | badanin-dmitry-playrix/pyswrve | 8eb005ba6aad0293af7b2615d37ff319a2d372f0 | [
"MIT"
] | 4 | 2016-08-03T13:00:48.000Z | 2019-06-01T09:16:15.000Z | pyswrve/export_api.py | badanin-dmitry-playrix/pyswrve | 8eb005ba6aad0293af7b2615d37ff319a2d372f0 | [
"MIT"
] | 2 | 2016-08-02T19:39:09.000Z | 2018-01-10T11:35:03.000Z | pyswrve/export_api.py | badanin-dmitry-playrix/pyswrve | 8eb005ba6aad0293af7b2615d37ff319a2d372f0 | [
"MIT"
] | 2 | 2016-08-02T19:32:14.000Z | 2017-02-08T08:55:10.000Z | # -*- coding: utf-8 -*-
from urllib.parse import urljoin
from datetime import datetime, timedelta
from .api import SwrveApi
class SwrveExportApi(SwrveApi):
""" Class for requesting stats with Swrve Export API
https://docs.swrve.com/swrves-apis/non-client-apis/swrve-export-api-guide
"""
kpi_factors = {'dau', 'mau', 'dau_mau', 'new_users', 'dpu', 'conversion',
'dollar_revenue', 'currency_spent', 'currency_spent_dau',
'currency_purchased', 'currency_purchased_dau',
'currency_given', 'items_purchased', 'items_purchased_dau',
'session_count', 'avg_session_length', 'arpu_daily',
'arppu_daily', 'arpu_monthly', 'arppu_monthly',
'avg_playtime', 'day30_retention'}
for i in (1, 3, 7):
kpi_factors.add('day%s_reengagement' % i)
kpi_factors.add('day%s_retention' % i)
kpi_taxable = {'dollar_revenue', 'arpu_daily', 'arppu_daily',
'arpu_monthly', 'arppu_monthly'}
period_lens = {'day': 1, 'week': 7, 'month': 30, 'year': 360}
date_formats = {
'DH-': '%Y-%m-%d-%H',
'H-': '%Y-%m-%d-%H',
'MD-': '%Y-%m-%d',
'D-': '%Y-%m-%d',
'M-': '%Y-%m',
'Y-': '%Y'
}
def __init__(self, region='us', api_key=None, personal_key=None,
section=None, conf_path=None):
""" __init__
:param region: [:class:`str`] us or eu region, it defines domain
in urls - dashboard.swrve.com or eu-dashboard.swrve.com
:param api_key: [:class:`str`] API Key from Swrve Dashboard -
Setup - Integration Settings - App Information
:param personal_key: [:class:`str`] Your personal key from
Swrve Dashboard Setup - Integration Settings
:param section: [:class:`str`] section in pyswrve config, you
are able to store keys for different projects in different
config sections
:param conf_path: [:class:`str`] arg overrides default path to
config file with entered
"""
super().__init__(region, api_key, personal_key, section, conf_path)
self._api_url = urljoin(self._api_url, 'exporter/')
def set_dates(self, start=None, stop=None, period=None, period_len=None):
""" Set start and stop or history params
:param start: period's first date
:type start: datetime, str
:param stop: period's last date
:type stop: datetime, str
:param period: [:class:`str`] day, week, month or year
:period_len: [:class:`int`] count of days (weeks, etc) in period
"""
if period:
if period_len is None:
period_len = 1
stop = datetime.today()
days = period_len * self.period_lens(period)
start = stop - timedelta(days=days)
if isinstance(start, datetime):
start = start.strftime('%Y-%m-%d')
if isinstance(stop, datetime):
stop = stop.strftime('%Y-%m-%d')
self.set_param('start', start)
self.set_param('stop', stop)
def to_datetime(self, date_str):
""" Create `datetime` object from string with specified format
:param date_str: [:class:`str`] string with date
:return: `datetime` object
"""
for fmt_key in self.date_formats:
if date_str.startswith(fmt_key):
fmt = self.date_formats[fmt_key]
date_part = date_str.split(fmt_key)[1]
break
return datetime.strptime(date_part, fmt)
def get_kpi(self, kpi, with_date=True, as_datetime=False, currency=None,
segment=None, multiplier=None, **kwargs):
""" Request the kpi stats
:param kpi: [:class:`str`] the kpi's name, one from
`SwrveExportApi.kpi_factors`
:param with_date: [`bool`] by default swrve return every element
as [['D-2015-01-31', 126.0], ['D-2015-01-31', 116.0]] so
the result is a list of lists, if `with_date` setted to `True`
the original result is modifing to list of values like
[126.0, 116.0]
:param as_datetime: [`bool`] if True convert strings with dates
to `datetime` object, default value is False
:param currency: [:class:`str`] in-project currency, used for kpis
like currency_given
:param segment: [:class:`str`] request stats for specified segment
:param multiplier: [:class:`float`] revenue multiplier like in Swrve
Dashboard - Setup - Report Settings - Reporting Revenue,
it applies to revenue, arpu and arppu
:return: [:class:`list`] a list of lists with dates and values or
a list of values, it depends on with_date arg
"""
url = urljoin(self._api_url, 'kpi/%s.json' % kpi)
data = self.send_api_request(url, currency=currency, segment=segment,
**kwargs)
results = data[0]['data']
if multiplier is not None and kpi in self.kpi_taxable:
results = [[i[0], i[1]*multiplier] for i in results]
if not with_date:
results = [i[1] for i in results]
elif as_datetime:
results = [[self.to_datetime(i[0]), i[1]] for i in results]
return results
def get_kpi_dau(self, kpi, with_date=True, as_datetime=False,
currency=None, segment=None, multiplier=None, **kwargs):
"""" Request the kpi stats and divide every value with DAU
:param kpi: [:class:`str`] the kpi's name, one from
`SwrveExportApi.kpi_factors`
:param with_date: [`bool`] by default swrve return every element
as [['D-2015-01-31', 126.0], ['D-2015-01-31', 116.0]] so
the result is a list of lists, if `with_date` setted to `True`
the original result is modifing to list of values like
[126.0, 116.0]
:param as_datetime: [`bool`] if True convert strings with dates
to `datetime` object, default value is False
:param currency: [:class:`str`] in-project currency, used for kpis
like currency_given
:param segment: [:class:`str`] request stats for specified segment
:param multiplier: [:class:`float`] revenue multiplier like in Swrve
Dashboard - Setup - Report Settings - Reporting Revenue,
it applies to revenue, arpu and arppu
:return: [:class:`list`] a list of lists with dates and values or
a list of values, it depends on with_date arg
"""
data = {}
for k in ('dau', kpi):
data[k] = self.get_kpi(k, with_date, as_datetime, currency,
segment, multiplier, **kwargs)
results = []
for idx in range(len(data['dau'])):
_dau = data['dau'][idx]
_kpi = data[kpi][idx]
if _dau == 0:
res = 0
elif isinstance(_dau, list) and _dau[1] == 0:
res = [0]
elif isinstance(_dau, list):
res = [_dau[0], _kpi[1] / _dau[1]]
else:
res = _kpi / _dau
results.append([res])
return results
def get_evt(self, evt_name, with_date=True, as_datetime=False,
segment=None, **kwargs):
""" Request event stats
:param evt_name: [:class:`str`] the event name
:param with_date: [`bool`] by default swrve return every element
as [['D-2015-01-31', 126.0], ['D-2015-01-31', 116.0]] so
the result is a list of lists, if `with_date` setted to `True`
the original result is modifing to list of values like
[126.0, 116.0]
:param as_datetime: [`bool`] if True convert strings with dates
to `datetime` object, default value is False
:param segment: [:class:`str`] request stats for specified segment
:return: [:class:`list`] a list of lists with dates and values or
a list of values, it depends on with_date arg
"""
url = urljoin(self._api_url, 'event/count')
data = self.send_api_request(url, name=evt_name, segment=segment,
**kwargs)
results = data[0]['data']
if not with_date:
results = [i[1] for i in results]
elif as_datetime:
results = [[self.to_datetime(i[0]), i[1]] for i in results]
return results
def get_evt_dau(self, evt_name, with_date=True, as_datetime=False,
segment=None, **kwargs):
""" Request event stats and divide every value with DAU
:param evt_name: [:class:`str`] the event name
:param with_date: [`bool`] by default swrve return every element
as [['D-2015-01-31', 126.0], ['D-2015-01-31', 116.0]] so
the result is a list of lists, if `with_date` setted to `True`
the original result is modifing to list of values like
[126.0, 116.0]
:param as_datetime: [`bool`] if True convert strings with dates
to `datetime` object, default value is False
:param segment: [:class:`str`] request stats for specified segment
:return: [:class:`list`] a list of lists with dates and values or
a list of values, it depends on with_date arg
"""
data = {
'dau': self.get_kpi('dau', with_date, as_datetime, segment,
**kwargs),
evt_name: self.get_evt(evt_name, with_date, as_datetime, segment,
**kwargs)
}
results = []
for idx in range(len(data['dau'])):
_dau = data['dau'][idx]
_evt = data[evt_name][idx]
if _dau == 0:
res = 0
elif isinstance(_dau, list) and _dau[1] == 0:
res = [0]
elif isinstance(_dau, list):
res = [_dau[0], _evt[1] / _dau[1]]
else:
res = _evt / _dau
results.append([res])
return results
def get_evt_lst(self):
""" Request project events list
:return: [:class:`list`] a list with events
"""
url = urljoin(self._api_url, 'event/list')
results = self.send_api_request(url)
return results
def get_payload(self, evt_name, payload_key, with_date=True,
as_datetime=False, default_struct=False):
""" Request stats for the event with specified payload key
:param evt_name: [:class:`str`] the event name
:param payload_key: [:class:`str`] the payload key
:param with_date: [`bool`] by default swrve return every element
as [['D-2015-01-31', 126.0], ['D-2015-01-31', 116.0]] so
the result is a list of lists, if `with_date` setted to `True`
the original result is modifing to list of values like
[126.0, 116.0]
:param as_datetime: [`bool`] if True convert strings with dates
to `datetime` object, default value is False
:param default_struct: [`bool`] default response data structure are
`[{'data': [['D-2017-01-01', 160], ['D-2018-01-02', 116]],
'event_name': 'levelup',
'name': 'levelup/level/1',
'payload_key': 'level',
'payload_value': '1},
{'data': [['D-2017-01-01', 260], ['D-2018-01-02', 216]],
'event_name': 'levelup',
'name': 'levelup/level/2',
'payload_key': 'level',
'payload_value': '2'}]`
by setting default_struct = False the structure are transforming in
`[{'timeline': 'D-2018-01-01', '1': 116, '2': 260},
{'timeline': 'D-2018-01-02', '1': 116, '2': 216}]`
:return: [:class:`list`] a list of dicts with stats for
payload key in event
"""
url = urljoin(self._api_url, 'event/payload')
data = self.send_api_request(url, name=evt_name,
payload_key=payload_key)
if not with_date:
for dct in data:
dct['data'] = [i[1] for i in dct['data']]
elif as_datetime:
for dct in data:
dct['data'] = [
[self.to_datetime(i[0]), i[1]] for i in dct['data']
]
if default_struct:
return data
results = {}
for dct in data:
paylod_value = dct['payload_value']
for idx in range(len(dct['data'])):
if isinstance(dct['data'][idx], list):
results_key, value = dct['data'][idx]
else:
results_key = idx
value = dct['data'][idx]
if results_key not in results:
results[results_key] = {'timeline': results_key}
results[results_key][paylod_value] = value
results = sorted(results.values(), key=lambda x: x['timeline'])
return results
def get_payload_lst(self, evt_name):
""" Request event payloads list
:param evt_name: [:class:`str`] the event name
:return: [:class:`list`] a list with payloads
"""
url = urljoin(self._api_url, 'event/payloads')
results = self.send_api_request(url, name=evt_name)
return results
def get_user_cohorts(self, cohort_type='retention', as_datetime=False,
segment=None):
""" Request user cohorts data
:param cohort_type: [:class:`str`] the type of cohort data to be
requested: retention, avg_sessions, avg_playtime, avg_revenue or
total_revenue
:param as_datetime: [`bool`] if True convert strings with dates
to `datetime` object, default value is False
:param segment: [:class:`str`] request stats for specified segment
:return: [:class:`dict`] a dict where keys are where cohorts dates
and values are dicts with cohort info
"""
url = urljoin(self._api_url, 'cohorts/daily')
data = self.send_api_request(url, cohort_type=cohort_type,
segment=segment)
results = data[0]['data']
if as_datetime:
results = {
datetime.strptime(k, '%Y-%m-%d'): results[k] for k in results
}
return results
def get_item_sales(self, uid=None, tag=None, as_datetime=False,
currency=None, segment=None, **kwargs):
""" Request the sales (count) of the item(s). If no uid or tag is
specified, requests all items.
:param uid: [:class:`str`] uid of the item
:param tag: [:class:`str`] tag of the items
:param as_datetime: [`bool`] if True convert strings with dates
to `datetime` object, default value is False
:param currency: [:class:`str`] if currency is None requests for all
:param segment: request stats for specified segment
:return: [:class:`list`] a list of dicts, one dict - one currency
"""
url = urljoin(self._api_url, 'item/sales')
results = self.send_api_request(url, uid=uid, tag=tag,
currency=currency, segment=segment,
**kwargs)
if as_datetime:
for dct in results:
dct['data'] = [
[self.to_datetime(i[0]), i[1]] for i in dct['data']
]
return results
def get_item_revenue(self, uid=None, tag=None, as_datetime=False,
currency=None, segment=None, **kwargs):
""" Request revenue (count * price) from the item(s). If no uid or
tag is specified, requests all items.
:param uid: [:class:`str`] uid of the item
:param tag: [:class:`str`] tag of the items
:param as_datetime: [`bool`] if True convert strings with dates
to `datetime` object, default value is False
:param currency: [:class:`str`] if currency is None requests for all
:param segment: request stats for specified segment
:return: [:class:`list`] a list of dicts, one dict - one currency
"""
url = urljoin(self._api_url, 'item/revenue')
results = self.send_api_request(url, uid=uid, tag=tag,
currency=currency, segment=segment,
**kwargs)
if as_datetime:
for dct in results:
dct['data'] = [
[self.to_datetime(i[0]), i[1]] for i in dct['data']
]
return results
def get_item_tag(self, tag):
""" Request uids of all items that are associated with the tag
:param tag: [:class:`str`] resources tag
:return: [:class:`list`] list of dicts with uids and names
"""
url = urljoin(self._api_url, 'item/tag')
results = self.send_api_request(url, tag=tag)
return results
def get_segment_lst(self):
""" Request prject segments list
:return: [:class:`list`] a list with segments
"""
url = urljoin(self._api_url, 'segment/list')
results = self.send_api_request(url)
return results
| 39.169643 | 79 | 0.557727 | 17,419 | 0.992649 | 0 | 0 | 0 | 0 | 0 | 0 | 9,636 | 0.549122 |
dcaafd00e0a89f91725c38ea88b87276b0daa613 | 605 | py | Python | makerHello/OpenCV-Face-detection/database/ImportLog.py | lingdantiancai/face-FD-FR | 48f1acafb4a4fc767c8d389a28e4b4e73246a7ea | [
"MIT"
] | null | null | null | makerHello/OpenCV-Face-detection/database/ImportLog.py | lingdantiancai/face-FD-FR | 48f1acafb4a4fc767c8d389a28e4b4e73246a7ea | [
"MIT"
] | null | null | null | makerHello/OpenCV-Face-detection/database/ImportLog.py | lingdantiancai/face-FD-FR | 48f1acafb4a4fc767c8d389a28e4b4e73246a7ea | [
"MIT"
] | null | null | null | #创建数据库并把txt文件的数据存进数据库
import sqlite3 #导入sqlite3
cx = sqlite3.connect('FaceRes.db') #创建数据库,如果数据库已经存在,则链接数据库;如果数据库不存在,则先创建数据库,再链接该数据库。
cu = cx.cursor() #定义一个游标,以便获得查询对象。
#cu.execute('create table if not exists train4 (id integer primary key,name text)') #创建表
fr = open('log.txt') #打开要读取的txt文件
for line in fr.readlines(): #将数据按行插入数据库的表VisitRecord中。
line_list = line.split(" ")
time = line_list[0]+" "+line_list[1]
name = line_list[2]
print(time)
print(name)
cu.execute('insert into VisitRecord values(?,?)',(time,name))
cu.close() #关闭游标
cx.commit() #事务提交
cx.close() #关闭数据库 | 35.588235 | 89 | 0.694215 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 546 | 0.644628 |
dcabba91c3341fdf7c41e1e8c9b5445c26d6bc30 | 2,801 | py | Python | plot_gen.py | XanaduAI/kerr-squeezing | 633db9d39f0943d8a8b7431c8d87a9d1147ba32d | [
"Apache-2.0"
] | 1 | 2021-01-13T02:02:43.000Z | 2021-01-13T02:02:43.000Z | plot_gen.py | XanaduAI/kerr-squeezing | 633db9d39f0943d8a8b7431c8d87a9d1147ba32d | [
"Apache-2.0"
] | null | null | null | plot_gen.py | XanaduAI/kerr-squeezing | 633db9d39f0943d8a8b7431c8d87a9d1147ba32d | [
"Apache-2.0"
] | 5 | 2020-02-27T00:58:05.000Z | 2022-02-10T15:10:19.000Z | import numpy as np
import matplotlib.pyplot as plt
import glob
from strawberryfields.decompositions import takagi
def jsa_from_m(m):
"""Given a phase sensitive moment m returns the joint spectral amplitude associated with it.
Args:
m (array): phase sentive moment
Returns:
(array): joint spectral amplitude
"""
ls, u = takagi(m)
return u @ np.diag(0.5 * np.arcsinh(2 * ls)) @ u.T
n = 1501 # Size of discretization in k
files = glob.glob("*" + str(n) + "*.npy")
files.sort()
l = 2 # Number of parameter settings
mfiles = files[2 * l : 3 * l]
nfiles = files[3 * l : 4 * l]
meanfiles = files[1 * l : 2 * l]
ks = np.load(files[0])
totp = l
localms = [np.load(mfiles[i]) for i in range(totp)]
localns = [np.load(nfiles[i]) for i in range(totp)]
N = np.empty([totp])
## Generating plot for the Schmidt number occupations
fig, ax = plt.subplots(totp, 1, sharey=False, sharex=False, figsize=(4, 3))
for i in range(totp):
y = 5
ns = np.linalg.eigvalsh(localns[i])[::-1]
K = (np.sum(ns) ** 2) / np.sum(ns ** 2)
N[i] = np.sum(ns)
ax[i].bar(
np.arange(y),
ns[0:y],
label=r"$K$=" + str(np.round(K, 4)) + r", $\langle n \rangle=$" + str(np.round(N[i], 4)),
)
ax[i].legend()
if i == 0:
ax[i].set_xlabel(r"Schmidt mode $j$")
ax[i].set_ylabel(r"$\langle n_j \rangle $")
fig.savefig("schmidt_occ.pdf")
plt.close()
## Generating plot for the energy density in the fluctuations
for i in range(totp):
x = np.load(nfiles[i])
plt.semilogy(
ks,
np.diag(x).real / (ks[1] - ks[0]),
label=r"$\langle n \rangle =$ " + str(np.round(N[i], 4)),
)
plt.xlim([-18, 18])
plt.xlabel(r"$(k-k_p)/\Delta k$")
plt.title(r"$\langle \delta b(k)^\dagger \delta b(k) \rangle$")
plt.legend()
plt.savefig("squeezed_energydensity.pdf")
plt.close()
## Generating plot for the energy density in the mean
for i in range(totp):
x = np.load(meanfiles[i])
plt.plot(ks, np.abs(x) ** 2, label=r"$\langle n \rangle =$ " + str(np.round(N[i], 4)))
plt.xlim([-10, 10])
plt.xlabel(r"$(k-k_p)/\Delta k$")
plt.ylabel(r"arb. units")
plt.title(r"$|\langle b(k) \rangle|^2$")
plt.legend()
plt.savefig("mean_energydensity.pdf")
plt.close()
## Generating plot for the joint spectral amplitude
fig, ax = plt.subplots(1, totp, sharex=False, sharey=True, figsize=(12, 12))
for i in range(totp):
localm = np.load(mfiles[i])
ax[i].contour(ks, ks, np.abs(jsa_from_m(localm)), origin="lower", cmap="Greens")
ax[i].set_aspect(aspect=1)
ax[i].set_xlim([-10, 10])
ax[i].set_ylim([-10, 10])
ax[i].set_xlabel(r"$(k-k_p)/\Delta k$")
if i == 0:
ax[i].set_ylabel(r"$(k'-k_p)/\Delta k$")
ax[i].set_title(r"$J(k,k')$")
fig.savefig("jsas.pdf")
plt.close()
| 27.194175 | 97 | 0.602642 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 906 | 0.323456 |
dcac2f5e5657abf2e5b9b2720e085b03ae852841 | 361 | py | Python | src/2/2997.py | youngdaLee/Baekjoon | 7d858d557dbbde6603fe4e8af2891c2b0e1940c0 | [
"MIT"
] | 11 | 2020-09-20T15:17:11.000Z | 2022-03-17T12:43:33.000Z | src/2/2997.py | youngdaLee/Baekjoon | 7d858d557dbbde6603fe4e8af2891c2b0e1940c0 | [
"MIT"
] | 3 | 2021-10-30T07:51:36.000Z | 2022-03-09T05:19:23.000Z | src/2/2997.py | youngdaLee/Baekjoon | 7d858d557dbbde6603fe4e8af2891c2b0e1940c0 | [
"MIT"
] | 13 | 2021-01-21T03:19:08.000Z | 2022-03-28T10:44:58.000Z | """
2997. 네 번째 수
작성자: xCrypt0r
언어: Python 3
사용 메모리: 29,380 KB
소요 시간: 68 ms
해결 날짜: 2020년 9월 26일
"""
def main():
num = sorted(map(int, input().split()))
d1 = num[1] - num[0]
d2 = num[2] - num[1]
if d1 == d2: res = num[2] + d1
elif d1 > d2: res = num[0] + d2
else: res = num[1] + d1
print(res)
if __name__ == '__main__':
main() | 15.695652 | 43 | 0.523546 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 159 | 0.386861 |
dcadba06d3d1dd54ece65a8004a3986299c7345c | 679 | py | Python | python/convertSVGs.py | JustgeekDE/imdb-visualizations | ebadb8b06b956f0a07a344370457926ad496434e | [
"Unlicense"
] | null | null | null | python/convertSVGs.py | JustgeekDE/imdb-visualizations | ebadb8b06b956f0a07a344370457926ad496434e | [
"Unlicense"
] | null | null | null | python/convertSVGs.py | JustgeekDE/imdb-visualizations | ebadb8b06b956f0a07a344370457926ad496434e | [
"Unlicense"
] | null | null | null | '''
Created on 10.08.2014
@author: Philip Peter <philip.peter@justgeek.de>
As long as you retain this notice you can do whatever you want with this stuff.
If we meet some day, and you think this stuff is worth it, you can buy me a
beer in return
Philip Peter
'''
import os
if __name__ == '__main__':
pass
inputDir = '../plots/svg/'
outputDir = '../plots/'
width = 1800
heigth = 1200
for item in os.listdir(inputDir):
split = item.split(".")
if split[-1] == "svg":
filename = '.'.join(split[:-1])
print "Converting "+filename
os.system("inkscape.exe -z -e "+outputDir+filename+".png -w " + str(width) + " -h " + str(heigth) + " "+inputDir+item)
| 23.413793 | 122 | 0.648012 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 365 | 0.537555 |
dcae5cad19658a7b02f9f861817e9967cf77531c | 4,127 | py | Python | game/management/commands/player-video.py | atadams/bbstuff | 79a40425e140034e4d22c485a8427b1d74a9f4cc | [
"MIT"
] | null | null | null | game/management/commands/player-video.py | atadams/bbstuff | 79a40425e140034e4d22c485a8427b1d74a9f4cc | [
"MIT"
] | null | null | null | game/management/commands/player-video.py | atadams/bbstuff | 79a40425e140034e4d22c485a8427b1d74a9f4cc | [
"MIT"
] | null | null | null | from decimal import Decimal
from pathlib import Path
import requests
from django.core.management import BaseCommand
from django.db.models import F
from django.db.models.aggregates import Max, Min
from moviepy.video.VideoClip import ColorClip, TextClip
from moviepy.video.compositing.CompositeVideoClip import CompositeVideoClip, clips_array
from moviepy.video.compositing.concatenate import concatenate_videoclips
from moviepy.video.fx.crop import crop
from moviepy.video.fx.resize import resize
from moviepy.video.io.VideoFileClip import VideoFileClip
from config.settings.base import PLAY_VIDEO_ROOT
from game.models import AtBat, Pitch, Player
PITCH_TYPE_COLORS = {
'CU': 'LightGreen',
'KC': 'LightGoldenrod',
'SC': 'gray',
'SL': 'LightSeaGreen',
'CH': 'Pink',
'KN': 'LightGreen',
'EP': 'LightGreen',
'FC': 'LightCoral',
'FF': 'LightBlue',
'FS': 'LightSalmon',
'FT': 'LightSkyBlue',
'SI': 'LightSteelBlue',
'FO': 'gray',
'PO': 'gray',
'IN': 'gray',
'UN': 'gray',
'AB': 'gray',
'FA': 'gray',
}
class Command(BaseCommand):
def handle(self, *args, **options):
download_videos = True
headers = {'referer': 'https://www.mlb.com/video/', }
player = Player.objects.get(name_first_last='Kyle Tucker')
at_bats = player.batter.filter(game__date__gt='2020-07-27', game__date__lt='2020-08-14',
game__home_team__abbreviation='HOU').order_by('game__date', 'ab_number')
print(f'At-bats: {len(at_bats)}')
video_array = []
if download_videos:
for at_bat in at_bats:
path_name = f'{PLAY_VIDEO_ROOT}{at_bat.game.path_name_with_id}'
Path(path_name).mkdir(parents=True, exist_ok=True)
for pitch in at_bat.pitches.all().order_by('row_id'):
if not pitch.video_exists:
myfile = requests.get(pitch.mlb_video_url_astros, headers=headers)
if not myfile.ok:
myfile = requests.get(pitch.mlb_video_url_home, headers=headers)
if not myfile.ok:
myfile = requests.get(pitch.mlb_video_url_away, headers=headers)
if not myfile.ok:
myfile = requests.get(pitch.mlb_video_url_network, headers=headers)
if myfile.ok:
open(pitch.video_filepath, 'wb').write(myfile.content)
print(
f'Success: {pitch.at_bat.game.game_description_full_date} {pitch.at_bat.inning_string} {pitch.row_id}')
else:
print(
f'FAILED: {pitch.at_bat.game.game_description_full_date} {pitch.at_bat.inning_string} {pitch.row_id}')
print('Downloads Complete!')
for at_bat in at_bats:
for pitch in at_bat.pitches.all().order_by('row_id'):
if pitch.video_exists:
print(f'{pitch.at_bat.game.game_description_full_date} {pitch.at_bat.inning_string} {pitch.row_id}')
video_clip = VideoFileClip(pitch.video_filepath, audio=False, fps_source='fps')
txt = TextClip(
f'{pitch.at_bat.game.game_description_full_date}',
font='Helvetica-Bold', color='white', fontsize=32)
composite_clip = CompositeVideoClip([
video_clip,
txt.set_position((10, 10)).set_duration(video_clip.duration),
])
video_array.append(composite_clip)
final_clip = concatenate_videoclips(video_array)
final_clip.write_videofile(
f'/Users/aadams/Downloads/plays/{player.clean_name}.mp4', fps=59.94)
def scale_xy(x, y, scale):
return int(x * scale), int(y * scale)
def even_number(num):
int_num = int(num)
if int_num % 2 == 0:
return int_num
else:
return int_num + 1
| 37.18018 | 135 | 0.590502 | 2,850 | 0.690574 | 0 | 0 | 0 | 0 | 0 | 0 | 902 | 0.218561 |
dcaf65a98d3f441b5263eb9d163c8eb4c1855439 | 9,097 | py | Python | nlabel/importers/server.py | poke1024/nlabel | 359972d2f71ba7ed2ee097fffcdc750e225d3784 | [
"MIT"
] | 1 | 2022-02-03T11:28:24.000Z | 2022-02-03T11:28:24.000Z | nlabel/importers/server.py | poke1024/nlabel | 359972d2f71ba7ed2ee097fffcdc750e225d3784 | [
"MIT"
] | null | null | null | nlabel/importers/server.py | poke1024/nlabel | 359972d2f71ba7ed2ee097fffcdc750e225d3784 | [
"MIT"
] | null | null | null | from wsgiref.simple_server import make_server
from nlabel.io.carenero.schema import create_session_factory, \
Text, ResultStatus, Result, Tagger, Vector, Vectors
from nlabel.io.carenero.common import ExternalKey
from nlabel.io.common import ArchiveInfo, text_hash_code
from nlabel.io.carenero.common import json_to_result
from nlabel.io.guid import text_guid, tagger_guid
from sqlalchemy.orm import load_only, lazyload
from falcon_auth2 import AuthMiddleware
from falcon_auth2.backends import BasicAuthBackend
import falcon
import click
import json
import functools
import nlabel.version
def user_loader(attributes, user, password, config):
if user == config['user'] and password == config['password']:
return True
else:
return False
class PingResource:
def on_get(self, req, resp):
resp.text = json.dumps({
'version': nlabel.version.__version__
})
resp.status = falcon.HTTP_200
class TaggersByIdResource:
def __init__(self, new_session):
self._new_session = new_session
def on_get(self, req, resp, tagger_id):
session = self._new_session()
try:
tagger = session.query(Tagger).filter(
Tagger.id == tagger_id).first()
if tagger is None:
resp.status = falcon.HTTP_204
else:
resp.status = falcon.HTTP_200
resp.text = json.dumps(tagger.signature)
finally:
session.close()
resp.status = falcon.HTTP_200
class TaggersResource:
def __init__(self, new_session):
self._new_session = new_session
def on_post(self, req, resp):
tagger_data = req.media
session = self._new_session()
try:
tagger_json = json.dumps(
tagger_data, sort_keys=True)
tagger = session.query(Tagger).filter_by(
signature=tagger_json).first()
if tagger is None:
tagger = Tagger(
guid=tagger_guid(),
signature=tagger_json)
session.add(tagger)
session.commit()
session.refresh(tagger)
resp.status = falcon.HTTP_200
resp.text = json.dumps({
'id': tagger.id
})
finally:
session.close()
class TextsResource:
def __init__(self, new_session):
self._new_session = new_session
def on_post(self, req, resp):
text_data = req.media
invalid_keys = set(text_data.keys()) - {
'external_key', 'text', 'meta'}
if invalid_keys:
raise falcon.HTTPInvalidParam(
"media", str(invalid_keys))
external_key = ExternalKey.from_value(
text_data.get('external_key'))
text_key = text_data.get('text')
meta_key = text_data.get('meta')
if meta_key is None:
meta_key = ''
else:
meta_key = json.dumps(meta_key, sort_keys=True)
session = self._new_session()
try:
text_query = session.query(Text)
if external_key is not None:
text = text_query.filter(
Text.external_key == external_key.str,
Text.external_key_type == external_key.type).options(
lazyload('results'),
load_only('id', 'text', 'meta')).first()
if text is not None:
if text.text != text_key:
raise falcon.HTTPConflict(
f"mismatch in stored text data for external key '{external_key.raw}'")
if text.meta != meta_key:
raise falcon.HTTPConflict(
f"mismatch in stored meta data for external key '{external_key.raw}'")
elif text_key is not None:
text = text_query.filter(
Text.text_hash_code == text_hash_code(text_key)).filter(
Text.text == text_key, Text.meta == meta_key).options(
load_only('id')).first().first()
else:
resp.status = falcon.HTTP_422
return
if text is None:
new_text_guid = text_guid()
if external_key is None:
external_key = new_text_guid
if text_key is None:
raise falcon.HTTPInvalidParam(
"media", "missing text")
text = Text(
guid=new_text_guid,
external_key=external_key.str,
external_key_type=external_key.type,
text=text_key,
text_hash_code=text_hash_code(text_key),
meta=meta_key)
session.add(text)
session.commit()
session.refresh(text)
resp.status = falcon.HTTP_200
resp.text = json.dumps({
'id': text.id
})
finally:
session.close()
class ResultsResource:
def __init__(self, new_session):
self._new_session = new_session
def on_get(self, req, resp, tagger_id, text_id):
fields = req.params.get("fields")
session = self._new_session()
try:
result = session.query(Result).filter(
Result.tagger_id == tagger_id, Result.text_id == text_id).first()
if result is None:
resp.status = falcon.HTTP_404
return
data_acc = {
'id': lambda: result.id,
'status': lambda: result.status.name,
'data': lambda: result.data,
'time_created': lambda: result.time_created.isoformat()
}
if fields is not None:
data = {}
for f in fields.split(","):
k = f.strip()
if k not in data_acc:
raise falcon.HTTPInvalidParam(
"fields", f"illegal field {k}")
data[k] = data_acc[k]()
else:
data = dict((k, data_acc[k]()) for k in data_acc.keys())
resp.status = falcon.HTTP_200
resp.text = json.dumps(data)
finally:
session.close()
def on_post(self, req, resp, tagger_id, text_id):
result_data = req.media
session = self._new_session()
try:
if session.query(Result).filter(
Result.tagger_id == tagger_id, Result.text_id == text_id).count() > 0:
raise falcon.HTTPConflict(
f"Result for tagger {tagger_id}, text {text_id} is already in db.")
tagger = session.query(Tagger).filter(Tagger.id == tagger_id).first()
text = session.query(Text).filter(Text.id == text_id).first()
result = json_to_result(
tagger=tagger,
text=text,
status=ResultStatus[result_data['status']],
json_data=result_data['data'])
vectors = result_data.get('vectors')
if vectors is not None:
dtype = vectors['dtype']
for k, v in vectors['data'].items():
x_vectors = [Vector(index=i, data=bytes.fromhex(x)) for i, x in enumerate(v)]
result.vectors.append(Vectors(name=k, dtype=dtype, vectors=x_vectors))
session.add(result)
session.commit()
session.refresh(result)
resp.status = falcon.HTTP_200
resp.text = json.dumps({'id': result.id})
finally:
session.close()
@click.command()
@click.argument('path', type=click.Path(exists=False))
@click.option('--port', default=8000, help='Port to serve on.')
@click.option('--user', default="user", help='Username for basic auth.')
@click.option('--password', required=True, help='Password for basic auth.')
def run(path, port, user, password):
"""Run a server on the given carenero archive."""
info = ArchiveInfo(path, engine='carenero')
new_session = create_session_factory(info.base_path)
auth_backend = BasicAuthBackend(functools.partial(user_loader, config={
'user': user,
'password': password
}))
auth_middleware = AuthMiddleware(auth_backend)
app = falcon.App(middleware=[auth_middleware])
app.add_route('/ping', PingResource())
app.add_route('/taggers', TaggersResource(new_session))
app.add_route('/taggers/{tagger_id:int}', TaggersByIdResource(new_session))
app.add_route('/texts', TextsResource(new_session))
app.add_route('/taggers/{tagger_id:int}/texts/{text_id:int}/results', ResultsResource(new_session))
with make_server('', port, app) as httpd:
print(f'Serving on port {port}...')
# Serve until process is killed
httpd.serve_forever()
if __name__ == '__main__':
run()
| 32.489286 | 103 | 0.559415 | 7,047 | 0.774651 | 0 | 0 | 1,225 | 0.13466 | 0 | 0 | 821 | 0.09025 |
dcaf9832cd110af6ffc1b7cb72b87b06926220e1 | 993 | py | Python | tests/integration/test_eden_unmount.py | tiguchi/watchman | 5bcece7661e33bec33faf60437805df535e2f8af | [
"Apache-2.0"
] | 2 | 2021-02-06T00:24:06.000Z | 2021-08-12T02:47:30.000Z | tests/integration/test_eden_unmount.py | tiguchi/watchman | 5bcece7661e33bec33faf60437805df535e2f8af | [
"Apache-2.0"
] | 9 | 2021-04-09T18:17:13.000Z | 2022-02-26T10:55:07.000Z | tests/integration/test_eden_unmount.py | Varulv1997/watchman | cca53233f252cf851fc49ac3b6c48e3eaf81be26 | [
"Apache-2.0"
] | 2 | 2021-04-07T08:09:28.000Z | 2021-06-06T12:26:22.000Z | # vim:ts=4:sw=4:et:
# Copyright 2012-present Facebook, Inc.
# Licensed under the Apache License, Version 2.0
# no unicode literals
from __future__ import absolute_import, division, print_function
import os
import pywatchman
import WatchmanEdenTestCase
class TestEdenUnmount(WatchmanEdenTestCase.WatchmanEdenTestCase):
def test_eden_unmount(self):
def populate(repo):
repo.write_file(".watchmanconfig", '{"ignore_dirs":[".buckd"]}')
repo.write_file("hello", "hola\n")
repo.commit("initial commit.")
root = self.makeEdenMount(populate)
self.watchmanCommand("watch", root)
clock = self.watchmanCommand("clock", root)
self.touchRelative(root, "newfile")
self.eden.unmount(root)
with self.assertRaises(pywatchman.CommandError) as ctx:
self.watchmanCommand("query", root, {"fields": ["name"], "since": clock})
self.assertRegex(str(ctx.exception), "unable to resolve root")
| 30.090909 | 85 | 0.679758 | 735 | 0.740181 | 0 | 0 | 0 | 0 | 0 | 0 | 279 | 0.280967 |
dcb2185b1d562b118d9365afacb1c2b0f388cace | 144 | py | Python | python/testData/resolve/SuperPy3k.py | truthiswill/intellij-community | fff88cfb0dc168eea18ecb745d3e5b93f57b0b95 | [
"Apache-2.0"
] | 2 | 2019-04-28T07:48:50.000Z | 2020-12-11T14:18:08.000Z | python/testData/resolve/SuperPy3k.py | truthiswill/intellij-community | fff88cfb0dc168eea18ecb745d3e5b93f57b0b95 | [
"Apache-2.0"
] | 173 | 2018-07-05T13:59:39.000Z | 2018-08-09T01:12:03.000Z | python/testData/resolve/SuperPy3k.py | truthiswill/intellij-community | fff88cfb0dc168eea18ecb745d3e5b93f57b0b95 | [
"Apache-2.0"
] | 2 | 2020-03-15T08:57:37.000Z | 2020-04-07T04:48:14.000Z | class A(object):
def foo(self):
print "foo"
class B(A):
def foo(self):
super().foo()
# <ref>
B().foo()
| 13.090909 | 22 | 0.430556 | 107 | 0.743056 | 0 | 0 | 0 | 0 | 0 | 0 | 27 | 0.1875 |
dcb289353c9e5c52f181bfcc7061e81910138776 | 13,742 | py | Python | python/displaywidget.py | karlssonper/gpuip | ce9a62ea9ced3f167a2477bf58944281dac6d45b | [
"MIT"
] | 10 | 2015-10-24T01:06:06.000Z | 2019-12-27T10:48:36.000Z | python/displaywidget.py | karlssonper/gpuip | ce9a62ea9ced3f167a2477bf58944281dac6d45b | [
"MIT"
] | null | null | null | python/displaywidget.py | karlssonper/gpuip | ce9a62ea9ced3f167a2477bf58944281dac6d45b | [
"MIT"
] | 3 | 2017-10-23T23:20:09.000Z | 2020-07-14T13:07:26.000Z | from PySide import QtGui, QtOpenGL, QtCore
from OpenGL import GL
from OpenGL import GL
from OpenGL.GL import shaders
from OpenGL.arrays import vbo
from OpenGL.GL.ARB import texture_rg
from OpenGL.GL.ARB import half_float_vertex
from ctypes import c_void_p
import numpy
import math
vert_src = """#version 120
attribute vec2 positionIn;
attribute vec2 texIn;
varying vec2 texcoord;
void main()
{
gl_Position= vec4(positionIn * 2.0 - vec2(1),0,1);
texcoord = texIn;
}
"""
frag_src = """#version 120
uniform sampler2D texture;
uniform int hdr_mode;
uniform float g;
uniform float m;
uniform float s;
varying vec2 texcoord;
float convert(float x)
{
return clamp(pow(x*m,g) *s, 0.0, 1.0);
}
void main()
{
vec2 coords = vec2(texcoord.x, 1.0 - texcoord.y);
vec3 tex = texture2D(texture, coords).xyz;
if (hdr_mode == 1) {
gl_FragColor = vec4(convert(tex.x), convert(tex.y), convert(tex.z), 1);
} else {
gl_FragColor = vec4(tex,1);
}
}
"""
class DisplayWidget(QtGui.QWidget):
def __init__(self, parent):
super(DisplayWidget, self).__init__(parent)
self.buffers = None
self.glWidget = GLWidget(self)
self.bufferComboBox = QtGui.QComboBox(self)
policy = QtGui.QSizePolicy()
policy.setHorizontalPolicy(QtGui.QSizePolicy.Expanding)
self.bufferComboBox.setSizePolicy(policy)
label = QtGui.QLabel("Buffers:")
label.setBuddy(self.bufferComboBox)
self.bufferComboBox.currentIndexChanged["QString"].connect(
self.onBufferSelectChange)
self.interactiveCheckBox = QtGui.QCheckBox("Interactive", self)
midLayout = QtGui.QHBoxLayout()
midLayout.addWidget(label)
midLayout.addWidget(self.bufferComboBox)
midLayout.addWidget(self.interactiveCheckBox)
self.label = QtGui.QLabel("Exposure: 0", self)
self.slider = QtGui.QSlider(QtCore.Qt.Horizontal, self)
self.slider.setRange(-100,100)
self.slider.setValue(0)
self.slider.valueChanged.connect(self.onExposureChange)
bottomLayout = QtGui.QHBoxLayout()
bottomLayout.addWidget(self.label)
bottomLayout.addWidget(self.slider)
bottomLayout.setSizeConstraint(QtGui.QLayout.SetMinimumSize)
layout = QtGui.QVBoxLayout()
layout.addWidget(self.glWidget)
layout.addLayout(midLayout)
layout.addLayout(bottomLayout)
self.setLayout(layout)
def setBuffers(self, buffers):
for i in xrange(self.bufferComboBox.count()):
self.bufferComboBox.removeItem(0)
self.buffers = buffers
buffersList = buffers.keys()
buffersList.sort()
self.bufferComboBox.addItems(buffersList)
def setActiveBuffer(self, bufferName):
idx = self.bufferComboBox.findText(bufferName)
if idx == self.bufferComboBox.currentIndex():
self.refreshDisplay()
else:
self.bufferComboBox.setCurrentIndex(idx)
def onBufferSelectChange(self, value):
if str(value) in self.buffers:
ndarray = self.buffers[str(value)].data
self.glWidget.copyDataToTexture(ndarray)
if ndarray.dtype == numpy.float32 or ndarray.dtype == numpy.float16:
self.slider.setEnabled(True)
else:
self.slider.setEnabled(False)
self.glWidget.glDraw()
def onExposureChange(self):
value = 0.1 * self.slider.value()
self.glWidget.exposure = value
self.label.setText("Exposure: " + str(value))
self.glWidget.glDraw()
def refreshDisplay(self):
self.onBufferSelectChange(self.bufferComboBox.currentText())
def sizeHint(self):
return QtCore.QSize(400,400)
class GLWidget(QtOpenGL.QGLWidget):
def __init__(self, parent):
super(GLWidget, self).__init__(parent)
self.w = 440
self.h = 440
self.rightBtnDown = False
self.texture = None
self.texturedata = None
self.shader = None
self.hdr_mode = 0
self.vbo = None
self.scale = 0.5
self.steps = 0
self.cx = 0.5
self.cy = 0.5
self.gamma = 1.0/2.2
self.exposure = 0
self.zoomFactor = 1.35
self.panFactor = 0.002
def initializeGL(self):
pass
def copyDataToTexture(self, ndarray):
# Update dimensions of widget
self.texturedata = ndarray
self.w = ndarray.shape[0]
self.h = ndarray.shape[1]
self.updateGeometry()
# Generate new texture
if not self.texture:
try:
self.texture = GL.glGenTextures(1)
except Exception:
return
target = GL.GL_TEXTURE_2D
GL.glBindTexture(target, self.texture)
GL.glTexParameterf(target, GL.GL_TEXTURE_MAG_FILTER, GL.GL_NEAREST)
GL.glTexParameterf(target, GL.GL_TEXTURE_MIN_FILTER, GL.GL_NEAREST)
GL.glTexParameterf(target, GL.GL_TEXTURE_WRAP_S, GL.GL_CLAMP_TO_EDGE)
GL.glTexParameterf(target, GL.GL_TEXTURE_WRAP_T, GL.GL_CLAMP_TO_EDGE)
GL.glTexParameteri(target, GL.GL_GENERATE_MIPMAP, GL.GL_FALSE);
# Get texture format
channels = ndarray.shape[2] if ndarray.ndim == 3 else 1
if channels == 1:
glFormat = GL.GL_RED
elif channels == 2:
glFormat = GL.GL_RG
elif channels == 3:
glFormat = GL.GL_RGB
elif channels == 4:
glFormat = GL.GL_RGBA
glInternalFormat = glFormat
# Get texture type
if ndarray.dtype == numpy.float32:
glType = GL.GL_FLOAT
# Need to use the exposure shader if floating point
self.hdr_mode = 1
# The internal format changes with floating point textures
if channels == 1:
glInternalFormat = texture_rg.GL_R32F
elif channels == 2:
glInternalFormat = texture_rg.GL_RG32F
elif channels == 3:
glInternalFormat = GL.GL_RGB32F
elif channels == 4:
glInternalFormat = GL.GL_RGBA32F
elif ndarray.dtype == numpy.float16:
glType = GL.GL_FLOAT
# Need to use the exposure shader if floating point
self.hdr_mode = 1
# The internal format changes with floating point textures
if channels == 1:
glInternalFormat = texture_rg.GL_R16F
elif channels == 2:
glInternalFormat = texture_rg.GL_RG16F
elif channels == 3:
glInternalFormat = GL.GL_RGB16F
elif channels == 4:
glInternalFormat = GL.GL_RGBA16F
else:
glType = GL.GL_UNSIGNED_BYTE
self.hdr_mode = 0
# Copy data to texture
GL.glTexImage2D(target, 0, glInternalFormat, self.w, self.h,
0, glFormat, glType, ndarray)
GL.glBindTexture(target, 0)
def resizeGL(self, width, height):
GL.glViewport(0,0,width,height)
GL.glMatrixMode(GL.GL_PROJECTION)
GL.glLoadIdentity()
GL.glOrtho(0,1,0,1,0,1)
GL.glMatrixMode(GL.GL_MODELVIEW)
def compileShaders(self):
# Build shaders
vert_shader = shaders.compileShader(vert_src, GL.GL_VERTEX_SHADER)
frag_shader = shaders.compileShader(frag_src, GL.GL_FRAGMENT_SHADER)
self.shader = shaders.compileProgram(vert_shader, frag_shader)
def paintGL(self):
if GL.glCheckFramebufferStatus(GL.GL_FRAMEBUFFER) == 33305:
return
GL.glClear(GL.GL_COLOR_BUFFER_BIT)
if not self.texture:
return
if not self.shader:
self.compileShaders()
if not self.vbo:
self.vbo = GL.glGenBuffers(1)
shaders.glUseProgram(self.shader)
GL.glBindBuffer(GL.GL_ARRAY_BUFFER, self.vbo)
vertices = numpy.array(
[-self.scale + self.cx, -self.scale + self.cy ,
self.scale + self.cx, -self.scale + self.cy,
self.scale + self.cx, self.scale + self.cy,
-self.scale + self.cx, self.scale + self.cy,
0,0,1,0,1,1,0,1], dtype = numpy.float32)
GL.glBufferData(GL.GL_ARRAY_BUFFER, 64, vertices, GL.GL_STATIC_DRAW)
loc = GL.glGetAttribLocation(self.shader, "positionIn")
GL.glEnableVertexAttribArray(loc)
GL.glVertexAttribPointer(loc, 2, GL.GL_FLOAT, 0, 8, c_void_p(0))
loc = GL.glGetAttribLocation(self.shader, "texIn")
GL.glEnableVertexAttribArray(loc)
GL.glVertexAttribPointer(loc, 2, GL.GL_FLOAT, 0, 8, c_void_p(32))
def _uniformLoc(name):
return GL.glGetUniformLocation(self.shader,name)
GL.glUniform1f(_uniformLoc("g"), self.gamma);
GL.glUniform1f(_uniformLoc("m"), math.pow(2, self.exposure + 2.47393))
GL.glUniform1f(_uniformLoc("s"), math.pow(2, -3.5 * self.gamma))
GL.glUniform1i(_uniformLoc("hdr_mode"), self.hdr_mode);
GL.glUniform1i(_uniformLoc("texture"), 0);
GL.glActiveTexture(GL.GL_TEXTURE0);
GL.glBindTexture(GL.GL_TEXTURE_2D, self.texture)
GL.glDrawArrays(GL.GL_QUADS, 0, 4);
GL.glBindTexture(GL.GL_TEXTURE_2D, 0)
loc = GL.glGetAttribLocation(self.shader, "positionIn")
GL.glDisableVertexAttribArray(loc)
loc = GL.glGetAttribLocation(self.shader, "texIn")
GL.glDisableVertexAttribArray(loc)
GL.glBindBuffer(GL.GL_ARRAY_BUFFER, 0)
shaders.glUseProgram(0)
if self.rightBtnDown:
self.renderPixelInfo()
def mousePressEvent(self, event):
self.lastPos = event.pos()
if event.button()== QtCore.Qt.RightButton:
self.rightBtnDown = True
self.glDraw()
def mouseReleaseEvent(self, event):
if event.button() == QtCore.Qt.RightButton:
self.rightBtnDown = False
self.glDraw()
def mouseMoveEvent(self, event):
dx = event.x() - self.lastPos.x()
dy = event.y() - self.lastPos.y()
if event.buttons() & QtCore.Qt.LeftButton:
self.cx += self.panFactor*dx
self.cy -= self.panFactor*dy
self.correctCenterCoordinates()
self.lastPos = event.pos()
self.glDraw()
def wheelEvent(self, event):
if event.delta() > 0:
self.steps += 1
else:
self.steps -= 1
# Only allow inital zoom (not smaller)
if self.steps < 0:
self.steps = 0
self.scale = 0.5 * math.pow(self.zoomFactor, self.steps)
self.correctCenterCoordinates()
self.glDraw()
def correctCenterCoordinates(self):
if -self.scale + self.cx > 0:
self.cx = self.scale
if self.scale + self.cx < 1:
self.cx = 1 - self.scale
if -self.scale + self.cy > 0:
self.cy = self.scale
if self.scale + self.cy < 1:
self.cy = 1 - self.scale
def sizeHint(self):
return QtCore.QSize(self.w,self.h)
def renderPixelInfo(self):
# Get pixel positions px and py
size = 2.0*(self.scale)
offx = self.w * (self.scale - self.cx) / size
offy = self.h * (self.scale - self.cy) / size
px = int(offx + (self.lastPos.x() * self.w) / (self.width() * size))
py = int(offy + (self.lastPos.y() * self.h) / (self.height()* size))
py = self.h - py
px = min(max(px,0), self.w - 1)
py = min(max(py,0), self.h - 1)
val = [None, None, None, None]
for i in xrange(self.texturedata.shape[2]):
val[i] = self.texturedata[px][py][i]
texts = ["x:%i y:%i" % (px,py),
"R:%f" % val[0] if val[0] else "n/a",
"G:%f" % val[1] if val[1] else "n/a",
"B:%f" % val[2] if val[2] else "n/a"]
font = QtGui.QFont()
font.setFamily("Monospace")
#font.setFixedPitch(True);
metrics = QtGui.QFontMetrics(font)
sx = 20 # spacing variable
w,h = metrics.width(texts[1]), metrics.height()
metrics.width(" ")
x,y = self.lastPos.x(), self.height() - self.lastPos.y() - sx
dx,dy = 1.0/self.width(), 1.0/self.height()
# Calculate pixel info position
# Swap position if outside screen
if x + 1.5*sx + w < self.width():
x0 = x + 0.75*sx
x1 = x + 1.5*sx + w + 10
tx = x + sx
else:
x0 = x - 0.75*sx
x1 = x - 1.5*sx - w
tx = x - sx - w
if y + sx - 5 * h > 0:
y0 = y + sx
y1 = y + sx - 5 * h
ty = self.height()-y
else:
y0 = y - sx + 3 * h
y1 = y - sx + 8 * h
ty = self.height()-y - 5 * h - 0.5*sx
# Draw transparent quad
GL.glBlendFunc(GL.GL_SRC_ALPHA, GL.GL_ONE_MINUS_SRC_ALPHA);
GL.glEnable(GL.GL_BLEND)
GL.glBegin(GL.GL_QUADS)
GL.glColor4f(0,0,0,0.8)
for x,y in zip([x0,x1,x1,x0],[y0,y0,y1,y1]):
GL.glVertex2f(x * dx, y * dy)
GL.glEnd()
GL.glDisable(GL.GL_BLEND)
# Render text
GL.glColor4f(1,1,1,1)
for i,text in enumerate(texts):
self.renderText(tx, ty + i*h, text, font)
| 33.681373 | 80 | 0.575608 | 12,735 | 0.926721 | 0 | 0 | 0 | 0 | 0 | 0 | 1,413 | 0.102823 |
dcb2d7ea31ccb76c2c9e3aa749bbceb3bfb6beec | 12,867 | py | Python | fiftyone/utils/eval/coco.py | vinayya/fiftyone | cadb54ba38e0db59abb6f9fb7ee630a41a517bef | [
"Apache-2.0"
] | 1 | 2020-08-26T20:41:10.000Z | 2020-08-26T20:41:10.000Z | fiftyone/utils/eval/coco.py | vinayya/fiftyone | cadb54ba38e0db59abb6f9fb7ee630a41a517bef | [
"Apache-2.0"
] | null | null | null | fiftyone/utils/eval/coco.py | vinayya/fiftyone | cadb54ba38e0db59abb6f9fb7ee630a41a517bef | [
"Apache-2.0"
] | null | null | null | """
COCO-style detection evaluation using
`pycocotools <https://github.com/cocodataset/cocoapi/tree/master/PythonAPI/pycocotools>`_.
| Copyright 2017-2020, Voxel51, Inc.
| `voxel51.com <https://voxel51.com/>`_
|
"""
# pragma pylint: disable=redefined-builtin
# pragma pylint: disable=unused-wildcard-import
# pragma pylint: disable=wildcard-import
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
from builtins import *
# pragma pylint: enable=redefined-builtin
# pragma pylint: enable=unused-wildcard-import
# pragma pylint: enable=wildcard-import
import logging
import numpy as np
from pycocotools import mask as mask_utils
import fiftyone.core.utils as fou
logger = logging.getLogger(__name__)
IOU_THRESHOLDS = [round(0.5 + 0.05 * i, 2) for i in range(10)]
_IOU_THRESHOLD_STRS = [str(iou).replace(".", "_") for iou in IOU_THRESHOLDS]
def evaluate_detections(
samples, pred_field, gt_field="ground_truth", save_iou=0.75
):
"""Evaluates the predicted detections in the given samples with respect to
the specified ground truth detections for each of the following
Intersection over Union (IoU) thresholds::
[0.50, 0.55, 0.60, 0.65, 0.70, 0.75, 0.80, 0.85, 0.90, 0.95]
It should be noted that if a :class:`fiftyone.core.labels.Detection` in the
ground truth field has a boolean attribute called `iscrowd`, then this
detection will be matched to multiple predictions and result in them all
being true positives. This follows evaluation performed in using the COCO
dataset in pycocotools.
Dictionaries are added to each predicted/ground truth
:class:`fiftyone.core.labels.Detections` instance in the fields listed
below; these fields tabulate the true positive (TP), false positive (FP),
and false negative (FN) counts for the sample at each IoU::
Ground truth: detections.<pred_field>_eval
Predictions: detections.<gt_field>_eval
Dictionaries are also added to each individual
:class:`fiftyone.core.labels.Detection` instance in the fields listed
below; these fields tabulate the IDs of the matching ground
truth/prediction for the detection at each IoU::
Ground truth: detection.<pred_field>_eval
Predictions: detection.<gt_field>_eval
In addition, true positive (TP), false positive (FP), and false negative
(FN) counts at the specified ``save_iou`` are saved in the following
top-level fields of each sample::
TP: sample.tp_iou_<save_iou>
FP: sample.fp_iou_<save_iou>
FN: sample.fn_iou_<save_iou>
where ``<save_iou> = str(save_iou).replace(".", "_")``.
Args:
samples: an iterable of :class:`fiftyone.core.sample.Sample` instances.
For example, this may be a :class:`fiftyone.core.dataset.Dataset`
or a :class:`fiftyone.core.view.DatasetView`
pred_field: the name of the field containing the predicted
:class:`fiftyone.core.labels.Detections` to evaluate
gt_field ("ground_truth"): the name of the field containing the ground
truth :class:`fiftyone.core.labels.Detections`
save_iou (0.75): an IoU value for which to save per-sample TP/FP/FN
counts as top-level sample fields
"""
gt_key = "%s_eval" % pred_field
pred_key = "%s_eval" % gt_field
eval_id = 0
try:
save_iou_ind = IOU_THRESHOLDS.index(save_iou)
save_iou_str = _IOU_THRESHOLD_STRS[save_iou_ind]
except ValueError:
logger.info(
"IoU %f is not in the list of available IoU thresholds: %s",
save_iou,
IOU_THRESHOLDS,
)
save_iou_str = None
logger.info("Evaluating detections...")
with fou.ProgressBar() as pb:
for sample in pb(samples):
preds = sample[pred_field]
gts = sample[gt_field]
# Sort preds and gt detections by category label
sample_cats = {}
for det in preds.detections:
det[pred_key] = {}
det[pred_key]["ious"] = {}
det[pred_key]["matches"] = {
iou_str: {"gt_id": -1, "iou": -1}
for iou_str in _IOU_THRESHOLD_STRS
}
det[pred_key]["pred_id"] = eval_id
eval_id += 1
if det.label not in sample_cats:
sample_cats[det.label] = {}
sample_cats[det.label]["preds"] = []
sample_cats[det.label]["gts"] = []
sample_cats[det.label]["preds"].append(det)
for det in gts.detections:
det[gt_key] = {}
det[gt_key]["matches"] = {
iou_str: {"pred_id": -1, "iou": -1}
for iou_str in _IOU_THRESHOLD_STRS
}
det[gt_key]["gt_id"] = eval_id
eval_id += 1
if det.label not in sample_cats:
sample_cats[det.label] = {}
sample_cats[det.label]["preds"] = []
sample_cats[det.label]["gts"] = []
sample_cats[det.label]["gts"].append(det)
# Compute IoU for every detection and gt
for cat, dets in sample_cats.items():
gts = dets["gts"]
preds = dets["preds"]
inds = np.argsort(
[-(p.confidence or 0.0) for p in preds], kind="mergesort"
)
preds = [preds[i] for i in inds]
sample_cats[cat]["preds"] = preds
gt_ids = [g[gt_key]["gt_id"] for g in gts]
gt_boxes = [list(g.bounding_box) for g in gts]
pred_boxes = [list(p.bounding_box) for p in preds]
iscrowd = [False] * len(gt_boxes)
for gind, g in enumerate(gts):
if "iscrowd" in g.attributes:
iscrowd[gind] = bool(g.attributes["iscrowd"].value)
# Get the IoU of every prediction with every ground truth
# shape = [num_preds, num_gts]
ious = mask_utils.iou(pred_boxes, gt_boxes, iscrowd)
for pind, gt_ious in enumerate(ious):
preds[pind][pred_key]["ious"][cat] = list(
zip(gt_ids, gt_ious)
)
#
# Starting with highest confidence prediction, match all with gts
# Store true and false positives
#
# Reference implementation:
# https://github.com/cocodataset/cocoapi/blob/8c9bcc3cf640524c4c20a9c40e89cb6a2f2fa0e9/PythonAPI/pycocotools/cocoeval.py#L273
#
result_dict = {
"true_positives": {},
"false_positives": {},
"false_negatives": {},
}
for iou_ind, iou_thresh in enumerate(IOU_THRESHOLDS):
iou_str = _IOU_THRESHOLD_STRS[iou_ind]
true_positives = 0
false_positives = 0
for cat, dets in sample_cats.items():
gt_by_id = {g[gt_key]["gt_id"]: g for g in dets["gts"]}
# Note: predictions were sorted by confidence in the
# previous step
preds = dets["preds"]
# Match each prediction to the highest IoU ground truth
# available
for pred in preds:
if cat in pred[pred_key]["ious"]:
best_match = -1
best_match_iou = min([iou_thresh, 1 - 1e-10])
for gt_id, iou in pred[pred_key]["ious"][cat]:
gt = gt_by_id[gt_id]
curr_gt_match = gt[gt_key]["matches"][iou_str][
"pred_id"
]
if "iscrowd" in gt.attributes:
iscrowd = bool(
gt.attributes["iscrowd"].value
)
else:
iscrowd = False
# Cannot match two preds to the same gt unless
# the gt is a crowd
if curr_gt_match > -1 and not iscrowd:
continue
# Ignore gts with an IoU lower than what was
# already found
if iou < best_match_iou:
continue
best_match_iou = iou
best_match = gt_id
if best_match > -1:
# If the prediction was matched, store the eval
# id of the pred in the gt and of the gt in the
# pred
gt_to_store = gt_by_id[best_match][gt_key]
gt_to_store["matches"][iou_str] = {
"pred_id": pred[pred_key]["pred_id"],
"iou": best_match_iou,
}
pred[pred_key]["matches"][iou_str] = {
"gt_id": best_match,
"iou": best_match_iou,
}
true_positives += 1
else:
false_positives += 1
elif pred.label == cat:
false_positives += 1
result_dict["true_positives"][iou_str] = true_positives
result_dict["false_positives"][iou_str] = false_positives
false_negatives = len(
[
g
for g in dets["gts"]
if g[gt_key]["matches"][iou_str]["pred_id"] == -1
]
)
result_dict["false_negatives"][iou_str] = false_negatives
if iou_str == save_iou_str:
sample["tp_iou_%s" % save_iou_str] = true_positives
sample["fp_iou_%s" % save_iou_str] = false_positives
sample["fn_iou_%s" % save_iou_str] = false_negatives
sample[pred_field][pred_key] = result_dict
# @todo compute sample-wise AP
sample.save()
def save_tp_fp_fn_counts(samples, pred_field, gt_field, iou):
"""Saves the true positive (TP), false positive (FP), and false negative
(FN) counts at the given IoU level in top-level fields of each sample.
The counts are stored in the following fields::
TP: sample.tp_iou_<iou>
FP: sample.fp_iou_<iou>
FN: sample.fn_iou_<iou>
where ``<iou> = str(iou).replace(".", "_")``.
The samples must have been previously evaluated by passing them to
:meth:`evaluate_detections`.
Args:
samples: an iterable of :class:`fiftyone.core.sample.Sample` instances.
For example, this may be a :class:`fiftyone.core.dataset.Dataset`
or a :class:`fiftyone.core.view.DatasetView`
pred_field: the name of the field containing the predicted
:class:`fiftyone.core.labels.Detections` that were evaluated
gt_field: the name of the field containing the ground truth
:class:`fiftyone.core.labels.Detections`
iou: the IoU value for which to save the TP/FP/FN counts
"""
pred_key = "%s_eval" % gt_field
save_iou_str = str(iou).replace(".", "_")
try:
iou_ind = IOU_THRESHOLDS.index(iou)
iou_str = _IOU_THRESHOLD_STRS[iou_ind]
except ValueError:
logger.info(
"IoU %f is not an available IoU threshold: %s", iou, IOU_THRESHOLDS
)
return
logger.info("Saving TP/FP/FN counts for IoU %f...", iou)
with fou.ProgressBar() as pb:
for sample in pb(samples):
result_dict = sample[pred_field][pred_key]
true_positives = result_dict["true_positives"][iou_str]
false_positives = result_dict["false_positives"][iou_str]
false_negatives = result_dict["false_negatives"][iou_str]
sample["tp_iou_%s" % save_iou_str] = true_positives
sample["fp_iou_%s" % save_iou_str] = false_positives
sample["fn_iou_%s" % save_iou_str] = false_negatives
sample.save()
| 39.959627 | 137 | 0.53952 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 5,355 | 0.416181 |
dcb32a1306f972cdb189ca03489b60c332bbdfc7 | 1,038 | py | Python | python/NSTEPS.py | feliposz/spoj-solutions | 8e51566d311dd953fac4a9058051c4c663b3bdca | [
"MIT"
] | null | null | null | python/NSTEPS.py | feliposz/spoj-solutions | 8e51566d311dd953fac4a9058051c4c663b3bdca | [
"MIT"
] | null | null | null | python/NSTEPS.py | feliposz/spoj-solutions | 8e51566d311dd953fac4a9058051c4c663b3bdca | [
"MIT"
] | null | null | null | def plot(width, height):
for j in range(height):
y = height - j - 1
print("y = {0:3} |".format(y), end="")
for x in range(width):
print("{0:3}".format(f(x,y)), end="")
print()
print(" +", end="")
for x in range(width):
print("---", end="")
print()
print(" ", end="")
for x in range(width):
print("{0:3}".format(x), end="")
print()
def f(x, y):
if x == y:
if x % 2 == 0:
return x + y
else:
return 1 + f(x-1, y-1)
elif x - 2 == y:
return f(x, y + 2) - 2
return ""
def test():
assert f(4, 2) == 6, "Test 1 failed"
assert f(6, 6) == 12, "Test 2 failed"
assert f(3, 4) == "", "Test 3 failed"
print("All tests passed")
if __name__ == '__main__':
lines = int(input())
for i in range(lines):
x, y = input().split()
n = f(int(x), int(y))
if n == "":
print("No Number")
else:
print(n)
#plot(20, 20)
| 22.565217 | 49 | 0.423892 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 169 | 0.162813 |
dcb3881e553a1a429ad11815f3af4731fd6525a0 | 5,692 | py | Python | examples/add_annotation_links.py | Cytomine-ULiege/Cytomine-python-client | 166f9c1ede99ce041cbcaa2feb9246b82bd7eaa4 | [
"Apache-2.0"
] | 8 | 2018-07-17T11:04:22.000Z | 2021-11-15T02:46:15.000Z | examples/add_annotation_links.py | Cytomine-ULiege/Cytomine-python-client | 166f9c1ede99ce041cbcaa2feb9246b82bd7eaa4 | [
"Apache-2.0"
] | 22 | 2018-04-30T11:46:46.000Z | 2022-03-11T23:34:46.000Z | examples/add_annotation_links.py | Cytomine-ULiege/Cytomine-python-client | 166f9c1ede99ce041cbcaa2feb9246b82bd7eaa4 | [
"Apache-2.0"
] | 6 | 2018-04-26T12:01:41.000Z | 2021-11-24T10:24:22.000Z | # -*- coding: utf-8 -*-
# * Copyright (c) 2009-2018. Authors: see NOTICE file.
# *
# * Licensed under the Apache License, Version 2.0 (the "License");
# * you may not use this file except in compliance with the License.
# * You may obtain a copy of the License at
# *
# * http://www.apache.org/licenses/LICENSE-2.0
# *
# * Unless required by applicable law or agreed to in writing, software
# * distributed under the License is distributed on an "AS IS" BASIS,
# * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# * See the License for the specific language governing permissions and
# * limitations under the License.
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
import logging
import sys
from argparse import ArgumentParser
from shapely.geometry import Point, box
from cytomine import Cytomine
from cytomine.models import Annotation, AnnotationCollection, \
ImageGroupImageInstanceCollection, AnnotationGroup, AnnotationLink
logging.basicConfig()
logger = logging.getLogger("cytomine.client")
logger.setLevel(logging.INFO)
if __name__ == '__main__':
parser = ArgumentParser(prog="Cytomine Python client example")
# Cytomine
parser.add_argument('--cytomine_host', dest='host',
default='demo.cytomine.be', help="The Cytomine host")
parser.add_argument('--cytomine_public_key', dest='public_key',
help="The Cytomine public key")
parser.add_argument('--cytomine_private_key', dest='private_key',
help="The Cytomine private key")
parser.add_argument('--cytomine_id_project', dest='id_project',
help="The project from which we want the images")
parser.add_argument('--cytomine_id_image_instance1', dest='id_image_instance1',
help="The image to which the annotation will be added"),
parser.add_argument('--cytomine_id_image_instance2', dest='id_image_instance2',
help="The image to which the linked annotation will be added"),
params, other = parser.parse_known_args(sys.argv[1:])
with Cytomine(host=params.host, public_key=params.public_key, private_key=params.private_key) as cytomine:
# Sanity check: the 2 images must be in the same image group.
igii1 = ImageGroupImageInstanceCollection().fetch_with_filter("imageinstance", params.id_image_instance1)
igii2 = ImageGroupImageInstanceCollection().fetch_with_filter("imageinstance", params.id_image_instance2)
if len(igii1) != 1 or len(igii2) != 1 or igii1[0].group != igii2[0].group:
raise ValueError("Images are not in the same image group !")
id_image_group = igii1[0].group
# We first add a point in (10,10) in both images
point = Point(10, 10)
annotation_point1 = Annotation(location=point.wkt, id_image=params.id_image_instance1).save()
annotation_point2 = Annotation(location=point.wkt, id_image=params.id_image_instance2).save()
# Now we will link them.
# 1) First I need to create an annotation group
annotation_group = AnnotationGroup(id_project=params.id_project, id_image_group=id_image_group).save()
print(annotation_group)
# 2) I add the 2 annotations into the group to create links
al1 = AnnotationLink(id_annotation=annotation_point1.id, id_annotation_group=annotation_group.id).save()
print(al1)
al2 = AnnotationLink(id_annotation=annotation_point2.id, id_annotation_group=annotation_group.id).save()
print(al2)
# List all annotations in that annotation group:
annots = AnnotationCollection()
annots.project = 682669
annots.showLink = True
annots.group = annotation_group.id
annots.fetch()
print(annots)
for annot in annots:
n_links = len(annot.annotationLink)
# n_links will be 2 as it contains links al1->annotation_group and al2->annotation_group
linked_annot_ids = [al['annotation'] for al in annot.annotationLink]
print("Annotation {} in image {} has {} links (annotations: {})"
.format(annot.id, annot.image, n_links, linked_annot_ids))
# ---------------
# How to speed up the process when we have more data ?
# We will create points (5, 5) in every image and link them
# We will create rectangle (20, 20, 100, 100) in every image and link them
point = Point(5, 5)
rectangle = box(20, 20, 100, 100)
# I need 2 annotation groups:
annot_group_ids = []
for i in range(2):
ag = AnnotationGroup(id_project=params.id_project, id_image_group=id_image_group).save()
annot_group_ids.append(ag.id)
# We will create all annotations in one request.
annotations = AnnotationCollection()
image_ids = [params.id_image_instance1, params.id_image_instance2]
for image_id in image_ids:
for i, geometry in enumerate([point, rectangle]):
annotations.append(
Annotation(location=geometry.wkt, id_project=params.id_project, id_image=image_id,
id_group=annot_group_ids[i])
)
annotations.save()
# In the end, we have:
# - a point in image 1, linked to a point in image 2
# - a rectangle in image 1, linked to a rectangle in image 2
# - a point in image 2, linked to a point in image 1
# - a rectangle in image 2, linked to a rectangle in image 1
| 45.903226 | 113 | 0.673577 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2,217 | 0.389494 |
dcb97c38d946b815896e5f38420a778cdd1546ea | 53,155 | py | Python | monasca_api/tests/test_a_repository.py | zhangjianweibj/monasca-api | 26133aefe413546f91aaa13c981fe93a69dfc2eb | [
"Apache-2.0"
] | 50 | 2015-10-18T02:54:52.000Z | 2021-12-05T07:54:08.000Z | monasca_api/tests/test_a_repository.py | zhangjianweibj/monasca-api | 26133aefe413546f91aaa13c981fe93a69dfc2eb | [
"Apache-2.0"
] | 13 | 2015-10-29T12:54:07.000Z | 2021-09-02T06:17:42.000Z | monasca_api/tests/test_a_repository.py | zhangjianweibj/monasca-api | 26133aefe413546f91aaa13c981fe93a69dfc2eb | [
"Apache-2.0"
] | 81 | 2015-10-21T07:43:30.000Z | 2022-01-07T03:35:05.000Z | # Copyright 2015 Cray
# Copyright 2016 FUJITSU LIMITED
# Copyright 2017 Hewlett Packard Enterprise Development LP
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import datetime
import time
import fixtures
from oslo_config import cfg
from oslo_db.sqlalchemy.engines import create_engine
from sqlalchemy import delete, MetaData, insert, bindparam
from monasca_api.common.repositories.sqla import models
from monasca_api.tests import base
CONF = cfg.CONF
class TestAlarmRepoDB(base.BaseTestCase):
@classmethod
def setUpClass(cls):
engine = create_engine('sqlite://')
qry = open('monasca_api/tests/sqlite_alarm.sql', 'r').read()
sconn = engine.raw_connection()
c = sconn.cursor()
c.executescript(qry)
sconn.commit()
c.close()
cls.engine = engine
def _fake_engine_from_config(*args, **kw):
return cls.engine
cls.fixture = fixtures.MonkeyPatch(
'sqlalchemy.create_engine', _fake_engine_from_config)
cls.fixture.setUp()
metadata = MetaData()
cls.aa = models.create_aa_model(metadata)
cls._delete_aa_query = delete(cls.aa)
cls._insert_aa_query = (insert(cls.aa)
.values(
alarm_definition_id=bindparam('alarm_definition_id'),
alarm_state=bindparam('alarm_state'),
action_id=bindparam('action_id')))
cls.ad = models.create_ad_model(metadata)
cls._delete_ad_query = delete(cls.ad)
cls._insert_ad_query = (insert(cls.ad)
.values(
id=bindparam('id'),
tenant_id=bindparam('tenant_id'),
name=bindparam('name'),
severity=bindparam('severity'),
expression=bindparam('expression'),
match_by=bindparam('match_by'),
actions_enabled=bindparam('actions_enabled'),
created_at=bindparam('created_at'),
updated_at=bindparam('updated_at'),
deleted_at=bindparam('deleted_at')))
cls.sad = models.create_sad_model(metadata)
cls._delete_sad_query = delete(cls.sad)
cls._insert_sad_query = (insert(cls.sad)
.values(
id=bindparam('id'),
alarm_definition_id=bindparam('alarm_definition_id'),
function=bindparam('function'),
metric_name=bindparam('metric_name'),
operator=bindparam('operator'),
threshold=bindparam('threshold'),
period=bindparam('period'),
periods=bindparam('periods'),
created_at=bindparam('created_at'),
updated_at=bindparam('updated_at')))
cls.sadd = models.create_sadd_model(metadata)
cls._delete_sadd_query = delete(cls.sadd)
cls._insert_sadd_query = (insert(cls.sadd)
.values(
sub_alarm_definition_id=bindparam('sub_alarm_definition_id'),
dimension_name=bindparam('dimension_name'),
value=bindparam('value')))
cls.nm = models.create_nm_model(metadata)
cls._delete_nm_query = delete(cls.nm)
cls._insert_nm_query = (insert(cls.nm)
.values(
id=bindparam('id'),
tenant_id=bindparam('tenant_id'),
name=bindparam('name'),
type=bindparam('type'),
address=bindparam('address'),
created_at=bindparam('created_at'),
updated_at=bindparam('updated_at')))
cls.a = models.create_a_model(metadata)
cls._delete_a_query = delete(cls.a)
cls._insert_a_query = (insert(cls.a)
.values(
id=bindparam('id'),
alarm_definition_id=bindparam('alarm_definition_id'),
state=bindparam('state'),
lifecycle_state=bindparam('lifecycle_state'),
link=bindparam('link'),
created_at=bindparam('created_at'),
updated_at=bindparam('updated_at'),
state_updated_at=bindparam('state_updated_at')))
cls.sa = models.create_sa_model(metadata)
cls._delete_sa_query = delete(cls.sa)
cls._insert_sa_query = (insert(cls.sa)
.values(
id=bindparam('id'),
sub_expression_id=bindparam('sub_expression_id'),
alarm_id=bindparam('alarm_id'),
expression=bindparam('expression'),
created_at=bindparam('created_at'),
updated_at=bindparam('updated_at')))
cls.am = models.create_am_model(metadata)
cls._delete_am_query = delete(cls.am)
cls._insert_am_query = (insert(cls.am)
.values(
alarm_id=bindparam('alarm_id'),
metric_definition_dimensions_id=bindparam(
'metric_definition_dimensions_id')))
cls.md = models.create_md_model(metadata)
cls._delete_md_query = delete(cls.md)
cls._insert_md_query = (insert(cls.md)
.values(
dimension_set_id=bindparam('dimension_set_id'),
name=bindparam('name'),
value=bindparam('value')))
cls.mdd = models.create_mdd_model(metadata)
cls._delete_mdd_query = delete(cls.mdd)
cls._insert_mdd_query = (insert(cls.mdd)
.values(
id=bindparam('id'),
metric_definition_id=bindparam('metric_definition_id'),
metric_dimension_set_id=bindparam('metric_dimension_set_id')))
cls.mde = models.create_mde_model(metadata)
cls._delete_mde_query = delete(cls.mde)
cls._insert_mde_query = (insert(cls.mde)
.values(
id=bindparam('id'),
name=bindparam('name'),
tenant_id=bindparam('tenant_id'),
region=bindparam('region')))
@classmethod
def tearDownClass(cls):
cls.fixture.cleanUp()
if hasattr(CONF, 'sql_engine'):
delattr(CONF, 'sql_engine')
def setUp(self):
super(TestAlarmRepoDB, self).setUp()
self.conf_override(connection='sqlite://', group='database')
from monasca_api.common.repositories.sqla import alarms_repository as ar
self.repo = ar.AlarmsRepository()
timestamp1 = datetime.datetime(2015, 3, 14, 9, 26, 53)
timestamp2 = datetime.datetime(2015, 3, 14, 9, 26, 54)
timestamp3 = datetime.datetime(2015, 3, 14, 9, 26, 55)
timestamp4 = datetime.datetime(2015, 3, 15, 9, 26, 53)
self.default_as = [{'id': '1',
'alarm_definition_id': '1',
'state': 'OK',
'lifecycle_state': 'OPEN',
'link': 'http://somesite.com/this-alarm-info',
'created_at': timestamp1,
'updated_at': timestamp1,
'state_updated_at': timestamp1},
{'id': '2',
'alarm_definition_id': '1',
'state': 'UNDETERMINED',
'lifecycle_state': 'OPEN',
'link': 'http://somesite.com/this-alarm-info',
'created_at': timestamp2,
'updated_at': timestamp2,
'state_updated_at': timestamp2},
{'id': '3',
'alarm_definition_id': '1',
'state': 'ALARM',
'lifecycle_state': None,
'link': 'http://somesite.com/this-alarm-info',
'created_at': timestamp3,
'updated_at': timestamp3,
'state_updated_at': timestamp3},
{'id': '234111',
'alarm_definition_id': '234',
'state': 'UNDETERMINED',
'lifecycle_state': None,
'link': None,
'created_at': timestamp4,
'updated_at': timestamp4,
'state_updated_at': timestamp4}]
self.default_ads = [{'id': '1',
'tenant_id': 'bob',
'name': '90% CPU',
'severity': 'LOW',
'expression': 'AVG(cpu.idle_perc{flavor_id=777,'
' image_id=888, device=1}) > 10',
'match_by': 'flavor_id,image_id',
'actions_enabled': False,
'created_at': datetime.datetime.now(),
'updated_at': datetime.datetime.now(),
'deleted_at': None},
{'id': '234',
'tenant_id': 'bob',
'name': '50% CPU',
'severity': 'CRITICAL',
'expression': 'AVG(cpu.sys_mem'
'{service=monitoring})'
' > 20 and AVG(cpu.idle_perc'
'{service=monitoring}) < 10',
'match_by': 'hostname,region',
'actions_enabled': False,
'created_at': datetime.datetime.now(),
'updated_at': datetime.datetime.now(),
'deleted_at': None}]
self.default_sadds = [{'sub_alarm_definition_id': '111',
'dimension_name': 'flavor_id',
'value': '777'},
{'sub_alarm_definition_id': '111',
'dimension_name': 'image_id',
'value': '888'},
{'sub_alarm_definition_id': '111',
'dimension_name': 'metric_name',
'value': 'cpu'},
{'sub_alarm_definition_id': '111',
'dimension_name': 'device',
'value': '1'},
{'sub_alarm_definition_id': '222',
'dimension_name': 'flavor_id',
'value': '777'},
{'sub_alarm_definition_id': '222',
'dimension_name': 'image_id',
'value': '888'},
{'sub_alarm_definition_id': '222',
'dimension_name': 'metric_name',
'value': 'mem'}]
self.default_nms = [{'id': '29387234',
'tenant_id': 'alarm-test',
'name': 'MyEmail',
'type': 'EMAIL',
'address': 'a@b',
'created_at': datetime.datetime.now(),
'updated_at': datetime.datetime.now()},
{'id': '77778687',
'tenant_id': 'alarm-test',
'name': 'OtherEmail',
'type': 'EMAIL',
'address': 'a@b',
'created_at': datetime.datetime.now(),
'updated_at': datetime.datetime.now()}]
self.default_aas = [{'alarm_definition_id': '123',
'alarm_state': 'ALARM',
'action_id': '29387234'},
{'alarm_definition_id': '123',
'alarm_state': 'ALARM',
'action_id': '77778687'},
{'alarm_definition_id': '234',
'alarm_state': 'ALARM',
'action_id': '29387234'},
{'alarm_definition_id': '234',
'alarm_state': 'ALARM',
'action_id': '77778687'}]
self.default_sads = [{'id': '43',
'alarm_definition_id': '234',
'function': 'f_43',
'metric_name': 'm_43',
'operator': 'GT',
'threshold': 0,
'period': 1,
'periods': 2,
'created_at': datetime.datetime.now(),
'updated_at': datetime.datetime.now()},
{'id': '45',
'alarm_definition_id': '234',
'function': 'f_45',
'metric_name': 'm_45',
'operator': 'GT',
'threshold': 0,
'period': 1,
'periods': 2,
'created_at': datetime.datetime.now(),
'updated_at': datetime.datetime.now()},
{'id': '47',
'alarm_definition_id': '234',
'function': 'f_47',
'metric_name': 'm_47',
'operator': 'GT',
'threshold': 0,
'period': 1,
'periods': 2,
'created_at': datetime.datetime.now(),
'updated_at': datetime.datetime.now()},
{'id': '8484',
'alarm_definition_id': '234',
'function': 'f_49',
'metric_name': 'm_49',
'operator': 'GT',
'threshold': 0,
'period': 1,
'periods': 2,
'created_at': datetime.datetime.now(),
'updated_at': datetime.datetime.now()},
{'id': '8686',
'alarm_definition_id': '234',
'function': 'f_51',
'metric_name': 'm_51',
'operator': 'GT',
'threshold': 0,
'period': 1,
'periods': 2,
'created_at': datetime.datetime.now(),
'updated_at': datetime.datetime.now()}]
self.default_sas = [{'sub_expression_id': '43',
'id': '42',
'alarm_id': '1',
'expression': 'avg(cpu.idle_perc{flavor_id=777,'
' image_id=888, device=1}) > 10',
'created_at': datetime.datetime.now(),
'updated_at': datetime.datetime.now()},
{'sub_expression_id': '45',
'id': '43',
'alarm_id': '2',
'expression': 'avg(cpu.idle_perc{flavor_id=777,'
' image_id=888, device=1}) > 10',
'created_at': datetime.datetime.now(),
'updated_at': datetime.datetime.now()},
{'sub_expression_id': '47',
'id': '44',
'alarm_id': '3',
'expression': 'avg(cpu.idle_perc{flavor_id=777,'
' image_id=888, device=1}) > 10',
'created_at': datetime.datetime.now(),
'updated_at': datetime.datetime.now()}]
self.default_ams = [{'alarm_id': '1',
'metric_definition_dimensions_id': b'11'},
{'alarm_id': '1',
'metric_definition_dimensions_id': b'22'},
{'alarm_id': '2',
'metric_definition_dimensions_id': b'11'},
{'alarm_id': '3',
'metric_definition_dimensions_id': b'22'},
{'alarm_id': '234111',
'metric_definition_dimensions_id': b'31'},
{'alarm_id': '234111',
'metric_definition_dimensions_id': b'32'}]
self.default_mdes = [{'id': b'1',
'name': 'cpu.idle_perc',
'tenant_id': 'bob',
'region': 'west'},
{'id': b'111',
'name': 'cpu.sys_mem',
'tenant_id': 'bob',
'region': 'west'},
{'id': b'112',
'name': 'cpu.idle_perc',
'tenant_id': 'bob',
'region': 'west'}]
self.default_mdds = [{'id': b'11',
'metric_definition_id': b'1',
'metric_dimension_set_id': b'1'},
{'id': b'22',
'metric_definition_id': b'1',
'metric_dimension_set_id': b'2'},
{'id': b'31',
'metric_definition_id': b'111',
'metric_dimension_set_id': b'21'},
{'id': b'32',
'metric_definition_id': b'112',
'metric_dimension_set_id': b'22'}]
self.default_mds = [{'dimension_set_id': b'1',
'name': 'instance_id',
'value': '123'},
{'dimension_set_id': b'1',
'name': 'service',
'value': 'monitoring'},
{'dimension_set_id': b'2',
'name': 'flavor_id',
'value': '222'},
{'dimension_set_id': b'22',
'name': 'flavor_id',
'value': '333'},
{'dimension_set_id': b'21',
'name': 'service',
'value': 'monitoring'},
{'dimension_set_id': b'22',
'name': 'service',
'value': 'monitoring'},
{'dimension_set_id': b'21',
'name': 'hostname',
'value': 'roland'},
{'dimension_set_id': b'22',
'name': 'hostname',
'value': 'roland'},
{'dimension_set_id': b'21',
'name': 'region',
'value': 'colorado'},
{'dimension_set_id': b'22',
'name': 'region',
'value': 'colorado'},
{'dimension_set_id': b'22',
'name': 'extra',
'value': 'vivi'}]
self.alarm1 = {'alarm_definition': {'id': '1',
'name': '90% CPU',
'severity': 'LOW'},
'created_timestamp': '2015-03-14T09:26:53Z',
'id': '1',
'lifecycle_state': 'OPEN',
'link': 'http://somesite.com/this-alarm-info',
'metrics': [{'dimensions': {'instance_id': '123',
'service': 'monitoring'},
'name': 'cpu.idle_perc'},
{'dimensions': {'flavor_id': '222'},
'name': 'cpu.idle_perc'}],
'state': 'OK',
'state_updated_timestamp': '2015-03-14T09:26:53Z',
'updated_timestamp': '2015-03-14T09:26:53Z'}
self.alarm2 = {'alarm_definition': {'id': '1',
'name': '90% CPU',
'severity': 'LOW'},
'created_timestamp': '2015-03-14T09:26:54Z',
'id': '2',
'lifecycle_state': 'OPEN',
'link': 'http://somesite.com/this-alarm-info',
'metrics': [{'dimensions': {'instance_id': '123',
'service': 'monitoring'},
'name': 'cpu.idle_perc'}],
'state': 'UNDETERMINED',
'state_updated_timestamp': '2015-03-14T09:26:54Z',
'updated_timestamp': '2015-03-14T09:26:54Z'}
self.alarm_compound = {'alarm_definition': {'id': '234',
'name': '50% CPU',
'severity': 'CRITICAL'},
'created_timestamp': '2015-03-15T09:26:53Z',
'id': '234111',
'lifecycle_state': None,
'link': None,
'metrics': [
{'dimensions': {'hostname': 'roland',
'region': 'colorado',
'service': 'monitoring'},
'name': 'cpu.sys_mem'},
{'dimensions': {'extra': 'vivi',
'flavor_id': '333',
'hostname': 'roland',
'region': 'colorado',
'service': 'monitoring'},
'name': 'cpu.idle_perc'}],
'state': 'UNDETERMINED',
'state_updated_timestamp':
'2015-03-15T09:26:53Z',
'updated_timestamp': '2015-03-15T09:26:53Z'}
self.alarm3 = {'alarm_definition': {'id': '1',
'name': '90% CPU',
'severity': 'LOW'},
'created_timestamp': '2015-03-14T09:26:55Z',
'id': '3',
'lifecycle_state': None,
'link': 'http://somesite.com/this-alarm-info',
'metrics': [{'dimensions': {'flavor_id': '222'},
'name': 'cpu.idle_perc'}],
'state': 'ALARM',
'state_updated_timestamp': '2015-03-14T09:26:55Z',
'updated_timestamp': '2015-03-14T09:26:55Z'}
with self.engine.begin() as conn:
conn.execute(self._delete_am_query)
conn.execute(self._insert_am_query, self.default_ams)
conn.execute(self._delete_md_query)
conn.execute(self._insert_md_query, self.default_mds)
conn.execute(self._delete_mdd_query)
conn.execute(self._insert_mdd_query, self.default_mdds)
conn.execute(self._delete_a_query)
conn.execute(self._insert_a_query, self.default_as)
conn.execute(self._delete_sa_query)
conn.execute(self._insert_sa_query, self.default_sas)
conn.execute(self._delete_mde_query)
conn.execute(self._insert_mde_query, self.default_mdes)
conn.execute(self._delete_ad_query)
conn.execute(self._insert_ad_query, self.default_ads)
conn.execute(self._delete_sad_query)
conn.execute(self._insert_sad_query, self.default_sads)
conn.execute(self._delete_sadd_query)
conn.execute(self._insert_sadd_query, self.default_sadds)
conn.execute(self._delete_nm_query)
conn.execute(self._insert_nm_query, self.default_nms)
conn.execute(self._delete_aa_query)
conn.execute(self._insert_aa_query, self.default_aas)
def helper_builder_result(self, alarm_rows):
result = []
if not alarm_rows:
return result
# Forward declaration
alarm = {}
prev_alarm_id = None
for alarm_row in alarm_rows:
if prev_alarm_id != alarm_row['alarm_id']:
if prev_alarm_id is not None:
result.append(alarm)
ad = {u'id': alarm_row['alarm_definition_id'],
u'name': alarm_row['alarm_definition_name'],
u'severity': alarm_row['severity'], }
metrics = []
alarm = {u'id': alarm_row['alarm_id'], u'metrics': metrics,
u'state': alarm_row['state'],
u'lifecycle_state': alarm_row['lifecycle_state'],
u'link': alarm_row['link'],
u'state_updated_timestamp':
alarm_row['state_updated_timestamp'].isoformat() +
'Z',
u'updated_timestamp':
alarm_row['updated_timestamp'].isoformat() + 'Z',
u'created_timestamp':
alarm_row['created_timestamp'].isoformat() + 'Z',
u'alarm_definition': ad}
prev_alarm_id = alarm_row['alarm_id']
dimensions = {}
metric = {u'name': alarm_row['metric_name'],
u'dimensions': dimensions}
if alarm_row['metric_dimensions']:
for dimension in alarm_row['metric_dimensions'].split(','):
parsed_dimension = dimension.split('=')
dimensions[parsed_dimension[0]] = parsed_dimension[1]
metrics.append(metric)
result.append(alarm)
return result
def test_should_delete(self):
tenant_id = 'bob'
alarm_id = '1'
alarm1 = self.repo.get_alarm(tenant_id, alarm_id)
alarm1 = self.helper_builder_result(alarm1)
self.assertEqual(alarm1[0], self.alarm1)
self.repo.delete_alarm(tenant_id, alarm_id)
from monasca_api.common.repositories import exceptions
self.assertRaises(exceptions.DoesNotExistException,
self.repo.get_alarm, tenant_id, alarm_id)
def test_should_throw_exception_on_delete(self):
tenant_id = 'bob'
from monasca_api.common.repositories import exceptions
self.assertRaises(exceptions.DoesNotExistException,
self.repo.delete_alarm, tenant_id, 'Not an alarm ID')
def test_should_find_alarm_def(self):
tenant_id = 'bob'
alarm_id = '1'
expected = {'actions_enabled': False,
'deleted_at': None,
'description': None,
'expression': 'AVG(cpu.idle_perc{flavor_id=777,'
' image_id=888, device=1}) > 10',
'id': '1',
'match_by': 'flavor_id,image_id',
'name': '90% CPU',
'severity': 'LOW',
'tenant_id': 'bob'}
alarm_def = self.repo.get_alarm_definition(tenant_id, alarm_id)
expected['created_at'] = alarm_def['created_at']
expected['updated_at'] = alarm_def['updated_at']
self.assertEqual(alarm_def, expected)
from monasca_api.common.repositories import exceptions
self.assertRaises(exceptions.DoesNotExistException,
self.repo.get_alarm_definition,
tenant_id, 'Not an alarm ID')
def test_should_find(self):
res = self.repo.get_alarms(tenant_id='Not a tenant id', limit=1)
self.assertEqual(res, [])
tenant_id = 'bob'
res = self.repo.get_alarms(tenant_id=tenant_id, limit=1000)
res = self.helper_builder_result(res)
expected = [self.alarm1,
self.alarm2,
self.alarm_compound,
self.alarm3]
self.assertEqual(res, expected)
alarm_def_id = self.alarm_compound['alarm_definition']['id']
query_parms = {'alarm_definition_id': alarm_def_id}
res = self.repo.get_alarms(tenant_id=tenant_id,
query_parms=query_parms,
limit=1000)
res = self.helper_builder_result(res)
expected = [self.alarm_compound]
self.assertEqual(res, expected)
query_parms = {'metric_name': 'cpu.sys_mem'}
res = self.repo.get_alarms(tenant_id=tenant_id,
query_parms=query_parms,
limit=1000)
res = self.helper_builder_result(res)
expected = [self.alarm_compound]
self.assertEqual(res, expected)
query_parms = {'metric_name': 'cpu.idle_perc'}
res = self.repo.get_alarms(tenant_id=tenant_id,
query_parms=query_parms,
limit=1000)
res = self.helper_builder_result(res)
expected = [self.alarm1,
self.alarm2,
self.alarm_compound,
self.alarm3]
self.assertEqual(res, expected)
query_parms = {'metric_name': 'cpu.idle_perc',
'metric_dimensions': {'flavor_id': '222'}}
res = self.repo.get_alarms(tenant_id=tenant_id,
query_parms=query_parms,
limit=1000)
res = self.helper_builder_result(res)
expected = [self.alarm1,
self.alarm3]
self.assertEqual(res, expected)
query_parms = {'metric_dimensions': {'flavor_id': '333'}}
res = self.repo.get_alarms(tenant_id=tenant_id,
query_parms=query_parms,
limit=1000)
res = self.helper_builder_result(res)
expected = [self.alarm_compound]
self.assertEqual(res, expected)
query_parms = {'metric_dimensions': {'flavor_id': '222|333'}}
res = self.repo.get_alarms(tenant_id=tenant_id,
query_parms=query_parms,
limit=1000)
res = self.helper_builder_result(res)
expected = [self.alarm1,
self.alarm_compound,
self.alarm3]
self.assertEqual(res, expected)
query_parms = {'metric_dimensions': {'flavor_id': ''}}
res = self.repo.get_alarms(tenant_id=tenant_id,
query_parms=query_parms,
limit=1000)
res = self.helper_builder_result(res)
expected = [self.alarm1,
self.alarm_compound,
self.alarm3]
self.assertEqual(res, expected)
query_parms = {'metric_name': 'cpu.idle_perc',
'metric_dimensions': {'service': 'monitoring',
'hostname': 'roland'}}
res = self.repo.get_alarms(tenant_id=tenant_id,
query_parms=query_parms,
limit=1000)
res = self.helper_builder_result(res)
expected = [self.alarm_compound]
self.assertEqual(res, expected)
query_parms = {'state': 'UNDETERMINED'}
res = self.repo.get_alarms(tenant_id=tenant_id,
query_parms=query_parms,
limit=1000)
res = self.helper_builder_result(res)
expected = [self.alarm2,
self.alarm_compound]
self.assertEqual(res, expected)
alarm_def_id = self.alarm1['alarm_definition']['id']
query_parms = {'metric_name': 'cpu.idle_perc',
'metric_dimensions': {'service': 'monitoring'},
'alarm_definition_id': alarm_def_id}
res = self.repo.get_alarms(tenant_id=tenant_id,
query_parms=query_parms,
limit=1000)
res = self.helper_builder_result(res)
expected = [self.alarm1,
self.alarm2]
self.assertEqual(res, expected)
alarm_def_id = self.alarm1['alarm_definition']['id']
query_parms = {'metric_name': 'cpu.idle_perc',
'alarm_definition_id': alarm_def_id}
res = self.repo.get_alarms(tenant_id=tenant_id,
query_parms=query_parms,
limit=1000)
res = self.helper_builder_result(res)
expected = [self.alarm1,
self.alarm2,
self.alarm3]
self.assertEqual(res, expected)
alarm_def_id = self.alarm_compound['alarm_definition']['id']
query_parms = {'alarm_definition_id': alarm_def_id,
'state': 'UNDETERMINED'}
res = self.repo.get_alarms(tenant_id=tenant_id,
query_parms=query_parms,
limit=1000)
res = self.helper_builder_result(res)
expected = [self.alarm_compound]
self.assertEqual(res, expected)
query_parms = {'metric_name': 'cpu.sys_mem',
'state': 'UNDETERMINED'}
res = self.repo.get_alarms(tenant_id=tenant_id,
query_parms=query_parms,
limit=1000)
res = self.helper_builder_result(res)
expected = [self.alarm_compound]
self.assertEqual(res, expected)
query_parms = {'metric_name': 'cpu.idle_perc',
'metric_dimensions': {'service': 'monitoring'},
'state': 'UNDETERMINED'}
res = self.repo.get_alarms(tenant_id=tenant_id,
query_parms=query_parms,
limit=1000)
res = self.helper_builder_result(res)
expected = [self.alarm2,
self.alarm_compound]
self.assertEqual(res, expected)
time_now = datetime.datetime.now().isoformat() + 'Z'
query_parms = {'metric_name': 'cpu.idle_perc',
'metric_dimensions': {'service': 'monitoring'},
'state': 'UNDETERMINED',
'state_updated_start_time': time_now}
res = self.repo.get_alarms(tenant_id=tenant_id,
query_parms=query_parms,
limit=1000)
res = self.helper_builder_result(res)
expected = []
self.assertEqual(res, expected)
time_now = '2015-03-15T00:00:00.0Z'
query_parms = {'state_updated_start_time': time_now}
res = self.repo.get_alarms(tenant_id=tenant_id,
query_parms=query_parms,
limit=1000)
res = self.helper_builder_result(res)
expected = [self.alarm_compound]
self.assertEqual(res, expected)
time_now = '2015-03-14T00:00:00.0Z'
query_parms = {'state_updated_start_time': time_now}
res = self.repo.get_alarms(tenant_id=tenant_id,
query_parms=query_parms,
limit=1000)
res = self.helper_builder_result(res)
expected = [self.alarm1,
self.alarm2,
self.alarm_compound,
self.alarm3]
self.assertEqual(res, expected)
query_parms = {'state_updated_start_time': time_now,
'link': 'http://google.com',
'lifecycle_state': 'OPEN'}
res = self.repo.get_alarms(tenant_id=tenant_id,
query_parms=query_parms,
limit=None,
offset='10')
res = self.helper_builder_result(res)
expected = []
self.assertEqual(res, expected)
query_parms = {'severity': 'LOW'}
res = self.repo.get_alarms(tenant_id=tenant_id,
query_parms=query_parms,
limit=1000)
res = self.helper_builder_result(res)
expected = [self.alarm1,
self.alarm2,
self.alarm3]
self.assertEqual(expected, res)
query_parms = {'severity': 'CRITICAL'}
res = self.repo.get_alarms(tenant_id=tenant_id,
query_parms=query_parms,
limit=1000)
res = self.helper_builder_result(res)
expected = [self.alarm_compound]
self.assertEqual(expected, res)
query_parms = {'severity': 'LOW|CRITICAL'}
res = self.repo.get_alarms(tenant_id=tenant_id,
query_parms=query_parms,
limit=1000)
res = self.helper_builder_result(res)
expected = [self.alarm1,
self.alarm2,
self.alarm_compound,
self.alarm3]
self.assertEqual(expected, res)
def test_should_count(self):
tenant_id = 'bob'
res = self.repo.get_alarms_count(tenant_id=tenant_id)
self.assertEqual([{'count': 4}], res)
res = self.repo.get_alarms_count(tenant_id=tenant_id,
limit=1000)
self.assertEqual([{'count': 4}], res)
res = self.repo.get_alarms_count(tenant_id=tenant_id,
limit=1000,
offset=10)
self.assertEqual([], res)
alarm_def_id = self.alarm_compound['alarm_definition']['id']
query_parms = {'alarm_definition_id': alarm_def_id}
res = self.repo.get_alarms_count(tenant_id=tenant_id,
query_parms=query_parms,
limit=1000)
self.assertEqual([{'count': 1}], res)
query_parms = {'metric_name': 'cpu.sys_mem'}
res = self.repo.get_alarms_count(tenant_id=tenant_id,
query_parms=query_parms,
limit=1000)
self.assertEqual([{'count': 1}], res)
query_parms = {'state': 'UNDETERMINED'}
res = self.repo.get_alarms_count(tenant_id=tenant_id,
query_parms=query_parms,
limit=1000)
self.assertEqual([{'count': 2}], res)
time_now = '2015-03-15T00:00:00.0Z'
query_parms = {'state_updated_start_time': time_now}
res = self.repo.get_alarms_count(tenant_id=tenant_id,
query_parms=query_parms,
limit=1000)
self.assertEqual([{'count': 1}], res)
query_parms = {'severity': 'LOW'}
res = self.repo.get_alarms_count(tenant_id=tenant_id,
query_parms=query_parms,
limit=1000)
self.assertEqual([{'count': 3}], res)
query_parms = {'lifecycle_state': 'OPEN'}
res = self.repo.get_alarms_count(tenant_id=tenant_id,
query_parms=query_parms,
limit=1000)
self.assertEqual([{'count': 2}], res)
query_parms = {'link': 'http://somesite.com/this-alarm-info'}
res = self.repo.get_alarms_count(tenant_id=tenant_id,
query_parms=query_parms,
limit=1000)
self.assertEqual([{'count': 3}], res)
query_parms = {'metric_dimensions': {'flavor_id': '222'}}
res = self.repo.get_alarms_count(tenant_id=tenant_id,
query_parms=query_parms,
limit=1000)
self.assertEqual([{'count': 2}], res)
query_parms = {'group_by': ['metric_name']}
res = self.repo.get_alarms_count(tenant_id=tenant_id,
query_parms=query_parms,
limit=1000)
expected = [{'count': 4, 'metric_name': 'cpu.idle_perc'},
{'count': 1, 'metric_name': 'cpu.sys_mem'}]
self.assertEqual(expected, res)
query_parms = {'group_by': ['dimension_name']}
res = self.repo.get_alarms_count(tenant_id=tenant_id,
query_parms=query_parms,
limit=1000)
expected = [{'count': 1, 'dimension_name': 'extra'},
{'count': 3, 'dimension_name': 'flavor_id'},
{'count': 1, 'dimension_name': 'hostname'},
{'count': 2, 'dimension_name': 'instance_id'},
{'count': 1, 'dimension_name': 'region'},
{'count': 3, 'dimension_name': 'service'}]
self.assertEqual(expected, res)
query_parms = {'group_by': ['dimension_value']}
res = self.repo.get_alarms_count(tenant_id=tenant_id,
query_parms=query_parms,
limit=1000)
expected = [{'count': 2, 'dimension_value': '123'},
{'count': 2, 'dimension_value': '222'},
{'count': 1, 'dimension_value': '333'},
{'count': 1, 'dimension_value': 'colorado'},
{'count': 3, 'dimension_value': 'monitoring'},
{'count': 1, 'dimension_value': 'roland'},
{'count': 1, 'dimension_value': 'vivi'}]
self.assertEqual(expected, res)
query_parms = {'group_by': []}
res = self.repo.get_alarms_count(tenant_id=tenant_id,
query_parms=query_parms,
limit=1000)
self.assertEqual([{'count': 4}], res)
def test_should_sort_and_find(self):
tenant_id = 'bob'
query_parms = {'metric_name': 'cpu.idle_perc',
'sort_by': ['alarm_id']}
res = self.repo.get_alarms(tenant_id=tenant_id,
query_parms=query_parms,
limit=1000)
res = self.helper_builder_result(res)
expected = [self.alarm1,
self.alarm2,
self.alarm_compound,
self.alarm3]
self.assertEqual(expected, res)
query_parms = {'metric_name': 'cpu.idle_perc',
'sort_by': ['alarm_definition_id']}
res = self.repo.get_alarms(tenant_id=tenant_id,
query_parms=query_parms,
limit=1000)
res = self.helper_builder_result(res)
expected = [self.alarm1,
self.alarm2,
self.alarm3,
self.alarm_compound]
self.assertEqual(expected, res)
query_parms = {'metric_name': 'cpu.idle_perc',
'sort_by': ['alarm_definition_name']}
res = self.repo.get_alarms(tenant_id=tenant_id,
query_parms=query_parms,
limit=1000)
expected = [self.alarm_compound,
self.alarm1,
self.alarm2,
self.alarm3]
res = self.helper_builder_result(res)
self.assertEqual(expected, res)
query_parms = {'metric_name': 'cpu.idle_perc',
'sort_by': ['severity']}
res = self.repo.get_alarms(tenant_id=tenant_id,
query_parms=query_parms,
limit=1000)
expected = [self.alarm1,
self.alarm2,
self.alarm3,
self.alarm_compound]
res = self.helper_builder_result(res)
self.assertEqual(expected, res)
query_parms = {'metric_name': 'cpu.idle_perc',
'sort_by': ['state']}
res = self.repo.get_alarms(tenant_id=tenant_id,
query_parms=query_parms,
limit=1000)
expected = [self.alarm1,
self.alarm2,
self.alarm_compound,
self.alarm3]
res = self.helper_builder_result(res)
self.assertEqual(expected, res)
query_parms = {'metric_name': 'cpu.idle_perc',
'sort_by': ['alarm_id asc']}
res = self.repo.get_alarms(tenant_id=tenant_id,
query_parms=query_parms,
limit=1000)
res = self.helper_builder_result(res)
expected = [self.alarm1,
self.alarm2,
self.alarm_compound,
self.alarm3]
self.assertEqual(expected, res)
query_parms = {'metric_name': 'cpu.idle_perc',
'sort_by': ['alarm_id desc']}
res = self.repo.get_alarms(tenant_id=tenant_id,
query_parms=query_parms,
limit=1000)
res = self.helper_builder_result(res)
expected = [self.alarm3,
self.alarm_compound,
self.alarm2,
self.alarm1]
self.assertEqual(expected, res)
query_parms = {'metric_name': 'cpu.idle_perc',
'sort_by': ['alarm_id nfl']}
res = self.repo.get_alarms(tenant_id=tenant_id,
query_parms=query_parms,
limit=1000)
res = self.helper_builder_result(res)
expected = [self.alarm1,
self.alarm2,
self.alarm_compound,
self.alarm3]
self.assertEqual(expected, res)
def test_should_update(self):
tenant_id = 'bob'
alarm_id = '2'
alarm = self.repo.get_alarm(tenant_id, alarm_id)
alarm = self.helper_builder_result(alarm)[0]
original_state_updated_date = alarm['state_updated_timestamp']
original_updated_timestamp = alarm['updated_timestamp']
self.assertEqual(alarm['state'], 'UNDETERMINED')
prev_state, _ = self.repo.update_alarm(tenant_id, alarm_id, 'OK', None, None)
alarm_new = self.repo.get_alarm(tenant_id, alarm_id)
alarm_new = self.helper_builder_result(alarm_new)[0]
new_state_updated_date = alarm_new['state_updated_timestamp']
new_updated_timestamp = alarm_new['updated_timestamp']
self.assertNotEqual(original_updated_timestamp,
new_updated_timestamp,
'updated_at did not change')
self.assertNotEqual(original_state_updated_date,
new_state_updated_date,
'state_updated_at did not change')
alarm_tmp = tuple(alarm[k] for k in ('state', 'link', 'lifecycle_state'))
self.assertEqual(alarm_tmp, prev_state)
alarm['state_updated_timestamp'] = alarm_new['state_updated_timestamp']
alarm['updated_timestamp'] = alarm_new['updated_timestamp']
alarm['state'] = alarm_new['state']
alarm['link'] = alarm_new['link']
alarm['lifecycle_state'] = alarm_new['lifecycle_state']
self.assertEqual(alarm, alarm_new)
time.sleep(1)
prev_state, _ = self.repo.update_alarm(tenant_id, alarm_id, 'OK', None, None)
alarm_unchanged = self.repo.get_alarm(tenant_id, alarm_id)
alarm_unchanged = self.helper_builder_result(alarm_unchanged)[0]
unchanged_state_updated_date = alarm_unchanged['state_updated_timestamp']
unchanged_updated_timestamp = alarm_unchanged['updated_timestamp']
self.assertNotEqual(unchanged_updated_timestamp,
new_updated_timestamp,
'updated_at did not change')
self.assertEqual(unchanged_state_updated_date,
new_state_updated_date,
'state_updated_at did change')
alarm_new_tmp = tuple(alarm_new[k] for k in ('state', 'link', 'lifecycle_state'))
self.assertEqual(alarm_new_tmp, prev_state)
def test_should_throw_exception_on_update(self):
tenant_id = 'bob'
alarm_id = 'Not real alarm id'
from monasca_api.common.repositories import exceptions
self.assertRaises(exceptions.DoesNotExistException,
self.repo.update_alarm,
tenant_id,
alarm_id,
'UNDETERMINED',
None,
None)
def test_get_alarm_metrics(self):
alarm_id = '2'
alarm_metrics = self.repo.get_alarm_metrics(alarm_id)
expected = [{'alarm_id': '2',
'dimensions': 'instance_id=123,service=monitoring',
'name': 'cpu.idle_perc'}]
self.assertEqual(alarm_metrics, expected)
def test_get_subalarms(self):
tenant_id = 'bob'
alarm_id = '2'
sub_alarms = self.repo.get_sub_alarms(tenant_id, alarm_id)
expected = [{'alarm_definition_id': '1',
'alarm_id': '2',
'expression': 'avg(cpu.idle_perc{flavor_id=777, image_id=888, device=1}) > 10',
'sub_alarm_id': '43'}]
self.assertEqual(sub_alarms, expected)
| 46.545534 | 100 | 0.456834 | 52,201 | 0.982052 | 0 | 0 | 7,052 | 0.132669 | 0 | 0 | 11,415 | 0.214749 |
dcbb7822ac5edb24535291bc298a197c89feab26 | 7,032 | py | Python | uniflocpy/uTemperature/temp_cable_NS.py | Shabonasar/unifloc | 1f12d6b4110a9ff0e10817560ad99d55c9133954 | [
"MIT"
] | 4 | 2019-02-05T20:02:44.000Z | 2019-08-24T13:18:31.000Z | uniflocpy/uTemperature/temp_cable_NS.py | Shabonasar/unifloc | 1f12d6b4110a9ff0e10817560ad99d55c9133954 | [
"MIT"
] | 32 | 2017-09-29T15:14:59.000Z | 2019-12-02T07:17:22.000Z | uniflocpy/uTemperature/temp_cable_NS.py | Shabonasar/unifloc | 1f12d6b4110a9ff0e10817560ad99d55c9133954 | [
"MIT"
] | 4 | 2020-05-31T16:14:17.000Z | 2020-08-31T06:24:29.000Z |
"""ГОСТ Р 51777-2001 Кабели для установок погружных электронасосов.
Общие технические условия (с Поправкой) """
import math
from scipy.optimize import fsolve
# TODO реализовать нормально ГОСТ, отрефакторить, учитывать разные формы кабеля
# TODO толщины слоев сделать
# TODO рисунок кабеля при инициализации
class Cable():
def __init__(self):
# T - длительно допустимая температура нагрева жил кабеля, C
# Tср - температура окружающей среды
# У.T.С.Т. - удельное тепловое сопротивление теплоперехода
self.sigma_liquid__Ccm_V = 104 # У.Т.С.Т. от поверхности кабеля в воду и от воды к ОТ, C * см^ 2 /Вт
self.sigma_oil = 425 # У.Т.С.Т. от поверхности кабеля в скважинную жидкость (нефть) и от нефти к ОТ, C*см^ 2/Вт
self.sigma_gas = 1100 # У.Т.С. теплоизлучению от поверхности кабеля в воздушную среду
self.sigma_polyethylene_Ccm_V = 400 # У.Т.С. полиэтилена, композиции полипропилена и сополимеров пропилена
self.sigma_thermoplastic_elastomers_Ccm_V = 600 # У.Т.С. термоэластопласта
self.sigma_rubber_Ccm_V = 750 # У.Т.С. резины
self.sigma_fluorocopolymers_Ccm_V = 1000 # У.Т.С. фторсополимеров
self.sigma_braids_ribbons_Ccm_V = 650 # У.Т.С. материалов оплеток и лент для наложения бандажей и подушек
self.sigma_plumbum_Ccm_V = 3 # У.Т.С. свинца и его сплавов
self.sigma_1isolation_Ccm_V = self.sigma_polyethylene_Ccm_V # удельное сопротивеление изоляции на C*см/Вт
self.sigma_2isolation_Ccm_V = self.sigma_polyethylene_Ccm_V
self.sigma_shell_Ccm_V = self.sigma_polyethylene_Ccm_V
self.sigma_bandage_Ccm_V = self.sigma_braids_ribbons_Ccm_V
self.sigma_pillow_Ccm_V = self.sigma_braids_ribbons_Ccm_V
self.sigma_o = 750 # материала оболочки
self.sigma_b = 1000 # материала бандажа поверх оболочки
self.sigma_p = 3 # материала подушки под броней
self.t_permanently_permissible_c = 120 # длительно допустимая температура нагрева жилы
self.R = 1.15 # электрическое сопротивление токопроводящей жилы
self.d_mm = 4.5 # номинальный диаметр токопроводящей жилы, мм
self.d1_first_isolation_mm = 7.5 # номинальный наружный диаметр первого слоя изоляции жилы, мм
self.d2_second_isolation_mm = 7.5 # номинальный наружный диаметр второго слоя изоляции жилы, мм
self.do_shell_mm = 10 # номинальный диаметр оболочки жилы, мм
self.db_bandage_mm = 11 # номинальный наружный диаметр бандажа поверх оболочки жилы
self.Dc_twist_mm = 20 # номинальный диаметр по скрутке жил, мм
self.Dp_pillow_mm = 12 # номинальный наружный диаметр подушки под броней
self.D_round_cable_mm = 30 # максимальный наружный диаметр круглого кабеля
# максимальные наружные размеры плоского кабеля
self.H_flat_cable_mm = 12.5 # толщина
self.B_flat_cable_mm = 36 # ширина
self.di_casing_mm = 120 # внутренний диаметр обсадной трубы скважины
self.alpha_1C = 0.0038 # температурный коэффициент электрического сопротивления материала
# токопроводящей жилы, С-1
self.cabel_type = 'Round' # Или 'Flat'
self.environment_type = 'Oil' # в нефти , 'Water' - в воде
def __thermal_resistance_cable__(self):
"""Расчет теплового сопротивления кабеля"""
result = (1 / 6 / math.pi *(self.sigma_1isolation_Ccm_V * math.log(self.d1_first_isolation_mm / self.d_mm) +
self.sigma_shell_Ccm_V * math.log(self.do_shell_mm / self.d1_first_isolation_mm ) +
self.sigma_bandage_Ccm_V * math.log(self.db_bandage_mm / self.do_shell_mm)) +
self.sigma_pillow_Ccm_V / 2 / math.pi * math.log( self.D_round_cable_mm/ self.Dc_twist_mm)) # TODO проверить диаметр подушки
return result
def __thermal_resistance_environment__(self):
"""Расчет теплового сопротивления окружающей среды"""
# Тепловое сопротивление по Б.2.2.1 в скважинной жидкости нефтяной скважины
if self.cabel_type == 'Round' and self.environment_type == 'Oil':
return (1 / 2 / math.pi * 10 * (self.sigma_oil *
(1 / self.D_round_cable_mm + 1 / self.di_casing_mm) +
self.sigma_gas / self.D_round_cable_mm))
if self.cabel_type == 'Flat' and self.environment_type == 'Oil':
return (1 / 2 * 10 * ( self.sigma_oil * (1 / (1.14 * self.H_flat_cable_mm + 2 * self.B_flat_cable_mm ) +
1 / math.pi / self.di_casing_mm) +
self.sigma_gas / (1.14 * self.H_flat_cable_mm + 2 * self.B_flat_cable_mm ) ))
def __electricial_resistance_cable_core__(self, R, t, alpha):
"""Расчет электрического сопротивления жилы кабеля"""
# электрическое сопротивление токопроводящей жилы, Ом
result = R * (1 + alpha * (t - 20))
return result
def __calc_i_a__(self, t, t_env, s_c, s_env, rt):
"""Расчет длительно допустимого тока"""
# длительно допустимый ток I, A
result = ((t - t_env) * 10 ** 5 / 3 / (s_c + s_env) / rt) ** (1 / 2)
return result
def __t_cabel_c__(self, tf_c, i, rt, s_cable, s_env):
"""Температура кабеля"""
result = (i ** 2) * (s_cable + s_env) * rt * 3 / 10 ** 5 + tf_c
return result
def calc_t_max_cable_c(self, tf_c, i_a):
"""
Расчет температуры кабеля
:param tf_c: Температура среды, С
:param i_a: Ток жилы кабеля, А
:return: температуры кабеля, С
"""
delta0 = tf_c * 0 + 10 # начальное приближение
def calc_temp_cable(val_t_cabel1):
s_c_val = self.__thermal_resistance_cable__()
s_env_val = self.__thermal_resistance_environment__()
rt_val = self.__electricial_resistance_cable_core__(self.R, val_t_cabel1, self.alpha_1C)
val_t_cabel2 = self.__t_cabel_c__(tf_c, i_a, rt_val, s_c_val, s_env_val)
return val_t_cabel2 - val_t_cabel1
result = fsolve(calc_temp_cable, delta0) # находит такое val_t_cabel1, при котором calc_temp_cable = 0
return result
def calc_i_max_a(self, t_max_c, t_env_c):
"""
Расчета максимально допустимой длительной силы тока кабеля
:param t_max_c: температурный индекс кабеля, максимальная температуры нагрева жил кабеля, С
:param t_env_c: температура среды
:return: длительно допустимый ток, А
"""
self.t_permanently_permissible_c = t_max_c
self.t_env_c = t_env_c
s_c_val = self.__thermal_resistance_cable__()
s_env_val = self.__thermal_resistance_environment__()
rt_val = self.__electricial_resistance_cable_core__(self.R, self.t_permanently_permissible_c, self.alpha_1C)
return self.__calc_i_a__(self.t_permanently_permissible_c, self.t_env_c, s_c_val, s_env_val, rt_val)
| 50.956522 | 143 | 0.665671 | 8,470 | 0.943522 | 0 | 0 | 0 | 0 | 0 | 0 | 4,640 | 0.516876 |
dcbbfc2a1126bf01e035f0cedab6dd794f1ffb72 | 48 | py | Python | user_orders/models/__init__.py | Vitamal/shop | facf04da00b8b674f2d8024aca4dae272a0c3de8 | [
"MIT"
] | null | null | null | user_orders/models/__init__.py | Vitamal/shop | facf04da00b8b674f2d8024aca4dae272a0c3de8 | [
"MIT"
] | null | null | null | user_orders/models/__init__.py | Vitamal/shop | facf04da00b8b674f2d8024aca4dae272a0c3de8 | [
"MIT"
] | null | null | null | from .order import Order
from .user import User
| 16 | 24 | 0.791667 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 |
f4915d9213c03b46a6f7201a11146fd617cb2dbb | 800 | py | Python | src/command_modules/azure-cli-monitor/azure/cli/command_modules/monitor/_exception_handler.py | viananth/azure-cli | 4d23492ed03e946cfc11bae23b29acb971fb137d | [
"MIT"
] | null | null | null | src/command_modules/azure-cli-monitor/azure/cli/command_modules/monitor/_exception_handler.py | viananth/azure-cli | 4d23492ed03e946cfc11bae23b29acb971fb137d | [
"MIT"
] | null | null | null | src/command_modules/azure-cli-monitor/azure/cli/command_modules/monitor/_exception_handler.py | viananth/azure-cli | 4d23492ed03e946cfc11bae23b29acb971fb137d | [
"MIT"
] | 1 | 2017-12-28T04:51:44.000Z | 2017-12-28T04:51:44.000Z | # --------------------------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# --------------------------------------------------------------------------------------------
from azure.cli.core.util import CLIError
def monitor_exception_handler(ex):
from azure.mgmt.monitor.models import ErrorResponseException
if hasattr(ex, 'inner_exception') and 'MonitoringService' in ex.inner_exception.message:
raise CLIError(ex.inner_exception.code)
elif isinstance(ex, ErrorResponseException):
raise CLIError(ex)
else:
import sys
from six import reraise
reraise(*sys.exc_info())
| 42.105263 | 94 | 0.56375 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 377 | 0.47125 |
f491909954bbe3100557b534f4855e7a6f095870 | 8,035 | py | Python | examples/EarthInterior/makeplot.py | alex-w/vplanet | e901ac08208f7fd5edb30677f32f36619eb8ca8c | [
"MIT"
] | null | null | null | examples/EarthInterior/makeplot.py | alex-w/vplanet | e901ac08208f7fd5edb30677f32f36619eb8ca8c | [
"MIT"
] | null | null | null | examples/EarthInterior/makeplot.py | alex-w/vplanet | e901ac08208f7fd5edb30677f32f36619eb8ca8c | [
"MIT"
] | null | null | null | import numpy as np
import matplotlib.pyplot as plt
import matplotlib as mpl
import vplot
import scipy.signal as sig
#plt.rcParams["text.usetex"]=True
#plt.rcParams["text.latex.unicode"]=True
plt.rcParams.update({'font.size':16,'legend.fontsize':15})
import sys
# Check correct number of arguments
if (len(sys.argv) != 2):
print('ERROR: Incorrect number of arguments.')
print('Usage: '+sys.argv[0]+' <pdf | png>')
exit(1)
if (sys.argv[1] != 'pdf' and sys.argv[1] != 'png'):
print('ERROR: Unknown file format: '+sys.argv[1])
print('Options are: pdf, png')
exit(1)
out = vplot.GetOutput()
# Print final state
#print('Final: t=%.3f TUMan=%f TMan=%f TCMB=%f TCore=%f HflowUMan=%.1f HflowCMB=%.1f RadPowerTotal=%f RadPowerMan=%.1f RadPowerCore=%.1f MagMom=%f RIC=%f'%(out.earth.Time[-1],out.earth.TUMan[-1],out.earth.TMan[-1],out.earth.TCMB[-1],out.earth.TCore[-1],out.earth.HflowUMan[-1],out.earth.HflowCMB[-1],out.earth.RadPowerTotal[-1],out.earth.RadPowerMan[-1],out.earth.RadPowerCore[-1],out.earth.MagMom[-1],out.earth.RIC[-1]))
### Uncertainty ranges
TUMan_ra = np.array([1280.,1475.])+273. #[K] Jaupart (2015) Table 4.
TCMB_ra = np.array([3800,4200.]) #[K] Hirose (2013) Table 2.
HflowUMan_ra = np.array([35,41.]) #[TW] Jaupart (2015) Table 12.
HflowUMan_ra = np.array([35,41.]) #[TW] Jaupart (2015) Table 12.
HflowCMB_ra = np.array([5,17]) #[TW] Jaupart (2015) Table 12.
ViscUMan_ra = np.array([1.5e19,1.5e22])/3300. #[m^2/s] Paulson (2005) Fig 3.
ViscLMan_ra = np.array([3e19,1.5e22])/5200. #[m^2/s] Paulson (2005) Fig 3.
MeltMassFlux_ra = np.array([0.52,4*.52]) #[1e6 kg/s] Cogne (2004) 5-15 km^3/yr. Li (2015) ~20 km^3/yr
FMeltUMan_ra = np.array([0.07,0.15]) # refs?
### Hi/lo
TUMan_lo = np.abs(TUMan_ra[0]-out.earth.TUMan[-1])
TUMan_hi = np.abs(TUMan_ra[1]-out.earth.TUMan[-1])
TCMB_lo = np.abs(TCMB_ra[0]-out.earth.TCMB[-1])
TCMB_hi = np.abs(TCMB_ra[1]-out.earth.TCMB[-1])
HflowUMan_lo = np.abs(HflowUMan_ra[0]-out.earth.HflowUMan[-1])
HflowUMan_hi = np.abs(HflowUMan_ra[1]-out.earth.HflowUMan[-1])
HflowCMB_lo = np.abs(HflowCMB_ra[0]-out.earth.HflowCMB[-1])
HflowCMB_hi = np.abs(HflowCMB_ra[1]-out.earth.HflowCMB[-1])
ViscUMan_lo = np.abs(ViscUMan_ra[0]-out.earth.ViscUMan[-1])
ViscUMan_hi = np.abs(ViscUMan_ra[1]-out.earth.ViscUMan[-1])
ViscLMan_lo = np.abs(ViscLMan_ra[0]-out.earth.ViscLMan[-1])
ViscLMan_hi = np.abs(ViscLMan_ra[1]-out.earth.ViscLMan[-1])
MeltMassFlux_lo = np.abs(MeltMassFlux_ra[0]-out.earth.MeltMassFluxMan[-1]*1e-6)
MeltMassFlux_hi = np.abs(MeltMassFlux_ra[1]-out.earth.MeltMassFluxMan[-1]*1e-6)
FMeltUMan_lo = np.abs(FMeltUMan_ra[0]-out.earth.FMeltUMan[-1])
FMeltUMan_hi = np.abs(FMeltUMan_ra[1]-out.earth.FMeltUMan[-1])
# Plots
rows=3
cols=2
# Mantle Figure
nfig=1
fig = plt.figure(nfig, figsize=(10,15))
panel=1
plt.subplot(rows,cols,panel)
plt.plot(out.earth.Time,out.earth.TMan,color=vplot.colors.red,linestyle='-',label=r'$T_{M}$')
plt.plot(out.earth.Time,out.earth.TUMan,color=vplot.colors.orange,linestyle='-',label=r'$T_{UM}$')
plt.errorbar(out.earth.Time[-1],out.earth.TUMan[-1],yerr=[[TUMan_lo],[TUMan_hi]],color=vplot.colors.orange,fmt='o')
plt.plot(out.earth.Time,out.earth.TLMan,color=vplot.colors.pale_blue,linestyle='-',label=r'$T_{LM}$')
plt.plot(out.earth.Time,out.earth.TCMB,color=vplot.colors.purple,linestyle='-',label=r'$T_{CMB}$')
plt.errorbar(out.earth.Time[-1],out.earth.TCMB[-1],yerr=[[TCMB_lo],[TCMB_hi]],color=vplot.colors.purple,fmt='-o')
plt.plot(out.earth.Time,out.earth.TCore,'k-',label=r'$T_{C}$')
plt.legend(loc='best',ncol=2,frameon=True,columnspacing=1)
plt.ylabel('Temperature (K)')
plt.xlabel('Time (Gyr)')
plt.ylim(0,10000)
plt.xlim(0,4.6)
plt.xticks([0,1,2,3,4])
panel += 1
plt.subplot(rows,cols,panel)
plt.plot(out.earth.Time,out.earth.HflowUMan,color=vplot.colors.red,linestyle='-',label=r'$Q_{UMan}$')
plt.errorbar(out.earth.Time[-1],out.earth.HflowUMan[-1],yerr=[[HflowUMan_lo],[HflowUMan_hi]],color=vplot.colors.red,fmt='o')
plt.plot(out.earth.Time,out.earth.HflowCMB,color=vplot.colors.orange,linestyle='-',label=r'$Q_{CMB}$')
plt.errorbar(out.earth.Time[-1],out.earth.HflowCMB[-1],yerr=[[HflowCMB_lo],[HflowCMB_hi]],color=vplot.colors.orange,fmt='o')
plt.plot(out.earth.Time,out.earth.RadPowerMan,color=vplot.colors.pale_blue,linestyle='-',label=r'$Q_{Rad,Man}$')
plt.plot(out.earth.Time,out.earth.RadPowerCore,'k-',label=r'$Q_{Rad,Core}$')
plt.legend(loc='best',frameon=True)
plt.ylabel('Heat Flow (TW)')
plt.xlabel('Time (Gyr)')
plt.ylim(0,150)
plt.xlim(0,4.6)
plt.xticks([0,1,2,3,4])
panel += 1
plt.subplot(rows,cols,panel)
plt.plot(out.earth.Time,out.earth.BLUMan,label=r'$\delta_{UM}$',color=vplot.colors.dark_blue)
plt.plot(out.earth.Time,out.earth.BLLMan,label=r'$\delta_{LM}$',color=vplot.colors.orange)
plt.legend(loc='best',frameon=True)
plt.ylabel(r'Boundary Layer Depths (km)')
plt.xlabel('Time (Gyr)')
plt.xlim(0,4.6)
plt.xticks([0,1,2,3,4])
panel += 1
plt.subplot(rows,cols,panel)
plt.semilogy(out.earth.Time,out.earth.ViscUMan,label=r'$\nu_{UM}$',color=vplot.colors.dark_blue)
plt.errorbar(out.earth.Time[-1],out.earth.ViscUMan[-1],yerr=[[ViscUMan_lo],[ViscUMan_hi]],color=vplot.colors.dark_blue,fmt='o')
plt.semilogy(out.earth.Time,out.earth.ViscLMan,label=r'$\nu_{LM}$',color=vplot.colors.orange)
plt.errorbar(out.earth.Time[-1],out.earth.ViscLMan[-1],yerr=[[ViscLMan_lo],[ViscLMan_hi]],color=vplot.colors.orange,fmt='o')
plt.legend(loc='best',frameon=True)
plt.ylabel(r'Mantle Viscosity ($m^2s^{-1}$)')
plt.xlabel('Time (Gyr)')
plt.ylim(1e12,1e19)
plt.xlim(0,4.6)
plt.xticks([0,1,2,3,4])
panel += 1
plt.subplot(rows,cols,panel)
plt.plot(out.earth.Time,out.earth.FMeltUMan,color=vplot.colors.dark_blue)
plt.errorbar(out.earth.Time[-1],out.earth.FMeltUMan[-1]*1e-6,yerr=[[FMeltUMan_lo],[FMeltUMan_hi]],color=vplot.colors.dark_blue,fmt='o')
plt.ylabel(r'Melt Fraction Upper Mantle (n.d.)')
plt.xlabel('Time (Gyr)')
plt.xlim(0,4.6)
plt.xticks([0,1,2,3,4])
panel += 1
plt.subplot(rows,cols,panel)
plt.plot(out.earth.Time,out.earth.MeltMassFluxMan*1e-6,color=vplot.colors.dark_blue)
plt.errorbar(out.earth.Time[-1],out.earth.MeltMassFluxMan[-1]*1e-6,yerr=[[MeltMassFlux_lo],[MeltMassFlux_hi]],color=vplot.colors.dark_blue,fmt='o')
plt.ylabel(r'Melt Mass Flux Mantle ($\times 10^6$ kg$/$s)')
plt.xlabel('Time (Gyr)')
plt.ylim(0,100)
plt.xlim(0,4.6)
plt.xticks([0,1,2,3,4])
vplot.make_pretty(fig)
if (sys.argv[1] == 'pdf'):
plt.savefig('EarthInterior%d.pdf'%nfig)
if (sys.argv[1] == 'png'):
plt.savefig('EarthInterior%d.png'%nfig)
# Core Plots
rows=2
nfig += 1
fig = plt.figure(nfig, figsize=(10,10))
panel = 1
plt.subplot(rows,cols,panel)
plt.plot(out.earth.Time,out.earth.RIC,label='RIC')
plt.ylim(0,1500)
plt.ylabel(r'Inner Core Radius (km)')
plt.xlabel('Time (Gyr)')
panel += 1
plt.subplot(rows,cols,panel)
plt.plot(out.earth.Time,out.earth.CoreBuoyTherm*1e13,label='Thermal')
plt.plot(out.earth.Time,out.earth.CoreBuoyCompo*1e13,label='Compositional')
plt.plot(out.earth.Time,out.earth.CoreBuoyTotal*1e13,label='Total')
plt.legend(loc='best',frameon=True)
plt.ylabel(r'Core Buoyancy Flux ($\times10^{-13}$ m$^2/$s$^3$)')
plt.xlabel('Time (Gyr)')
plt.xticks([0,1,2,3,4])
panel += 1
plt.subplot(rows,cols,panel)
plt.plot(out.earth.Time,out.earth.MagMom,label='MagMom')
plt.ylim(0,2)
plt.ylabel('Magnetic Moment (E. Units)')
plt.xlabel('Time (Gyr)')
plt.xticks([0,1,2,3,4])
panel += 1
plt.subplot(rows,cols,panel)
plt.plot(out.earth.Time,out.earth.MagPauseRad)
plt.ylabel(r'Magnetopause Radius (E. Units)')
plt.xlabel('Time (Gyr)')
plt.xticks([0,1,2,3,4])
#panel += 1
#plt.subplot(rows,cols,panel)
#plt.plot(out.earth.Time,out.earth.ChiOC,label='ChiOC')
#plt.plot(out.earth.Time,out.earth.ChiIC,label='ChiIC')
#plt.ylim(0,0.2)
#plt.ylabel(r'Core Light Element Concentration')
#plt.xlabel('Time (Gyr)')
#plt.legend(loc='best',frameon=False)
vplot.make_pretty(fig)
if (sys.argv[1] == 'pdf'):
plt.savefig('EarthInterior%d.pdf'%nfig)
if (sys.argv[1] == 'png'):
plt.savefig('EarthInterior%d.png'%nfig)
plt.close()
| 44.148352 | 421 | 0.705663 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2,153 | 0.267953 |
f4935401f20bb33cbe67a8c775b7c94f08f7eeec | 417 | py | Python | numcodecs/tests/test_compat.py | rabernat/numcodecs | 568fc5ad286a91d6b96b7a35a0031eb5e4ee5e97 | [
"MIT"
] | null | null | null | numcodecs/tests/test_compat.py | rabernat/numcodecs | 568fc5ad286a91d6b96b7a35a0031eb5e4ee5e97 | [
"MIT"
] | 2 | 2018-10-20T02:13:53.000Z | 2018-10-30T23:29:35.000Z | numcodecs/tests/test_compat.py | funkey/numcodecs | 870610403baac803856cc43c302ec289cd00eea3 | [
"MIT"
] | null | null | null | # -*- coding: utf-8 -*-
from __future__ import absolute_import, print_function, division
import array
import numpy as np
from numcodecs.compat import buffer_tobytes
def test_buffer_tobytes():
bufs = [
b'adsdasdas',
bytes(20),
np.arange(100),
array.array('l', b'qwertyuiqwertyui')
]
for buf in bufs:
b = buffer_tobytes(buf)
assert isinstance(b, bytes)
| 18.954545 | 64 | 0.63789 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 57 | 0.136691 |
f495558b425c19c7f688ca7077b87a76ada1cbf2 | 92 | py | Python | unittest/scripts/py_dev_api_examples/working_with_collections/Working_with_Existing_Collections.py | mueller/mysql-shell | 29bafc5692bd536a12c4e41c54cb587375fe52cf | [
"Apache-2.0"
] | 119 | 2016-04-14T14:16:22.000Z | 2022-03-08T20:24:38.000Z | unittest/scripts/py_dev_api_examples/working_with_collections/Working_with_Existing_Collections.py | mueller/mysql-shell | 29bafc5692bd536a12c4e41c54cb587375fe52cf | [
"Apache-2.0"
] | 9 | 2017-04-26T20:48:42.000Z | 2021-09-07T01:52:44.000Z | unittest/scripts/py_dev_api_examples/working_with_collections/Working_with_Existing_Collections.py | mueller/mysql-shell | 29bafc5692bd536a12c4e41c54cb587375fe52cf | [
"Apache-2.0"
] | 51 | 2016-07-20T05:06:48.000Z | 2022-03-09T01:20:53.000Z | # Get a collection object for 'my_collection'
myColl = db.get_collection('my_collection')
| 30.666667 | 46 | 0.771739 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 61 | 0.663043 |
f4968e302d7b44803fe1b1bf20a1663988d063f3 | 2,051 | py | Python | app/decorators/cacheable.py | matrufsc2/matrufsc2 | d8a32c532281cc2a09a26444bd5b8497bc578b18 | [
"RSA-MD"
] | 4 | 2017-07-07T19:04:07.000Z | 2018-07-04T18:03:49.000Z | app/decorators/cacheable.py | matrufsc2/matrufsc2 | d8a32c532281cc2a09a26444bd5b8497bc578b18 | [
"RSA-MD"
] | 6 | 2015-02-27T03:21:02.000Z | 2019-07-30T19:58:35.000Z | app/decorators/cacheable.py | matrufsc2/matrufsc2 | d8a32c532281cc2a09a26444bd5b8497bc578b18 | [
"RSA-MD"
] | null | null | null | from app.cache import get_from_cache, set_into_cache, delete_from_cache
import logging as _logging
import hashlib, json
logging = _logging.getLogger("matrufsc2_decorators_cacheable")
logging.setLevel(_logging.DEBUG)
__author__ = 'fernando'
CACHE_CACHEABLE_KEY = "cache/functions/%s/%s"
def cacheable(consider_only=None):
def decorator(fn):
def dec(filters, **kwargs):
if consider_only is not None and filters:
filters = {k: filters[k] for k in filters.iterkeys() if k in consider_only}
filters_hash = hashlib.sha1(json.dumps(filters, sort_keys=True)).hexdigest()
cache_key = CACHE_CACHEABLE_KEY % (
fn.__name__,
filters_hash
)
persistent = kwargs.get("persistent", True)
if kwargs.get("overwrite"):
update_with = kwargs.get("update_with")
if update_with:
result = get_from_cache(cache_key, persistent=persistent).get_result()
if not result:
result = update_with
if type(result) == type(update_with):
logging.debug("Updating cache with passed in value")
set_into_cache(cache_key, update_with, persistent=persistent).get_result()
else:
raise Exception("Types differents: %s != %s" % (str(type(result)), str(type(update_with))))
elif kwargs.get("exclude"):
return delete_from_cache(cache_key, persistent=persistent).get_result()
else:
result = None
else:
result = get_from_cache(cache_key, persistent=persistent).get_result()
if not result:
result = fn(filters)
set_into_cache(cache_key, result, persistent=persistent).get_result()
return result
dec.__name__ = fn.__name__
dec.__doc__ = fn.__doc__
return dec
return decorator | 42.729167 | 115 | 0.586543 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 175 | 0.085324 |
f4974747ea99b258966fdaf8250ba7738a96d370 | 8,929 | py | Python | falkon/mmv_ops/keops.py | mohamad-amin/falkon | 581c761b4a4cb7bf6a299613700db8414c419a52 | [
"MIT"
] | 130 | 2020-06-18T08:30:30.000Z | 2022-03-21T15:43:17.000Z | falkon/mmv_ops/keops.py | mohamad-amin/falkon | 581c761b4a4cb7bf6a299613700db8414c419a52 | [
"MIT"
] | 32 | 2020-06-26T09:24:45.000Z | 2022-03-20T10:37:36.000Z | falkon/mmv_ops/keops.py | mohamad-amin/falkon | 581c761b4a4cb7bf6a299613700db8414c419a52 | [
"MIT"
] | 17 | 2020-07-13T17:28:02.000Z | 2022-02-15T19:55:40.000Z | import warnings
from dataclasses import dataclass
from typing import List, Optional
import torch
from falkon.utils.stream_utils import sync_current_stream
from falkon.mmv_ops.utils import _get_gpu_info, create_output_mat, _start_wait_processes
from falkon.options import FalkonOptions, BaseOptions
from falkon.utils import decide_cuda
from falkon.utils.helpers import sizeof_dtype, calc_gpu_block_sizes
from pykeops.torch import Genred
@dataclass(frozen=True)
class ArgsFmmv:
X1: torch.Tensor
X2: torch.Tensor
v: torch.Tensor
other_vars: List[torch.Tensor]
out: torch.Tensor
gpu_ram: float
backend: str
function: callable
def _keops_dtype(dtype: torch.dtype) -> str:
"""Returns a string which represents the given data type.
The string representation is necessary for KeOps which doesn't
like type objects.
"""
if dtype == torch.float64:
return 'float64'
elif dtype == torch.float32:
return 'float32'
else:
raise NotImplementedError("Data type %s not recognized." % (dtype))
def _decide_backend(opt: BaseOptions, num_dim: int) -> str:
"""Switch between CPU and GPU backend for KeOps
"""
if not decide_cuda(opt):
return 'CPU'
else:
return 'GPU_1D'
def _estimate_split(N, M, D, T, R, ds):
"""Estimate the splits along dimensions N and M for a MVM to fit in memory
The operations consist of computing the product between a kernel
matrix (from a N*D and a M*D matrix) and a 'vector' of shape M*T
This typically requires storage of the input and output matrices,
which occupies (M + N)*(D + T) memory locations plus some intermediate
buffers to perform computations.
TODO: It is not clear how much intermediate memory KeOps requires;
the only thing that is certain is that it is quadratic in D.
For now we sidestep this issue by using a smaller R than what is
actually available in GPU memory.
This function calculates the split along N and M into blocks of size n*m
so that we can compute the kernel-vector product between such blocks
and still fit in GPU memory.
Parameters
-----------
- N : int
The first dimension of the kernel matrix
- M : int
The second dimension of the kernel matrix
- D : int
The data dimensionality
- T : int
The number of output columns
- R : float
The amount of memory available (in bytes)
- ds : int
The size in bytes of each element in the data matrices
(e.g. 4 if the data is in single precision).
Returns
--------
- n : int
The block size to be used along the first dimension
- m : int
The block size along the second dimension of the kernel
matrix
Raises
-------
RuntimeError
If the available memory `R` is insufficient to store even the smallest
possible input matrices. This may happen if `D` is very large since we
do not perform any splitting along `D`.
Notes
------
We find 'good' values of M, N such that
N*(D+T) + M*(D+T) <= R/ds
"""
R = R / ds
# We have a linear equation in two variables (N, M)
slope = -1
intercept = R / (D + T)
slack_points = 10
# We try to pick a point at the edges such that only one kind of split
# is necessary
if N < intercept - 1:
M = min(M, intercept + slope * N)
elif M < intercept - 1:
N = min(N, intercept + slope * M)
else:
# All points on the slope such that N, M > 0 are possible
N = intercept - slack_points - 1
M = intercept + slope * N
if N <= 0 or M <= 0:
raise RuntimeError(
"Insufficient available GPU "
"memory (available %.2fGB)" % (R * ds / 2 ** 30))
return int(N), int(M)
def _single_gpu_method(proc_idx, queue, device_id):
a: ArgsFmmv = queue.get()
backend = a.backend
X1 = a.X1
X2 = a.X2
v = a.v
oout = a.out
other_vars = a.other_vars
fn = a.function
R = a.gpu_ram
N, D = X1.shape
M = X2.shape[0]
T = v.shape[1]
device = torch.device(f"cuda:{device_id}")
# Second round of subdivision (only if necessary due to RAM constraints)
n, m = _estimate_split(N, M, D, T, R, sizeof_dtype(X1.dtype))
other_vars_dev = [ov.to(device, copy=False) for ov in other_vars]
out_ic = oout.device.index == device_id
# Process the two rounds of splitting with a nested loop.
with torch.cuda.device(device_id):
for mi in range(0, M, m):
ml = min(m, M - mi)
if ml != M and mi > 0: # Then we must create a temporary output array
out = torch.empty_like(oout)
else:
out = oout
cX2 = X2[mi:mi + ml, :].to(device, copy=False)
cv = v[mi:mi + ml, :].to(device, copy=False)
for ni in range(0, N, n):
nl = min(n, N - ni)
cX1 = X1[ni:ni + nl, :].to(device, copy=False)
cout = out[ni: ni + nl, :].to(device, copy=False)
variables = [cX1, cX2, cv] + other_vars_dev
fn(*variables, out=cout, device_id=device_id, backend=backend)
if not out_ic:
out[ni: ni + nl, :].copy_(cout)
if ml != M and mi > 0:
oout.add_(out)
return oout
def run_keops_mmv(X1: torch.Tensor,
X2: torch.Tensor,
v: torch.Tensor,
other_vars: List[torch.Tensor],
out: Optional[torch.Tensor],
formula: str,
aliases: List[str],
axis: int,
reduction: str = 'Sum',
opt: Optional[FalkonOptions] = None) -> torch.Tensor:
if opt is None:
opt = FalkonOptions()
# Choose backend
N, D = X1.shape
T = v.shape[1]
backend = _decide_backend(opt, D)
dtype = _keops_dtype(X1.dtype)
data_devs = [X1.device, X2.device, v.device]
if any([ddev.type == 'cuda' for ddev in data_devs]) and (not backend.startswith("GPU")):
warnings.warn("KeOps backend was chosen to be CPU, but GPU input tensors found. "
"Defaulting to 'GPU_1D' backend. To force usage of the CPU backend, "
"please pass CPU tensors; to avoid this warning if the GPU backend is "
"desired, check your options (i.e. set 'use_cpu=False').")
backend = "GPU_1D"
differentiable = any(
[X1.requires_grad, X2.requires_grad, v.requires_grad] +
[o.requires_grad for o in other_vars]
)
if differentiable:
from falkon.kernels.tiling_red import TilingGenred
fn = TilingGenred(formula, aliases, reduction_op='Sum', axis=1, dtype=dtype,
dtype_acc="auto", sum_scheme="auto", opt=opt)
return fn(X1, X2, v, *other_vars, out=out, backend=backend)
# Define formula wrapper
fn = Genred(formula, aliases,
reduction_op=reduction, axis=axis,
dtype=dtype, dtype_acc=opt.keops_acc_dtype,
sum_scheme=opt.keops_sum_scheme)
comp_dev_type = backend[:3].lower().replace('gpu', 'cuda') # 'cpu' or 'cuda'
out = create_output_mat(out, data_devs, is_sparse=False, shape=(N, T), dtype=X1.dtype,
comp_dev_type=comp_dev_type, other_mat=X1, output_stride="C")
if comp_dev_type == 'cpu' and all([ddev.type == 'cpu' for ddev in data_devs]): # incore CPU
variables = [X1, X2, v] + other_vars
out = fn(*variables, out=out, backend=backend)
elif comp_dev_type == 'cuda' and all([ddev.type == 'cuda' for ddev in data_devs]): # incore CUDA
variables = [X1, X2, v] + other_vars
device = data_devs[0]
with torch.cuda.device(device):
sync_current_stream(device)
out = fn(*variables, out=out, backend=backend)
else: # Out of core
# slack is high due to imprecise memory usage estimates for keops
gpu_info = _get_gpu_info(opt, slack=opt.keops_memory_slack)
block_sizes = calc_gpu_block_sizes(gpu_info, N)
# Create queues
args = [] # Arguments passed to each subprocess
for i, g in enumerate(gpu_info):
# First round of subdivision
bwidth = block_sizes[i + 1] - block_sizes[i]
if bwidth <= 0:
continue
args.append((ArgsFmmv(
X1=X1.narrow(0, block_sizes[i], bwidth),
X2=X2,
v=v,
out=out.narrow(0, block_sizes[i], bwidth),
other_vars=other_vars,
function=fn,
backend=backend,
gpu_ram=g.usable_memory
), g.Id))
_start_wait_processes(_single_gpu_method, args)
return out
| 34.342308 | 101 | 0.598051 | 193 | 0.021615 | 0 | 0 | 217 | 0.024303 | 0 | 0 | 3,122 | 0.349647 |
f49c10df92a95cac9e385825872f6c84fbdad8d8 | 13,791 | py | Python | ancilla/ancilla/foundation/node/api/node.py | frenzylabs/ancilla | 3469272f17e1a5092d033cdc099f86f3052e744f | [
"Apache-2.0"
] | 7 | 2020-03-31T19:52:59.000Z | 2021-05-21T08:38:47.000Z | ancilla/ancilla/foundation/node/api/node.py | frenzylabs/ancilla | 3469272f17e1a5092d033cdc099f86f3052e744f | [
"Apache-2.0"
] | 15 | 2020-04-01T13:52:07.000Z | 2020-04-01T13:52:11.000Z | ancilla/ancilla/foundation/node/api/node.py | frenzylabs/ancilla | 3469272f17e1a5092d033cdc099f86f3052e744f | [
"Apache-2.0"
] | null | null | null | '''
node.py
ancilla
Created by Kevin Musselman (kevin@frenzylabs.com) on 01/14/20
Copyright 2019 FrenzyLabs, LLC.
'''
import time
from .api import Api
from ..events import Event
from ...data.models import Service, Printer, Camera, ServiceAttachment, CameraRecording, Node
from ..response import AncillaError, AncillaResponse
import re
import math
import os
import json
MB = 1 << 20
BUFF_SIZE = 10 * MB
class NodeApi(Api):
def setup(self):
super().setup("/api")
# self.service.route('/services', 'GET', self.services)
self.service.route('/api/node', 'GET', self.get_node)
self.service.route('/api/node', 'PATCH', self.update_node)
self.service.route('/api/nodes', 'GET', self.discover_nodes)
self.service.route('/api/services', 'GET', self.services)
self.service.route('/api/recordings', 'GET', self.recordings)
self.service.route('/api/recordings/<recording_id>', 'GET', self.get_recording)
self.service.route('/api/recordings/<recording_id>', 'DELETE', self.delete_recording)
self.service.route('/api/recordings/<recording_id>/video', 'GET', self.get_video)
# self.service.route('/services/<service_id>/restart', 'GET', self.restart_service)
self.service.route('/api/attachments/<attachment_id>', 'PATCH', self.update_attachment)
self.service.route('/api/services/<service_id>', 'PATCH', self.update_service_model)
self.service.route('/api/services/<service_id>', 'DELETE', self.delete_service)
self.service.route('/api/services/<service_id>/stop', 'GET', self.stop_service)
self.service.route('/api/services/camera', 'GET', self.list_cameras)
self.service.route('/api/services/camera', 'POST', self.create_camera)
self.service.route('/api/services/printer', 'POST', self.create_printer)
self.service.route('/api/services/printer', 'GET', self.list_printers)
# self.service.route('/services/<service>/<service_id><other:re:.*>', ['GET', 'PUT', 'POST', 'DELETE', 'PATCH'], self.catchUnmountedServices)
# self.service.route('/services/<name><other:re:.*>', 'GET', self.catchIt)
def get_node(self, request, *args):
model = self.service.model
return {"node": model.json}
def update_node(self, request, *args):
model = self.service.model
frozen_keys = ['id', 'name', 'original_name', 'created_at', 'updated_at']
newname = request.params.get("name")
if newname:
model.node_name = newname
modelkeys = model.__data__.keys() - frozen_keys
for k in modelkeys:
kval = request.params.get(k)
if kval:
model.__setattr__(k, kval)
if not model.is_valid:
raise AncillaError(400, {"errors": model.errors})
model.save()
return {"node": model}
# newname = request.params.get("name")
# n = Node.select().first()
def discover_nodes(self, request, *args):
res = self.service.discovery.nodes()
# print(f'Node res = {res}')
nodes = []
ips = {}
for r in res:
if "ip" in r:
ips[r["ip"]] = r
beacon = self.service.discovery.beacon
if beacon and beacon.listener:
networkservices = beacon.listener.myservices
else:
networkservices = {}
# {'addresses': ['192.168.1.129'], 'port': 5000, 'server': 'ancilla.local', 'type': '_ancilla._tcp.local.'}
try:
for key, ns in networkservices.items():
ip = ns["addresses"][0]
if ip:
nd = {"network_name": key}
if ip in ips:
nd.update({**ns, **ips[ip]})
nodes.append(nd)
del ips[ip]
else:
nd.update(ns)
nodes.append(nd)
except Exception as e:
print(f"Node Exception = {str(e)}", flush=True)
## The rest of ips not part of the bonjour services for some reason")
for n in ips.values():
nodes.append(n)
return {"nodes": nodes}
async def delete_service(self, request, layerkeep, service_id, *args):
smodel = Service.get_by_id(service_id)
model = smodel.model
with Service._meta.database.atomic() as transaction:
try:
if model:
# if request.params.get("layerkeep_sync") and request.params.get("layerkeep_sync") != "false":
if layerkeep and smodel.kind == "printer" and model.layerkeep_id:
response = await layerkeep.delete_printer({"data": {"layerkeep_id": model.layerkeep_id}})
if not response.success:
raise response
model.delete_instance(recursive=True)
smodel.delete_instance(recursive=True)
self.service.delete_service(smodel)
except Exception as e:
print(f"DELETE SERvice excption= {str(e)}", flush=True)
transaction.rollback()
raise e
# return {"error": "Could Not Delete Service"}
return {"success": True}
def stop_service(self, request, service_id, *args):
s = Service.get_by_id(service_id)
self.service.stop_service(s)
return {"success": True}
def services(self, request, *args):
allservices = []
q = Service.select()
if request.params.get("kind"):
q = q.where(Service.kind == request.params.get("kind"))
for service in q:
js = service.to_json(extra_attrs=["identity"])
model = service.model
if model:
js.update(model=model.to_json(recurse=False))
allservices.append(js)
return {'services': allservices}
# return {'services': [service.json for service in Service.select()]}
def actions(self, *args):
return {"actions": self.service.list_actions()}
def service_model(self, request, model_id, *args):
s = Service.get_by_id(model_id)
return {"service_model": s.json}
def update_service_model(self, request, layerkeep, service_id, *args):
s = Service.get_by_id(service_id)
with Service._meta.database.atomic() as transaction:
try:
model = s.model
newname = request.params.get("name")
if newname:
s.service_name = newname
if model:
model.name = newname
if not model.is_valid:
raise AncillaError(400, {"errors": model.errors})
model.save()
if request.params.get('configuration') != None:
s.configuration = request.params.get('configuration')
if request.params.get('settings') != None:
s.settings = request.params.get('settings')
s.event_listeners = request.params.get('event_listeners') or s.event_listeners
s.save()
smodel = s.json
if model:
smodel.update(model=model.to_json(recurse=False))
return {"service_model": smodel}
except Exception as e:
# Because this block of code is wrapped with "atomic", a
# new transaction will begin automatically after the call
# to rollback().
transaction.rollback()
# return {"Error"}
raise e
# def register_event_listeners(self, *args):
def delete_service_model(self, request, model_id, *args):
s = Service.get_by_id(model_id)
self.service.remove_service()
list_1 = [item for item in list_1 if item[2] >= 5 or item[3] >= 0.3]
def recordings(self, request, *args, **kwargs):
page = int(request.params.get("page") or 1)
per_page = int(request.params.get("per_page") or 5)
q = CameraRecording.select().order_by(CameraRecording.created_at.desc())
if request.params.get("q[print_id]"):
q = q.where(CameraRecording.print_id == request.params.get("q[print_id]"))
if request.params.get("q[camera_id]"):
q = q.where(CameraRecording.camera_id == request.params.get("q[camera_id]"))
if request.params.get("q[status]"):
q = q.where(CameraRecording.status == request.params.get("q[status]"))
cnt = q.count()
num_pages = math.ceil(cnt / per_page)
return {"data": [p.to_json(recurse=True) for p in q.paginate(page, per_page)], "meta": {"current_page": page, "last_page": num_pages, "total": cnt}}
def get_recording(self, request, recording_id, *args):
rcd = CameraRecording.get_by_id(recording_id)
return {"data": rcd.json}
def delete_recording(self, request, recording_id, *args):
rcd = CameraRecording.get_by_id(recording_id)
if self.service.delete_recording(rcd):
return {"success": "Deleted"}
raise AncillaError(400, {"errors": "Coud Not Delete Recording"})
def get_video(self, request, recording_id, *args):
rcd = CameraRecording.get_by_id(recording_id)
# path = rcd.video_path + "/output.mp4"
fp = open(rcd.video_path, "rb")
request.response.set_header('Content-Disposition', 'filename=%s' % "output.mp4")
if request.params.get("download"):
request.response.set_header('Content-Type', 'application/octet-stream')
return fp
request.response.status = 206
request.response.set_header('Content-Type', 'video/mp4')
request.response.set_header('Accept-Ranges', 'bytes')
return self.stream_video(request, fp)
def list_printers(self, *args, **kwargs):
return {'printers': [printer.json for printer in Printer.select()]}
def list_cameras(self, request, *args, **kwargs):
return {'cameras': [camera.json for camera in Camera.select()]}
def create_camera(self, request, *args, **kwargs):
with Service._meta.database.atomic() as transaction:
try:
service = Service(name=request.params.get("name"), kind="camera", class_name="Camera")
service.service_name = request.params.get("name")
if not service.is_valid:
raise AncillaError(400, {"errors": service.errors})
service.save()
camera = Camera(**request.params, service=service)
default_settings = {
"record": {
"timelapse": 2,
"frames_per_second": 10,
},
"video": {
"size": [640, 480],
"format": "avc1"
}
}
camera.settings = default_settings
if not camera.is_valid:
raise AncillaError(400, {"errors": camera.errors})
camera.save()
camera_service = service.json
camera_service.update(model=camera.json)
return {"camera": camera_service}
except Exception as e:
# Because this block of code is wrapped with "atomic", a
# new transaction will begin automatically after the call
# to rollback().
transaction.rollback()
raise e
async def create_printer(self, request, layerkeep, *args, **kwargs):
with Service._meta.database.atomic() as transaction:
try:
service = Service(name=request.params.get("name"), kind="printer", class_name="Printer")
service.service_name = request.params.get("name")
if not service.is_valid:
raise AncillaError(400, {"errors": service.errors})
service.save()
printer = Printer(**request.params, service=service)
if not printer.is_valid:
raise AncillaError(400, {"errors": printer.errors})
if request.params.get("layerkeep_sync") == True:
if layerkeep:
response = await layerkeep.create_printer({"data": request.params})
if response.success:
printer.layerkeep_id = response.body.get("data").get("id")
else:
raise response
printer.save()
printerservice = service.json
printerservice.update(model=printer.json)
return {"printer": printerservice}
except Exception as e:
# Because this block of code is wrapped with "atomic", a
# new transaction will begin automatically after the call
# to rollback().
transaction.rollback()
raise e
async def update_attachment(self, request, attachment_id, *args):
sa = ServiceAttachment.get_by_id(attachment_id)
if request.params.get("settings"):
sa.settings = request.params.get("settings")
sa.save()
return {"data": sa.json}
def stream_video(self, request, fp):
start, end = self.get_range(request)
requestedrange = request.headers.get('Range')
# if requestedrange == None:
# print("NO REQUESTED RANGE",flush=True)
# else:
file_size = os.path.getsize(fp.name)
if end is None:
end = start + BUFF_SIZE - 1
end = min(end, file_size - 1)
end = min(end, start + BUFF_SIZE - 1)
length = end - start + 1
request.response.set_header(
'Content-Range', 'bytes {0}-{1}/{2}'.format(
start, end, file_size,
),
)
fp.seek(start)
bytes = fp.read(length)
return bytes
def get_range(self, request):
range = request.headers.get('Range')
m = None
if range:
m = re.match('bytes=(?P<start>\d+)-(?P<end>\d+)?', range)
if m:
start = m.group('start')
end = m.group('end')
start = int(start)
if end is not None:
end = int(end)
return start, end
else:
return 0, None
def catchUnmountedServices(self, request, service, service_id, *args, **kwargs):
print(f"INSIDECatch service {service} {service_id}", flush=True)
print(f"INSIDECatch {args}, {kwargs}", flush=True)
print(f"Request = {request.params}", flush=True)
try:
s = Service.get_by_id(service_id)
status, module = self.service.mount_service(s)
if status == "created":
return request.app._handle(request.environ)
else:
return {"status": "error", "error": "No Route"}
except Exception as e:
print(f"Could not mount service {str(e)}")
return {"error": str(e)}
return {"retry": True}
def catchIt(self, name, *args, **kwargs):
print("INSIDE CATCH IT")
return {"catch it": True}
| 34.4775 | 152 | 0.629106 | 13,377 | 0.96998 | 0 | 0 | 0 | 0 | 2,486 | 0.180262 | 3,293 | 0.238779 |
f49c627cef82dc9a970e9b68a4c299ff3e228e5d | 727 | py | Python | tensorflow_in_action/nlp/2_ptb_gen_idword.py | wdxtub/deep-learning-note | 47b83a039b80d4757e0436d5cbd2fa3037de3904 | [
"MIT"
] | 37 | 2019-03-27T20:17:05.000Z | 2022-02-02T23:20:31.000Z | tensorflow_in_action/nlp/2_ptb_gen_idword.py | wdxtub/deep-learning-note | 47b83a039b80d4757e0436d5cbd2fa3037de3904 | [
"MIT"
] | null | null | null | tensorflow_in_action/nlp/2_ptb_gen_idword.py | wdxtub/deep-learning-note | 47b83a039b80d4757e0436d5cbd2fa3037de3904 | [
"MIT"
] | 14 | 2019-03-31T10:28:47.000Z | 2022-03-28T07:25:40.000Z | import codecs
import sys
RAW_DATA = "../data/ptb/ptb.train.txt"
VOCAB = "data/ptb.vocab"
OUTPUT_DATA = "data/ptb.train"
# 读取词汇表并建立映射
with codecs.open(VOCAB, "r", "utf-8") as f_vocab:
vocab = [w.strip() for w in f_vocab.readlines()]
word_to_id = {k: v for (k, v) in zip(vocab, range(len(vocab)))}
# 如果出现了被删除的低频词,替换成 <unk>
def get_id(word):
return word_to_id[word] if word in word_to_id else word_to_id["<unk>"]
fin = codecs.open(RAW_DATA, "r", "utf-8")
fout = codecs.open(OUTPUT_DATA, 'w', 'utf-8')
for line in fin:
words = line.strip().split() + ["<eos>"] # 读取单词并添加 <eos> 结束符
# 将每个单词替换为词汇表中的编号
out_line = ' '.join([str(get_id(w)) for w in words]) + '\n'
fout.write(out_line)
fin.close()
fout.close() | 29.08 | 74 | 0.65337 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 284 | 0.342581 |
f49e24bc85091e60f6700fa12214c8674aef6354 | 661 | py | Python | racoon/view/error/custom.py | onukura/Racoon | 96c96f2a37b8d33b35ca368c085d90e7a7caf105 | [
"MIT"
] | 3 | 2020-05-21T12:11:43.000Z | 2020-06-08T13:04:40.000Z | racoon/view/error/custom.py | onukura/Racoon | 96c96f2a37b8d33b35ca368c085d90e7a7caf105 | [
"MIT"
] | 6 | 2020-05-14T11:52:16.000Z | 2020-05-23T18:03:23.000Z | racoon/view/error/custom.py | onukura/Racoon | 96c96f2a37b8d33b35ca368c085d90e7a7caf105 | [
"MIT"
] | null | null | null | # -*- coding: utf-8 -*-
from flask import Blueprint, render_template
bp_error = Blueprint("bp_error", __name__, url_prefix="/error")
# Specific Error Handlers
@bp_error.route("/default")
def default():
return render_template(
"error/error_base.html",
error_code=500,
header_name="Error",
error_message="We will work on fixing that right away.",
)
@bp_error.route("/unauthorized")
def unauthorized():
return render_template(
"error/error_base.html",
error_code=500,
header_name="Unauthorized",
error_message="Not allowed to access this contents.",
)
| 24.481481 | 65 | 0.635401 | 0 | 0 | 0 | 0 | 482 | 0.729198 | 0 | 0 | 239 | 0.361573 |
f49e8ab07fb60d23483830ae470ad5c6dce2e53e | 7,611 | py | Python | modules/d_functions.py | william-stearns/E_ink_dashboard | 1625b213baef336833497c4593157485cfffdad4 | [
"MIT"
] | null | null | null | modules/d_functions.py | william-stearns/E_ink_dashboard | 1625b213baef336833497c4593157485cfffdad4 | [
"MIT"
] | null | null | null | modules/d_functions.py | william-stearns/E_ink_dashboard | 1625b213baef336833497c4593157485cfffdad4 | [
"MIT"
] | null | null | null |
import time
import datetime
from waveshare_epd import epd7in5_V2
from PIL import Image, ImageDraw, ImageFont
import calendar
import random
import os
picdir = os.path.join(os.path.dirname(os.path.dirname(os.path.realpath(__file__))), 'pic')
fontdir = os.path.join(os.path.dirname(os.path.dirname(os.path.realpath(__file__))), 'font')
epd = epd7in5_V2.EPD()
# Set the colors
black = 'rgb(0,0,0)'
white = 'rgb(255,255,255)'
def choose_mod(mod_choice, mod_turn):
if mod_choice == "weather" or mod_choice == "transit" or mod_choice == "tasklist":
mod_turn = 0
elif mod_choice == "c-s" or mod_choice == "news" or mod_choice == "meetings":
mod_turn = 1
elif mod_choice == "off":
mod_turn = 2
elif mod_choice == "random":
mod_rand = random.randint(0, 1)
while mod_rand == mod_turn:
mod_rand = random.randint(0, 1)
if mod_turn != mod_rand:
mod_turn = mod_rand
else:
print("mode unknown, going to default mode")
mod_turn = 0
return mod_turn
def time_in_range(start_hour, end_hour):
start = datetime.time((start_hour), 0, 0)
end = datetime.time((end_hour), 0, 0)
current_hour = datetime.datetime.now().strftime('%H')
current_min = datetime.datetime.now().strftime('%M')
current_sec = datetime.datetime.now().strftime('%S')
x = datetime.time(int(current_hour), int(current_min), int(current_sec))
"""Return true if x is in the range [start, end]"""
if start <= end:
return start <= x <= end
else:
return start <= x or x <= end
def tir_min(hour, x_min, y_min, y_sec):
start = datetime.time((hour), (x_min), 0)
end = datetime.time((hour), (y_min), (y_sec))
current_hour = datetime.datetime.now().strftime('%H')
current_min = datetime.datetime.now().strftime('%M')
current_sec = datetime.datetime.now().strftime('%S')
x = datetime.time(int(current_hour), int(current_min), int(current_sec))
"""Return true if x is in the range [start, end]"""
if start <= end:
return start <= x <= end
else:
return start <= x or x <= end
def sep_strings(it_str, chk_start):
chk_str = int(len(str(it_str)))
chk_str_1 = chk_str
check = False
#print("before" + str(chk_str))
if chk_str > chk_start:
chk_str = chk_start
else:
chk_str = chk_str
check = True
#print("after" + str(chk_str))
while check is False:
if str(it_str)[chk_str] != " ":
chk_str = chk_str - 1
# print("space_false: " + str(chk_str))
check = False
else:
chk_str = chk_str
# print("space_true: " + str(chk_str))
check = True
if chk_str_1 >= chk_start:
sep_it_1 = str(it_str)[0: chk_str] + " "
sep_it_2 = str(it_str)[chk_str+1: chk_str_1] + " "
else:
sep_it_1 = str(it_str)[0: chk_str] + " "
sep_it_2 = " "
return sep_it_1, sep_it_2
def draw_cal_mod(cal_s_x_0, cal_s_y, draw, color_1, color_2):
cal_month = datetime.datetime.now().month
cal_year = datetime.datetime.now().year
cal_day = datetime.datetime.now().day
cal_n_m = calendar.month_name[cal_month]
cal_text = calendar.TextCalendar(calendar.SUNDAY)
cal_list = cal_text.monthdayscalendar(cal_year, cal_month)
cal_s_x = cal_s_x_0
draw.text((cal_s_x + 60, cal_s_y-65), str(cal_n_m) + ' ' + str(cal_year),
font=font_size(35), fill=color_1)
draw.text((cal_s_x, cal_s_y-25), 'SU MO TU WED THU FRI SAT',
font=font_size(22), fill=color_1)
for cal_x in (0, 1, 2, 3, 4):
for cal_y in (0, 1, 2, 3, 4, 5, 6):
if cal_list[cal_x][cal_y] != 0:
if cal_list[cal_x][cal_y] == cal_day:
draw.rectangle((cal_s_x-5, cal_s_y, cal_s_x+22, cal_s_y+28), fill=color_1)
draw.text((cal_s_x, cal_s_y), str(
cal_list[cal_x][cal_y]), font=font_size(22), fill=color_2, align='right')
else:
draw.text((cal_s_x, cal_s_y), str(
cal_list[cal_x][cal_y]), font=font_size(22), fill=color_1, align='right')
cal_s_x = cal_s_x + 55
cal_s_x = cal_s_x_0
cal_s_y = cal_s_y + 30
def font_size(size):
fs = ImageFont.truetype(os.path.join(fontdir, 'BAHNSCHRIFT.TTF'), size)
#fs = ImageFont.truetype(os.path.join(fontdir, 'Font.ttc'), size)
return fs
def get_time(local_time):
pst_time = time.localtime(int(local_time))
pst_time = time.strftime('%A, %b %d', pst_time)
return pst_time
def get_year():
datetime_object = datetime.datetime.now()
year_str = (str(datetime_object.year) + "-" +
str(datetime_object.month)+"-" + str(datetime_object.day))
return year_str
def dayname():
day_name = datetime.datetime.now().strftime("%A")
return day_name
def cur_hr():
cur_hr = datetime.datetime.now().strftime("%H")
return cur_hr
def isTimeFormat(input):
try:
time.strptime(input, '%I:%M%p %Y-%m-%d')
return True
except ValueError:
return False
# define funciton for writing image and sleeping for 5 min.
def sep_datetime(utc_datetime):
if len(str(utc_datetime)) > 10:
date_time_x = datetime.datetime.strptime(str(utc_datetime), '%Y-%m-%dT%H:%M:%S%z')
# pst_time = time.strftime('%Y-%m-%d %H:%M', pst_time)
date_x = str(date_time_x.day) + '/' + str(date_time_x.month) + '/' + str(date_time_x.year)
time_x = str(date_time_x.strftime('%I')) + ':' + \
str(date_time_x.strftime('%M')) + str(date_time_x.strftime('%p'))
else:
date_time_x = datetime.datetime.strptime(str(utc_datetime), '%Y-%m-%d')
# pst_time = time.strftime('%Y-%m-%d %H:%M', pst_time)
date_x = str(date_time_x.day) + '/' + str(date_time_x.month) + '/' + str(date_time_x.year)
time_x = ''
return date_x, time_x
def write_to_screen(image, sleep_seconds):
print('Writing to screen.')
# Write to screen
h_image = Image.new('1', (epd.width, epd.height), 255)
# Open the template
screen_output_file = Image.open(os.path.join(picdir, image))
# Initialize the drawing context with template as background
h_image.paste(screen_output_file, (0, 0))
epd.display(epd.getbuffer(h_image))
# Sleep
print('Sleeping for ' + str(int(sleep_seconds/60)) + ' min.')
time.sleep(sleep_seconds)
# define function for displaying error
def display_error(error_source, color):
# Display an error
print('Error in the', error_source, 'request.')
# Initialize drawing
error_image = Image.new('1', (epd.width, epd.height), 255)
# Initialize the drawing
draw = ImageDraw.Draw(error_image)
draw.text((100, 150), error_source + ' ERROR', font=font_size(30), fill=color)
draw.text((100, 300), 'Retrying in 8 min', font=font_size(22), fill=color)
current_time = datetime.datetime.now().strftime('%H:%M')
draw.text((300, 365), 'Last Refresh: ' + str(current_time), font=font_size(30), fill=color)
# Save the error image
error_image_file = 'error.png'
error_image.save(os.path.join(picdir, error_image_file))
# Close error image
error_image.close()
# Write error to screen
write_to_screen(error_image_file, 8*60)
| 34.912844 | 99 | 0.6036 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1,275 | 0.167521 |
f49f790d9571cf03ed43cb10b00ff30ef123ff75 | 961 | py | Python | v2.5.7/toontown/racing/DistributedKartPadAI.py | TTOFFLINE-LEAK/ttoffline | bb0e91704a755d34983e94288d50288e46b68380 | [
"MIT"
] | 4 | 2019-07-01T15:46:43.000Z | 2021-07-23T16:26:48.000Z | v2.5.7/toontown/racing/DistributedKartPadAI.py | TTOFFLINE-LEAK/ttoffline | bb0e91704a755d34983e94288d50288e46b68380 | [
"MIT"
] | 1 | 2019-06-29T03:40:05.000Z | 2021-06-13T01:15:16.000Z | v2.5.7/toontown/racing/DistributedKartPadAI.py | TTOFFLINE-LEAK/ttoffline | bb0e91704a755d34983e94288d50288e46b68380 | [
"MIT"
] | 4 | 2019-07-28T21:18:46.000Z | 2021-02-25T06:37:25.000Z | from direct.directnotify import DirectNotifyGlobal
from direct.distributed.DistributedObjectAI import DistributedObjectAI
class DistributedKartPadAI(DistributedObjectAI):
notify = DirectNotifyGlobal.directNotify.newCategory('DistributedKartPadAI')
def __init__(self, air):
DistributedObjectAI.__init__(self, air)
self.air = air
self.startingBlocks = []
self.area = None
return
def setArea(self, area):
self.area = area
def d_setArea(self, area):
self.sendUpdate('setArea', [area])
def b_setArea(self, area):
self.setArea(area)
self.d_setArea(self, area)
def getArea(self):
return self.area
def addStartingBlock(self, block):
self.startingBlocks.append(block)
def updateMovieState(self):
pass
def removeStartingBlock(self, block):
if self.startingBlocks.count(block):
self.startingBlocks.remove(block) | 27.457143 | 80 | 0.679501 | 838 | 0.872008 | 0 | 0 | 0 | 0 | 0 | 0 | 31 | 0.032258 |
f4a045ca031ed0291bed7dc3d42ae24523969c0a | 22,644 | py | Python | main.py | leo0123456/Smart-camera | 0c73b3b8b5cc0147e036c71f2f078cbd576cd399 | [
"MIT"
] | 9 | 2020-07-10T01:54:31.000Z | 2021-11-22T16:37:17.000Z | main.py | leo0123456/Smart-camera | 0c73b3b8b5cc0147e036c71f2f078cbd576cd399 | [
"MIT"
] | 2 | 2020-11-26T12:27:04.000Z | 2021-11-22T16:38:42.000Z | main.py | leo0123456/Smart-camera | 0c73b3b8b5cc0147e036c71f2f078cbd576cd399 | [
"MIT"
] | 2 | 2021-01-02T10:18:30.000Z | 2021-05-25T10:53:22.000Z | from PyQt5 import Qt
from PyQt5 import QtCore,QtWidgets,QtGui
import sys
import PyQt5
from PyQt5.QtWidgets import QApplication, QWidget, QLabel, QFileDialog, QGraphicsRectItem, QGraphicsScene
from PyQt5.QtGui import QPixmap, QImage
from PyQt5.QtCore import QSize
import cv2
import numpy as np
from matplotlib import pyplot as plt
from face_control import DynamicStreamMaskService
import window
import window2
import swapGUI
import traceback
import head
from jiemian import rongh
class MainWindow():
def __init__(self):
app = QtWidgets.QApplication(sys.argv)
MainWindow = QtWidgets.QMainWindow()
self.raw_image = None
self.ui = window.Ui_MainWindow()
self.ui.setupUi(MainWindow)
self.action_connect()
MainWindow.show()
sys.exit(app.exec_())
# 信号槽绑定
def action_connect(self):
self.ui.action.triggered.connect(self.open_file)
self.ui.action_2.triggered.connect(self.save_file)
self.ui.action_5.triggered.connect(self.recover_img)
self.ui.action_99.triggered.connect(self.texiao)
self.ui.action_98.triggered.connect(self.huanlian)
self.ui.action_96.triggered.connect(self.zhengjian)
self.ui.action_95.triggered.connect(self.hezhao)
# 饱和度
self.ui.horizontalSlider.valueChanged.connect(self.slider_change)
self.ui.horizontalSlider.sliderReleased.connect(self.show_histogram)
# 亮度
self.ui.horizontalSlider_4.valueChanged.connect(self.slider_change)
self.ui.horizontalSlider_4.sliderReleased.connect(self.show_histogram)
# 美白(人脸识别)
self.ui.horizontalSlider_8.sliderReleased.connect(self.slider_change)
self.ui.horizontalSlider_8.sliderReleased.connect(self.show_histogram)
# 美白(皮肤识别)
self.ui.horizontalSlider_13.sliderReleased.connect(self.slider_change)
self.ui.horizontalSlider_13.sliderReleased.connect(self.show_histogram)
# 磨皮精度
self.ui.horizontalSlider_14.sliderReleased.connect(self.slider_change)
self.ui.horizontalSlider_14.sliderReleased.connect(self.show_histogram)
# 磨皮程度
self.ui.horizontalSlider_11.sliderReleased.connect(self.slider_change)
self.ui.horizontalSlider_11.sliderReleased.connect(self.show_histogram)
# 伽马变换
self.ui.horizontalSlider_5.sliderReleased.connect(self.slider_change)
self.ui.horizontalSlider_5.sliderReleased.connect(self.show_histogram)
# 人脸识别和皮肤检测
self.ui.tabWidget.tabBarClicked.connect(self.calculate)
# 木刻滤镜
self.ui.horizontalSlider_9.sliderReleased.connect(self.woodcut)
self.ui.horizontalSlider_9.sliderReleased.connect(self.show_histogram)
# 灰色铅笔画
self.ui.horizontalSlider_7.sliderReleased.connect(self.pencil_gray)
self.ui.horizontalSlider_7.sliderReleased.connect(self.show_histogram)
# 怀旧滤镜
self.ui.horizontalSlider_10.sliderReleased.connect(self.reminiscene)
self.ui.horizontalSlider_10.sliderReleased.connect(self.show_histogram)
# 铅笔画滤镜
self.ui.horizontalSlider_12.sliderReleased.connect(self.slider_change)
self.ui.horizontalSlider_12.sliderReleased.connect(self.show_histogram)
# 风格化
self.ui.horizontalSlider_2.sliderReleased.connect(self.slider_change)
self.ui.horizontalSlider_2.sliderReleased.connect(self.show_histogram)
# 细节增强
self.ui.horizontalSlider_6.sliderReleased.connect(self.slider_change)
self.ui.horizontalSlider_6.sliderReleased.connect(self.show_histogram)
# 边缘保持
self.ui.horizontalSlider_3.sliderReleased.connect(self.slider_change)
self.ui.horizontalSlider_3.sliderReleased.connect(self.show_histogram)
# 打开摄像头
self.ui.action_17.triggered.connect(self.new_camera)
# 标记人脸位置
self.ui.action_18.triggered.connect(self.mark_face)
# 显示图片
def show_image(self):
img_cv = cv2.cvtColor(self.current_img, cv2.COLOR_RGB2BGR)
img_width, img_height, a = img_cv.shape
ratio_img = img_width/img_height
ratio_scene = self.ui.graphicsView.width()/self.ui.graphicsView.height()
if ratio_img > ratio_scene:
width = int(self.ui.graphicsView.width())
height = int(self.ui.graphicsView.width() / ratio_img)
else:
width = int(self.ui.graphicsView.height() * ratio_img)
height = int(self.ui.graphicsView.height())
img_resize = cv2.resize(img_cv, (height-5, width-5), interpolation=cv2.INTER_AREA)
h, w, c = img_resize.shape
bytesPerLine = w * 3
qimg = QImage(img_resize.data, w, h, bytesPerLine, QImage.Format_RGB888)
self.scene = QGraphicsScene()
pix = QPixmap(qimg)
self.scene.addPixmap(pix)
self.ui.graphicsView.setScene(self.scene)
# 显示灰度图像
def show_grayimage(self):
img_cv = self.gray_image
img_width, img_height = img_cv.shape
ratio_img = img_width/img_height
ratio_scene = self.ui.graphicsView.width()/self.ui.graphicsView.height()
if ratio_img > ratio_scene:
width = int(self.ui.graphicsView.width())
height = int(self.ui.graphicsView.width() / ratio_img)
else:
width = int(self.ui.graphicsView.height() * ratio_img)
height = int(self.ui.graphicsView.height())
img_resize = cv2.resize(img_cv, (height-5, width-5), interpolation=cv2.INTER_AREA)
h, w = img_resize.shape
qimg = QImage(img_resize.data, w, h, w, QImage.Format_Grayscale8)
self.scene = QGraphicsScene()
pix = QPixmap(qimg)
self.scene.addPixmap(pix)
self.ui.graphicsView.setScene(self.scene)
# 显示直方图
def show_histogram(self):
if self.raw_image is None:
return 0
img = self.current_img
plt.figure(figsize=((self.ui.tab_3.width()-10)/100, (self.ui.tab_3.width()-60)/100), frameon=False)
plt.hist(img.ravel(), bins=256, range=[0, 256])
plt.axes().get_yaxis().set_visible(False)
# plt.axes().get_xaxis().set_visible(False)
ax = plt.axes()
# 隐藏坐标系的外围框线
for spine in ax.spines.values():
spine.set_visible(False)
plt.savefig('Hist.png', bbox_inches="tight", transparent=True, dpi=100)
pix = QPixmap("Hist.png")
self.ui.label.setPixmap(pix)
self.ui.label_2.setPixmap(pix)
self.ui.label_3.setPixmap(pix)
# 保存图片
def save_file(self):
fname = QFileDialog.getSaveFileName(None, '打开文件', './', ("Images (*.png *.xpm *.jpg)"))
if fname[0]:
cv2.imwrite(fname[0], self.current_img)
# 打开图片
def open_file(self):
fname = QFileDialog.getOpenFileName(None, '打开文件', './', ("Images (*.png *.xpm *.jpg)"))
if fname[0]:
img_cv = cv2.imdecode(np.fromfile(fname[0], dtype=np.uint8), -1) # 注意这里读取的是RGB空间的
self.raw_image = img_cv
self.last_image = img_cv
self.current_img = img_cv
self.show_image()
self.show_histogram()
self.imgskin = np.zeros(self.raw_image.shape)
self.intial_value()
# 恢复图片
def recover_img(self):
self.current_img = self.raw_image
self.show_image()
self.show_histogram()
self.intial_value()
# 饱和度
def change_saturation(self):
if self.raw_image is None:
return 0
value = self.ui.horizontalSlider.value()
img_hsv = cv2.cvtColor(self.current_img, cv2.COLOR_BGR2HLS)
if value > 2:
img_hsv[:, :, 2] = np.log(img_hsv[:, :, 2] /255* (value - 1)+1) / np.log(value + 1) * 255
if value < 0:
img_hsv[:, :, 2] = np.uint8(img_hsv[:, :, 2] / np.log(- value + np.e))
self.current_img = cv2.cvtColor(img_hsv, cv2.COLOR_HLS2BGR)
# 明度调节
def change_darker(self):
if self.raw_image is None:
return 0
value = self.ui.horizontalSlider_4.value()
img_hsv = cv2.cvtColor(self.current_img, cv2.COLOR_BGR2HLS)
if value > 3:
img_hsv[:, :, 1] = np.log(img_hsv[:, :, 1] /255* (value - 1)+1) / np.log(value + 1) * 255
if value < 0:
img_hsv[:, :, 1] = np.uint8(img_hsv[:, :, 1] / np.log(- value + np.e))
self.last_image = self.current_img
self.current_img = cv2.cvtColor(img_hsv, cv2.COLOR_HLS2BGR)
# 人脸识别
def detect_face(self):
img = self.raw_image
face_cascade = cv2.CascadeClassifier('haarcascade_frontalface_default.xml')
gray = cv2.cvtColor(img, cv2.COLOR_BGR2GRAY)
faces = face_cascade.detectMultiScale(gray, 1.3, 5)
return faces
# 皮肤识别
def detect_skin(self):
img = self.raw_image
rows, cols, channals = img.shape
for r in range(rows):
for c in range(cols):
B = img.item(r, c, 0)
G = img.item(r, c, 1)
R = img.item(r, c, 2)
if (abs(R - G) > 15) and (R > G) and (R > B):
if (R > 95) and (G > 40) and (B > 20) and (max(R, G, B) - min(R, G, B) > 15):
self.imgskin[r, c] = (1, 1, 1)
elif (R > 220) and (G > 210) and (B > 170):
self.imgskin[r, c] = (1, 1, 1)
# 皮肤磨皮(value1精细度,value2程度)
def dermabrasion(self, value1=3, value2=2):
value1 = self.ui.horizontalSlider_14.value()
value2 = 11 - self.ui.horizontalSlider_11.value()
if value1 == 0 and value2 == 0:
return 0
if value2 == 0:
value2 = 2
if value1 == 0:
value1 = 3
img = self.current_img
dx = value1 * 5
fc = value1 * 12.5
p = 50
temp1 = cv2.bilateralFilter(img, dx, fc, fc)
temp2 = (temp1 - img + 128)
temp3 = cv2.GaussianBlur(temp2, (2 * value2 - 1, 2 * value2 - 1), 0, 0)
temp4 = img + 2 * temp3 - 255
dst = np.uint8(img * ((100 - p) / 100) + temp4 * (p / 100))
imgskin_c = np.uint8(-(self.imgskin - 1))
dst = np.uint8(dst * self.imgskin + img * imgskin_c)
self.current_img = dst
# 美白算法(皮肤识别)
def whitening_skin(self, value=30):
# value = 30
value = self.ui.horizontalSlider_13.value()
img = self.current_img
imgw = np.zeros(img.shape, dtype='uint8')
imgw = img.copy()
midtones_add = np.zeros(256)#返回来一个给定形状和类型的用0填充的数组;
for i in range(256):
midtones_add[i] = 0.667 * (1 - ((i - 127.0) / 127) * ((i - 127.0) / 127))
lookup = np.zeros(256, dtype="uint8")
for i in range(256):
red = i
red += np.uint8(value * midtones_add[red])#数组类型转换为uint8是无符号八位整型,表示范围是[0, 255]的整数
red = max(0, min(0xff, red)) #oxff为255
lookup[i] = np.uint8(red)
rows, cols, channals = img.shape
for r in range(rows):
for c in range(cols):
if self.imgskin[r, c, 0] == 1:
imgw[r, c, 0] = lookup[imgw[r, c, 0]]
imgw[r, c, 1] = lookup[imgw[r, c, 1]]
imgw[r, c, 2] = lookup[imgw[r, c, 2]]
self.current_img = imgw
# 美白算法(人脸识别)
def whitening_face(self, value=30):
# value = 30
value = self.ui.horizontalSlider_8.value()
img = self.current_img
imgw = np.zeros(img.shape, dtype='uint8')
imgw = img.copy()
midtones_add = np.zeros(256)
for i in range(256):
midtones_add[i] = 0.667 * (1 - ((i - 127.0) / 127) * ((i - 127.0) / 127))
lookup = np.zeros(256, dtype="uint8")
for i in range(256):
red = i
red += np.uint8(value * midtones_add[red])
red = max(0, min(0xff, red))
lookup[i] = np.uint8(red)
# faces可全局变量
faces = self.faces
if faces == ():
rows, cols, channals = img.shape
for r in range(rows):
for c in range(cols):
imgw[r, c, 0] = lookup[imgw[r, c, 0]]
imgw[r, c, 1] = lookup[imgw[r, c, 1]]
imgw[r, c, 2] = lookup[imgw[r, c, 2]]
else:
x, y, w, h = faces[0]
rows, cols, channals = img.shape
x = max(x - (w * np.sqrt(2) - w) / 2, 0)
y = max(y - (h * np.sqrt(2) - h) / 2, 0)
w = w * np.sqrt(2)
h = h * np.sqrt(2)
rows = min(rows, y + h)
cols = min(cols, x + w)
for r in range(int(y), int(rows)):
for c in range(int(x), int(cols)):
imgw[r, c, 0] = lookup[imgw[r, c, 0]]
imgw[r, c, 1] = lookup[imgw[r, c, 1]]
imgw[r, c, 2] = lookup[imgw[r, c, 2]]
processWidth = int(max(min(rows - y, cols - 1) / 8, 2))
for i in range(1, processWidth):
alpha = (i - 1) / processWidth
for r in range(int(y), int(rows)):
imgw[r, int(x) + i - 1] = np.uint8(
imgw[r, int(x) + i - 1] * alpha + img[r, int(x) + i - 1] * (1 - alpha))
imgw[r, int(cols) - i] = np.uint8(
imgw[r, int(cols) - i] * alpha + img[r, int(cols) - i] * (1 - alpha))
for c in range(int(x) + processWidth, int(cols) - processWidth):
imgw[int(y) + i - 1, c] = np.uint8(
imgw[int(y) + i - 1, c] * alpha + img[int(y) + i - 1, c] * (1 - alpha))
imgw[int(rows) - i, c] = np.uint8(
imgw[int(rows) - i, c] * alpha + img[int(rows) - i, c] * (1 - alpha))
self.current_img = imgw
# Gamma矫正
def gamma_trans(self):
gamma = (self.ui.horizontalSlider_5.value() + 10) / 10
img = self.current_img
gamma_table = [np.power(x / 255.0, gamma) * 255.0 for x in range(256)]
gamma_table = np.round(np.array(gamma_table)).astype(np.uint8)
self.current_img = cv2.LUT(img, gamma_table)
self.show_image()
self.show_histogram()
# 响应滑动条的变化
def slider_change(self):
if self.raw_image is None:
return 0
self.current_img = self.raw_image
# 伽马变换
if self.ui.horizontalSlider_5.value() != 0:
self.gamma_trans()
# 饱和度
if self.ui.horizontalSlider.value() != 0:
self.change_saturation()
if self.ui.horizontalSlider_2.value() != 0:
pass
# 边缘保持
if self.ui.horizontalSlider_3.value() != 0:
self.edge_preserve()
# 亮度
if self.ui.horizontalSlider_4.value() != 0:
self.change_darker()
# 美白(人脸识别)
if self.ui.horizontalSlider_8.value() != 0:
self.whitening_face()
# 美白(皮肤识别)
if self.ui.horizontalSlider_13.value() != 0:
self.whitening_skin()
# 磨皮程度
if self.ui.horizontalSlider_11.value() != 0:
self.dermabrasion()
# 磨皮精度
if self.ui.horizontalSlider_14.value() != 0:
self.dermabrasion()
# 风格化
if self.ui.horizontalSlider_2.value() != 0:
self.stylize()
# 细节增强
if self.ui.horizontalSlider_6.value() != 0:
self.detail_enhance()
# 铅笔画
if self.ui.horizontalSlider_12.value() != 0:
self.pencil_color()
self.show_image()
# 计算人脸识别和皮肤识别的基本参数
def calculate(self):
if self.raw_image is None:
return 0
if self.calculated is False:
self.faces = self.detect_face()
if self.faces != ():
self.detect_skin()
self.calculated = True
# 怀旧滤镜
def reminiscene(self):
if self.raw_image is None:
return 0
if self.ui.horizontalSlider_10.value() == 0:
self.current_img = self.raw_image
self.show_image()
return 0
img = self.raw_image.copy()
rows, cols, channals = img.shape
for r in range(rows):
for c in range(cols):
B = img.item(r, c, 0)
G = img.item(r, c, 1)
R = img.item(r, c, 2)
img[r, c, 0] = np.uint8(min(max(0.272 * R + 0.534 * G + 0.131 * B, 0), 255))
img[r, c, 1] = np.uint8(min(max(0.349 * R + 0.686 * G + 0.168 * B, 0), 255))
img[r, c, 2] = np.uint8(min(max(0.393 * R + 0.769 * G + 0.189 * B, 0), 255))
self.current_img = img
self.show_image()
# 木刻滤镜
def woodcut(self):
if self.raw_image is None:
return 0
if self.ui.horizontalSlider_9.value() == 0:
# self.current_img = self.raw_image
self.show_image()
return 0
self.gray_image = cv2.cvtColor(self.raw_image, cv2.COLOR_BGR2GRAY)
gray = self.gray_image
value = 70 + self.ui.horizontalSlider_9.value()
rows, cols = gray.shape
for r in range(rows):
for c in range(cols):
if gray[r, c] > value:
gray[r, c] = 255
else:
gray[r, c] = 0
self.gray_image = gray
self.show_grayimage()
# 铅笔画(灰度)
def pencil_gray(self):
if self.raw_image is None:
return 0
if self.ui.horizontalSlider_7.value() == 0:
# self.current_img = self.raw_image
self.show_image()
return 0
value = self.ui.horizontalSlider_7.value() * 0.05
dst1_gray, dst1_color = cv2.pencilSketch(self.current_img, sigma_s=50, sigma_r=value, shade_factor=0.04)
self.gray_image = dst1_gray
self.show_grayimage()
# 铅笔画(彩色)
def pencil_color(self):
if self.raw_image is None:
return 0
if self.ui.horizontalSlider_12.value() == 0:
self.current_img = self.raw_image
self.show_image()
return 0
value = self.ui.horizontalSlider_12.value() * 0.05
dst1_gray, dst1_color = cv2.pencilSketch(self.current_img, sigma_s=50, sigma_r=value, shade_factor=0.04)
self.current_img = dst1_color
# 风格化
def stylize(self):
if self.raw_image is None:
return 0
if self.ui.horizontalSlider_2.value() == 0:
self.current_img = self.raw_image
self.show_image()
return 0
value = self.ui.horizontalSlider_2.value() * 0.05
self.current_img = cv2.stylization(self.current_img, sigma_s=50, sigma_r=value)
# 细节增强
def detail_enhance(self):
if self.raw_image is None:
return 0
if self.ui.horizontalSlider_6.value() == 0:
self.current_img = self.raw_image
self.show_image()
return 0
value = self.ui.horizontalSlider_6.value() * 0.05
self.current_img = cv2.detailEnhance(self.current_img, sigma_s=50, sigma_r=value)
# 边缘保持
def edge_preserve(self):
if self.raw_image is None:
return 0
if self.ui.horizontalSlider_3.value() == 0:
self.current_img = self.raw_image
self.show_image()
return 0
value = self.ui.horizontalSlider_3.value() * 0.05
self.current_img = cv2.edgePreservingFilter(self.current_img, flags=1, sigma_s=50, sigma_r=value)
# 显示摄像照片
def show_camera(self):
flag, self.camera_image = self.cap.read()
show = cv2.resize(self.image, (640, 480))
show = cv2.cvtColor(show, cv2.COLOR_BGR2RGB)
showImage = QtGui.QImage(show.data, show.shape[1], show.shape[0], QtGui.QImage.Format_RGB888)
self.label_show_camera.setPixmap(QtGui.QPixmap.fromImage(showImage))
# 初始化
def intial_value(self):
self.calculated = False
self.ui.horizontalSlider.setValue(0)
self.ui.horizontalSlider_2.setValue(0)
self.ui.horizontalSlider_3.setValue(0)
self.ui.horizontalSlider_4.setValue(0)
self.ui.horizontalSlider_5.setValue(0)
self.ui.horizontalSlider_6.setValue(0)
self.ui.horizontalSlider_7.setValue(0)
self.ui.horizontalSlider_8.setValue(0)
self.ui.horizontalSlider_9.setValue(0)
self.ui.horizontalSlider_10.setValue(0)
self.ui.horizontalSlider_11.setValue(0)
self.ui.horizontalSlider_12.setValue(0)
self.ui.horizontalSlider_13.setValue(0)
self.ui.horizontalSlider_14.setValue(0)
def texiao(self):
ms = DynamicStreamMaskService()
ms.start()
def huanlian(self):
# try:
# self.app = QApplication(sys.argv)
# self.fileload = swapGUI.Ui_Swap()
# self.fileload.show()
# sys.exit(self.app.exec_())
# except:
# traceback.print_exc()
self.app = QApplication(sys.argv)
self.fileload = swapGUI.Ui_Swap()
self.fileload.show()
self.app.exec_()
def zhengjian(self):
# try:
# self.appp = QtWidgets.QApplication(sys.argv)
# self.window = head.mywindow()
# self.window.show()
# sys.exit(self.appp.exec_())
# except:
# traceback.print_exc()
self.appp = QtWidgets.QApplication(sys.argv)
self.window = head.mywindow()
self.window.show()
self.appp.exec_()
def hezhao(self):
self.mdo=rongh()
self.mdo.startrong()
# 调用摄像头窗口
def new_camera(self):
Dialog = QtWidgets.QDialog()
self.ui_2 = window2.Ui_Form()
self.ui_2.setupUi(Dialog)
Dialog.show()
self.ui_2.pushButton_2.clicked.connect(self.get_image)
Dialog.exec_()
if self.ui_2.cap.isOpened():
self.ui_2.cap.release()
if self.ui_2.timer_camera.isActive():
self.ui_2.timer_camera.stop()
# 获取摄像头的图片
def get_image(self):
if self.ui_2.captured_image is not None:
self.raw_image = self.ui_2.captured_image
self.current_img = self.ui_2.captured_image
self.show_image()
self.show_histogram()
self.imgskin = np.zeros(self.raw_image.shape)
self.intial_value()
# 显示人脸识别
def mark_face(self):
if self.raw_image is None:
return 0
if self.calculated == False:
self.calculate()
for (x, y, w, h) in self.faces:
self.current_img = cv2.rectangle(self.current_img.copy(), (x, y), (x+w, y+h), (255, 0, 0), 1)
self.show_image()
if __name__ == "__main__":
MainWindow()
| 35.326053 | 112 | 0.578829 | 22,847 | 0.977454 | 0 | 0 | 0 | 0 | 0 | 0 | 1,944 | 0.083169 |
f4a2d6d380e2e0e8ef71012f0474f13521d69e9d | 1,551 | py | Python | classes/Friend.py | brandonwarech/book-tracker-capstone | d2b66db94a9be6b1577e2d8eb4b5b9d8eb87b790 | [
"Apache-2.0"
] | null | null | null | classes/Friend.py | brandonwarech/book-tracker-capstone | d2b66db94a9be6b1577e2d8eb4b5b9d8eb87b790 | [
"Apache-2.0"
] | null | null | null | classes/Friend.py | brandonwarech/book-tracker-capstone | d2b66db94a9be6b1577e2d8eb4b5b9d8eb87b790 | [
"Apache-2.0"
] | 1 | 2019-06-25T07:01:58.000Z | 2019-06-25T07:01:58.000Z | import logging
import sys
import classes.iDb as db
# Set Logging Level
logging.basicConfig(level=logging.INFO)
class Friend:
def __init__(self, User, Friend):
self.user_id = User.user_id
self.friend_id = Friend.user_id
pass
def addFriend(self):
pass
def removeFriend(self):
pass
@staticmethod
def getFriends(user_id):
print(user_id)
try:
sql = "SELECT * FROM FRIEND WHERE USER1 = \'" + str(user_id) +'\' OR USER2 = \'' + str(user_id) + "'"
# Calls database with constructed SQL from imported db class
#favs = db.db.callDbFetch(sql)
friends_query_obj = db.dbQuery(sql)
friends = db.dbQuery.callDbFetch(friends_query_obj)
# Log Results of DB call and return results
logging.debug("successful connect to db2")
logging.info("favorites response: " + str(friends))
if friends != [False]:
return friends
else:
return {
"statusCode": 400,
"headers": {"Content-Type": "application/json"},
"body": {"error": str(sql) + str(sys.exc_info())}
}
except:
logging.error("Oops!" + str(sys.exc_info()) + "occured. ")
return {
"statusCode": 400,
"headers": {"Content-Type": "application/json"},
"body": {"error": str(sql) + str(sys.exc_info())}
}
| 29.264151 | 113 | 0.523533 | 1,429 | 0.921341 | 0 | 0 | 1,201 | 0.774339 | 0 | 0 | 411 | 0.26499 |
f4a7996845ec8bc3004f9b2f07ce077ddf8b18ec | 214 | py | Python | src/video_retrieval/FrameSaver.py | DoodleBobBuffPants/EyesInTheSky | 1bd18fc40631b0046cbd029a48413f31a63afcca | [
"MIT"
] | null | null | null | src/video_retrieval/FrameSaver.py | DoodleBobBuffPants/EyesInTheSky | 1bd18fc40631b0046cbd029a48413f31a63afcca | [
"MIT"
] | null | null | null | src/video_retrieval/FrameSaver.py | DoodleBobBuffPants/EyesInTheSky | 1bd18fc40631b0046cbd029a48413f31a63afcca | [
"MIT"
] | null | null | null | # save frames asynchronously
import cv2 as cv
def frame_saver(queue, lock):
while True:
lock.take_lock()
cv.imwrite("frame.jpg", queue.peek()) # peek non existent
lock.release_lock()
| 21.4 | 66 | 0.649533 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 58 | 0.271028 |
f4aadeae1ada83a420b6b77187f7404ea44db94a | 1,529 | py | Python | zhiwehu/post/urls.py | zhiwehu/zhiwehu | 4d07fa14fc00d5544226326161a0efc2d1202329 | [
"Apache-2.0",
"BSD-3-Clause"
] | 1 | 2021-05-15T17:40:21.000Z | 2021-05-15T17:40:21.000Z | zhiwehu/post/urls.py | zhiwehu/zhiwehu | 4d07fa14fc00d5544226326161a0efc2d1202329 | [
"Apache-2.0",
"BSD-3-Clause"
] | null | null | null | zhiwehu/post/urls.py | zhiwehu/zhiwehu | 4d07fa14fc00d5544226326161a0efc2d1202329 | [
"Apache-2.0",
"BSD-3-Clause"
] | null | null | null | # -*- coding: utf-8 -*-
from django.conf.urls import patterns, url
from .views import PostListView, PostDetailView
urlpatterns = patterns('',
# URL pattern for the PostListView # noqa
url(
regex=r'^$',
view=PostListView.as_view(),
name='post_list'
),
url(
regex=r'^category/(?P<category>[\w-]+)/$',
view=PostListView.as_view(),
name='category_post_list'
),
url(
regex=r'^tag/(?P<tag>[\w-]+)/$',
view=PostListView.as_view(),
name='tag_post_list'
),
url(
regex=r'^(?P<year>\d{4})/(?P<month>\d{1,2})/$',
view=PostListView.as_view(),
name='archive_post_list'
),
url(
regex=r'^blog/(?P<slug>[\w-]+)/$',
view=PostDetailView.as_view(),
name='post_detail'
),
url(
regex=r'^add/comment/$',
view='post.views.add_comment',
name='add_comment',
),
) | 35.55814 | 74 | 0.325049 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 331 | 0.216481 |
f4ab03085872568dfa994166ab6e02b47cfe7be5 | 3,270 | py | Python | iseq/_cli/hscan.py | EBI-Metagenomics/iseq | 3c28fc92e5af05c91c6669d7f1a28d1ce857f3f1 | [
"MIT"
] | null | null | null | iseq/_cli/hscan.py | EBI-Metagenomics/iseq | 3c28fc92e5af05c91c6669d7f1a28d1ce857f3f1 | [
"MIT"
] | null | null | null | iseq/_cli/hscan.py | EBI-Metagenomics/iseq | 3c28fc92e5af05c91c6669d7f1a28d1ce857f3f1 | [
"MIT"
] | null | null | null | import os
import click
from fasta_reader import read_fasta
from hmmer_reader import open_hmmer
from iseq.alphabet import alphabet_name
from iseq.hmmer3 import create_profile
from iseq.hmmer_model import HMMERModel
from iseq.model import EntryDistr
from .debug_writer import DebugWriter
from .output_writer import OutputWriter
@click.command()
@click.argument("profile", type=click.File("r"))
@click.argument("target", type=click.File("r"))
@click.option(
"--output",
type=click.Path(exists=False, dir_okay=False, writable=True, resolve_path=True),
help="Save results to OUTPUT (GFF format).",
default="output.gff",
)
@click.option(
"--quiet/--no-quiet",
"-q/-nq",
help="Disable standard output.",
default=False,
)
@click.option(
"--window",
type=int,
help="Window length. Defaults to zero, which means no window.",
default=0,
)
@click.option(
"--hmmer3-compat/--no-hmmer3-compat",
help="Enable full HMMER3 compatibility. Defaults to False.",
default=False,
)
@click.option(
"--entry-distr",
type=click.Choice(["uniform", "occupancy"], case_sensitive=False),
help="Set the entry distribution. Defaults to occupancy.",
default="occupancy",
)
@click.option(
"--odebug",
type=click.File("w"),
help="Save debug info into a tab-separated values file.",
default=os.devnull,
)
def hscan(
profile,
target,
output,
quiet,
window: int,
hmmer3_compat: bool,
entry_distr: str,
odebug,
):
"""
Search nucleotide sequence(s) against a profiles database.
An OUTPUT line determines an association between a TARGET subsequence and
a PROFILE protein profile. An association maps a target subsequence to a
profile and represents a potential homology. Expect many false positive
associations as we are not filtering out by statistical significance.
"""
owriter = OutputWriter(output)
dwriter = DebugWriter(odebug)
if entry_distr == "occupancy":
edistr = EntryDistr.OCCUPANCY
else:
edistr = EntryDistr.UNIFORM
if quiet:
click.open_file(os.devnull, "a")
else:
click.get_text_stream("stdout")
with read_fasta(target) as fasta:
targets = list(fasta)
for plain_model in open_hmmer(profile):
model = HMMERModel(plain_model)
prof = create_profile(model, hmmer3_compat, edistr, window)
for tgt in targets:
seq = prof.create_sequence(tgt.sequence.encode())
search_results = prof.search(seq)
ifragments = search_results.ifragments()
seqid = f"{tgt.id}"
for interval in [i.interval for i in ifragments]:
start = interval.start
stop = interval.stop
owriter.write_item(
seqid,
alphabet_name(seq.alphabet),
prof.profid,
alphabet_name(prof.alphabet),
start,
stop,
prof.window_length,
)
if odebug is not os.devnull:
for i in search_results.debug_table():
dwriter.write_row(seqid, i)
owriter.close()
odebug.close_intelligently()
| 28.434783 | 84 | 0.635474 | 0 | 0 | 0 | 0 | 2,938 | 0.898471 | 0 | 0 | 869 | 0.265749 |
f4abb17f526e4601a14901c49f09e55bab004ea9 | 847 | py | Python | commands/playbackjoystick.py | randbrown/robotpy_recordplayback | b36f794b141809578acd25975fb961bfe17c8285 | [
"MIT"
] | null | null | null | commands/playbackjoystick.py | randbrown/robotpy_recordplayback | b36f794b141809578acd25975fb961bfe17c8285 | [
"MIT"
] | null | null | null | commands/playbackjoystick.py | randbrown/robotpy_recordplayback | b36f794b141809578acd25975fb961bfe17c8285 | [
"MIT"
] | null | null | null |
'''
Psuedo-joystick object for playback of recorded macros
'''
class PlaybackJoystick():
def __init__(self, playback_data):
self.playback_data = playback_data
self.t = 0
def setTime(self, t=0):
self.t = t
def getRawAxis(self, axis):
#TODO fix me, get correct index for time t
# in the simulator it's called multiple times per millisecond, but the recorder only stores one per millisecond,
# so in the simulator index is usually the same as t.... but in the real bot it would be like every 50 ms or so
index = self.t
if(index < len(self.playback_data)):
return float(self.playback_data[index][axis + 2])
return 0
def getX(self, hand=None):
return self.getRawAxis(0)
def getY(self, hand=None):
return self.getRawAxis(1)
| 31.37037 | 120 | 0.638725 | 782 | 0.923259 | 0 | 0 | 0 | 0 | 0 | 0 | 327 | 0.386068 |
f4ac16666c88abed3219913a05d2e76e0e7e0cac | 2,323 | py | Python | link_grib.py | martinremy/wps | 8bddbdbb612a0e019ae110df481461d5d904053a | [
"MIT"
] | 5 | 2019-09-21T03:08:01.000Z | 2021-10-03T01:41:30.000Z | link_grib.py | martinremy/wps | 8bddbdbb612a0e019ae110df481461d5d904053a | [
"MIT"
] | 6 | 2019-07-07T20:49:18.000Z | 2020-04-23T15:13:01.000Z | link_grib.py | WRF-CMake/WPS | 69f5da12997d6685cb0640132c5ef398ba12c341 | [
"MIT"
] | 2 | 2020-04-21T12:54:24.000Z | 2021-07-08T00:49:37.000Z | #!/usr/bin/env python
# WRF-CMake (https://github.com/WRF-CMake/wps).
# Copyright 2018 M. Riechert and D. Meyer. Licensed under the MIT License.
import os
import sys
import shutil
import glob
import string
import itertools
import argparse
def link(src_path, link_path):
assert os.path.isfile(src_path)
if os.path.exists(link_path) or os.path.islink(link_path):
os.remove(link_path)
try:
# Windows: requires admin rights, but not restricted to same drive
os.symlink(src_path, link_path)
except:
# Windows: does not require admin rights, but restricted to same drive
os.link(src_path, link_path)
def link_or_copy(src, dst):
try:
link(src, dst)
except:
# fall-back for Windows if hard/sym links couldn't be created
shutil.copy(src, dst)
def generate_gribfile_extensions():
letters = list(string.ascii_uppercase)
for a, b, c in itertools.product(letters, repeat=3):
yield a + b + c
def collect_input_files(paths):
input_files = []
for path in paths:
if os.path.isdir(path):
for filename in os.listdir(path):
input_files.append(os.path.join(path, filename))
else:
input_files.append(path)
return input_files
def link_grib_files(input_paths, output_dir):
os.makedirs(output_dir, exist_ok=True)
for path in glob.glob(os.path.join(output_dir, 'GRIBFILE.*')):
os.remove(path)
paths = collect_input_files(input_paths)
grib_exts = generate_gribfile_extensions()
for path in paths:
try:
ext = next(grib_exts)
except StopIteration:
print('RAN OUT OF GRIB FILE SUFFIXES!', file=sys.stderr)
sys.exit(1)
link_path = os.path.join(output_dir, 'GRIBFILE.' + ext)
link_or_copy(path, link_path)
if __name__ == '__main__':
parser = argparse.ArgumentParser(description='A tool that symlinks GRIB files to GRIBFILE.??? format.')
parser.add_argument('paths', metavar='path', nargs='+', help='GRIB file or folder with GRIB files')
parser.add_argument('-o', '--out', metavar='output_dir', help='Output folder (default is current folder)',
default=os.getcwd())
args = parser.parse_args()
link_grib_files(args.paths, args.out)
| 33.666667 | 111 | 0.660783 | 0 | 0 | 159 | 0.068446 | 0 | 0 | 0 | 0 | 580 | 0.249677 |
f4af472ea7697545e5951704f8ee5043fcc2bdef | 95 | py | Python | freight/__init__.py | buahaha/aa-freight | 69eb85188988d7cfaffc7c485d22ddb442a4a2b3 | [
"MIT"
] | null | null | null | freight/__init__.py | buahaha/aa-freight | 69eb85188988d7cfaffc7c485d22ddb442a4a2b3 | [
"MIT"
] | null | null | null | freight/__init__.py | buahaha/aa-freight | 69eb85188988d7cfaffc7c485d22ddb442a4a2b3 | [
"MIT"
] | null | null | null | default_app_config = "freight.apps.FreightConfig"
__version__ = "1.5.1"
__title__ = "Freight"
| 19 | 49 | 0.757895 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 44 | 0.463158 |
f4af8027565466ae51ad5b3fe97e8b0e0503ff7d | 307 | py | Python | platform/hwconf_data/efr32bg1p/modules/WDOG/__init__.py | lenloe1/v2.7 | 9ac9c4a7bb37987af382c80647f42d84db5f2e1d | [
"Zlib"
] | null | null | null | platform/hwconf_data/efr32bg1p/modules/WDOG/__init__.py | lenloe1/v2.7 | 9ac9c4a7bb37987af382c80647f42d84db5f2e1d | [
"Zlib"
] | 1 | 2020-08-25T02:36:22.000Z | 2020-08-25T02:36:22.000Z | platform/hwconf_data/efr32bg1p/modules/WDOG/__init__.py | lenloe1/v2.7 | 9ac9c4a7bb37987af382c80647f42d84db5f2e1d | [
"Zlib"
] | 1 | 2020-08-25T01:56:04.000Z | 2020-08-25T01:56:04.000Z | import efr32bg1p.halconfig.halconfig_types as halconfig_types
import efr32bg1p.halconfig.halconfig_dependency as halconfig_dependency
import efr32bg1p.PythonSnippet.ExporterModel as ExporterModel
import efr32bg1p.PythonSnippet.RuntimeModel as RuntimeModel
import efr32bg1p.PythonSnippet.Metadata as Metadata | 61.4 | 71 | 0.905537 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 |
f4b2408f2df2d90a1f9ebc4e0578961026c80979 | 738 | py | Python | dataart.py | heerdyes/tortoises | 745479e75e2bce92c41b21c52205501ab2fc44ca | [
"Artistic-2.0"
] | null | null | null | dataart.py | heerdyes/tortoises | 745479e75e2bce92c41b21c52205501ab2fc44ca | [
"Artistic-2.0"
] | null | null | null | dataart.py | heerdyes/tortoises | 745479e75e2bce92c41b21c52205501ab2fc44ca | [
"Artistic-2.0"
] | null | null | null | import turtle
# initialization
t=turtle.Turtle()
t.speed(0)
t.up()
t.bk(200)
t.down()
# ask for data file name
fname=input('enter data file name: ')
print('reading from file: '+fname)
# create an empty list datalines
datalines=[]
# read lines from the data file into the list datalines
with open(fname) as df:
datalines=df.readlines()
# check what's there in datalines
print('datalines:')
print(datalines)
# loop over each line in the list and do something with line
for dl in datalines:
amt=int(dl)
# move upward by distance 'amt'
t.lt(90)
t.fd(amt)
t.bk(amt)
t.rt(90)
# shift turtle little to right to prepare it for the next iteration
t.up()
t.fd(10)
t.down()
print('end of program')
| 18.923077 | 71 | 0.677507 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 391 | 0.52981 |