blob_id stringlengths 40 40 | directory_id stringlengths 40 40 | path stringlengths 3 616 | content_id stringlengths 40 40 | detected_licenses listlengths 0 112 | license_type stringclasses 2 values | repo_name stringlengths 5 115 | snapshot_id stringlengths 40 40 | revision_id stringlengths 40 40 | branch_name stringclasses 777 values | visit_date timestamp[us]date 2015-08-06 10:31:46 2023-09-06 10:44:38 | revision_date timestamp[us]date 1970-01-01 02:38:32 2037-05-03 13:00:00 | committer_date timestamp[us]date 1970-01-01 02:38:32 2023-09-06 01:08:06 | github_id int64 4.92k 681M ⌀ | star_events_count int64 0 209k | fork_events_count int64 0 110k | gha_license_id stringclasses 22 values | gha_event_created_at timestamp[us]date 2012-06-04 01:52:49 2023-09-14 21:59:50 ⌀ | gha_created_at timestamp[us]date 2008-05-22 07:58:19 2023-08-21 12:35:19 ⌀ | gha_language stringclasses 149 values | src_encoding stringclasses 26 values | language stringclasses 1 value | is_vendor bool 2 classes | is_generated bool 2 classes | length_bytes int64 3 10.2M | extension stringclasses 188 values | content stringlengths 3 10.2M | authors listlengths 1 1 | author_id stringlengths 1 132 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
1ca7e02291a8193c9d0a77dd908e13c243f0c60e | d5af5459d0a68d8934219cdd516a23d73c7c52fb | /examples/10 packages/demoscript2.py | c76974ea2c3c59c767f5173f41098301fc861175 | [] | no_license | flathunt/pylearn | 1e5b147924dca792eb1cddbcbee1b8da0fc3d055 | 006f37d67343a0288e7efda359ed9454939ec25e | refs/heads/master | 2022-11-23T23:31:32.016146 | 2022-11-17T08:20:57 | 2022-11-17T08:20:57 | 146,803,991 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 111 | py | #!/usr/local/bin/python
from demopkg.demomod1 import *
from demopkg.demomod2 import *
fn1()
fn1()
fn21()
| [
"porkpie@gmail.com"
] | porkpie@gmail.com |
6fd0477a22c33b117b42125a6e057df913881c55 | 48f73b5b78da81c388d76d685ec47bb6387eefdd | /scrapeHackerrankCode/codes/cats-and-a-mouse.py | ca00be6a7e4c1e023ad75c17f954266b7f54f1c9 | [] | no_license | abidkhan484/hacerrankScraping | ad0ceda6c86d321d98768b169d63ea1ee7ccd861 | 487bbf115117bd5c293298e77f15ae810a50b82d | refs/heads/master | 2021-09-18T19:27:52.173164 | 2018-07-18T12:12:51 | 2018-07-18T12:12:51 | 111,005,462 | 2 | 1 | null | null | null | null | UTF-8 | Python | false | false | 456 | py | # Accepted
# Python 3
#!/bin/python3
import sys
q = int(input().strip())
for a0 in range(q):
x,y,z = input().strip().split(' ')
x,y,z = [int(x),int(y),int(z)]
#position of cat A to mouse is c1
c1 = x-z
if (c1<0):
c1 = -c1
#position of cat B to mouse is c2
c2 = y-z
if (c2<0):
c2 = -c2
if (c1>c2):
print("Cat B")
elif (c2>c1):
print("Cat A")
else:
print("Mouse C")
| [
"abidkhan484@gmail.com"
] | abidkhan484@gmail.com |
93e48303735a8b4f1377af9f4955cdd1ce80e27e | ca7aa979e7059467e158830b76673f5b77a0f5a3 | /Python_codes/p03352/s380195250.py | c8f616bc533e0145002aa7156f98f0a57f0f9297 | [] | no_license | Aasthaengg/IBMdataset | 7abb6cbcc4fb03ef5ca68ac64ba460c4a64f8901 | f33f1c5c3b16d0ea8d1f5a7d479ad288bb3f48d8 | refs/heads/main | 2023-04-22T10:22:44.763102 | 2021-05-13T17:27:22 | 2021-05-13T17:27:22 | 367,112,348 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 234 | py | import math
X = int(input())
list1 = []
for i in range(2,X+1):
for j in range(2,int((X + 1)/2)):
a = math.log10(i)/math.log10(j)
if a.is_integer():
list1.append(i)
if list1 == []:
print(1)
else:
print(max(list1))
| [
"66529651+Aastha2104@users.noreply.github.com"
] | 66529651+Aastha2104@users.noreply.github.com |
9bda41dd7233be94e1d43c731d04583cd8d9da2a | 717fbff92a7998660dc572666b045500cf028dff | /CRAB_SUBMISSION/Pt_100_withoutPU/crab_reco_50_2ndfull_2nd.py | bf11bc0b4fd441667be79fcaae38767ecf6f3ac8 | [] | no_license | neharawal14/muon_gun_project | 20c84874c14b75cb5fed4e9b7958927f6ddaf9f9 | efccbc36c4d511f1b8dafed8292604331b7b38ae | refs/heads/main | 2023-08-22T15:24:21.255977 | 2021-09-22T17:46:25 | 2021-09-22T17:46:25 | 409,301,680 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,244 | py | from WMCore.Configuration import Configuration
from CRABClient.UserUtilities import config
config = config()
config.General.requestName = 'SingleMuPt50_2ndfull_2nd_large_step3'
config.General.workArea = 'crab_projects_pt50_2ndfull_2nd'
config.General.transferOutputs = True
config.General.failureLimit=1
config.General.transferLogs=True
config.JobType.pluginName = 'Analysis'
config.JobType.psetName = 'SingleMuPt50_2ndfull_2nd_GEN_DIGI_L1_RAW2DIGI_RECO.py'
#config.Data.outputPrimaryDataset = 'MinBias'
config.Data.splitting = 'FileBased'
config.Data.unitsPerJob = 1
#NJOBS = 2000 # This is not a configuration parameter, but an auxiliary variable that we use in the next line.
#config.Data.totalUnits = config.Data.unitsPerJob * NJOBS
config.Data.publication = True
#config.Data.outputDatasetTag = 'ZMuMu_step2'
#config.Data.outputPrimaryDataset = 'ZMuMu_step2_ferrico'
config.Data.inputDataset = '/SingleMuPt50_2ndfull_2nd_large_GEN-SIM_step1_neha/nrawal-crab_SingleMuPt50_2ndfull_2nd_large_step2-b92895268740895c325a8071b664b5b5/USER'
#config.Data.inputDataset = '/ZMuMu_GEN-SIM_step1_ferrico/ferrico-ZMuMu_GEN-SIM_step1-9eadee95878022f078e16d6b70fe376c/USER'
config.Data.inputDBS = 'phys03'
config.Site.storageSite = 'T2_US_Florida'
| [
"nrawal@lxplus.cern.ch"
] | nrawal@lxplus.cern.ch |
5d7cc648684a2ad43296c9188ed9c4fca05f8fcb | 9252a2c661404e596bb81474ae56ff836ad21eb4 | /Python/10_Error_Exception/try.py | 7343bc785aca8afd741cf2211edbec66c523ebe8 | [] | no_license | yang75n/programming-language-guide | 242c6d094e114240a7d0650ed12b7dc8b074a5e4 | 0c6ff76c859f1283ee39a547bb81bb9236d4a303 | refs/heads/master | 2021-06-29T16:38:01.535856 | 2021-03-05T07:32:08 | 2021-03-05T07:32:08 | 223,430,223 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 177 | py | while True:
try:
x = int(input("请输入一个数字: "))
break
except ValueError:
print("您输入的不是数字,请再次尝试输入!")
| [
"you@example.com"
] | you@example.com |
36312377ff350c3df7aded430feadb6bc3f2a9fd | c163ffbec8ed657adb4e6633373492a6166c39cd | /substanced/sdi/views/indexing.py | dc67ae430b609340ab45c102b18a28ef127fd154 | [
"BSD-3-Clause-Modification"
] | permissive | reebalazs/substanced | b085d92a725efff45f430d6a2513573a86005a5b | 921ae5abfc6b04d009a55ed3405563589cddc88c | refs/heads/master | 2021-01-15T22:41:50.581066 | 2012-11-20T14:02:21 | 2012-11-20T14:02:21 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,914 | py | from pyramid.view import view_defaults
from pyramid.httpexceptions import HTTPFound
from pyramid.session import check_csrf_token
from ...catalog import (
catalog_view_factory_for,
CatalogViewWrapper,
)
from ...content import find_services
from ...util import oid_of
from .. import (
mgmt_view,
MIDDLE
)
@view_defaults(
catalogable=True,
name='indexing',
permission='sdi.manage-catalog',
)
class IndexingView(object):
catalog_view_factory_for = staticmethod(catalog_view_factory_for) # testing
def __init__(self, context, request):
self.context = context
self.request = request
@mgmt_view(
renderer='templates/indexing.pt',
tab_title='Indexing',
tab_after=MIDDLE, # try not to be the default tab, we're too obscure
)
def show(self):
oid = oid_of(self.context)
catalogs = []
for catalog in find_services(self.context, 'catalog'):
indexes = []
catalogs.append((catalog, indexes))
for index in catalog.values():
docrepr = index.document_repr(oid, '(not indexed)')
indexes.append({'index':index, 'value':docrepr})
return {'catalogs':catalogs}
@mgmt_view(request_method='POST', tab_title=None)
def reindex(self):
context = self.context
request = self.request
check_csrf_token(request)
oid = oid_of(self.context)
catalog_view_factory = self.catalog_view_factory_for(
context, request.registry)
if catalog_view_factory:
wrapper = CatalogViewWrapper(context, catalog_view_factory)
for catalog in find_services(context, 'catalog'):
catalog.reindex_doc(oid, wrapper)
request.flash_with_undo('Object reindexed', 'success')
return HTTPFound(request.mgmt_url(self.context, '@@indexing'))
| [
"chrism@plope.com"
] | chrism@plope.com |
5803bce5c8dd22cf1776e2fcc7389efe58b99768 | 6ffc81125d6bb5f8476f95b2334a27807b8290de | /tests/test_parse.py | 1e7a4e1614041fc569252d27d69c2636434b2ced | [
"BSD-2-Clause",
"Apache-2.0"
] | permissive | oaqa/FlexNeuART | 4cb341ca3c3f94fa28a7cfd4aef5451de3a4a2cb | 0bd3e06735ff705731fb6cee62d3486276beccdf | refs/heads/master | 2023-09-01T00:19:33.980081 | 2023-05-26T19:19:30 | 2023-05-26T19:19:30 | 64,071,121 | 156 | 21 | Apache-2.0 | 2023-09-10T01:27:05 | 2016-07-24T15:08:03 | Java | UTF-8 | Python | false | false | 665 | py | #!/usr/bin/env python
import unittest
from flexneuart.text_proc.parse import KrovetzStemParser
class TestKrovetzStemParser(unittest.TestCase):
# This are a very basic tests, but they are functional
def basic_tests(self):
parser = KrovetzStemParser(['is', 'a'])
self.assertEqual(parser('This IS a simplest tests'), 'this simplest test')
self.assertEqual(parser('This IS a simplest teStEd'), 'this simplest test')
self.assertEqual(parser('This IS a simplest-teStEd'), 'this simplest test')
self.assertEqual(parser('This IS a simplest#teStEd'), 'this simplest test')
if __name__ == "__main__":
unittest.main()
| [
"leo@boytsov.info"
] | leo@boytsov.info |
9823b66021657ff14842e2da00f41c45bb928496 | bd299b3b54c8d566f888159f448200932c415df6 | /memories/urls.py | e1920f17ff555449f551efe12dd5a51a437ff0f0 | [] | no_license | primeunforcer/KinkadeWebpage | edd2db799817e6353cc0a4b23a5bdd6357881fd5 | e037d1c6fdfed58d8abb84f797c99b9e14b3e118 | refs/heads/master | 2021-04-28T19:17:33.117383 | 2018-02-18T01:56:14 | 2018-02-18T01:56:14 | 121,891,912 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 126 | py | from django.conf.urls import url
from . import views
urlpatterns = [
url(r'^$', views.memory_list, name='memory_list'),
] | [
"admin@admin.com"
] | admin@admin.com |
937b9f9d39c95c4381e0d93c2397840ab7af652a | ca7aa979e7059467e158830b76673f5b77a0f5a3 | /Python_codes/p02987/s818435683.py | 7cfe08a62edfc1ca0bfee4cf0d5452b18db63878 | [] | no_license | Aasthaengg/IBMdataset | 7abb6cbcc4fb03ef5ca68ac64ba460c4a64f8901 | f33f1c5c3b16d0ea8d1f5a7d479ad288bb3f48d8 | refs/heads/main | 2023-04-22T10:22:44.763102 | 2021-05-13T17:27:22 | 2021-05-13T17:27:22 | 367,112,348 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 135 | py | s = input().split()
b = list(s[0][0] + s[0][1] + s[0][2] + s[0][3])
sset = set(b)
if len(sset) == 2:
print('Yes')
else:
print('No') | [
"66529651+Aastha2104@users.noreply.github.com"
] | 66529651+Aastha2104@users.noreply.github.com |
5420de25e6cd3e59e10715c9c78fb4be9208eff7 | 1170f797b7788b9e8445b7dd65377763068a9aa3 | /vpython/01_curve.py | 8f17637613af15aff0ea1e0f99a146d2960097a3 | [] | no_license | f-fathurrahman/ffr-python-stuffs | 2585c554e15ec6828900739376250ed391cf6966 | d249c4f2a01f58a96083bac2377309c05f652907 | refs/heads/master | 2023-08-23T22:54:34.147166 | 2023-07-20T23:24:50 | 2023-07-20T23:24:50 | 83,401,478 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 79 | py | from vpython import *
c = curve(vector(1,1,0), vector(-1,1,0), vector(2,2,1))
| [
"fadjar.fathurrahman@gmail.com"
] | fadjar.fathurrahman@gmail.com |
841cdbbcc333c4a305e51582f43857977b301cf7 | 2f98aa7e5bfc2fc5ef25e4d5cfa1d7802e3a7fae | /python/python_6665.py | 5372da5606559fecb37e86171cac99324584bf4c | [] | no_license | AK-1121/code_extraction | cc812b6832b112e3ffcc2bb7eb4237fd85c88c01 | 5297a4a3aab3bb37efa24a89636935da04a1f8b6 | refs/heads/master | 2020-05-23T08:04:11.789141 | 2015-10-22T19:19:40 | 2015-10-22T19:19:40 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 176 | py | # TypeError in Django
url(r'^koszyk/$', 'app_name.views.koszyk', name="sklep_koszyk"),
url(r'^koszyk/dodaj/(\d+)/$', 'app_name.views.koszyk_dodaj', name="sklep_koszyk_dodaj"),
| [
"ubuntu@ip-172-31-7-228.us-west-2.compute.internal"
] | ubuntu@ip-172-31-7-228.us-west-2.compute.internal |
b99317af4560c6df672ceda2c1599ab07f34961c | 487ce91881032c1de16e35ed8bc187d6034205f7 | /codes/CodeJamCrawler/16_0_2/cachitas/revenge_of_the_pancakes.py | c86f379820b17e6d7b83f5e4d4a37df87be2c178 | [] | no_license | DaHuO/Supergraph | 9cd26d8c5a081803015d93cf5f2674009e92ef7e | c88059dc66297af577ad2b8afa4e0ac0ad622915 | refs/heads/master | 2021-06-14T16:07:52.405091 | 2016-08-21T13:39:13 | 2016-08-21T13:39:13 | 49,829,508 | 2 | 0 | null | 2021-03-19T21:55:46 | 2016-01-17T18:23:00 | Python | UTF-8 | Python | false | false | 1,883 | py | import logging
log = logging.getLogger(__name__)
def prepare_pancakes(pancakes_pile):
log.info('Pancakes pile: %s' % pancakes_pile)
maneuvers = 0
n = len(pancakes_pile)
ideal_pile = '+' * n
while pancakes_pile != ideal_pile:
log.debug('Pancake pile is not ready to serve: %s' % pancakes_pile)
# Lookup pankace pile from the bottom
for i, pancake in enumerate(pancakes_pile[::-1]):
if pancake == '-':
lowest_incorrect_pancake = n - i - 1
log.debug('Lowest incorrect pancake index: %d'
% lowest_incorrect_pancake)
break
else:
log.info('Serving pancakes. It required %d maneuvers' % maneuvers)
return maneuvers
pancakes_pile = _flip_top_pancakes(
pancakes_pile, lowest_incorrect_pancake)
maneuvers += 1
log.info('Serving pancakes. It required %d maneuvers' % maneuvers)
return maneuvers
def _flip_top_pancakes(pancakes_pile, n):
top_stack, rest = pancakes_pile[:n+1], pancakes_pile[n+1:]
log.debug('Flipping first pancakes [%s](%s)' % (top_stack, rest))
stack_flipped = [_flip(pancake) for pancake in top_stack]
stack_after_maneuver = ''.join(stack_flipped)
pancakes_pile = stack_after_maneuver + pancakes_pile[n+1:]
log.debug('Flipping result: %s' % pancakes_pile)
return str(pancakes_pile)
def _flip(pancake):
if pancake == '+':
return '-'
elif pancake == '-':
return '+'
else:
raise ValueError(pancake)
if __name__ == '__main__':
logging.basicConfig(level='INFO')
T = int(input()) # read a line with a single integer (input size)
for i in range(1, T + 1):
log.info(50 * '-' + ' CASE {:>d}'.format(i))
S = input()
print("Case #{}: {}".format(i, prepare_pancakes(S)))
| [
"[dhuo@tcd.ie]"
] | [dhuo@tcd.ie] |
760704eb82954bce2f7199f574320a6724fcdf28 | 53dd5d2cfb79edc87f6c606bbfb7d0bedcf6da61 | /.history/EMR/age_sex_20190618093311.py | cca1d6257ba56a454e2c1c044dda556e97e0897c | [] | no_license | cyc19950621/python | 4add54894dc81187211aa8d45e5115903b69a182 | d184b83e73334a37d413306d3694e14a19580cb0 | refs/heads/master | 2020-04-11T20:39:34.641303 | 2019-07-02T12:54:49 | 2019-07-02T12:54:49 | 162,078,640 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 867 | py | # -*- coding:utf-8 -*-
import time
import math
import os
import sys
import os, os.path,shutil
import codecs
import EMRdef
import re
emrtxts = EMRdef.txttq(u'D:\DeepLearning ER\EHR-all')#txt目录提取
for emrtxt in emrtxts:
f = open(emrtxt,'r',errors="ignore")#中文加入errors
emrtxt = os.path.basename(emrtxt)
emrtxt_str = re.findall(r'(^.+?)\_',emrtxt)#提取ID
emrtxt = "".join(emrtxt_str)#转成str
out = []
for line in f.readlines():
if line.find(r'男')>-1:
out.append('M')
if line.find(r'女')>-1:
out.append('W')
if line.find('岁')>-1:
line = re.sub('岁','',line)
lien = ''.join(line)
out.append(line)
break
output = ' '.join(out)
EMRdef.text_create(r'D:\DeepLearning ER\EHRbase','.txt' ,emrtxt,output)
| [
"1044801968@qq.com"
] | 1044801968@qq.com |
214f2c895076e8f4b959413b437dfafe392835a9 | 2f46c6463d4f871a72d4296c3dae00f029e892f1 | /src/cogent3/util/transform.py | 92b61230bf9fa8ef70c530cf4d580f2316f65502 | [
"BSD-3-Clause"
] | permissive | BrendanBeaton/cogent3 | a09376c55f24da837690219157770ad94e917579 | e10f4f933921d52b000096b7c016190a1602add6 | refs/heads/master | 2022-12-02T07:59:11.112306 | 2020-06-30T05:40:33 | 2020-06-30T05:40:33 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,676 | py | #!/usr/bin/env python
"""Provides transformations of functions and other objects.
Includes:
Standard combinatorial higher-order functions adapted from David Mertz (2003),
"Text Processing in Python", Chapter 1.
Functions for performing complex tests on strings, e.g. includes_any or
includes_all.
Functions for generating combinations, permutations, or cartesian products
of lists.
"""
__author__ = "Sandra Smit"
__copyright__ = "Copyright 2007-2020, The Cogent Project"
__credits__ = ["Sandra Smit", "Rob Knight", "Zongzhi Liu"]
__license__ = "BSD-3"
__version__ = "2020.6.30a"
__maintainer__ = "Sandra Smit"
__email__ = "sandra.smit@colorado.edu"
__status__ = "Production"
maketrans = str.maketrans
# standard combinatorial HOF's from Mertz
def per_shortest(total, x, y):
"""Divides total by min(len(x), len(y)).
Useful for normalizing per-item results from sequences that are zipped
together. Always returns 0 if one of the sequences is empty (to
avoid divide by zero error).
"""
shortest = min(len(x), len(y))
if not shortest:
return 0
else:
return total / shortest
def per_longest(total, x, y):
"""Divides total by max(len(x), len(y)).
Useful for normalizing per-item results from sequences that are zipped
together. Always returns 0 if one of the sequences is empty (to
avoid divide by zero error).
"""
longest = max(len(x), len(y))
if not longest:
return 0
else:
return total / longest
class for_seq(object):
"""Returns function that applies f(i,j) to i,j in zip(first, second).
f: f(i,j) applying to elements of the sequence.
aggregator: method to reduce the list of results to a scalar. Default: sum.
normalizer: f(total, i, j) that normalizes the total as a function of
i and j. Default is length_normalizer (divides by the length of the shorter
of i and j). If normalizer is None, no normalization is performed.
Will always truncate to length of the shorter sequence (because of the use
of zip).
"""
def __init__(self, f, aggregator=sum, normalizer=per_shortest):
self.f = f
self.aggregator = aggregator
self.normalizer = normalizer
def __call__(self, first, second):
f = self.f
if self.normalizer is None:
return self.aggregator([f(i, j) for i, j in zip(first, second)])
else:
return self.normalizer(
self.aggregator([f(i, j) for i, j in zip(first, second)]), first, second
)
# convenience functions for modifying objects
class KeepChars(object):
"""Returns a filter object o(s): call to return a filtered string.
Specifically, strips out everything in s that is not in keep.
This filter is case sensitive by default.
"""
allchars = bytes(range(256))
def __init__(self, keep, case_sens=True):
"""Returns a new KeepChars object, based on string keep"""
if not case_sens:
low = keep.lower()
up = keep.upper()
keep = low + up
keep = keep.encode("utf-8")
self._strip_table = dict([(c, None) for c in self.allchars if c not in keep])
def __call__(self, s):
"""f(s) -> s, translates using self.allchars and self.delchars"""
if s is None:
raise TypeError
if isinstance(s, bytes):
s = s.decode("utf8")
s = str(s)
return s.translate(self._strip_table)
def first_index_in_set(seq, items):
"""Returns index of first occurrence of any of items in seq, or None."""
for i, s in enumerate(seq):
if s in items:
return i
| [
"Gavin.Huttley@anu.edu.au"
] | Gavin.Huttley@anu.edu.au |
8835f0a2b179ed88083d3cd9023db727730c613c | eab0ec43806fb7f010ec0673370038f07c5ceefe | /NflmCA/venv/Scripts/easy_install-3.5-script.py | 47054a2c28778dfbb7575b04f64485242af3a66a | [] | no_license | tanaypatil/custom-django-admin | 023d915ce061a312d51d79f6b2135d744b07af15 | 5d6743468c03b642c7a059db681df3f83d71eff4 | refs/heads/master | 2020-06-13T03:53:18.067184 | 2019-06-30T14:32:19 | 2019-06-30T14:32:19 | 194,525,226 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 465 | py | #!"C:\Users\Tanay Patil\PycharmProjects\NflmCA\venv\Scripts\python.exe"
# EASY-INSTALL-ENTRY-SCRIPT: 'setuptools==39.0.1','console_scripts','easy_install-3.5'
__requires__ = 'setuptools==39.0.1'
import re
import sys
from pkg_resources import load_entry_point
if __name__ == '__main__':
sys.argv[0] = re.sub(r'(-script\.pyw?|\.exe)?$', '', sys.argv[0])
sys.exit(
load_entry_point('setuptools==39.0.1', 'console_scripts', 'easy_install-3.5')()
)
| [
"tanaypatil197@gmail.com"
] | tanaypatil197@gmail.com |
88d35b2362956cce0f445f8acddc49bbf7f1db61 | 9a66a9acebfb324a5303f9bfe0570145e4a31554 | /plugin.video.salts/scrapers/rlshd_scraper.py | c80b28383ad7805d3086165a9b08dc1121810d5f | [] | no_license | n0490b/tdbaddon | f229e9f7b8587925cfe38acb04c5f5c70bbf1837 | 21628f1b40e2f854cc3b63acf0d632bb5daecdac | refs/heads/master | 2021-01-23T04:58:23.101284 | 2017-03-26T09:35:56 | 2017-03-26T09:35:56 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,007 | py | """
SALTS XBMC Addon
Copyright (C) 2014 tknorris
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
"""
import re
import urlparse
import kodi
import log_utils # @UnusedImport
import dom_parser2
from salts_lib import scraper_utils
from salts_lib.constants import FORCE_NO_MATCH
from salts_lib.constants import VIDEO_TYPES
from salts_lib.utils2 import i18n
import scraper
BASE_URL = 'http://www.rlshd.net'
class Scraper(scraper.Scraper):
base_url = BASE_URL
def __init__(self, timeout=scraper.DEFAULT_TIMEOUT):
self.timeout = timeout
self.base_url = kodi.get_setting('%s-base_url' % (self.get_name()))
@classmethod
def provides(cls):
return frozenset([VIDEO_TYPES.EPISODE])
@classmethod
def get_name(cls):
return 'RLSHD'
def get_sources(self, video):
source_url = self.get_url(video)
hosters = []
if source_url and source_url != FORCE_NO_MATCH:
url = urlparse.urljoin(self.base_url, source_url)
html = self._http_get(url, require_debrid=True, cache_limit=.5)
sources = self.__get_post_links(html, video)
for source in sources:
if scraper_utils.excluded_link(source): continue
host = urlparse.urlparse(source).hostname
hoster = {'multi-part': False, 'host': host, 'class': self, 'views': None, 'url': source, 'rating': None, 'quality': sources[source], 'direct': False}
hosters.append(hoster)
return hosters
def __get_post_links(self, html, video):
sources = {}
post = dom_parser2.parse_dom(html, 'article', {'id': re.compile('post-\d+')})
if post:
for _attrs, fragment in dom_parser2.parse_dom(post[0].content, 'h2'):
for attrs, _content in dom_parser2.parse_dom(fragment, 'a', req='href'):
stream_url = attrs['href']
meta = scraper_utils.parse_episode_link(stream_url)
release_quality = scraper_utils.height_get_quality(meta['height'])
host = urlparse.urlparse(stream_url).hostname
quality = scraper_utils.get_quality(video, host, release_quality)
sources[stream_url] = quality
return sources
def get_url(self, video):
return self._blog_get_url(video)
@classmethod
def get_settings(cls):
settings = super(cls, cls).get_settings()
settings = scraper_utils.disable_sub_check(settings)
name = cls.get_name()
settings.append(' <setting id="%s-filter" type="slider" range="0,180" option="int" label=" %s" default="30" visible="eq(-3,true)"/>' % (name, i18n('filter_results_days')))
settings.append(' <setting id="%s-select" type="enum" label=" %s" lvalues="30636|30637" default="0" visible="eq(-4,true)"/>' % (name, i18n('auto_select')))
return settings
def search(self, video_type, title, year, season=''): # @UnusedVariable
html = self._http_get(self.base_url, params={'s': title}, require_debrid=True, cache_limit=1)
post_pattern = 'class="entry-title">\s*<a[^>]+href="(?P<url>[^"]*/(?P<date>\d{4}/\d{1,2}/\d{1,2})/[^"]*)[^>]+>(?P<post_title>[^<]+)'
date_format = '%Y/%m/%d'
return self._blog_proc_results(html, post_pattern, date_format, video_type, title, year)
| [
"tdbaddon@hotmail.com"
] | tdbaddon@hotmail.com |
f7f373531b94f6b6b6320de9d729e76c8458ad5f | be3ad8712d398d2adb39c32104d9a90a4a62b7e8 | /Ocat/utils/prepdatdict.py | 4edbfa1b35239072f9472d17a51632f14dc61e05 | [] | no_license | tisobe/Usint | 3c74059b3d7b813d162af1d1c1232d77ec0dd632 | 07bfd42767e9bfb56e7657e4ed6780bde296cfd4 | refs/heads/master | 2020-03-17T15:00:11.835862 | 2018-05-16T16:29:44 | 2018-05-16T16:29:44 | 133,694,486 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 24,487 | py | #!/usr/bin/env /proj/sot/ska/bin/python
#####################################################################################################
# #
# prepdatdict.py: prepare data dictionay for Ocat Data Page #
# #
# use the fuction: prep_data_dict(obsid) #
# #
# author: t. isobe (tisobe@cfa.harvard.edu) #
# #
# last update: Sep 06, 2016 #
# #
#####################################################################################################
import sys
import re
import unittest
#
#--- reading directory list
#
#path = '/data/mta4/CUS/www/Usint/Ocat/ocatsite/static/dir_list_py'
path = './ocatsite/static/dir_list_py'
f = open(path, 'r')
data = [line.strip() for line in f.readlines()]
f.close()
for ent in data:
atemp = re.split('::', ent)
var = atemp[1].strip()
line = atemp[0].strip()
exec "%s = %s" %(var, line)
#
#--- append path to a private folders
#
sys.path.append(base_dir)
sys.path.append(mta_dir)
import ocatCommonFunctions as ocf
import ocatsql as osq
#----------------------------------------------------------------------------------
#-- prep_data_dict: extract data from db and set up a data dictionary ---
#----------------------------------------------------------------------------------
def prep_data_dict(obsid):
"""
extract parameters and values from database, and set up a data dictionary
input: obsid --- obsid
output: dat_dict --- data dictionary
"""
#
#---- read sql database and create a data dictionary
#
[dat_dict, db] = get_data_from_db(obsid)
#
#---- set a special notification in the head part
#
head_notification = set_header_notification(dat_dict)
dat_dict['head_notificaiton'] = head_notification
#
#--- dither
#
temp_dict = set_dither(dat_dict)
dat_dict = append_dict(dat_dict, temp_dict)
#
#---- time constraints
#
temp_dict = get_time_ordr(dat_dict, db)
dat_dict = append_dict(dat_dict, temp_dict)
#
#---- roll constraints
#
temp_dict = get_roll_ordr(dat_dict, db)
dat_dict = append_dict(dat_dict, temp_dict)
#
#--- acis window constraints
#
temp_dict = get_spwindow_ordr(dat_dict, db)
dat_dict = append_dict(dat_dict, temp_dict)
#
#--- ra/dec adjstment
#
temp_dict = set_ra_dec(dat_dict)
dat_dict = append_dict(dat_dict, temp_dict)
#
#--- monitor list
#
temp_dict = find_monitor_obs(dat_dict)
dat_dict = append_dict(dat_dict, temp_dict)
#
#--- a few other adjustments
#
dat_dict['rem_exp_time'] = round(dat_dict['rem_exp_time'], 1)
temp_dict = define_eventfilter_highest(dat_dict)
dat_dict = append_dict(dat_dict, temp_dict)
try:
test = dat_dict['asis']
if test == '':
dat_dict['asis'] = 'norm'
except:
dat_dict['asis'] = 'norm'
#
#--- setting aiming point chip coordinates
#
try:
[chipx, chipy] = ocf.find_aiming_point(dat_dict['instrument'], dat_dict['y_det_offset'], dat_dict['z_det_offset'])
except:
chipx = 0.0
chipy = 0.0
dat_dict['chipx'] = chipx
dat_dict['chipy'] = chipy
#
#-- now return dat_dict
#
return dat_dict
#----------------------------------------------------------------------------------
#-- get_data_from_db: create data dictionary from the databse for a given obsid ---
#----------------------------------------------------------------------------------
def get_data_from_db(obsid):
"""
create data dictionary from the databse for a given obsid
input: obsid --- obsid
output: dat_dict --- a dictionary of data
db --- database output
"""
#
#--- set up data "db"
#
db = osq.OcatDB(obsid)
#
#--- get a list of parameters
#
name_dict = ocf.read_name_list()
plist = name_dict.keys()
#
#--- set up dictionary
#
dat_dict = {'obsid':obsid}
for ent in plist:
try:
#
#--- check ordered case (it gives a list); they will be handled separately
#
val = db.origValue(ent)
if isinstance(val, list):
continue
dat_dict[ent] = val
except:
dat_dict[ent] = ''
return [dat_dict, db]
#----------------------------------------------------------------------------------
#-- et_header_notification: check whether we need to put a special notification --
#----------------------------------------------------------------------------------
def set_header_notification(dat_dict):
"""
check whether we need to put a special notification at the top of the page
input: dat_dict ---- data dictionary
output: head_notification --- category number:1, 2, or 3
"""
obsid = dat_dict['obsid']
#
#--- for the case the observation is observed etc
#
head_notification = ''
if dat_dict['status'] in ('observed', 'canceled', 'archived', 'untriggered', 'discarded'):
head_notification = 1
#
#--- for the case the observation is already in OR list
#
elif ocf.is_in_orlist(obsid):
head_notification = 2
#
#--- for the case the observation is already approved
#
elif ocf.is_approved(obsid):
head_notification = 3
#
#--- for the case the lts date is less than the warning period
#
else:
if dat_dict['lts_lt_plan'] and dat_dict['lts_lt_plan'] != '':
try:
[chk, lspan] = ocf.check_lts_date_coming(dat_dict['lts_lt_plan'])
except:
chk = ''
if chk and chk !='':
head_notification = 4
dat_dict['lts_span'] = lspan
else:
dat_dict['lts_span'] = ''
return head_notification
#----------------------------------------------------------------------------------
#-- set_dither: set dither related quantities and save in a dictionary --
#----------------------------------------------------------------------------------
def set_dither(dat_dict):
"""
set dither related quantities and save in a dictionary
input: dat_dict ---- data dictionary
output: temp_dict ---- a dictionary related dither quantities
"""
temp_dict = {}
try:
test = dat_dict['dither_flag']
except:
temp_dict['dither_flag'] = 'N'
if dat_dict['dither_flag'] == 'Y':
y_amp = dat_dict['y_amp']
z_amp = dat_dict['z_amp']
y_freq = dat_dict['y_freq']
z_freq = dat_dict['z_freq']
temp_dict['y_amp_asec'] = ocf.convert_deg_to_sec(y_amp)
temp_dict['z_amp_asec'] = ocf.convert_deg_to_sec(z_amp)
temp_dict['y_freq_asec'] = ocf.convert_deg_to_sec(y_freq)
temp_dict['z_freq_asec'] = ocf.convert_deg_to_sec(z_freq)
#
#--- round out to 6th digit (around 1.0 arcsec accuracy)
#
try:
temp_dict['y_amp'] = round(y_amp, 6)
temp_dict['z_amp'] = round(z_amp, 6)
except:
pass
else:
temp_dict['dither_flag'] = 'N'
return temp_dict
#----------------------------------------------------------------------------------
#-- get_time_ordr: check time order entries and create a dictionary for the entries
#----------------------------------------------------------------------------------
def get_time_ordr(dat_dict, db):
"""
check time order entries and create a dictionary for the entries
input: dat_dict ---- data dictionary
db ---- database
output: temp_dict ---- data dictionary for time_order related parameters
"""
#
#--- check whether this is time ordered case
#
temp_dict = {}
try:
test = dat_dict['window_flag']
except:
temp_dict['window_flag'] = 'N'
try:
ordr = int(float(dat_dict['time_ordr']))
except:
temp_dict['time_rodr'] = 0
ordr = 0
for k in range(0, ordr):
name = 'window_constraint'
val = db.origValue(name)[k]
j = k + 1
dname = name + str(j)
temp_dict[dname] = val
#
#--- tstart and tstop need to be devided into month, data, year, and time
#
tname = ('month', 'date', 'year', 'time')
for name in ('tstart', 'tstop'):
j = k + 1
val = db.origValue(name)[k]
tlist = re.split('\s+', str(val))
#
#--- convert time in 24 hr system
#
time = ocf.time_format_convert(tlist[3])
tlist[3] = time
for m in range(0, 4):
dname = name + '_' + tname[m] + str(j)
temp_dict[dname] = tlist[m]
#
#--- set original tstart and tstop in 24 hrs system
#
try:
val = float(tlist[1])
if val < 10:
tlist[1] = ' ' + tlist[1]
except:
pass
if len(tlist[3]) < 8:
tlist[3] = ' ' + tlist[3]
dtime = tlist[0] + ' ' + tlist[1] + ' ' + tlist[2] + ' ' + tlist[3]
pname = name + str(j)
temp_dict[pname] = dtime
return temp_dict
#----------------------------------------------------------------------------------
#-- get_roll_ordr: get roll order related data ---
#----------------------------------------------------------------------------------
def get_roll_ordr(dat_dict, db):
"""
get roll order related data
input: dat_dict --- data dictionary
db --- db
output: temp_dict --- data dictionary for roll_order related parameters
"""
temp_dict = {}
try:
test = dat_dict['roll_flag']
except:
temp_dict['roll_flag'] = 'N'
try:
ordr = int(float(dat_dict['roll_ordr']))
except:
temp_dict['roll_ordr'] = 0
ordr = 0
for k in range(0, ordr):
for name in ('roll_constraint', 'roll_180', 'roll', 'roll_tolerance'):
val = db.newValue(name)[k]
j = k + 1
dname = name + str(j)
temp_dict[dname] = val
return temp_dict
#----------------------------------------------------------------------------------
#-- get_spwindow_ordr: get acis window constraint related data ---
#----------------------------------------------------------------------------------
def get_spwindow_ordr(dat_dict, db):
"""
get acis window constraint related data
input: dat_dict --- data dictionary
db --- db
output: temp_dict --- data dictionary for acis window constraint related parameters
"""
temp_dict = {}
try:
test = dat_dict['spwindow_flag']
except:
temp_dict['spwindow_flag'] = 'N'
try:
ordr = int(float(dat_dict['ordr']))
except:
temp_dict['ordr'] = 0
ordr = 0
for k in range(0, ordr):
for name in ('chip', 'start_row', 'start_column', 'height', 'width', \
'lower_threshold', 'pha_range', 'sample'):
val = db.newValue(name)[k]
j = k + 1
dname = name + str(j)
temp_dict[dname] = val
return temp_dict
#----------------------------------------------------------------------------------
#-- set_ra_dec: convert ra/dec in hh:mm:ss format --
#----------------------------------------------------------------------------------
def set_ra_dec(dat_dict):
"""
convert ra/dec in hh:mm:ss format
input: dat_dict ---- data dictonary
output: temp_dict ---- ra/dec related data dictionary
"""
temp_dict = {}
#
#--- ra/dec are in degree, save them in "dra" and "ddec"
#
val = float(dat_dict['ra'])
temp_dict['dra'] = round(val, 6)
val = float(dat_dict['dec'])
temp_dict['ddec'] = round(val, 6)
#
#--- convert ra/dec in hh:mm:ss format
#
[tra, tdec] = ocf.convert_ra_dec_hms(dat_dict['ra'], dat_dict['dec'])
#
#--- and save them in 'ra' and 'dec'
#
temp_dict['ra'] = tra
temp_dict['dec'] = tdec
return temp_dict
#----------------------------------------------------------------------------------
#-- find_monitor_obs: ind observations in the list of monitor/group --
#----------------------------------------------------------------------------------
def find_monitor_obs(dat_dict):
"""
find observations in the list of monitor/group
input: dat_dict ---- data dictionary
output: temp_dict ---- monitor/group list related data dictionary
"""
#
#--- find all monitoring/group ids
#
temp_dict = {}
try:
monitor_list = osq.find_monitor_obs(dat_dict['obsid'])
except:
monitor_list = []
#
#--- if group_id exits, monitor_flag must be "N"
#
if dat_dict['group_id'] in [None, 'NULL', 'N']:
if len(monitor_list) > 0:
monitor_flag = 'Y'
else:
monitor_flag = ''
else:
monitor_flag = 'N'
#
#--- if this is monitoring case, remove already observed and cancelled observations from the list
#
if monitor_flag == 'Y':
clean_list = []
for otest in monitor_list:
db = osq.OcatDB(otest)
val = db.newValue('status')
if val.lower() in ['unobserved', 'scheduled']:
clean_list.append(otest)
monitor_list = clean_list
temp_dict['monitor_flag'] = monitor_flag
temp_dict['monitor_list'] = monitor_list
return temp_dict
#----------------------------------------------------------------------------------
#-- append_dict: combine two dictionary. if there is the same key in both dict ---
#----------------------------------------------------------------------------------
def append_dict(dict1, dict2):
"""
combine two dictionary. if there is the same key in both dict,
the value of the second dict will replace the first one.
input: dict1 --- dictionary 1
dict2 --- dictionary 2
output: dict1 --- combined dictonary
"""
for key in dict2.keys():
dict1[key] = dict2[key]
return dict1
#----------------------------------------------------------------------------------
#-- define_eventfilter_highest: define evenfilter highest energy value ---
#----------------------------------------------------------------------------------
def define_eventfilter_highest(dat_dict):
"""
define evenfilter highest energy value
input: dat_dict --- data dictionary
output: temp_dict --- a dictionary for eventfilter_highest
"""
temp_dict = {}
chk = 1
try:
energy_low = float(dat_dict['eventfilter_lower'])
except:
energy_low = 0.0
chk = 0
try:
eventfilter_higher = float(dat_dict['eventfilter_higher'])
except:
eventfilter_higher = 0.0
chk = 0
eventfilter_highest = energy_low + eventfilter_higher
if chk == 1:
temp_dict['eventfilter_highest'] = eventfilter_highest
else:
if eventfilter_highest > 0:
temp_dict['eventfilter_highest'] = eventfilter_highest
else:
temp_dict['eventfilter_highest'] = ""
return temp_dict
#----------------------------------------------------------------------------------
#-- create_blank_dat_dict: create a blank dat_dict. all keys have a value of "" --
#----------------------------------------------------------------------------------
def create_blank_dat_dict():
"""
create a blank dat_dict. all keys have a value of ""
input: none
output: dat_dict ---- a blank data dictionary
"""
#
#--- get a list of parameters
#
name_dict = ocf.read_name_list()
plist = name_dict.keys()
#
#--- create empty dat_dict
#
dat_dict = {}
for key in plist:
dat_dict[key] = ""
return dat_dict
#----------------------------------------------------------------------------------
#-- read_non_changable_param_list: get a list of none changable parameter list ---
#----------------------------------------------------------------------------------
def read_non_changable_param_list():
"""
get a list of none changable parameter list
input: None but read from the file
output: nlist --- a list of none changable parameters
"""
file = house_keeping + 'non_changable_param_list'
f = open(file, 'r')
nlist = [line.strip() for line in f.readlines()]
f.close()
return nlist
#----------------------------------------------------------------------------------
#-- return_non_changerble_param_list: create a list of parameter/value pairs ------
#----------------------------------------------------------------------------------
def return_non_changerble_param_list(data_dict):
"""
create a list of parameter/value pairs for all none changiable parameters
input: data_dict ---- a dictionary of data
outpu: rlist ---- a list of [param/value]
"""
nlist = read_non_changable_param_list()
rlist = []
for ent in nlist:
rlist.append([ent, data_dict[ent]])
return rlist
#----------------------------------------------------------------------------------
#-- return_changerble_param_list: create a list of parameter/value pairs -----
#----------------------------------------------------------------------------------
def return_changerble_param_list():
"""
create a list of parameter/value pairs for all none changiable parameters
input: data_dict ---- a dictionary of data
outpu: rlist ---- a list of (param name, descriptive name)
"""
non_changble_list = read_non_changable_param_list()
file = house_keeping + 'changable_param_list'
f = open(file, 'r')
nlist = [line.strip() for line in f.readlines()]
f.close()
rlist = []
for ent in nlist:
if ent[0] == '#':
continue
atemp = re.split('::', ent)
chk = 0
for comp in non_changble_list:
if atemp[1] == comp:
chk = 1
break
if chk == 0:
rlist.append((atemp[0], atemp[1]))
return rlist
#----------------------------------------------------------------------------------
#-- return_final_display_param_list: create a list of categories and parameters --
#----------------------------------------------------------------------------------
def return_final_display_param_list():
"""
create a list of categories and a list of lists of parameters in that category
input: none but read from changable_param_list
output: dlist --- a list of discriptive names of the category
rlist --- a list of lists of parameter names
"""
file = house_keeping + 'changable_param_list'
f = open(file, 'r')
nlist = [line.strip() for line in f.readlines()]
f.close()
rlist = []
dlist = []
for ent in nlist:
#
#--- category names are marked by ## at the top of the line
#
if ent[0] == '#':
if ent[1] == '#':
atemp = re.split('##', ent)
if len(plist) > 0:
rlist.append(plist)
btemp = re.split(':', atemp[1])
mark = btemp[0]
discrp = btemp[1]
plist = []
dlist.append(discrp)
continue
else:
#
#--- finding the name of the parameters
#
atemp = re.split(':', ent)
plist.append(atemp[0])
return [dlist, rlist]
#----------------------------------------------------------------------------------
#-- TEST TEST TEST TEST TEST TEST TEST TEST TEST TEST TEST TEST TEST TEST --
#----------------------------------------------------------------------------------
class TestFunctions(unittest.TestCase):
"""
testing functions
"""
#------------------------------------------------------------
def test_get_data_from_db(self):
obsid = 16245
[dat_dict, db] = get_data_from_db(obsid)
self.assertEquals(dat_dict['status'], 'archived')
self.assertEquals(dat_dict['type'], 'TOO')
self.assertEquals(dat_dict['dither_flag'], 'Y')
self.assertEquals(dat_dict['pre_max_lead'], 1.5)
#------------------------------------------------------------
def test_set_header_notification(self):
obsid = 16245
[dat_dict, db] = get_data_from_db(obsid)
notification = set_header_notification(dat_dict)
self.assertEquals(notification, 1)
#------------------------------------------------------------
def test_set_dither(self):
obsid = 16245
[dat_dict, db] = get_data_from_db(obsid)
test_dict = set_dither(dat_dict)
self.assertEquals(test_dict['y_freq_asec'], 1296.0)
self.assertEquals(test_dict['z_freq_asec'], 1832.76)
#------------------------------------------------------------
def test_spwindow_ordr(self):
obsid = 16245
[dat_dict, db] = get_data_from_db(obsid)
test_dict = get_spwindow_ordr(dat_dict, db)
self.assertEquals(test_dict['chip1'], 'S3')
self.assertEquals(test_dict['height1'], 300)
self.assertEquals(test_dict['lower_threshold1'], 0.3)
self.assertEquals(test_dict['start_row2'], 1)
self.assertEquals(test_dict['height2'], 1024)
self.assertEquals(test_dict['pha_range2'], 11.0)
#------------------------------------------------------------
def test_set_ra_dec(self):
obsid = 16245
[dat_dict, db] = get_data_from_db(obsid)
test_dict = set_ra_dec(dat_dict)
self.assertEquals(test_dict['ra'], '05:34:31.6000')
self.assertEquals(test_dict['dec'], '+22:00:56.4000')
#------------------------------------------------------------
def test_define_eventfilter_highest(self):
obsid = 16245
[dat_dict, db] = get_data_from_db(obsid)
test_dict = define_eventfilter_highest(dat_dict)
self.assertEquals(test_dict['eventfilter_highest'], 11.3)
#------------------------------------------------------------
def test_get_time_ordr(self):
obsid = 12109
[in_dict, tdb] = get_data_from_db(obsid)
test_dict = get_time_ordr(in_dict, tdb)
self.assertEquals(test_dict['window_constraint1'], 'Y')
self.assertEquals(test_dict['tstart_month1'], 'Feb')
self.assertEquals(test_dict['tstop_year1'], '2010')
self.assertEquals(test_dict['tstart_date2'], '1')
self.assertEquals(test_dict['tstop_time2'], '23:59:00')
#------------------------------------------------------------
def test_get_roll_ordr(self):
obsid = 1601
[in_dict, tdb] = get_data_from_db(obsid)
test_dict = get_roll_ordr(in_dict, tdb)
self.assertEquals(test_dict['roll_constraint1'], 'Y')
self.assertEquals(test_dict['roll_1801'], 'N')
self.assertEquals(test_dict['roll1'], 307.0)
self.assertEquals(test_dict['roll_tolerance1'], 1.0)
#------------------------------------------------------------
def test_find_monitor_obs(self):
obsid = 15442
clist = [15430 , 15431 , 15432 , 15433 , 15434 , 15435 , 15436 , 15437 , 15438 , 15439 , 15440 , 15441 , 15442 , 15443 , 15444 , 15445 , 15446 , 15447 , 15448 , 15449 , 15450]
[in_dict, tdb] = get_data_from_db(obsid)
test_dict = find_monitor_obs(in_dict)
self.assertEquals(test_dict['monitor_list'], clist )
#------------------------------------------------------------
#---------------------------------------------------------------------------
if __name__ == "__main__":
unittest.main()
| [
"isobe@head.cfa.harvard.edu"
] | isobe@head.cfa.harvard.edu |
167a1a0bf78a1e1379f4295b09cbfeedf34b7840 | dbd87fe6e9466c4cada18b037667cfdddc62c193 | /Technical_Indicators/ZigZag.py | 057435288496ca0943ce0fe371e39f8bf6a7d9ca | [] | no_license | alexanu/Python_Trading_Snippets | 74515a40dc63ba50d95bd50330ed05d59b5dc837 | 85969e681b9c74e24e60cc524a952f9585ea9ce9 | refs/heads/main | 2023-06-25T03:27:45.813987 | 2023-06-09T16:09:43 | 2023-06-09T16:09:43 | 197,401,560 | 18 | 17 | null | 2023-02-08T22:25:25 | 2019-07-17T14:05:32 | Jupyter Notebook | UTF-8 | Python | false | false | 1,826 | py | import numpy as np
import pandas as pd
import matplotlib.pyplot as plt
import warnings
warnings.filterwarnings("ignore")
import yfinance as yf
yf.pdr_override()
import datetime as dt
# input
symbol = 'AAPL'
start = dt.date.today() - dt.timedelta(days = 365)
end = dt.date.today()
# Read data
df = yf.download(symbol,start,end)
from zigzag import *
plt.figure(figsize=(14,7))
pivots = peak_valley_pivots(df['Adj Close'].values, 0.2, -0.2)
ts_pivots = pd.Series(df['Adj Close'], index=df.index)
ts_pivots = ts_pivots[pivots != 0]
df['Adj Close'].plot()
ts_pivots.plot(style='g-o', label='ZigZag')
plt.title('Stock of ZigZag', fontsize=18)
plt.legend(loc='best')
plt.xlabel('Date')
plt.ylabel('Price')
plt.show()
# ## Candlestick with ZigZag
from matplotlib import dates as mdates
dfc = df.copy()
dfc['VolumePositive'] = dfc['Open'] < dfc['Adj Close']
#dfc = dfc.dropna()
dfc = dfc.reset_index()
dfc['Date'] = mdates.date2num(dfc['Date'].tolist())
from mplfinance.original_flavor import candlestick_ohlc
fig = plt.figure(figsize=(14,7))
ax1 = plt.subplot(111)
candlestick_ohlc(ax1,dfc.values, width=0.5, colorup='g', colordown='r', alpha=1.0)
pivots = peak_valley_pivots(df['Adj Close'].values, 0.2, -0.2)
ts_pivots = pd.Series(df['Adj Close'], index=df.index)
ts_pivots = ts_pivots[pivots != 0]
ax1.plot(df['Adj Close'])
ts_pivots.plot(style='g-o', label='ZigZag')
ax1.xaxis_date()
ax1.xaxis.set_major_formatter(mdates.DateFormatter('%d-%m-%Y'))
ax1.grid(True, which='both')
ax1.minorticks_on()
ax1v = ax1.twinx()
colors = dfc.VolumePositive.map({True: 'g', False: 'r'})
ax1v.bar(dfc.Date, dfc['Volume'], color=colors, alpha=0.4)
ax1v.axes.yaxis.set_ticklabels([])
ax1v.set_ylim(0, 3*df.Volume.max())
ax1.set_title('Stock '+ symbol +' Closing Price')
ax1.set_ylabel('Price')
ax1.set_xlabel('Date')
ax1.legend()
plt.show() | [
"oanufriyev@gmail.com"
] | oanufriyev@gmail.com |
8d827c60eec9c54f9403584d65216617936c6c77 | a190ccac9d0ed3e0de44648957bf616c2dd88466 | /cowrie/commands/ifconfig.py | acf38c5576a8b3e6e453f1d5ebd2d6684d7332e8 | [
"BSD-2-Clause"
] | permissive | CrazyLlama/cowrie | bd0d400c22017348a7aa450841f5fb094d293a47 | 5bdf4093a34be4ede7778a171d0a84a889060c6b | refs/heads/master | 2021-01-12T11:11:29.510918 | 2018-05-01T14:55:04 | 2018-05-01T14:55:04 | 72,859,411 | 1 | 0 | null | 2018-05-01T14:55:05 | 2016-11-04T15:19:29 | Python | UTF-8 | Python | false | false | 1,395 | py | # -*- coding: utf-8 -*-
# Copyright (c) 2014 Peter Reuterås <peter@reuteras.com>
# See the COPYRIGHT file for more information
from __future__ import division, absolute_import
from cowrie.shell.honeypot import HoneyPotCommand
commands = {}
class command_ifconfig(HoneyPotCommand):
def call(self):
l = """eth0 Link encap:Ethernet HWaddr 04:01:16:df:2d:01
inet addr:%s Bcast:%s.255 Mask:255.255.255.0
inet6 addr: fe80::601:16ff:fedf:2d01/64 Scope:Link
UP BROADCAST RUNNING MULTICAST MTU:1500 Metric:1
RX packets:139435762 errors:0 dropped:0 overruns:0 frame:0
TX packets:116082382 errors:0 dropped:0 overruns:0 carrier:0
collisions:0 txqueuelen:1000
RX bytes:102191499830 (102.1 GB) TX bytes:68687923025 (68.6 GB)
lo Link encap:Local Loopback
inet addr:127.0.0.1 Mask:255.0.0.0
inet6 addr: ::1/128 Scope:Host
UP LOOPBACK RUNNING MTU:65536 Metric:1
RX packets:110 errors:0 dropped:0 overruns:0 frame:0
TX packets:110 errors:0 dropped:0 overruns:0 carrier:0
collisions:0 txqueuelen:0
RX bytes:19932 (19.9 KB) TX bytes:19932 (19.9 KB)""" % \
(self.protocol.kippoIP,
self.protocol.kippoIP.rsplit('.', 1)[0])
self.write(l+'\n')
commands['/sbin/ifconfig'] = command_ifconfig
# vim: set sw=4 et:
| [
"michel@oosterhof.net"
] | michel@oosterhof.net |
e1473dea34590fc8fef9d5ca63926ef43ae3e558 | b39d72ba5de9d4683041e6b4413f8483c817f821 | /GeneVisualization/ass1/Lib/site-packages/itk/itkVTKImageIOPython.py | 73b5ce730e8461b1f3de699d92885a52b91c4100 | [] | no_license | ssalmaan/DataVisualization | d93a0afe1290e4ea46c3be5718d503c71a6f99a7 | eff072f11337f124681ce08742e1a092033680cc | refs/heads/master | 2021-03-13T05:40:23.679095 | 2020-03-11T21:37:45 | 2020-03-11T21:37:45 | 246,642,979 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 8,904 | py | # This file was automatically generated by SWIG (http://www.swig.org).
# Version 3.0.8
#
# Do not make changes to this file unless you know what you are doing--modify
# the SWIG interface file instead.
from sys import version_info
if version_info >= (3, 0, 0):
new_instancemethod = lambda func, inst, cls: _itkVTKImageIOPython.SWIG_PyInstanceMethod_New(func)
else:
from new import instancemethod as new_instancemethod
if version_info >= (2, 6, 0):
def swig_import_helper():
from os.path import dirname
import imp
fp = None
try:
fp, pathname, description = imp.find_module('_itkVTKImageIOPython', [dirname(__file__)])
except ImportError:
import _itkVTKImageIOPython
return _itkVTKImageIOPython
if fp is not None:
try:
_mod = imp.load_module('_itkVTKImageIOPython', fp, pathname, description)
finally:
fp.close()
return _mod
_itkVTKImageIOPython = swig_import_helper()
del swig_import_helper
else:
import _itkVTKImageIOPython
del version_info
try:
_swig_property = property
except NameError:
pass # Python < 2.2 doesn't have 'property'.
def _swig_setattr_nondynamic(self, class_type, name, value, static=1):
if (name == "thisown"):
return self.this.own(value)
if (name == "this"):
if type(value).__name__ == 'SwigPyObject':
self.__dict__[name] = value
return
method = class_type.__swig_setmethods__.get(name, None)
if method:
return method(self, value)
if (not static):
object.__setattr__(self, name, value)
else:
raise AttributeError("You cannot add attributes to %s" % self)
def _swig_setattr(self, class_type, name, value):
return _swig_setattr_nondynamic(self, class_type, name, value, 0)
def _swig_getattr_nondynamic(self, class_type, name, static=1):
if (name == "thisown"):
return self.this.own()
method = class_type.__swig_getmethods__.get(name, None)
if method:
return method(self)
if (not static):
return object.__getattr__(self, name)
else:
raise AttributeError(name)
def _swig_getattr(self, class_type, name):
return _swig_getattr_nondynamic(self, class_type, name, 0)
def _swig_repr(self):
try:
strthis = "proxy of " + self.this.__repr__()
except Exception:
strthis = ""
return "<%s.%s; %s >" % (self.__class__.__module__, self.__class__.__name__, strthis,)
try:
_object = object
_newclass = 1
except AttributeError:
class _object:
pass
_newclass = 0
def _swig_setattr_nondynamic_method(set):
def set_attr(self, name, value):
if (name == "thisown"):
return self.this.own(value)
if hasattr(self, name) or (name == "this"):
set(self, name, value)
else:
raise AttributeError("You cannot add attributes to %s" % self)
return set_attr
import ITKIOImageBaseBasePython
import ITKCommonBasePython
import pyBasePython
import vnl_vectorPython
import vnl_matrixPython
import stdcomplexPython
def itkVTKImageIOFactory_New():
return itkVTKImageIOFactory.New()
def itkVTKImageIO_New():
return itkVTKImageIO.New()
class itkVTKImageIO(ITKIOImageBaseBasePython.itkStreamingImageIOBase):
"""Proxy of C++ itkVTKImageIO class."""
thisown = _swig_property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc='The membership flag')
def __init__(self, *args, **kwargs):
raise AttributeError("No constructor defined")
__repr__ = _swig_repr
def __New_orig__() -> "itkVTKImageIO_Pointer":
"""__New_orig__() -> itkVTKImageIO_Pointer"""
return _itkVTKImageIOPython.itkVTKImageIO___New_orig__()
__New_orig__ = staticmethod(__New_orig__)
def Clone(self) -> "itkVTKImageIO_Pointer":
"""Clone(itkVTKImageIO self) -> itkVTKImageIO_Pointer"""
return _itkVTKImageIOPython.itkVTKImageIO_Clone(self)
def GetHeaderSize(self) -> "long long":
"""GetHeaderSize(itkVTKImageIO self) -> long long"""
return _itkVTKImageIOPython.itkVTKImageIO_GetHeaderSize(self)
__swig_destroy__ = _itkVTKImageIOPython.delete_itkVTKImageIO
def cast(obj: 'itkLightObject') -> "itkVTKImageIO *":
"""cast(itkLightObject obj) -> itkVTKImageIO"""
return _itkVTKImageIOPython.itkVTKImageIO_cast(obj)
cast = staticmethod(cast)
def New(*args, **kargs):
"""New() -> itkVTKImageIO
Create a new object of the class itkVTKImageIO and set the input and the parameters if some
named or non-named arguments are passed to that method.
New() tries to assign all the non named parameters to the input of the new objects - the
first non named parameter in the first input, etc.
The named parameters are used by calling the method with the same name prefixed by 'Set'.
Ex:
itkVTKImageIO.New( reader, Threshold=10 )
is (most of the time) equivalent to:
obj = itkVTKImageIO.New()
obj.SetInput( 0, reader.GetOutput() )
obj.SetThreshold( 10 )
"""
obj = itkVTKImageIO.__New_orig__()
import itkTemplate
itkTemplate.New(obj, *args, **kargs)
return obj
New = staticmethod(New)
itkVTKImageIO.Clone = new_instancemethod(_itkVTKImageIOPython.itkVTKImageIO_Clone, None, itkVTKImageIO)
itkVTKImageIO.GetHeaderSize = new_instancemethod(_itkVTKImageIOPython.itkVTKImageIO_GetHeaderSize, None, itkVTKImageIO)
itkVTKImageIO_swigregister = _itkVTKImageIOPython.itkVTKImageIO_swigregister
itkVTKImageIO_swigregister(itkVTKImageIO)
def itkVTKImageIO___New_orig__() -> "itkVTKImageIO_Pointer":
"""itkVTKImageIO___New_orig__() -> itkVTKImageIO_Pointer"""
return _itkVTKImageIOPython.itkVTKImageIO___New_orig__()
def itkVTKImageIO_cast(obj: 'itkLightObject') -> "itkVTKImageIO *":
"""itkVTKImageIO_cast(itkLightObject obj) -> itkVTKImageIO"""
return _itkVTKImageIOPython.itkVTKImageIO_cast(obj)
class itkVTKImageIOFactory(ITKCommonBasePython.itkObjectFactoryBase):
"""Proxy of C++ itkVTKImageIOFactory class."""
thisown = _swig_property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc='The membership flag')
def __init__(self, *args, **kwargs):
raise AttributeError("No constructor defined")
__repr__ = _swig_repr
def __New_orig__() -> "itkVTKImageIOFactory_Pointer":
"""__New_orig__() -> itkVTKImageIOFactory_Pointer"""
return _itkVTKImageIOPython.itkVTKImageIOFactory___New_orig__()
__New_orig__ = staticmethod(__New_orig__)
def RegisterOneFactory() -> "void":
"""RegisterOneFactory()"""
return _itkVTKImageIOPython.itkVTKImageIOFactory_RegisterOneFactory()
RegisterOneFactory = staticmethod(RegisterOneFactory)
__swig_destroy__ = _itkVTKImageIOPython.delete_itkVTKImageIOFactory
def cast(obj: 'itkLightObject') -> "itkVTKImageIOFactory *":
"""cast(itkLightObject obj) -> itkVTKImageIOFactory"""
return _itkVTKImageIOPython.itkVTKImageIOFactory_cast(obj)
cast = staticmethod(cast)
def New(*args, **kargs):
"""New() -> itkVTKImageIOFactory
Create a new object of the class itkVTKImageIOFactory and set the input and the parameters if some
named or non-named arguments are passed to that method.
New() tries to assign all the non named parameters to the input of the new objects - the
first non named parameter in the first input, etc.
The named parameters are used by calling the method with the same name prefixed by 'Set'.
Ex:
itkVTKImageIOFactory.New( reader, Threshold=10 )
is (most of the time) equivalent to:
obj = itkVTKImageIOFactory.New()
obj.SetInput( 0, reader.GetOutput() )
obj.SetThreshold( 10 )
"""
obj = itkVTKImageIOFactory.__New_orig__()
import itkTemplate
itkTemplate.New(obj, *args, **kargs)
return obj
New = staticmethod(New)
itkVTKImageIOFactory_swigregister = _itkVTKImageIOPython.itkVTKImageIOFactory_swigregister
itkVTKImageIOFactory_swigregister(itkVTKImageIOFactory)
def itkVTKImageIOFactory___New_orig__() -> "itkVTKImageIOFactory_Pointer":
"""itkVTKImageIOFactory___New_orig__() -> itkVTKImageIOFactory_Pointer"""
return _itkVTKImageIOPython.itkVTKImageIOFactory___New_orig__()
def itkVTKImageIOFactory_RegisterOneFactory() -> "void":
"""itkVTKImageIOFactory_RegisterOneFactory()"""
return _itkVTKImageIOPython.itkVTKImageIOFactory_RegisterOneFactory()
def itkVTKImageIOFactory_cast(obj: 'itkLightObject') -> "itkVTKImageIOFactory *":
"""itkVTKImageIOFactory_cast(itkLightObject obj) -> itkVTKImageIOFactory"""
return _itkVTKImageIOPython.itkVTKImageIOFactory_cast(obj)
| [
"44883043+ssalmaan@users.noreply.github.com"
] | 44883043+ssalmaan@users.noreply.github.com |
88c9be39ef89d55c1845d93bf1e0875f8696b53b | 11aa417a375023da34224b9cb8c46f75c8753f6f | /aiobitcoin/tools/tx/script/segwit.py | 282f53ae10a5eec4de3a0a7dc8a5d87769d76174 | [
"MIT"
] | permissive | mkbeh/aiobitcoin | 426a1be16c9b7d7150a164fff553296988961625 | 1cc9121a292208600b1631a709917c82a1238964 | refs/heads/master | 2023-08-07T15:11:41.892153 | 2021-07-11T11:05:29 | 2021-07-11T11:05:29 | 185,234,495 | 1 | 2 | MIT | 2023-07-20T15:09:24 | 2019-05-06T16:36:30 | Python | UTF-8 | Python | false | false | 5,230 | py | # -*- coding: utf-8 -*-
"""
Parse, stream, create, sign and verify Bitcoin transactions as Tx structures.
The MIT License (MIT)
Copyright (c) 2017 by Richard Kiss
Copyright (c) 2019 July by mkbeh
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
"""
from hashlib import sha256
from ...intbytes import byte2int, indexbytes
from .flags import (
VERIFY_P2SH, VERIFY_DISCOURAGE_UPGRADABLE_WITNESS_PROGRAM
)
from . import errno
from . import opcodes
from . import ScriptError
from .Stack import Stack
from .eval_script import eval_script
from .tools import bin_script, bool_from_script_bytes
def witness_program_version(script):
l = len(script)
if l < 4 or l > 42:
return None
first_opcode = byte2int(script)
if indexbytes(script, 1) + 2 != l:
return None
if first_opcode == opcodes.OP_0:
return 0
if opcodes.OP_1 <= first_opcode <= opcodes.OP_16:
return first_opcode - opcodes.OP_1 + 1
return None
def check_witness_program_v0(
witness, script_signature, flags, signature_for_hash_type_f,
lock_time, expected_hash_type, traceback_f, tx_sequence, tx_version):
l = len(script_signature)
if l == 32:
if len(witness) == 0:
raise ScriptError("witness program empty", errno.WITNESS_PROGRAM_WITNESS_EMPTY)
script_public_key = witness[-1]
if sha256(script_public_key).digest() != script_signature:
raise ScriptError("witness program mismatch", errno.WITNESS_PROGRAM_MISMATCH)
stack = Stack(witness[:-1])
elif l == 20:
# special case for pay-to-pubkeyhash; signature + pubkey in witness
if len(witness) != 2:
raise ScriptError("witness program mismatch", errno.WITNESS_PROGRAM_MISMATCH)
# "OP_DUP OP_HASH160 %s OP_EQUALVERIFY OP_CHECKSIG" % b2h(script_signature))
script_public_key = b'v\xa9' + bin_script([script_signature]) + b'\x88\xac'
stack = Stack(witness)
else:
raise ScriptError("witness program wrong length", errno.WITNESS_PROGRAM_WRONG_LENGTH)
return stack, script_public_key
def check_witness_program(
witness, version, script_signature, flags, signature_for_hash_type_f,
lock_time, expected_hash_type, traceback_f, tx_sequence, tx_version):
if version == 0:
stack, script_public_key = check_witness_program_v0(
witness, script_signature, flags, signature_for_hash_type_f,
lock_time, expected_hash_type, traceback_f, tx_sequence, tx_version)
elif flags & VERIFY_DISCOURAGE_UPGRADABLE_WITNESS_PROGRAM:
raise ScriptError(
"this version witness program not yet supported", errno.DISCOURAGE_UPGRADABLE_WITNESS_PROGRAM)
else:
return
for s in stack:
if len(s) > 520:
raise ScriptError("pushing too much data onto stack", errno.PUSH_SIZE)
eval_script(script_public_key, signature_for_hash_type_f.witness, lock_time, expected_hash_type,
stack, traceback_f=traceback_f, flags=flags, is_signature=True,
tx_sequence=tx_sequence, tx_version=tx_version)
if len(stack) == 0 or not bool_from_script_bytes(stack[-1]):
raise ScriptError("eval false", errno.EVAL_FALSE)
if len(stack) != 1:
raise ScriptError("stack not clean after evaluation", errno.CLEANSTACK)
def check_witness(stack, script_public_key, script_signature, witness, witness_flags, signature_for_hash_type_f,
lock_time, expected_hash_type, traceback_f, tx_sequence, tx_version):
witness_version = witness_program_version(script_public_key)
had_witness = False
if witness_version is not None:
had_witness = True
witness_program = script_public_key[2:]
if len(script_signature) > 0:
err = errno.WITNESS_MALLEATED if witness_flags & VERIFY_P2SH else errno.WITNESS_MALLEATED_P2SH
raise ScriptError("script sig is not blank on segwit input", err)
check_witness_program(
witness, witness_version, witness_program, witness_flags,
signature_for_hash_type_f, lock_time, expected_hash_type,
traceback_f, tx_sequence, tx_version)
stack[:] = stack[-1:]
return had_witness
| [
"mkbehforever@gmail.com"
] | mkbehforever@gmail.com |
8417118ad2daf826e674008968ff8e3a4da089ce | 5d65f00e3fb6c84779f14ec0be852f1da285aec2 | /homeassistant/components/netatmo/light.py | 6fe5e84e65af8351faa316f63b4a175b99cb531c | [
"Apache-2.0"
] | permissive | Shahzad6077/core | b448ff00ef4fed131c70c0671635b7b4c62dd23d | e0873493e2f77e9702601969712f6d2c1536aaef | refs/heads/master | 2023-07-09T18:11:20.778850 | 2021-08-16T03:49:10 | 2021-08-16T03:49:10 | 397,514,065 | 2 | 0 | Apache-2.0 | 2021-08-18T07:36:53 | 2021-08-18T07:36:53 | null | UTF-8 | Python | false | false | 5,010 | py | """Support for the Netatmo camera lights."""
from __future__ import annotations
import logging
from typing import cast
import pyatmo
from homeassistant.components.light import LightEntity
from homeassistant.config_entries import ConfigEntry
from homeassistant.core import HomeAssistant, callback
from homeassistant.exceptions import PlatformNotReady
from homeassistant.helpers.dispatcher import async_dispatcher_connect
from homeassistant.helpers.entity_platform import AddEntitiesCallback
from .const import (
DATA_HANDLER,
DOMAIN,
EVENT_TYPE_LIGHT_MODE,
MANUFACTURER,
SIGNAL_NAME,
WEBHOOK_LIGHT_MODE,
WEBHOOK_PUSH_TYPE,
)
from .data_handler import CAMERA_DATA_CLASS_NAME, NetatmoDataHandler
from .netatmo_entity_base import NetatmoBase
_LOGGER = logging.getLogger(__name__)
async def async_setup_entry(
hass: HomeAssistant, entry: ConfigEntry, async_add_entities: AddEntitiesCallback
) -> None:
"""Set up the Netatmo camera light platform."""
if "access_camera" not in entry.data["token"]["scope"]:
_LOGGER.info(
"Cameras are currently not supported with this authentication method"
)
return
data_handler = hass.data[DOMAIN][entry.entry_id][DATA_HANDLER]
await data_handler.register_data_class(
CAMERA_DATA_CLASS_NAME, CAMERA_DATA_CLASS_NAME, None
)
data_class = data_handler.data.get(CAMERA_DATA_CLASS_NAME)
if not data_class or data_class.raw_data == {}:
raise PlatformNotReady
all_cameras = []
for home in data_handler.data[CAMERA_DATA_CLASS_NAME].cameras.values():
for camera in home.values():
all_cameras.append(camera)
entities = [
NetatmoLight(
data_handler,
camera["id"],
camera["type"],
camera["home_id"],
)
for camera in all_cameras
if camera["type"] == "NOC"
]
_LOGGER.debug("Adding camera lights %s", entities)
async_add_entities(entities, True)
class NetatmoLight(NetatmoBase, LightEntity):
"""Representation of a Netatmo Presence camera light."""
def __init__(
self,
data_handler: NetatmoDataHandler,
camera_id: str,
camera_type: str,
home_id: str,
) -> None:
"""Initialize a Netatmo Presence camera light."""
LightEntity.__init__(self)
super().__init__(data_handler)
self._data_classes.append(
{"name": CAMERA_DATA_CLASS_NAME, SIGNAL_NAME: CAMERA_DATA_CLASS_NAME}
)
self._id = camera_id
self._home_id = home_id
self._model = camera_type
self._device_name: str = self._data.get_camera(camera_id)["name"]
self._attr_name = f"{MANUFACTURER} {self._device_name}"
self._is_on = False
self._attr_unique_id = f"{self._id}-light"
async def async_added_to_hass(self) -> None:
"""Entity created."""
await super().async_added_to_hass()
self._listeners.append(
async_dispatcher_connect(
self.hass,
f"signal-{DOMAIN}-webhook-{EVENT_TYPE_LIGHT_MODE}",
self.handle_event,
)
)
@callback
def handle_event(self, event: dict) -> None:
"""Handle webhook events."""
data = event["data"]
if not data.get("camera_id"):
return
if (
data["home_id"] == self._home_id
and data["camera_id"] == self._id
and data[WEBHOOK_PUSH_TYPE] == WEBHOOK_LIGHT_MODE
):
self._is_on = bool(data["sub_type"] == "on")
self.async_write_ha_state()
return
@property
def _data(self) -> pyatmo.AsyncCameraData:
"""Return data for this entity."""
return cast(
pyatmo.AsyncCameraData,
self.data_handler.data[self._data_classes[0]["name"]],
)
@property
def available(self) -> bool:
"""If the webhook is not established, mark as unavailable."""
return bool(self.data_handler.webhook)
@property
def is_on(self) -> bool:
"""Return true if light is on."""
return self._is_on
async def async_turn_on(self, **kwargs: dict) -> None:
"""Turn camera floodlight on."""
_LOGGER.debug("Turn camera '%s' on", self.name)
await self._data.async_set_state(
home_id=self._home_id,
camera_id=self._id,
floodlight="on",
)
async def async_turn_off(self, **kwargs: dict) -> None:
"""Turn camera floodlight into auto mode."""
_LOGGER.debug("Turn camera '%s' to auto mode", self.name)
await self._data.async_set_state(
home_id=self._home_id,
camera_id=self._id,
floodlight="auto",
)
@callback
def async_update_callback(self) -> None:
"""Update the entity's state."""
self._is_on = bool(self._data.get_light_state(self._id) == "on")
| [
"noreply@github.com"
] | Shahzad6077.noreply@github.com |
13663badcb166800aaeff083e507489aa701c0d2 | e47d5da2a947c3b3a834817d0b084ee65d302067 | /atcoder.jp/abc092/abc092_a/Main.py | 47552455aafafc56e01520693d3d03d62ca48228 | [] | no_license | aki-nlp/AtCoder | 3293b9b183c0a8cefbf20d7f4f491c6f1e7604b8 | 9385805cbb1fa158f6d3c4a2415cdf7ba94547e5 | refs/heads/master | 2023-02-25T06:04:10.913237 | 2020-10-03T12:02:00 | 2020-10-03T12:02:00 | 296,792,313 | 2 | 0 | null | null | null | null | UTF-8 | Python | false | false | 76 | py | a =[int(input()) for _ in range(4)]
print(min(a[0], a[1]) + min(a[2], a[3])) | [
"akiuo.ou@gmail.com"
] | akiuo.ou@gmail.com |
9d8946b7ca19fe0c6393b5f6a2dde7d406919cd9 | 78f3fe4a148c86ce9b80411a3433a49ccfdc02dd | /2015/09/college-scorecard-laitinen-20150916/graphic_config.py | beb761cd07fa41c1aada464419435120c5b3c263 | [] | no_license | nprapps/graphics-archive | 54cfc4d4d670aca4d71839d70f23a8bf645c692f | fe92cd061730496cb95c9df8fa624505c3b291f8 | refs/heads/master | 2023-03-04T11:35:36.413216 | 2023-02-26T23:26:48 | 2023-02-26T23:26:48 | 22,472,848 | 16 | 7 | null | null | null | null | UTF-8 | Python | false | false | 1,049 | py | #!/usr/bin/env python
import base_filters
from math import trunc
COPY_GOOGLE_DOC_KEY = '1AOxvgIztKDd-GOVZjx11dRjmRadr47wXUsDUMS2awJw'
USE_ASSETS = False
# Use these variables to override the default cache timeouts for this graphic
# DEFAULT_MAX_AGE = 20
# ASSETS_MAX_AGE = 300
def breaks(n):
n = float(n)
if (n >= 200):
return 'plus200'
elif (n >= 150):
return 'plus150'
elif (n >= 100):
return 'plus100'
elif (n >= 50):
return 'plus50'
else:
return 'plus0'
def format_currency(value):
return "{:,.1f}".format(float(value)/1000) + "k"
def format_score(value):
return "{:.2f}".format(float(value))
def format_rank(value):
floated = trunc(float(value))
return floated
def format_percent(value):
return "{0:.00f}".format(float(value)*100)
def format_percent2(value):
return "{0:.1f}".format(float(value)*100)
JINJA_FILTER_FUNCTIONS = base_filters.FILTERS + [format_currency] + [format_rank] + [format_percent] + [format_score] + [format_percent2]
| [
"ahurt@npr.org"
] | ahurt@npr.org |
2630419c2c16f7fafecd89941409cee861476904 | 1efd2de8bf77ec00eb2fcaf5749278495946d920 | /src/tests/ftest/util/dbench_utils.py | 897f77efe1395dadb573a482c8594d04f951011e | [
"BSD-2-Clause",
"BSD-2-Clause-Patent"
] | permissive | daos-stack/daos | 6f55bf3061fd830d5b8d28506e1295e2d3a27c38 | ed5eed5df43a68571afe123132a743824c02637a | refs/heads/master | 2023-08-31T21:43:37.606145 | 2023-08-31T16:38:00 | 2023-08-31T16:38:00 | 69,390,670 | 631 | 300 | NOASSERTION | 2023-09-14T18:55:15 | 2016-09-27T19:21:29 | C | UTF-8 | Python | false | false | 2,879 | py | """
(C) Copyright 2019-2023 Intel Corporation.
SPDX-License-Identifier: BSD-2-Clause-Patent
"""
from command_utils_base import FormattedParameter
from command_utils_base import BasicParameter
from command_utils import ExecutableCommand
from job_manager_utils import Mpirun
# pylint: disable=too-few-public-methods,too-many-instance-attributes
class DbenchCommand(ExecutableCommand):
"""Defines a object representing a dbench command."""
def __init__(self, namespace, command):
"""Create a dbench Command object."""
super().__init__(namespace, command)
# dbench options
self.timelimit = FormattedParameter("--timelimit {}")
self.loadfile = FormattedParameter("--loadfile {}")
self.directory = FormattedParameter("--directory {}")
self.tcp_options = FormattedParameter("--tcp-options {}")
self.target_rate = FormattedParameter("--target-rate {}")
self.sync = FormattedParameter("--sync", False)
self.fsync = FormattedParameter("--fsync", False)
self.xattr = FormattedParameter("--xattr", False)
self.no_resolve = FormattedParameter("--no-resolve", False)
self.clients_per_process = FormattedParameter(
"--clients-per-process {}")
self.one_byte_write_fix = FormattedParameter(
"--one-byte-write-fix", False)
self.stat_check = FormattedParameter("--stat-check", False)
self.fake_io = FormattedParameter("--fake-io", False)
self.skip_cleanup = FormattedParameter("--skip-cleanup", False)
self.per_client_results = FormattedParameter(
"--per-client-results", False)
self.num_of_procs = BasicParameter(None)
def get_param_names(self):
"""Overriding the original get_param_names."""
param_names = super().get_param_names()
# move key=num_of_procs to the end
param_names.sort(key='num_of_procs'.__eq__)
return param_names
class Dbench(DbenchCommand):
"""Class defining an object of type DbenchCommand."""
def __init__(self, hosts, tmp):
"""Create a dbench object."""
super().__init__("/run/dbench/*", "dbench")
# set params
self.hosts = hosts
self.tmp = tmp
def run(self, processes=1):
# pylint: disable=arguments-differ
# pylint: disable=arguments-renamed
"""Run the dbench command.
Args:
processes: mpi processes
Raises:
CommandFailure: In case dbench run command fails
"""
self.log.info('Starting dbench')
# Get job manager cmd
mpirun = Mpirun(self, mpi_type="mpich")
mpirun.assign_hosts(self.hosts, self.tmp)
mpirun.assign_processes(processes)
mpirun.exit_status_exception = True
# run dbench
out = mpirun.run()
return out
| [
"noreply@github.com"
] | daos-stack.noreply@github.com |
16f69a9fbaa037a8ffd65d455a9fc64049bc0d44 | 1dae87abcaf49f1d995d03c0ce49fbb3b983d74a | /programs/subroutines/Ext Ver Half - 150ms.sub.py | 6bcee355d0892a2f2de8054a57b6913d87df051c | [] | no_license | BEC-Trento/BEC1-data | 651cd8e5f15a7d9848f9921b352e0830c08f27dd | f849086891bc68ecf7447f62962f791496d01858 | refs/heads/master | 2023-03-10T19:19:54.833567 | 2023-03-03T22:59:01 | 2023-03-03T22:59:01 | 132,161,998 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 18,481 | py | prg_comment = ""
prg_version = "0.5.1"
def program(prg, cmd):
prg.add(-4002500, "Shutter Probe Na Open")
prg.add(-3512500, "Na Probe/Push (-) freq", 150.000000)
prg.add(-3502500, "Na Probe/Push (+) freq", 150.000000)
prg.add(-724500, "Optical Levit ON")
prg.add(0, "Trig ON Stingray 1")
prg.add(20, "Na Probe/Push (-) freq", 110.000000)
prg.add(500, "Na Probe/Push (+) freq", 110.000000)
prg.add(1500, "Na Probe/Push (+) freq", 150.000000)
prg.add(1900, "Na Probe/Push (-) freq", 150.000000)
prg.add(2000, "Trig OFF Stingray 1")
prg.add(299964, "Pulse uw ON")
prg.add(300000, "Pulse uw OFF")
prg.add(430000, "Trig ON Stingray 1")
prg.add(430020, "Na Probe/Push (-) freq", 110.000000)
prg.add(430500, "Na Probe/Push (+) freq", 110.000000)
prg.add(431500, "Na Probe/Push (+) freq", 150.000000)
prg.add(431900, "Na Probe/Push (-) freq", 150.000000)
prg.add(432000, "Trig OFF Stingray 1")
prg.add(649964, "Pulse uw ON")
prg.add(650000, "Pulse uw OFF")
prg.add(780000, "Trig ON Stingray 1")
prg.add(780020, "Na Probe/Push (-) freq", 110.000000)
prg.add(780500, "Na Probe/Push (+) freq", 110.000000)
prg.add(781500, "Na Probe/Push (+) freq", 150.000000)
prg.add(781900, "Na Probe/Push (-) freq", 150.000000)
prg.add(782000, "Trig OFF Stingray 1")
prg.add(929963, "Pulse uw ON")
prg.add(930000, "Pulse uw OFF")
prg.add(1060000, "Trig ON Stingray 1")
prg.add(1060020, "Na Probe/Push (-) freq", 110.000000)
prg.add(1060500, "Na Probe/Push (+) freq", 110.000000)
prg.add(1061500, "Na Probe/Push (+) freq", 150.000000)
prg.add(1061900, "Na Probe/Push (-) freq", 150.000000)
prg.add(1062000, "Trig OFF Stingray 1")
prg.add(1209962, "Pulse uw ON")
prg.add(1210000, "Pulse uw OFF")
prg.add(1340000, "Trig ON Stingray 1")
prg.add(1340020, "Na Probe/Push (-) freq", 110.000000)
prg.add(1340500, "Na Probe/Push (+) freq", 110.000000)
prg.add(1341500, "Na Probe/Push (+) freq", 150.000000)
prg.add(1341900, "Na Probe/Push (-) freq", 150.000000)
prg.add(1342000, "Trig OFF Stingray 1")
prg.add(1489960, "Pulse uw ON")
prg.add(1490000, "Pulse uw OFF")
prg.add(1620000, "Trig ON Stingray 1")
prg.add(1620020, "Na Probe/Push (-) freq", 110.000000)
prg.add(1620500, "Na Probe/Push (+) freq", 110.000000)
prg.add(1621500, "Na Probe/Push (+) freq", 150.000000)
prg.add(1621900, "Na Probe/Push (-) freq", 150.000000)
prg.add(1622000, "Trig OFF Stingray 1")
prg.add(1769957, "Pulse uw ON")
prg.add(1770000, "Pulse uw OFF")
prg.add(1900000, "Trig ON Stingray 1")
prg.add(1900020, "Na Probe/Push (-) freq", 110.000000)
prg.add(1900500, "Na Probe/Push (+) freq", 110.000000)
prg.add(1901500, "Na Probe/Push (+) freq", 150.000000)
prg.add(1901900, "Na Probe/Push (-) freq", 150.000000)
prg.add(1902000, "Trig OFF Stingray 1")
prg.add(2049955, "Pulse uw ON")
prg.add(2050000, "Pulse uw OFF")
prg.add(2180000, "Trig ON Stingray 1")
prg.add(2180020, "Na Probe/Push (-) freq", 110.000000)
prg.add(2180500, "Na Probe/Push (+) freq", 110.000000)
prg.add(2181500, "Na Probe/Push (+) freq", 150.000000)
prg.add(2181900, "Na Probe/Push (-) freq", 150.000000)
prg.add(2182000, "Trig OFF Stingray 1")
prg.add(2329954, "Pulse uw ON")
prg.add(2330000, "Pulse uw OFF")
prg.add(2460000, "Trig ON Stingray 1")
prg.add(2460020, "Na Probe/Push (-) freq", 110.000000)
prg.add(2460500, "Na Probe/Push (+) freq", 110.000000)
prg.add(2461500, "Na Probe/Push (+) freq", 150.000000)
prg.add(2461900, "Na Probe/Push (-) freq", 150.000000)
prg.add(2462000, "Trig OFF Stingray 1")
prg.add(2609953, "Pulse uw ON")
prg.add(2610000, "Pulse uw OFF")
prg.add(2740000, "Trig ON Stingray 1")
prg.add(2740020, "Na Probe/Push (-) freq", 110.000000)
prg.add(2740500, "Na Probe/Push (+) freq", 110.000000)
prg.add(2741500, "Na Probe/Push (+) freq", 150.000000)
prg.add(2741900, "Na Probe/Push (-) freq", 150.000000)
prg.add(2742000, "Trig OFF Stingray 1")
prg.add(2889952, "Pulse uw ON")
prg.add(2890000, "Pulse uw OFF")
prg.add(3020000, "Trig ON Stingray 1")
prg.add(3020020, "Na Probe/Push (-) freq", 110.000000)
prg.add(3020500, "Na Probe/Push (+) freq", 110.000000)
prg.add(3021500, "Na Probe/Push (+) freq", 150.000000)
prg.add(3021900, "Na Probe/Push (-) freq", 150.000000)
prg.add(3022000, "Trig OFF Stingray 1")
prg.add(3170000, "Pulse uw OFF")
prg.add(3300000, "Trig ON Stingray 1")
prg.add(3300020, "Na Probe/Push (-) freq", 110.000000)
prg.add(3300500, "Na Probe/Push (+) freq", 110.000000)
prg.add(3301500, "Na Probe/Push (+) freq", 150.000000)
prg.add(3301900, "Na Probe/Push (-) freq", 150.000000)
prg.add(3302000, "Trig OFF Stingray 1")
prg.add(3450000, "Pulse uw OFF")
prg.add(3580000, "Trig ON Stingray 1")
prg.add(3580020, "Na Probe/Push (-) freq", 110.000000)
prg.add(3580500, "Na Probe/Push (+) freq", 110.000000)
prg.add(3581500, "Na Probe/Push (+) freq", 150.000000)
prg.add(3581900, "Na Probe/Push (-) freq", 150.000000)
prg.add(3582000, "Trig OFF Stingray 1")
prg.add(3730000, "Pulse uw OFF")
prg.add(3754500, "Na Repumper1 (+) Amp", 1.000000)
prg.add(3764500, "K probe Repumper (+) Amp", 1.000000)
prg.add(3774500, "K Repumper 1p (+) Amp", 1.000000)
prg.add(3794500, "Na Dark Spot Amp", 1.000000)
prg.add(3804500, "Na Repumper MOT Amp", 1.000000)
prg.add(3860000, "Trig ON Stingray 1")
prg.add(3860020, "Na Probe/Push (-) freq", 110.000000)
prg.add(3860500, "Na Probe/Push (+) freq", 110.000000)
prg.add(3861500, "Na Probe/Push (+) freq", 150.000000)
prg.add(3861900, "Na Probe/Push (-) freq", 150.000000)
prg.add(3862000, "Trig OFF Stingray 1")
prg.add(4010000, "Pulse uw OFF")
prg.add(4140000, "Trig ON Stingray 1")
prg.add(4140020, "Na Probe/Push (-) freq", 110.000000)
prg.add(4140500, "Na Probe/Push (+) freq", 110.000000)
prg.add(4141500, "Na Probe/Push (+) freq", 150.000000)
prg.add(4141900, "Na Probe/Push (-) freq", 150.000000)
prg.add(4142000, "Trig OFF Stingray 1")
prg.add(4154500, "Shutter Probe Na Open")
prg.add(4290000, "Pulse uw OFF")
prg.add(4420000, "Trig ON Stingray 1")
prg.add(4420020, "Na Probe/Push (-) freq", 110.000000)
prg.add(4420500, "Na Probe/Push (+) freq", 110.000000)
prg.add(4421500, "Na Probe/Push (+) freq", 150.000000)
prg.add(4421900, "Na Probe/Push (-) freq", 150.000000)
prg.add(4422000, "Trig OFF Stingray 1")
prg.add(4570000, "Pulse uw OFF")
prg.add(4700000, "Trig ON Stingray 1")
prg.add(4700020, "Na Probe/Push (-) freq", 110.000000)
prg.add(4700500, "Na Probe/Push (+) freq", 110.000000)
prg.add(4701500, "Na Probe/Push (+) freq", 150.000000)
prg.add(4701900, "Na Probe/Push (-) freq", 150.000000)
prg.add(4702000, "Trig OFF Stingray 1")
prg.add(4850000, "Pulse uw OFF")
prg.add(4980000, "Trig ON Stingray 1")
prg.add(4980020, "Na Probe/Push (-) freq", 110.000000)
prg.add(4980500, "Na Probe/Push (+) freq", 110.000000)
prg.add(4981500, "Na Probe/Push (+) freq", 150.000000)
prg.add(4981900, "Na Probe/Push (-) freq", 150.000000)
prg.add(4982000, "Trig OFF Stingray 1")
prg.add(5124500, "Shutter Probe K Open")
prg.add(5130000, "Pulse uw OFF")
prg.add(5134500, "Shutter RepumperMOT K Open")
prg.add(5144500, "Shutter repump Na Open")
prg.add(5260000, "Trig ON Stingray 1")
prg.add(5260019, "Na Probe/Push (-) freq", 110.000000)
prg.add(5260500, "Na Probe/Push (+) freq", 110.000000)
prg.add(5261500, "Na Probe/Push (+) freq", 150.000000)
prg.add(5261900, "Na Probe/Push (-) freq", 150.000000)
prg.add(5262000, "Trig OFF Stingray 1")
prg.add(5410000, "Pulse uw OFF")
prg.add(5540000, "Trig ON Stingray 1")
prg.add(5540019, "Na Probe/Push (-) freq", 110.000000)
prg.add(5540500, "Na Probe/Push (+) freq", 110.000000)
prg.add(5541500, "Na Probe/Push (+) freq", 150.000000)
prg.add(5541900, "Na Probe/Push (-) freq", 150.000000)
prg.add(5542000, "Trig OFF Stingray 1")
prg.add(5664500, "K probe Cooler (-) Amp", 1.000000)
prg.add(5690000, "Pulse uw OFF")
prg.add(5820000, "Trig ON Stingray 1")
prg.add(5820019, "Na Probe/Push (-) freq", 110.000000)
prg.add(5820500, "Na Probe/Push (+) freq", 110.000000)
prg.add(5821500, "Na Probe/Push (+) freq", 150.000000)
prg.add(5821900, "Na Probe/Push (-) freq", 150.000000)
prg.add(5822000, "Trig OFF Stingray 1")
prg.add(6127500, "Na 3D MOT cool (-) Amp", 1.000000)
prg.add(6137500, "Na 3D MOT cool (+) Amp", 1.000000)
prg.add(6156500, "Optical Levit OFF")
prg.add(6157500, "Shutter 3DMOT cool Na Open")
prg.add(6168500, "Shutter Optical Levit Close")
prg.add(6647000, "B comp y", 0.000000)
prg.add(6647530, "B comp y", 0.260000)
prg.add(6648049, "B comp y", 0.530000)
prg.add(6648579, "B comp y", 0.790000)
prg.add(6649109, "B comp y", 1.050000)
prg.add(6649630, "B comp y", 1.320000)
prg.add(6650160, "B comp y", 1.580000)
prg.add(6650680, "B comp y", 1.840000)
prg.add(6651210, "B comp y", 2.110000)
prg.add(6651740, "B comp y", 2.370000)
prg.add(6652260, "B comp y", 2.630000)
prg.add(6652790, "B comp y", 2.890000)
prg.add(6653320, "B comp y", 3.160000)
prg.add(6653840, "B comp y", 3.420000)
prg.add(6654370, "B comp y", 3.680000)
prg.add(6654890, "B comp y", 3.950000)
prg.add(6655420, "B comp y", 4.210000)
prg.add(6655950, "B comp y", 4.470000)
prg.add(6656470, "B comp y", 4.740000)
prg.add(6656500, "IGBT 1 pinch", -10.000000)
prg.add(6656520, "IGBT 3 Open")
prg.add(6656559, "IGBT 2 pinch+comp", 10.000000)
prg.add(6656660, "IGBT 2 pinch+comp", 9.800000)
prg.add(6656760, "IGBT 2 pinch+comp", 9.600000)
prg.add(6656860, "IGBT 2 pinch+comp", 9.400000)
prg.add(6656960, "IGBT 2 pinch+comp", 9.200000)
prg.add(6657000, "B comp y", 5.000000)
prg.add(6657060, "IGBT 2 pinch+comp", 9.000000)
prg.add(6657160, "IGBT 2 pinch+comp", 8.800000)
prg.add(6657260, "IGBT 2 pinch+comp", 8.600000)
prg.add(6657360, "IGBT 2 pinch+comp", 8.400000)
prg.add(6657460, "IGBT 2 pinch+comp", 8.200000)
prg.add(6657560, "IGBT 2 pinch+comp", 8.000000)
prg.add(6657660, "IGBT 2 pinch+comp", 7.800000)
prg.add(6657760, "IGBT 2 pinch+comp", 7.600000)
prg.add(6657859, "IGBT 2 pinch+comp", 7.400000)
prg.add(6657960, "IGBT 2 pinch+comp", 7.200000)
prg.add(6658060, "IGBT 2 pinch+comp", 7.000000)
prg.add(6658160, "IGBT 2 pinch+comp", 6.800000)
prg.add(6658260, "IGBT 2 pinch+comp", 6.600000)
prg.add(6658360, "IGBT 2 pinch+comp", 6.400000)
prg.add(6658460, "IGBT 2 pinch+comp", 6.200000)
prg.add(6658560, "IGBT 2 pinch+comp", 6.000000)
prg.add(6658660, "IGBT 2 pinch+comp", 5.800000)
prg.add(6658760, "IGBT 2 pinch+comp", 5.600000)
prg.add(6658860, "IGBT 2 pinch+comp", 5.400000)
prg.add(6658960, "IGBT 2 pinch+comp", 5.200000)
prg.add(6659059, "IGBT 2 pinch+comp", 5.000000)
prg.add(6659160, "IGBT 2 pinch+comp", 4.800000)
prg.add(6659260, "IGBT 2 pinch+comp", 4.600000)
prg.add(6659360, "IGBT 2 pinch+comp", 4.400000)
prg.add(6659460, "IGBT 2 pinch+comp", 4.200000)
prg.add(6659560, "IGBT 2 pinch+comp", 4.000000)
prg.add(6659660, "IGBT 2 pinch+comp", 3.800000)
prg.add(6659760, "IGBT 2 pinch+comp", 3.600000)
prg.add(6659860, "IGBT 2 pinch+comp", 3.400000)
prg.add(6659960, "IGBT 2 pinch+comp", 3.200000)
prg.add(6660060, "IGBT 2 pinch+comp", 3.000000)
prg.add(6660160, "IGBT 2 pinch+comp", 2.800000)
prg.add(6660260, "IGBT 2 pinch+comp", 2.600000)
prg.add(6660359, "IGBT 2 pinch+comp", 2.400000)
prg.add(6660460, "IGBT 2 pinch+comp", 2.200000)
prg.add(6660560, "IGBT 2 pinch+comp", 2.000000)
prg.add(6660660, "IGBT 2 pinch+comp", 1.800000)
prg.add(6660760, "IGBT 2 pinch+comp", 1.600000)
prg.add(6660860, "IGBT 2 pinch+comp", 1.400000)
prg.add(6660960, "IGBT 2 pinch+comp", 1.200000)
prg.add(6661060, "IGBT 2 pinch+comp", 1.000000)
prg.add(6661160, "IGBT 2 pinch+comp", 0.800000)
prg.add(6661260, "IGBT 2 pinch+comp", 0.600000)
prg.add(6661360, "IGBT 2 pinch+comp", 0.400000)
prg.add(6661460, "IGBT 2 pinch+comp", 0.200000)
prg.add(6661559, "IGBT 2 pinch+comp", 0.000000)
prg.add(6661660, "IGBT 2 pinch+comp", -0.200000)
prg.add(6661760, "IGBT 2 pinch+comp", -0.400000)
prg.add(6661860, "IGBT 2 pinch+comp", -0.600000)
prg.add(6661960, "IGBT 2 pinch+comp", -0.800000)
prg.add(6662060, "IGBT 2 pinch+comp", -1.000000)
prg.add(6662160, "IGBT 2 pinch+comp", -1.200000)
prg.add(6662260, "IGBT 2 pinch+comp", -1.400000)
prg.add(6662360, "IGBT 2 pinch+comp", -1.600000)
prg.add(6662460, "IGBT 2 pinch+comp", -1.800000)
prg.add(6662560, "IGBT 2 pinch+comp", -2.000000)
prg.add(6662660, "IGBT 2 pinch+comp", -2.200000)
prg.add(6662760, "IGBT 2 pinch+comp", -2.400000)
prg.add(6662859, "IGBT 2 pinch+comp", -2.600000)
prg.add(6662960, "IGBT 2 pinch+comp", -2.800000)
prg.add(6663060, "IGBT 2 pinch+comp", -3.000000)
prg.add(6663160, "IGBT 2 pinch+comp", -3.200000)
prg.add(6663260, "IGBT 2 pinch+comp", -3.400000)
prg.add(6663360, "IGBT 2 pinch+comp", -3.600000)
prg.add(6663460, "IGBT 2 pinch+comp", -3.800000)
prg.add(6663560, "IGBT 2 pinch+comp", -4.000000)
prg.add(6663660, "IGBT 2 pinch+comp", -4.200000)
prg.add(6663760, "IGBT 2 pinch+comp", -4.400000)
prg.add(6663860, "IGBT 2 pinch+comp", -4.600000)
prg.add(6663960, "IGBT 2 pinch+comp", -4.800000)
prg.add(6664059, "IGBT 2 pinch+comp", -5.000000)
prg.add(6664160, "IGBT 2 pinch+comp", -5.200000)
prg.add(6664260, "IGBT 2 pinch+comp", -5.400000)
prg.add(6664360, "IGBT 2 pinch+comp", -5.600000)
prg.add(6664460, "IGBT 2 pinch+comp", -5.800000)
prg.add(6664560, "IGBT 2 pinch+comp", -6.000000)
prg.add(6664660, "IGBT 2 pinch+comp", -6.200000)
prg.add(6664760, "IGBT 2 pinch+comp", -6.400000)
prg.add(6664860, "IGBT 2 pinch+comp", -6.600000)
prg.add(6664960, "IGBT 2 pinch+comp", -6.800000)
prg.add(6665060, "IGBT 2 pinch+comp", -7.000000)
prg.add(6665160, "IGBT 2 pinch+comp", -7.200000)
prg.add(6665260, "IGBT 2 pinch+comp", -7.400000)
prg.add(6665359, "IGBT 2 pinch+comp", -7.600000)
prg.add(6665460, "IGBT 2 pinch+comp", -7.800000)
prg.add(6665560, "IGBT 2 pinch+comp", -8.000000)
prg.add(6665660, "IGBT 2 pinch+comp", -8.200000)
prg.add(6665760, "IGBT 2 pinch+comp", -8.400000)
prg.add(6665860, "IGBT 2 pinch+comp", -8.600000)
prg.add(6665960, "IGBT 2 pinch+comp", -8.800000)
prg.add(6666060, "IGBT 2 pinch+comp", -9.000000)
prg.add(6666160, "IGBT 2 pinch+comp", -9.200000)
prg.add(6666260, "IGBT 2 pinch+comp", -9.400000)
prg.add(6666360, "IGBT 2 pinch+comp", -9.600000)
prg.add(6666460, "IGBT 2 pinch+comp", -9.800000)
prg.add(6666559, "IGBT 2 pinch+comp", -10.000000)
prg.add(6666600, "IGBT 4 Open")
prg.add(6666620, "IGBT 5 Open")
prg.add(6666849, "IGBT 1 pinch", -10.000000)
prg.add(6666860, "IGBT 2 pinch+comp", -10.000000)
prg.add(6666870, "IGBT 3 Close")
prg.add(6666880, "IGBT 4 Close")
prg.add(6666890, "IGBT 5 Open")
prg.add(6666900, "Delta 2 Voltage", 0.000000)
prg.add(6666910, "Delta 1 Current", 15.300000)
prg.add(6666950, "B comp x", 0.000000)
prg.add(6667000, "B comp y", 5.000000)
prg.add(6667530, "B comp y", 4.740000)
prg.add(6668049, "B comp y", 4.470000)
prg.add(6668579, "B comp y", 4.210000)
prg.add(6669109, "B comp y", 3.950000)
prg.add(6669630, "B comp y", 3.680000)
prg.add(6670160, "B comp y", 3.420000)
prg.add(6670680, "B comp y", 3.160000)
prg.add(6671210, "B comp y", 2.890000)
prg.add(6671740, "B comp y", 2.630000)
prg.add(6672260, "B comp y", 2.370000)
prg.add(6672790, "B comp y", 2.110000)
prg.add(6673320, "B comp y", 1.840000)
prg.add(6673840, "B comp y", 1.580000)
prg.add(6674370, "B comp y", 1.320000)
prg.add(6674890, "B comp y", 1.050000)
prg.add(6675420, "B comp y", 0.790000)
prg.add(6675950, "B comp y", 0.530000)
prg.add(6676470, "B comp y", 0.260000)
prg.add(6677000, "B comp y", 0.000000)
prg.add(8138500, "B comp y", 1.000000)
prg.add(8151500, "IGBT 1 pinch", -10.000000)
prg.add(8151510, "IGBT 2 pinch+comp", -10.000000)
prg.add(8151520, "IGBT 3 Open")
prg.add(8151530, "IGBT 4 Open")
prg.add(8151540, "IGBT 5 Open")
prg.add(8151550, "IGBT 6 Open")
prg.add(8151570, "Na Probe/Push (-) Amp", 1.000000)
prg.add(8151900, "Na Probe/Push (+) Amp", 1.000000)
prg.add(8155000, "Na Repumper MOT Amp", 1000.000000)
prg.add(8155500, "Na Repumper1 (+) Amp", 1000.000000)
prg.add(8155900, "Na Repumper Tune (+) freq", 1713.000000)
prg.add(8156300, "Na Probe/Push (+) freq", 110.000000)
prg.add(8156700, "Na Probe/Push (-) freq", 110.000000)
prg.add(8157000, "Trig Slow Stingray ON")
prg.add(8157100, "Na Probe/Push (+) Amp", 1000.000000)
prg.add(8157500, "Na Probe/Push (-) Amp", 1000.000000)
prg.add(8157900, "K probe Cooler (-) Amp", 1000.000000)
prg.add(8158550, "Na Probe/Push (-) Amp", 1.000000)
prg.add(8158900, "K probe Cooler (-) Amp", 1.000000)
prg.add(8159500, "Trig Slow Stingray OFF")
prg.add(8407500, "Shutter Probe Na Close")
prg.add(8417500, "Shutter Probe K Close")
prg.add(9157000, "Trig Slow Stingray ON")
prg.add(9157500, "Na Probe/Push (-) Amp", 1000.000000)
prg.add(9157900, "K probe Cooler (-) Amp", 1000.000000)
prg.add(9158500, "Na Probe/Push (-) Amp", 1.000000)
prg.add(9158900, "K probe Cooler (-) Amp", 1.000000)
prg.add(9159500, "Trig Slow Stingray OFF")
prg.add(9167500, "Na Repumper MOT Amp", 1.000000)
prg.add(9177500, "Na Repumper1 (+) Amp", 1.000000)
prg.add(9187500, "K Repumper 1p (+) Amp", 1.000000)
prg.add(10157000, "Trig Slow Stingray ON")
prg.add(10159000, "Trig Slow Stingray OFF")
prg.add(11157000, "Trig Slow Stingray ON")
prg.add(11159000, "Trig Slow Stingray OFF")
prg.add(11667500, "Optical Levit OFF")
prg.add(11906500, "Shutter Optical Levit Close")
prg.add(12157500, "B comp y", 0.000000)
prg.add(16656500, "Optical Levit ON")
return prg
| [
"carmelo.mordini@unitn.it"
] | carmelo.mordini@unitn.it |
9893f64592146bbf9aa0045526733414bc2fe78e | 53fab060fa262e5d5026e0807d93c75fb81e67b9 | /backup/user_217/ch19_2019_03_26_19_01_39_954573.py | eb141b3e1ed996df46b165c1f995431f0b7a5452 | [] | no_license | gabriellaec/desoft-analise-exercicios | b77c6999424c5ce7e44086a12589a0ad43d6adca | 01940ab0897aa6005764fc220b900e4d6161d36b | refs/heads/main | 2023-01-31T17:19:42.050628 | 2020-12-16T05:21:31 | 2020-12-16T05:21:31 | 306,735,108 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 158 | py | import math
def calcula_distancia_do_projetil(v,an,yo):
d= (v**2)/(2*9,8*)(1+(1+(2*9,8*yo)/(v**2)*((sin(an))**2))*sin(2*an)
retun d | [
"you@example.com"
] | you@example.com |
9ca6261bf93b240faa6581bb150d113d8fdacb75 | dbe5973d69df9c5a5f3b06b7451a0de7086ebda4 | /myapps/catalogue/migrations/0016_remove_product_price.py | 42b33265e0b7f36f5617d40c3801e771e1b10d5d | [] | no_license | phares/mall | 29e7c0fdf3222a05161de36c8252167ab59df7be | d3f0093828c892ce46d55afaa245e5780555cc68 | refs/heads/master | 2021-01-22T23:53:27.535609 | 2017-04-30T09:17:53 | 2017-04-30T09:17:53 | 85,676,779 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 390 | py | # -*- coding: utf-8 -*-
# Generated by Django 1.10.7 on 2017-04-13 08:45
from __future__ import unicode_literals
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('catalogue', '0015_product_price'),
]
operations = [
migrations.RemoveField(
model_name='product',
name='price',
),
]
| [
"cndeti@gmail.com"
] | cndeti@gmail.com |
7dcae7d80631e98f00eb1837adc3c9fa7474450c | e01c5d1ee81cc4104b248be375e93ae29c4b3572 | /Sequence4/Algorithm-toolbox/Week4/submission/closestpoint-6-2.py | 27df5061138128feac6d7ae6f4c0b8f7ec3a3d67 | [] | no_license | lalitzz/DS | 7de54281a34814601f26ee826c722d123ee8bd99 | 66272a7a8c20c0c3e85aa5f9d19f29e0a3e11db1 | refs/heads/master | 2021-10-14T09:47:08.754570 | 2018-12-29T11:00:25 | 2018-12-29T11:00:25 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,406 | py | #Uses python3
import sys
from math import sqrt
def closest_util(Px, Py, ln):
if ln <= 3:
return brute_force(Px, ln)
mid = ln // 2
Qx = Px[:mid]
Rx = Px[mid:]
mid_point = Px[mid][0]
Qy = []
Ry = []
for x in Py:
if x[0] <= mid_point:
Qy.append(x)
else:
Ry.append(x)
mid1 = closest_util(Qx, Qy, mid)
mid2 = closest_util(Rx, Ry, ln - mid)
d = min(mid1, mid2)
strip = []
for i in range(len(Py)):
if abs(Py[i][0] - mid_point) < d:
strip.append(Py[i])
return min(d, strip_closest(strip, len(strip), d))
def brute_force(Px, ln):
mi = float('inf')
for i in range(ln):
for j in range(i+1, ln):
d = dist(Px[i], Px[j])
if d < mi:
mi = d
return mi
def dist(p1, p2):
return sqrt((p1[0] - p2[0]) ** 2 + (p1[1] - p2[1]) ** 2)
def strip_closest(strip, size, delta):
_min = delta
for i in range(size):
for j in range(i+1, min(i + 5, size)):
dst = dist(strip[i], strip[j])
if dst < _min:
_min = dst
return _min
def closest(P, n):
Px = sorted(P, key= lambda d: d[0])
Py = sorted(P, key= lambda d: d[1])
mi = closest_util(Px, Py, n)
return mi
if __name__ == '__main__':
input = sys.stdin.read()
data = list(map(int, input.split()))
n = data[0]
a = []
i = 1
a = list(zip(data[1::2], data[2::2]))
p = closest(a, n)
print("{0:.9f}".format(p)) | [
"lalit.slg007@gmail.com"
] | lalit.slg007@gmail.com |
7239f386a51988cac9fdf59c7cbcaee52c5ed0b5 | 5e8d200078e64b97e3bbd1e61f83cb5bae99ab6e | /main/source/code_templates/generate_templates.py | 49dec8e6c2ed9f1142e7ef562e81803265fc9c13 | [] | no_license | MedicaicloudLink/Rosetta | 3ee2d79d48b31bd8ca898036ad32fe910c9a7a28 | 01affdf77abb773ed375b83cdbbf58439edd8719 | refs/heads/master | 2020-12-07T17:52:01.350906 | 2020-01-10T08:24:09 | 2020-01-10T08:24:09 | 232,757,729 | 2 | 6 | null | null | null | null | UTF-8 | Python | false | false | 15,963 | py | #!/usr/bin/env python
#
# (c) Copyright Rosetta Commons Member Institutions.
# (c) This file is part of the Rosetta software suite and is made available under license.
# (c) The Rosetta software is developed by the contributing members of the Rosetta Commons.
# (c) For more information, see http://www.rosettacommons.org. Questions about this can be
# (c) addressed to University of Washington CoMotion, email: license@uw.edu.
## @file generate_templates.py
## @brief Script for generating Rosetta files for classes and files
## @author Jared Adolf-Bryfogle (jadolfbr@gmail.com)
#See Readme for use.
from __future__ import print_function
from __future__ import absolute_import
import os
work_dir = os.getcwd()
if os.path.dirname(__file__): os.chdir(os.path.dirname(__file__))
from argparse import ArgumentParser
import glob
import sys
import re
import subprocess
template_types = ["src", "application", "unit_test"]
residue_selector_namespace = ["core", "select", "residue_selector"]
class GenerateRosettaTemplates(object):
def __init__(self, template_type, parser):
testing_args = parser.add_argument_group("Testing")
testing_args.add_argument("--test",
help = "Indicate that we are running in test mode. "
"All output files will go to the current work dir.",
default = False,
action = "store_true",
required = False)
if len(sys.argv) < 2:
#parser.print_usage()
parser.print_help()
sys.exit()
self.options = parser.parse_args()
self.template_type = template_type
self.start_pwd = os.getcwd()
#os.chdir(template_type)
self.source_dir = os.path.abspath("..")
self.src_dir = os.path.abspath("../src")
self.test_dir = os.path.abspath("../test")
#Functions to run for replacement. Must return a string.
self.replacement = {
"--name--" : lambda: self.get_name(),
"--email--": lambda: self.get_email(),
"--class--": lambda: self.get_option("class_name", fail_on_none=True),
"--brief--": lambda: self.get_option("brief", fail_on_none=True),
"--path--": lambda: self.get_outfile_rel_path(),
"--path_underscore--": lambda: self.get_path_underscore(),
"--namespace--": lambda: self.get_namespace_lines(),
"--namespace_dot--": lambda: self.get_namespace_char_concat("."),
"--namespace_colon--": lambda: self.get_namespace_char_concat(":"),
"--namespace_2colon--": lambda: self.get_namespace_char_concat("::"),
"--namespace_underscore--": lambda: self.get_namespace_char_concat("_"),
"--end_namespace--": lambda: self.get_end_namespace(),
}
#### General Template Paths ###
self.fwd_class_template = os.path.join(template_types[0], "RosettaClass.fwd.hh")
def apply(self):
if hasattr(self.options, "type") and self.options.type:
template_dir = os.path.join(self.template_type, self.options.type)
files = glob.glob(os.path.join(template_dir, '*'))
else:
template_dir = self.template_type
files = glob.glob(os.path.join(template_dir, "*"))
print("Type: ", self.template_type)
#Add forward class template for most types
if self.fwd_class_template not in files \
and self.template_type == template_types[0]\
and self.options.type != "util":
files.append(self.fwd_class_template)
#Special case residue selector creator
#if hasattr(self.options, "type") and self.options.type == "residue_selector" and \
# self.options.namespace == residue_selector_namespace:
#
# files = [f for f in files if os.path.basename(f) != "ResidueSelectorCreator.hh"]
outdir = os.path.join(self.get_base_outdir(), self.get_outfile_rel_path())
if not os.path.exists(outdir):
os.makedirs(outdir)
matches = list(self.replacement.keys())
for template in sorted(files):
extension = "."+".".join(os.path.basename(template).split(".")[1:])
if hasattr(self.options, "type") and self.options.type == "util":
out_basename = "util" + extension
elif re.search("Creator", os.path.basename(template)):
out_basename = self.get_outname()+"Creator"+extension
else:
out_basename = self.get_outname()+extension
new_file_name = os.path.join(outdir, out_basename)
print("Creating ",new_file_name)
INFILE = open(os.path.abspath( template ), 'r')
out_lines = []
for line in INFILE:
newline = line
for key in matches:
if re.search(key, line):
newline = newline.replace(key, str(self.replacement[key]()))
out_lines.append(newline)
INFILE.close()
#Write the output file ONLY if all things are OK.
OUTFILE = open(new_file_name, 'w')
for line in out_lines:
OUTFILE.write(line)
OUTFILE.close()
self.print_dev_help()
def get_base_outdir(self):
if self.options.test:
return os.path.join(work_dir,"test_src")
else:
return self.src_dir
def print_dev_help(self):
"""
Printed at the end of template generation to help devs know where things are supposed to go, etc.
"""
if hasattr(self.options, "type"):
if self.options.type == "util":
print("\ngit add "+os.path.join(self.get_base_outdir(), self.get_outfile_rel_path())+"/util"+"*")
else:
print("\nRemember to add your newly created files to the git repository:")
print("\ngit add "+os.path.join(self.get_base_outdir(), self.get_outfile_rel_path())+"/"+\
self.get_option("class_name", fail_on_none=False)+"*")
if self.options.type == "residue_selector":
if self.options.namespace[0] == "core":
print("\nRegister in (core.3): \n"+\
" "+self.get_base_outdir()+"/"+"core/init/init.cc")
else:
print("\nRegister in (protocols.7):\n" \
" "+self.get_base_outdir()+"/"+"protocols/init/init.ResidueSelectorCreators.ihh\n" \
" "+self.get_base_outdir()+"/"+"protocols/init/init.ResidueSelectorRegistrators.ihh\n")
print("\n See Wiki for How to serialize your Selector: \n" \
" "+"https://wiki.rosettacommons.org/index.php/SerializationFAQ")
elif self.options.type == "singleton":
print("\n Singletons do not need to be registered. " \
"To get the current instance from anywhere, use:\n" \
" "+self.replacement["--namespace_2colon--"]()+"::"+self.replacement["--class--"]()+"::get_instance()->")
elif self.options.type == "mover":
print("\nMover Creator should be registered in (protocols.7) \n" \
" "+self.get_base_outdir()+"/"+"protocols/init/init.MoverRegistrators.ihh and \n" \
" "+self.get_base_outdir()+"/"+"protocols/init/init.MoverCreators.ihh\n")
elif self.options.type == "features_reporter":
print("\nFeature Reporter Creator should be registered in (protocols.7) \n" \
" "+self.get_base_outdir()+"/"+"protocols/init/init.FeaturesReporterRegistrators.ihh and \n" \
" "+self.get_base_outdir()+"/"+"protocols/init/init.FeaturesReporterCreators.ihh\n")
elif self.options.type == "constraint_generator":
print("\nConstraint Generator Creator should be registered in (protocols.7) \n" \
" "+self.get_base_outdir()+"/"+"protocols/init/init.ConstraintGeneratorRegistrators.ihh and \n" \
" "+self.get_base_outdir()+"/"+"protocols/init/init.ConstraintGeneratorCreators.ihh\n")
elif self.options.type == "jd3_standard":
print("\n "+"This template is for a standard JD3 app, however, much more complex apps can be created. See the docs for more.\n")
elif re.search("metric_", self.options.type):
print("\nSimple Metric Creator should be registered in (protocols.7) \n" \
" "+self.get_base_outdir()+"/"+"protocols/init/init.SimpleMetricRegistrators.ihh and \n" \
" "+self.get_base_outdir()+"/"+"protocols/init/init.SimpleMetricCreators.ihh\n")
elif re.search("jd3", self.options.type):
print("\nDocs can be found out https://wiki.rosettacommons.org/index.php/JD3FAQ\n")
if self.options.type == "jd3_standard_job_queen":
print("A tutorial can be found here: \n"
"https://www.rosettacommons.org/docs/wiki/development_documentation/tutorials/jd3_derived_jq/jd3_derived_jq_home\n")
elif self.options.type == "jd3_job":
print("A tutorial can be found here: \n"
"https://www.rosettacommons.org/docs/latest/development_documentation/tutorials/jd3_derived_jq/tutorial_job\n")
elif self.options.type == "jd3_job_summary":
print("A tutorial can be found here: \n "
"https://www.rosettacommons.org/docs/latest/development_documentation/tutorials/jd3_derived_jq/completed_job_summary\n")
######## Replacement Functions#############
def get_option(self, option_name, fail_on_none = True):
opts = vars(self.options)
if option_name not in opts and fail_on_none:
sys.exit(option_name+" is necessary. Pass it as an argument.")
elif not opts[option_name] and fail_on_none:
sys.exit(option_name+" is necessary. Pass it as an argument.")
elif option_name in opts:
return opts[option_name]
else:
return None
def get_name(self):
name = subprocess.check_output("git config user.name", shell=True).strip()
if sys.version_info[0] > 2:
name = name.decode(encoding="utf-8", errors="replace")
return name
def get_email(self):
email = subprocess.check_output("git config user.email", shell=True).strip()
if sys.version_info[0] > 2:
email = email.decode(encoding="utf-8", errors="replace")
return email
def get_outname(self):
return self.get_option("class_name", fail_on_none= True)
def get_outfile_rel_path(self):
"""
Get the rel path line. Ex: protocols/antibody
:rtype: str
"""
return "/".join(self.get_path_list())
def get_path_underscore(self):
"""
Get the path with underscore for ifdefs
:rtype: str
"""
return "_".join(self.get_path_list())
def get_namespace_lines(self):
"""
Get the namespace declaration lines for hh and cc file
:rtype: str
"""
return "\n".join(["namespace "+n+" {" for n in self.get_option("namespace", fail_on_none=True) ] )
def get_namespace_char_concat(self, char):
return char.join(self.get_option("namespace", fail_on_none=True))
def get_end_namespace(self):
"""
Get the end of the namespace declaration at end of file.
:rtype: str
"""
return "\n".join(["} //"+n for n in self.get_option("namespace", fail_on_none=True)][::-1])
def get_path_list(self):
"""
Get a list of the path directory hierarchy.
:rtype: list
"""
if hasattr(self.options, "dir_override") and self.options.dir_override:
#Catching things like protocols/antibody
if len(self.options.dir_override) == 1:
self.options.dir_override = self.options.dir_override[0]
#Check to make sure someone did not try to use src/test to set directory. Helps new users.
if re.search('src', "/".join(self.options.dir_override)):
sys.exit("\nERROR. Please check your path. Path is relative, src directory not needed. ( Ex - protocols/antibody )")
#Not split. We don't fail here, but instead give it as a list.
if (re.search('/', self.options.dir_override)):
return self.options.dir_override.strip().split('/')
else:
return self.options.dir_override
elif hasattr(self.options, "namespace") and self.options.namespace:
return self.options.namespace
else:
sys.exit("Path not defined. Either set the path/outdirs or pass a namespace")
#def get_res_sel_creator_path(self):
# """
# Places the residue selector creator path in the template if namespace is not core
# For ResidueSelectors, Creators are in core are contained in one one file.
# :rtype: str
# """
# if self.options.namespace == residue_selector_namespace:
# return "<core/select/residue_selector/ResidueSelectorCreators.hh>\n"
# else:
# return "<"+self.replacement["--path--"]()+"/"+self.replacement["--class--"]()+"Creator.hh>\n"
class GenerateGeneralTemplates(GenerateRosettaTemplates):
"""
Template Generator specifically for general rosetta classes and files (movers, task_ops, etc.)
"""
def __init__(self, template_type_name = template_types[0]):
self.types = sorted( [os.path.basename(d) for d in glob.glob(os.path.join(template_type_name,"*")) if os.path.isdir(d)] )
print("Found template types: "+repr(self.types))
parser = ArgumentParser(description="This class is used to generate Rosetta templates for use in any IDE. "
"See the README for more detailed instructions. ")
required = parser.add_argument_group("Required")
required.add_argument("--type",
help = "The type of template you will be needing: "+", ".join(self.types),
required = True,
metavar = "class_type",
choices = self.types)
required.add_argument("--class_name", "-c",
help = "The name of the class you are creating if not creating util files.")
required.add_argument("--brief", "-b",
help = "A brief description of the class/file. Enclose in quotes.",
required = True)
required.add_argument("--namespace",
help = "Namespace needed for file. Separate by spaces. "
"Default is to place files in this directory. "
"Ex: --namespace protocols antibody",
nargs='*',
default = [],
required = True)
optional = parser.add_argument_group("Optional")
optional.add_argument("--dir_override",
help = "List of dir names if path to output file is different than namespace.",
nargs='*',
default = [])
GenerateRosettaTemplates.__init__(self, template_type_name, parser)
if __name__ == "__main__":
generate_files = GenerateGeneralTemplates()
generate_files.apply()
| [
"36790013+MedicaicloudLink@users.noreply.github.com"
] | 36790013+MedicaicloudLink@users.noreply.github.com |
fb493a7f9431ddc6e4e4707347757a429979cb9f | 6e0001fb880d83d1d3e305e42acba93b85631838 | /Point Diagrams/PD1V1/out1/v_EQEQLTprojectGT.py | 4e6410b1e9e99b3fdac62c13dafc2d7f714accb5 | [] | no_license | pjalagna/Aryc2019 | 97b6de8248006bf19af527e4c1e35763b1277aaa | ece8ccf18305e6c65a59bee650b47b8730904bd0 | refs/heads/master | 2021-06-21T23:14:55.527195 | 2021-03-18T08:02:31 | 2021-03-18T08:02:31 | 205,817,944 | 0 | 1 | null | 2020-10-16T00:13:31 | 2019-09-02T09:03:22 | Python | UTF-8 | Python | false | false | 594 | py |
# file v_EQEQLTprojectGT.py
def main(objj,trace):
local = {} # local architecture ; dies with action
local['ds'] = []
local['nds'] = {}
if(trace == 1):
xx = raw_input("begin v_EQEQLTprojectGT")
retbox = [0,objj,trace] # init by type
# set status to retbox[0] = 0 ok -1 ng or #
retbox[0] = 0 # default is ok
# process work goes here
t = objj['ds'].pop()
if (t == "<project>"):
retbox[0] = 0 # ok
else:
objj['ds'].push(t)
retbox[0] = -1
#endif
return(retbox)
#end v_EQEQLTprojectGT
| [
"PJAlagna@Gmail.com"
] | PJAlagna@Gmail.com |
efe564a24df6338dc3c130002c9e6e40ce2caf64 | 971e0efcc68b8f7cfb1040c38008426f7bcf9d2e | /tests/model_control/detailed/transf_Anscombe/model_control_one_enabled_Anscombe_Lag1Trend_Seasonal_DayOfMonth_AR.py | 1f63aee2753363e8e382e17a46c09ec522a83da8 | [
"BSD-3-Clause",
"LicenseRef-scancode-unknown-license-reference"
] | permissive | antoinecarme/pyaf | a105d172c2e7544f8d580d75f28b751351dd83b6 | b12db77cb3fa9292e774b2b33db8ce732647c35e | refs/heads/master | 2023-09-01T09:30:59.967219 | 2023-07-28T20:15:53 | 2023-07-28T20:15:53 | 70,790,978 | 457 | 77 | BSD-3-Clause | 2023-03-08T21:45:40 | 2016-10-13T09:30:30 | Python | UTF-8 | Python | false | false | 160 | py | import tests.model_control.test_ozone_custom_models_enabled as testmod
testmod.build_model( ['Anscombe'] , ['Lag1Trend'] , ['Seasonal_DayOfMonth'] , ['AR'] ); | [
"antoine.carme@laposte.net"
] | antoine.carme@laposte.net |
24840e6649ffe2f0b8e59a92f4cc0b9bfca31034 | c94f888541c0c430331110818ed7f3d6b27b788a | /shuziwuliu/python/setup.py | e31004b6c7feda1ea8f494f74cff8d9f5bb07c6e | [
"Apache-2.0",
"MIT"
] | permissive | alipay/antchain-openapi-prod-sdk | 48534eb78878bd708a0c05f2fe280ba9c41d09ad | 5269b1f55f1fc19cf0584dc3ceea821d3f8f8632 | refs/heads/master | 2023-09-03T07:12:04.166131 | 2023-09-01T08:56:15 | 2023-09-01T08:56:15 | 275,521,177 | 9 | 10 | MIT | 2021-03-25T02:35:20 | 2020-06-28T06:22:14 | PHP | UTF-8 | Python | false | false | 2,523 | py | # -*- coding: utf-8 -*-
"""
Licensed to the Apache Software Foundation (ASF) under one
or more contributor license agreements. See the NOTICE file
distributed with this work for additional information
regarding copyright ownership. The ASF licenses this file
to you under the Apache License, Version 2.0 (the
"License"); you may not use this file except in compliance
with the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing,
software distributed under the License is distributed on an
"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
KIND, either express or implied. See the License for the
specific language governing permissions and limitations
under the License.
"""
import os
from setuptools import setup, find_packages
"""
setup module for antchain_shuziwuliu.
Created on 21/03/2023
@author: Ant Chain SDK
"""
PACKAGE = "antchain_sdk_shuziwuliu"
NAME = "antchain_shuziwuliu" or "alibabacloud-package"
DESCRIPTION = "Ant Chain SHUZIWULIU SDK Library for Python"
AUTHOR = "Ant Chain SDK"
AUTHOR_EMAIL = "sdk-team@alibabacloud.com"
URL = "https://github.com/alipay/antchain-openapi-prod-sdk"
VERSION = __import__(PACKAGE).__version__
REQUIRES = [
"antchain_alipay_util>=1.0.1, <2.0.0",
"alibabacloud_tea_util>=0.3.8, <1.0.0",
"alibabacloud_rpc_util>=0.0.4, <1.0.0"
]
LONG_DESCRIPTION = ''
if os.path.exists('./README.md'):
with open("README.md", encoding='utf-8') as fp:
LONG_DESCRIPTION = fp.read()
setup(
name=NAME,
version=VERSION,
description=DESCRIPTION,
long_description=LONG_DESCRIPTION,
long_description_content_type='text/markdown',
author=AUTHOR,
author_email=AUTHOR_EMAIL,
license="Apache License 2.0",
url=URL,
keywords=["antchain","shuziwuliu"],
packages=find_packages(exclude=["tests*"]),
include_package_data=True,
platforms="any",
install_requires=REQUIRES,
python_requires=">=3.6",
classifiers=(
"Development Status :: 4 - Beta",
"Intended Audience :: Developers",
"License :: OSI Approved :: Apache Software License",
"Programming Language :: Python",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.6",
'Programming Language :: Python :: 3.7',
'Programming Language :: Python :: 3.8',
'Programming Language :: Python :: 3.9',
"Topic :: Software Development"
)
)
| [
"sdk-team@alibabacloud.com"
] | sdk-team@alibabacloud.com |
dae5b42382e8e74d2b2b579db2cbab17d24e21ff | 12a42054b156383ebbe3ccc5de4150633c66da5d | /problems/longest-consecutive-sequence/solution.py | bd284b649e4f30eb39ed840b85cb0b4ee6dcbb38 | [] | no_license | cfoust/leetcode-problems | 93c33029f74f32c64caf8294292226d199d6e272 | f5ad7866906d0a2cf2250e5972ce910bf35ce526 | refs/heads/master | 2020-03-16T23:05:45.123781 | 2018-05-11T16:41:09 | 2018-05-11T16:41:09 | 133,064,772 | 1 | 1 | null | null | null | null | UTF-8 | Python | false | false | 152 | py | class Solution:
def longestConsecutive(self, nums):
"""
:type nums: List[int]
:rtype: int
"""
| [
"cfoust@sqweebloid.com"
] | cfoust@sqweebloid.com |
191648db5c4ce872041af42a900a4a429edc158e | 4eb2cac1b030a129a2ba4a156d5d1ccbf2d472bb | /fullcalendar/calendario/migrations/0003_auto_20161110_2030.py | 1f31a5bbe66d999a7be787fe6a483a72a7868238 | [] | no_license | CoutinhoElias/calendario | aba3899b10a31f1b6175c943480b4fe45398cb69 | 0d519f7fee242f8856a1d6cf55b03ba9b1bbbabf | refs/heads/master | 2020-12-22T09:05:20.403642 | 2016-11-11T19:00:00 | 2016-11-11T19:00:00 | 73,497,553 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 823 | py | # -*- coding: utf-8 -*-
# Generated by Django 1.10.3 on 2016-11-10 20:30
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('calendario', '0002_auto_20161110_2013'),
]
operations = [
migrations.RemoveField(
model_name='avaluo',
name='avaluo_id',
),
migrations.AddField(
model_name='avaluo',
name='Estatus',
field=models.CharField(max_length=255, null=True),
),
migrations.AddField(
model_name='avaluo',
name='id',
field=models.AutoField(auto_created=True, default=1, primary_key=True, serialize=False, verbose_name='ID'),
preserve_default=False,
),
]
| [
"coutinho.elias@gmail.com"
] | coutinho.elias@gmail.com |
8d160ec784e313aa01c3b5a268ffa2e46ebb06d0 | 6c083d19a4e95a5b61637db8ec042ce4a338b6b0 | /python_mongo_crud/crud/positions.py | 8dbbfd1e0b18310219f42c2d4904b5c21c0830dc | [] | no_license | zeionara/barbershop | a0269e58bbd10a2f969f574f5c4296ba69aa09ca | e4600b7b5483b2692b0fbb6ba0f5b8dbbb839e01 | refs/heads/master | 2021-03-27T10:15:17.820736 | 2017-12-22T15:27:46 | 2017-12-22T15:27:46 | 110,250,691 | 2 | 1 | null | null | null | null | UTF-8 | Python | false | false | 814 | py | import configparser
from connection import create_session
from ming import schema
from ming.odm import FieldProperty
from ming.odm.declarative import MappedClass
from commons import EnhancingClass
collection_name = 'positions'
config = configparser.ConfigParser()
config.read('C://Users//Zerbs//accounts.sec')
session = create_session(config['mongo']['login'], config['mongo']['password'], config['mongo']['path'])
class Position(MappedClass, EnhancingClass):
class __mongometa__:
session = session
name = collection_name
_id = FieldProperty(schema.ObjectId)
name = FieldProperty(schema.String(required=True))
description = FieldProperty(schema.String(if_missing = ''))
#position = Position(name = "General hairdresser")
session.flush_all()
| [
"zeionara@gmail.com"
] | zeionara@gmail.com |
1fea4438696f38ff7f96f2e09e7f8dd183048899 | eb64b799ff1d7ef3a244bf8e6f9f4e9118d5cfcd | /homeassistant/components/tuya/sensor.py | 3ee88d2d57be6c2a8bfe21f1401e9bcab3814537 | [
"Apache-2.0"
] | permissive | JeffLIrion/home-assistant | 53966b81b5d5816679f12fc761f79e8777c738d6 | 8f4ec89be6c2505d8a59eee44de335abe308ac9f | refs/heads/dev | 2023-08-22T09:42:02.399277 | 2022-02-16T01:26:13 | 2022-02-16T01:26:13 | 136,679,169 | 5 | 2 | Apache-2.0 | 2023-09-13T06:59:25 | 2018-06-09T00:58:35 | Python | UTF-8 | Python | false | false | 36,143 | py | """Support for Tuya sensors."""
from __future__ import annotations
from dataclasses import dataclass
from tuya_iot import TuyaDevice, TuyaDeviceManager
from tuya_iot.device import TuyaDeviceStatusRange
from homeassistant.components.sensor import (
SensorDeviceClass,
SensorEntity,
SensorEntityDescription,
SensorStateClass,
)
from homeassistant.config_entries import ConfigEntry
from homeassistant.const import (
ELECTRIC_CURRENT_AMPERE,
ELECTRIC_POTENTIAL_VOLT,
PERCENTAGE,
POWER_KILO_WATT,
)
from homeassistant.core import HomeAssistant, callback
from homeassistant.helpers.dispatcher import async_dispatcher_connect
from homeassistant.helpers.entity import EntityCategory
from homeassistant.helpers.entity_platform import AddEntitiesCallback
from homeassistant.helpers.typing import StateType
from . import HomeAssistantTuyaData
from .base import ElectricityTypeData, EnumTypeData, IntegerTypeData, TuyaEntity
from .const import (
DEVICE_CLASS_UNITS,
DOMAIN,
TUYA_DISCOVERY_NEW,
DPCode,
DPType,
TuyaDeviceClass,
UnitOfMeasurement,
)
@dataclass
class TuyaSensorEntityDescription(SensorEntityDescription):
"""Describes Tuya sensor entity."""
subkey: str | None = None
# Commonly used battery sensors, that are re-used in the sensors down below.
BATTERY_SENSORS: tuple[TuyaSensorEntityDescription, ...] = (
TuyaSensorEntityDescription(
key=DPCode.BATTERY_PERCENTAGE,
name="Battery",
native_unit_of_measurement=PERCENTAGE,
device_class=SensorDeviceClass.BATTERY,
state_class=SensorStateClass.MEASUREMENT,
entity_category=EntityCategory.DIAGNOSTIC,
),
TuyaSensorEntityDescription(
key=DPCode.BATTERY_STATE,
name="Battery State",
icon="mdi:battery",
entity_category=EntityCategory.DIAGNOSTIC,
),
TuyaSensorEntityDescription(
key=DPCode.BATTERY_VALUE,
name="Battery",
device_class=SensorDeviceClass.BATTERY,
entity_category=EntityCategory.DIAGNOSTIC,
state_class=SensorStateClass.MEASUREMENT,
),
TuyaSensorEntityDescription(
key=DPCode.VA_BATTERY,
name="Battery",
device_class=SensorDeviceClass.BATTERY,
entity_category=EntityCategory.DIAGNOSTIC,
state_class=SensorStateClass.MEASUREMENT,
),
)
# All descriptions can be found here. Mostly the Integer data types in the
# default status set of each category (that don't have a set instruction)
# end up being a sensor.
# https://developer.tuya.com/en/docs/iot/standarddescription?id=K9i5ql6waswzq
SENSORS: dict[str, tuple[TuyaSensorEntityDescription, ...]] = {
# Smart Kettle
# https://developer.tuya.com/en/docs/iot/fbh?id=K9gf484m21yq7
"bh": (
TuyaSensorEntityDescription(
key=DPCode.TEMP_CURRENT,
name="Current Temperature",
device_class=SensorDeviceClass.TEMPERATURE,
state_class=SensorStateClass.MEASUREMENT,
),
TuyaSensorEntityDescription(
key=DPCode.TEMP_CURRENT_F,
name="Current Temperature",
device_class=SensorDeviceClass.TEMPERATURE,
state_class=SensorStateClass.MEASUREMENT,
),
TuyaSensorEntityDescription(
key=DPCode.STATUS,
name="Status",
device_class=TuyaDeviceClass.STATUS,
),
),
# CO2 Detector
# https://developer.tuya.com/en/docs/iot/categoryco2bj?id=Kaiuz3wes7yuy
"co2bj": (
TuyaSensorEntityDescription(
key=DPCode.HUMIDITY_VALUE,
name="Humidity",
device_class=SensorDeviceClass.HUMIDITY,
state_class=SensorStateClass.MEASUREMENT,
),
TuyaSensorEntityDescription(
key=DPCode.TEMP_CURRENT,
name="Temperature",
device_class=SensorDeviceClass.TEMPERATURE,
state_class=SensorStateClass.MEASUREMENT,
),
TuyaSensorEntityDescription(
key=DPCode.CO2_VALUE,
name="Carbon Dioxide",
device_class=SensorDeviceClass.CO2,
state_class=SensorStateClass.MEASUREMENT,
),
*BATTERY_SENSORS,
),
# CO Detector
# https://developer.tuya.com/en/docs/iot/categorycobj?id=Kaiuz3u1j6q1v
"cobj": (
TuyaSensorEntityDescription(
key=DPCode.CO_VALUE,
name="Carbon Monoxide",
device_class=SensorDeviceClass.CO,
state_class=SensorStateClass.MEASUREMENT,
),
*BATTERY_SENSORS,
),
# Smart Pet Feeder
# https://developer.tuya.com/en/docs/iot/categorycwwsq?id=Kaiuz2b6vydld
"cwwsq": (
TuyaSensorEntityDescription(
key=DPCode.FEED_REPORT,
name="Last Amount",
icon="mdi:counter",
state_class=SensorStateClass.MEASUREMENT,
),
),
# Air Quality Monitor
# No specification on Tuya portal
"hjjcy": (
TuyaSensorEntityDescription(
key=DPCode.TEMP_CURRENT,
name="Temperature",
device_class=SensorDeviceClass.TEMPERATURE,
state_class=SensorStateClass.MEASUREMENT,
),
TuyaSensorEntityDescription(
key=DPCode.HUMIDITY_VALUE,
name="Humidity",
device_class=SensorDeviceClass.HUMIDITY,
state_class=SensorStateClass.MEASUREMENT,
),
TuyaSensorEntityDescription(
key=DPCode.CO2_VALUE,
name="Carbon Dioxide",
device_class=SensorDeviceClass.CO2,
state_class=SensorStateClass.MEASUREMENT,
),
TuyaSensorEntityDescription(
key=DPCode.CH2O_VALUE,
name="Formaldehyde",
state_class=SensorStateClass.MEASUREMENT,
),
TuyaSensorEntityDescription(
key=DPCode.VOC_VALUE,
name="Volatile Organic Compound",
device_class=SensorDeviceClass.VOLATILE_ORGANIC_COMPOUNDS,
state_class=SensorStateClass.MEASUREMENT,
),
TuyaSensorEntityDescription(
key=DPCode.PM25_VALUE,
name="Particulate Matter 2.5 µm",
device_class=SensorDeviceClass.PM25,
state_class=SensorStateClass.MEASUREMENT,
),
),
# Formaldehyde Detector
# Note: Not documented
"jqbj": (
TuyaSensorEntityDescription(
key=DPCode.CO2_VALUE,
name="Carbon Dioxide",
device_class=SensorDeviceClass.CO2,
state_class=SensorStateClass.MEASUREMENT,
),
TuyaSensorEntityDescription(
key=DPCode.VOC_VALUE,
name="Volatile Organic Compound",
device_class=SensorDeviceClass.VOLATILE_ORGANIC_COMPOUNDS,
state_class=SensorStateClass.MEASUREMENT,
),
TuyaSensorEntityDescription(
key=DPCode.PM25_VALUE,
name="Particulate Matter 2.5 µm",
device_class=SensorDeviceClass.PM25,
state_class=SensorStateClass.MEASUREMENT,
),
TuyaSensorEntityDescription(
key=DPCode.VA_HUMIDITY,
name="Humidity",
device_class=SensorDeviceClass.HUMIDITY,
state_class=SensorStateClass.MEASUREMENT,
),
TuyaSensorEntityDescription(
key=DPCode.VA_TEMPERATURE,
name="Temperature",
device_class=SensorDeviceClass.TEMPERATURE,
state_class=SensorStateClass.MEASUREMENT,
),
TuyaSensorEntityDescription(
key=DPCode.CH2O_VALUE,
name="Formaldehyde",
state_class=SensorStateClass.MEASUREMENT,
),
*BATTERY_SENSORS,
),
# Methane Detector
# https://developer.tuya.com/en/docs/iot/categoryjwbj?id=Kaiuz40u98lkm
"jwbj": (
TuyaSensorEntityDescription(
key=DPCode.CH4_SENSOR_VALUE,
name="Methane",
state_class=SensorStateClass.MEASUREMENT,
),
*BATTERY_SENSORS,
),
# Switch
# https://developer.tuya.com/en/docs/iot/s?id=K9gf7o5prgf7s
"kg": (
TuyaSensorEntityDescription(
key=DPCode.CUR_CURRENT,
name="Current",
device_class=SensorDeviceClass.CURRENT,
state_class=SensorStateClass.MEASUREMENT,
entity_registry_enabled_default=False,
),
TuyaSensorEntityDescription(
key=DPCode.CUR_POWER,
name="Power",
device_class=SensorDeviceClass.POWER,
state_class=SensorStateClass.MEASUREMENT,
entity_registry_enabled_default=False,
),
TuyaSensorEntityDescription(
key=DPCode.CUR_VOLTAGE,
name="Voltage",
device_class=SensorDeviceClass.VOLTAGE,
state_class=SensorStateClass.MEASUREMENT,
entity_registry_enabled_default=False,
),
),
# Luminance Sensor
# https://developer.tuya.com/en/docs/iot/categoryldcg?id=Kaiuz3n7u69l8
"ldcg": (
TuyaSensorEntityDescription(
key=DPCode.BRIGHT_STATE,
name="Luminosity",
icon="mdi:brightness-6",
),
TuyaSensorEntityDescription(
key=DPCode.BRIGHT_VALUE,
name="Luminosity",
device_class=SensorDeviceClass.ILLUMINANCE,
state_class=SensorStateClass.MEASUREMENT,
),
TuyaSensorEntityDescription(
key=DPCode.TEMP_CURRENT,
name="Temperature",
device_class=SensorDeviceClass.TEMPERATURE,
state_class=SensorStateClass.MEASUREMENT,
),
TuyaSensorEntityDescription(
key=DPCode.HUMIDITY_VALUE,
name="Humidity",
device_class=SensorDeviceClass.HUMIDITY,
state_class=SensorStateClass.MEASUREMENT,
),
TuyaSensorEntityDescription(
key=DPCode.CO2_VALUE,
name="Carbon Dioxide",
device_class=SensorDeviceClass.CO2,
state_class=SensorStateClass.MEASUREMENT,
),
*BATTERY_SENSORS,
),
# Door and Window Controller
# https://developer.tuya.com/en/docs/iot/s?id=K9gf48r5zjsy9
"mc": BATTERY_SENSORS,
# Door Window Sensor
# https://developer.tuya.com/en/docs/iot/s?id=K9gf48hm02l8m
"mcs": BATTERY_SENSORS,
# PIR Detector
# https://developer.tuya.com/en/docs/iot/categorypir?id=Kaiuz3ss11b80
"pir": BATTERY_SENSORS,
# PM2.5 Sensor
# https://developer.tuya.com/en/docs/iot/categorypm25?id=Kaiuz3qof3yfu
"pm2.5": (
TuyaSensorEntityDescription(
key=DPCode.PM25_VALUE,
name="Particulate Matter 2.5 µm",
device_class=SensorDeviceClass.PM25,
state_class=SensorStateClass.MEASUREMENT,
),
TuyaSensorEntityDescription(
key=DPCode.CH2O_VALUE,
name="Formaldehyde",
state_class=SensorStateClass.MEASUREMENT,
),
TuyaSensorEntityDescription(
key=DPCode.VOC_VALUE,
name="Volatile Organic Compound",
device_class=SensorDeviceClass.VOLATILE_ORGANIC_COMPOUNDS,
state_class=SensorStateClass.MEASUREMENT,
),
TuyaSensorEntityDescription(
key=DPCode.TEMP_CURRENT,
name="Temperature",
device_class=SensorDeviceClass.TEMPERATURE,
state_class=SensorStateClass.MEASUREMENT,
),
TuyaSensorEntityDescription(
key=DPCode.CO2_VALUE,
name="Carbon Dioxide",
device_class=SensorDeviceClass.CO2,
state_class=SensorStateClass.MEASUREMENT,
),
TuyaSensorEntityDescription(
key=DPCode.HUMIDITY_VALUE,
name="Humidity",
device_class=SensorDeviceClass.HUMIDITY,
state_class=SensorStateClass.MEASUREMENT,
),
TuyaSensorEntityDescription(
key=DPCode.PM1,
name="Particulate Matter 1.0 µm",
device_class=SensorDeviceClass.PM1,
state_class=SensorStateClass.MEASUREMENT,
),
TuyaSensorEntityDescription(
key=DPCode.PM10,
name="Particulate Matter 10.0 µm",
device_class=SensorDeviceClass.PM10,
state_class=SensorStateClass.MEASUREMENT,
),
*BATTERY_SENSORS,
),
# Heater
# https://developer.tuya.com/en/docs/iot/categoryqn?id=Kaiuz18kih0sm
"qn": (
TuyaSensorEntityDescription(
key=DPCode.WORK_POWER,
name="Power",
device_class=SensorDeviceClass.POWER,
state_class=SensorStateClass.MEASUREMENT,
),
),
# Gas Detector
# https://developer.tuya.com/en/docs/iot/categoryrqbj?id=Kaiuz3d162ubw
"rqbj": (
TuyaSensorEntityDescription(
key=DPCode.GAS_SENSOR_VALUE,
icon="mdi:gas-cylinder",
device_class=SensorStateClass.MEASUREMENT,
),
*BATTERY_SENSORS,
),
# Water Detector
# https://developer.tuya.com/en/docs/iot/categorysj?id=Kaiuz3iub2sli
"sj": BATTERY_SENSORS,
# Emergency Button
# https://developer.tuya.com/en/docs/iot/categorysos?id=Kaiuz3oi6agjy
"sos": BATTERY_SENSORS,
# Smart Camera
# https://developer.tuya.com/en/docs/iot/categorysp?id=Kaiuz35leyo12
"sp": (
TuyaSensorEntityDescription(
key=DPCode.SENSOR_TEMPERATURE,
name="Temperature",
device_class=SensorDeviceClass.TEMPERATURE,
state_class=SensorStateClass.MEASUREMENT,
),
TuyaSensorEntityDescription(
key=DPCode.SENSOR_HUMIDITY,
name="Humidity",
device_class=SensorDeviceClass.HUMIDITY,
state_class=SensorStateClass.MEASUREMENT,
),
TuyaSensorEntityDescription(
key=DPCode.WIRELESS_ELECTRICITY,
name="Battery",
device_class=SensorDeviceClass.BATTERY,
entity_category=EntityCategory.DIAGNOSTIC,
state_class=SensorStateClass.MEASUREMENT,
),
),
# Fingerbot
"szjqr": BATTERY_SENSORS,
# Solar Light
# https://developer.tuya.com/en/docs/iot/tynd?id=Kaof8j02e1t98
"tyndj": BATTERY_SENSORS,
# Volatile Organic Compound Sensor
# Note: Undocumented in cloud API docs, based on test device
"voc": (
TuyaSensorEntityDescription(
key=DPCode.CO2_VALUE,
name="Carbon Dioxide",
device_class=SensorDeviceClass.CO2,
state_class=SensorStateClass.MEASUREMENT,
),
TuyaSensorEntityDescription(
key=DPCode.PM25_VALUE,
name="Particulate Matter 2.5 µm",
device_class=SensorDeviceClass.PM25,
state_class=SensorStateClass.MEASUREMENT,
),
TuyaSensorEntityDescription(
key=DPCode.CH2O_VALUE,
name="Formaldehyde",
state_class=SensorStateClass.MEASUREMENT,
),
TuyaSensorEntityDescription(
key=DPCode.HUMIDITY_VALUE,
name="Humidity",
device_class=SensorDeviceClass.HUMIDITY,
state_class=SensorStateClass.MEASUREMENT,
),
TuyaSensorEntityDescription(
key=DPCode.TEMP_CURRENT,
name="Temperature",
device_class=SensorDeviceClass.TEMPERATURE,
state_class=SensorStateClass.MEASUREMENT,
),
TuyaSensorEntityDescription(
key=DPCode.VOC_VALUE,
name="Volatile Organic Compound",
device_class=SensorDeviceClass.VOLATILE_ORGANIC_COMPOUNDS,
state_class=SensorStateClass.MEASUREMENT,
),
*BATTERY_SENSORS,
),
# Thermostatic Radiator Valve
# Not documented
"wkf": BATTERY_SENSORS,
# Temperature and Humidity Sensor
# https://developer.tuya.com/en/docs/iot/categorywsdcg?id=Kaiuz3hinij34
"wsdcg": (
TuyaSensorEntityDescription(
key=DPCode.VA_TEMPERATURE,
name="Temperature",
device_class=SensorDeviceClass.TEMPERATURE,
state_class=SensorStateClass.MEASUREMENT,
),
TuyaSensorEntityDescription(
key=DPCode.TEMP_CURRENT,
name="Temperature",
device_class=SensorDeviceClass.TEMPERATURE,
state_class=SensorStateClass.MEASUREMENT,
),
TuyaSensorEntityDescription(
key=DPCode.VA_HUMIDITY,
name="Humidity",
device_class=SensorDeviceClass.HUMIDITY,
state_class=SensorStateClass.MEASUREMENT,
),
TuyaSensorEntityDescription(
key=DPCode.HUMIDITY_VALUE,
name="Humidity",
device_class=SensorDeviceClass.HUMIDITY,
state_class=SensorStateClass.MEASUREMENT,
),
TuyaSensorEntityDescription(
key=DPCode.BRIGHT_VALUE,
name="Luminosity",
device_class=SensorDeviceClass.ILLUMINANCE,
state_class=SensorStateClass.MEASUREMENT,
),
*BATTERY_SENSORS,
),
# Pressure Sensor
# https://developer.tuya.com/en/docs/iot/categoryylcg?id=Kaiuz3kc2e4gm
"ylcg": (
TuyaSensorEntityDescription(
key=DPCode.PRESSURE_VALUE,
device_class=SensorDeviceClass.PRESSURE,
state_class=SensorStateClass.MEASUREMENT,
),
*BATTERY_SENSORS,
),
# Smoke Detector
# https://developer.tuya.com/en/docs/iot/categoryywbj?id=Kaiuz3f6sf952
"ywbj": (
TuyaSensorEntityDescription(
key=DPCode.SMOKE_SENSOR_VALUE,
name="Smoke Amount",
icon="mdi:smoke-detector",
entity_category=EntityCategory.DIAGNOSTIC,
device_class=SensorStateClass.MEASUREMENT,
),
*BATTERY_SENSORS,
),
# Vibration Sensor
# https://developer.tuya.com/en/docs/iot/categoryzd?id=Kaiuz3a5vrzno
"zd": BATTERY_SENSORS,
# Smart Electricity Meter
# https://developer.tuya.com/en/docs/iot/smart-meter?id=Kaiuz4gv6ack7
"zndb": (
TuyaSensorEntityDescription(
key=DPCode.FORWARD_ENERGY_TOTAL,
name="Total Energy",
device_class=SensorDeviceClass.ENERGY,
state_class=SensorStateClass.TOTAL_INCREASING,
),
TuyaSensorEntityDescription(
key=DPCode.PHASE_A,
name="Phase A Current",
device_class=SensorDeviceClass.CURRENT,
native_unit_of_measurement=ELECTRIC_CURRENT_AMPERE,
state_class=SensorStateClass.MEASUREMENT,
subkey="electriccurrent",
),
TuyaSensorEntityDescription(
key=DPCode.PHASE_A,
name="Phase A Power",
device_class=SensorDeviceClass.POWER,
state_class=SensorStateClass.MEASUREMENT,
native_unit_of_measurement=POWER_KILO_WATT,
subkey="power",
),
TuyaSensorEntityDescription(
key=DPCode.PHASE_A,
name="Phase A Voltage",
device_class=SensorDeviceClass.VOLTAGE,
state_class=SensorStateClass.MEASUREMENT,
native_unit_of_measurement=ELECTRIC_POTENTIAL_VOLT,
subkey="voltage",
),
TuyaSensorEntityDescription(
key=DPCode.PHASE_B,
name="Phase B Current",
device_class=SensorDeviceClass.CURRENT,
native_unit_of_measurement=ELECTRIC_CURRENT_AMPERE,
state_class=SensorStateClass.MEASUREMENT,
subkey="electriccurrent",
),
TuyaSensorEntityDescription(
key=DPCode.PHASE_B,
name="Phase B Power",
device_class=SensorDeviceClass.POWER,
state_class=SensorStateClass.MEASUREMENT,
native_unit_of_measurement=POWER_KILO_WATT,
subkey="power",
),
TuyaSensorEntityDescription(
key=DPCode.PHASE_B,
name="Phase B Voltage",
device_class=SensorDeviceClass.VOLTAGE,
state_class=SensorStateClass.MEASUREMENT,
native_unit_of_measurement=ELECTRIC_POTENTIAL_VOLT,
subkey="voltage",
),
TuyaSensorEntityDescription(
key=DPCode.PHASE_C,
name="Phase C Current",
device_class=SensorDeviceClass.CURRENT,
native_unit_of_measurement=ELECTRIC_CURRENT_AMPERE,
state_class=SensorStateClass.MEASUREMENT,
subkey="electriccurrent",
),
TuyaSensorEntityDescription(
key=DPCode.PHASE_C,
name="Phase C Power",
device_class=SensorDeviceClass.POWER,
state_class=SensorStateClass.MEASUREMENT,
native_unit_of_measurement=POWER_KILO_WATT,
subkey="power",
),
TuyaSensorEntityDescription(
key=DPCode.PHASE_C,
name="Phase C Voltage",
device_class=SensorDeviceClass.VOLTAGE,
state_class=SensorStateClass.MEASUREMENT,
native_unit_of_measurement=ELECTRIC_POTENTIAL_VOLT,
subkey="voltage",
),
),
# Circuit Breaker
# https://developer.tuya.com/en/docs/iot/dlq?id=Kb0kidk9enyh8
"dlq": (
TuyaSensorEntityDescription(
key=DPCode.TOTAL_FORWARD_ENERGY,
name="Total Energy",
device_class=SensorDeviceClass.ENERGY,
state_class=SensorStateClass.TOTAL_INCREASING,
),
TuyaSensorEntityDescription(
key=DPCode.PHASE_A,
name="Phase A Current",
device_class=SensorDeviceClass.CURRENT,
native_unit_of_measurement=ELECTRIC_CURRENT_AMPERE,
state_class=SensorStateClass.MEASUREMENT,
subkey="electriccurrent",
),
TuyaSensorEntityDescription(
key=DPCode.PHASE_A,
name="Phase A Power",
device_class=SensorDeviceClass.POWER,
state_class=SensorStateClass.MEASUREMENT,
native_unit_of_measurement=POWER_KILO_WATT,
subkey="power",
),
TuyaSensorEntityDescription(
key=DPCode.PHASE_A,
name="Phase A Voltage",
device_class=SensorDeviceClass.VOLTAGE,
state_class=SensorStateClass.MEASUREMENT,
native_unit_of_measurement=ELECTRIC_POTENTIAL_VOLT,
subkey="voltage",
),
TuyaSensorEntityDescription(
key=DPCode.PHASE_B,
name="Phase B Current",
device_class=SensorDeviceClass.CURRENT,
native_unit_of_measurement=ELECTRIC_CURRENT_AMPERE,
state_class=SensorStateClass.MEASUREMENT,
subkey="electriccurrent",
),
TuyaSensorEntityDescription(
key=DPCode.PHASE_B,
name="Phase B Power",
device_class=SensorDeviceClass.POWER,
state_class=SensorStateClass.MEASUREMENT,
native_unit_of_measurement=POWER_KILO_WATT,
subkey="power",
),
TuyaSensorEntityDescription(
key=DPCode.PHASE_B,
name="Phase B Voltage",
device_class=SensorDeviceClass.VOLTAGE,
state_class=SensorStateClass.MEASUREMENT,
native_unit_of_measurement=ELECTRIC_POTENTIAL_VOLT,
subkey="voltage",
),
TuyaSensorEntityDescription(
key=DPCode.PHASE_C,
name="Phase C Current",
device_class=SensorDeviceClass.CURRENT,
native_unit_of_measurement=ELECTRIC_CURRENT_AMPERE,
state_class=SensorStateClass.MEASUREMENT,
subkey="electriccurrent",
),
TuyaSensorEntityDescription(
key=DPCode.PHASE_C,
name="Phase C Power",
device_class=SensorDeviceClass.POWER,
state_class=SensorStateClass.MEASUREMENT,
native_unit_of_measurement=POWER_KILO_WATT,
subkey="power",
),
TuyaSensorEntityDescription(
key=DPCode.PHASE_C,
name="Phase C Voltage",
device_class=SensorDeviceClass.VOLTAGE,
state_class=SensorStateClass.MEASUREMENT,
native_unit_of_measurement=ELECTRIC_POTENTIAL_VOLT,
subkey="voltage",
),
),
# Robot Vacuum
# https://developer.tuya.com/en/docs/iot/fsd?id=K9gf487ck1tlo
"sd": (
TuyaSensorEntityDescription(
key=DPCode.CLEAN_AREA,
name="Cleaning Area",
icon="mdi:texture-box",
state_class=SensorStateClass.MEASUREMENT,
),
TuyaSensorEntityDescription(
key=DPCode.CLEAN_TIME,
name="Cleaning Time",
icon="mdi:progress-clock",
state_class=SensorStateClass.MEASUREMENT,
),
TuyaSensorEntityDescription(
key=DPCode.TOTAL_CLEAN_AREA,
name="Total Cleaning Area",
icon="mdi:texture-box",
state_class=SensorStateClass.TOTAL_INCREASING,
),
TuyaSensorEntityDescription(
key=DPCode.TOTAL_CLEAN_TIME,
name="Total Cleaning Time",
icon="mdi:history",
state_class=SensorStateClass.TOTAL_INCREASING,
),
TuyaSensorEntityDescription(
key=DPCode.TOTAL_CLEAN_COUNT,
name="Total Cleaning Times",
icon="mdi:counter",
state_class=SensorStateClass.TOTAL_INCREASING,
),
TuyaSensorEntityDescription(
key=DPCode.DUSTER_CLOTH,
name="Duster Cloth Life",
icon="mdi:ticket-percent-outline",
state_class=SensorStateClass.MEASUREMENT,
),
TuyaSensorEntityDescription(
key=DPCode.EDGE_BRUSH,
name="Side Brush Life",
icon="mdi:ticket-percent-outline",
state_class=SensorStateClass.MEASUREMENT,
),
TuyaSensorEntityDescription(
key=DPCode.FILTER_LIFE,
name="Filter Life",
icon="mdi:ticket-percent-outline",
state_class=SensorStateClass.MEASUREMENT,
),
TuyaSensorEntityDescription(
key=DPCode.ROLL_BRUSH,
name="Rolling Brush Life",
icon="mdi:ticket-percent-outline",
state_class=SensorStateClass.MEASUREMENT,
),
),
# Curtain
# https://developer.tuya.com/en/docs/iot/s?id=K9gf48qy7wkre
"cl": (
TuyaSensorEntityDescription(
key=DPCode.TIME_TOTAL,
name="Last Operation Duration",
entity_category=EntityCategory.DIAGNOSTIC,
icon="mdi:progress-clock",
),
),
# Humidifier
# https://developer.tuya.com/en/docs/iot/s?id=K9gf48qwjz0i3
"jsq": (
TuyaSensorEntityDescription(
key=DPCode.HUMIDITY_CURRENT,
name="Humidity",
device_class=SensorDeviceClass.HUMIDITY,
state_class=SensorStateClass.MEASUREMENT,
),
TuyaSensorEntityDescription(
key=DPCode.TEMP_CURRENT,
name="Temperature",
device_class=SensorDeviceClass.TEMPERATURE,
state_class=SensorStateClass.MEASUREMENT,
),
TuyaSensorEntityDescription(
key=DPCode.TEMP_CURRENT_F,
name="Temperature",
device_class=SensorDeviceClass.TEMPERATURE,
state_class=SensorStateClass.MEASUREMENT,
),
TuyaSensorEntityDescription(
key=DPCode.LEVEL_CURRENT,
name="Water Level",
entity_category=EntityCategory.DIAGNOSTIC,
icon="mdi:waves-arrow-up",
),
),
# Air Purifier
# https://developer.tuya.com/en/docs/iot/s?id=K9gf48r41mn81
"kj": (
TuyaSensorEntityDescription(
key=DPCode.FILTER,
name="Filter Utilization",
entity_category=EntityCategory.DIAGNOSTIC,
icon="mdi:ticket-percent-outline",
),
TuyaSensorEntityDescription(
key=DPCode.PM25,
name="Particulate Matter 2.5 µm",
device_class=SensorDeviceClass.PM25,
state_class=SensorStateClass.MEASUREMENT,
icon="mdi:molecule",
),
TuyaSensorEntityDescription(
key=DPCode.TEMP,
name="Temperature",
device_class=SensorDeviceClass.TEMPERATURE,
state_class=SensorStateClass.MEASUREMENT,
),
TuyaSensorEntityDescription(
key=DPCode.HUMIDITY,
name="Humidity",
device_class=SensorDeviceClass.HUMIDITY,
state_class=SensorStateClass.MEASUREMENT,
),
TuyaSensorEntityDescription(
key=DPCode.TVOC,
name="Total Volatile Organic Compound",
device_class=SensorDeviceClass.VOLATILE_ORGANIC_COMPOUNDS,
state_class=SensorStateClass.MEASUREMENT,
),
TuyaSensorEntityDescription(
key=DPCode.ECO2,
name="Concentration of Carbon Dioxide",
device_class=SensorDeviceClass.CO2,
state_class=SensorStateClass.MEASUREMENT,
),
TuyaSensorEntityDescription(
key=DPCode.TOTAL_TIME,
name="Total Operating Time",
icon="mdi:history",
state_class=SensorStateClass.TOTAL_INCREASING,
entity_category=EntityCategory.DIAGNOSTIC,
),
TuyaSensorEntityDescription(
key=DPCode.TOTAL_PM,
name="Total Absorption of Particles",
icon="mdi:texture-box",
state_class=SensorStateClass.TOTAL_INCREASING,
entity_category=EntityCategory.DIAGNOSTIC,
),
TuyaSensorEntityDescription(
key=DPCode.AIR_QUALITY,
name="Air quality",
icon="mdi:air-filter",
device_class=TuyaDeviceClass.AIR_QUALITY,
),
),
# Fan
# https://developer.tuya.com/en/docs/iot/s?id=K9gf48quojr54
"fs": (
TuyaSensorEntityDescription(
key=DPCode.TEMP_CURRENT,
name="Temperature",
device_class=SensorDeviceClass.TEMPERATURE,
state_class=SensorStateClass.MEASUREMENT,
),
),
}
# Socket (duplicate of `kg`)
# https://developer.tuya.com/en/docs/iot/s?id=K9gf7o5prgf7s
SENSORS["cz"] = SENSORS["kg"]
# Power Socket (duplicate of `kg`)
# https://developer.tuya.com/en/docs/iot/s?id=K9gf7o5prgf7s
SENSORS["pc"] = SENSORS["kg"]
async def async_setup_entry(
hass: HomeAssistant, entry: ConfigEntry, async_add_entities: AddEntitiesCallback
) -> None:
"""Set up Tuya sensor dynamically through Tuya discovery."""
hass_data: HomeAssistantTuyaData = hass.data[DOMAIN][entry.entry_id]
@callback
def async_discover_device(device_ids: list[str]) -> None:
"""Discover and add a discovered Tuya sensor."""
entities: list[TuyaSensorEntity] = []
for device_id in device_ids:
device = hass_data.device_manager.device_map[device_id]
if descriptions := SENSORS.get(device.category):
for description in descriptions:
if description.key in device.status:
entities.append(
TuyaSensorEntity(
device, hass_data.device_manager, description
)
)
async_add_entities(entities)
async_discover_device([*hass_data.device_manager.device_map])
entry.async_on_unload(
async_dispatcher_connect(hass, TUYA_DISCOVERY_NEW, async_discover_device)
)
class TuyaSensorEntity(TuyaEntity, SensorEntity):
"""Tuya Sensor Entity."""
entity_description: TuyaSensorEntityDescription
_status_range: TuyaDeviceStatusRange | None = None
_type: DPType | None = None
_type_data: IntegerTypeData | EnumTypeData | None = None
_uom: UnitOfMeasurement | None = None
def __init__(
self,
device: TuyaDevice,
device_manager: TuyaDeviceManager,
description: TuyaSensorEntityDescription,
) -> None:
"""Init Tuya sensor."""
super().__init__(device, device_manager)
self.entity_description = description
self._attr_unique_id = (
f"{super().unique_id}{description.key}{description.subkey or ''}"
)
if int_type := self.find_dpcode(description.key, dptype=DPType.INTEGER):
self._type_data = int_type
self._type = DPType.INTEGER
if description.native_unit_of_measurement is None:
self._attr_native_unit_of_measurement = int_type.unit
elif enum_type := self.find_dpcode(
description.key, dptype=DPType.ENUM, prefer_function=True
):
self._type_data = enum_type
self._type = DPType.ENUM
else:
self._type = self.get_dptype(DPCode(description.key))
# Logic to ensure the set device class and API received Unit Of Measurement
# match Home Assistants requirements.
if (
self.device_class is not None
and not self.device_class.startswith(DOMAIN)
and description.native_unit_of_measurement is None
):
# We cannot have a device class, if the UOM isn't set or the
# device class cannot be found in the validation mapping.
if (
self.native_unit_of_measurement is None
or self.device_class not in DEVICE_CLASS_UNITS
):
self._attr_device_class = None
return
uoms = DEVICE_CLASS_UNITS[self.device_class]
self._uom = uoms.get(self.native_unit_of_measurement) or uoms.get(
self.native_unit_of_measurement.lower()
)
# Unknown unit of measurement, device class should not be used.
if self._uom is None:
self._attr_device_class = None
return
# If we still have a device class, we should not use an icon
if self.device_class:
self._attr_icon = None
# Found unit of measurement, use the standardized Unit
# Use the target conversion unit (if set)
self._attr_native_unit_of_measurement = (
self._uom.conversion_unit or self._uom.unit
)
@property
def native_value(self) -> StateType:
"""Return the value reported by the sensor."""
# Only continue if data type is known
if self._type not in (
DPType.INTEGER,
DPType.STRING,
DPType.ENUM,
DPType.JSON,
DPType.RAW,
):
return None
# Raw value
value = self.device.status.get(self.entity_description.key)
if value is None:
return None
# Scale integer/float value
if isinstance(self._type_data, IntegerTypeData):
scaled_value = self._type_data.scale_value(value)
if self._uom and self._uom.conversion_fn is not None:
return self._uom.conversion_fn(scaled_value)
return scaled_value
# Unexpected enum value
if (
isinstance(self._type_data, EnumTypeData)
and value not in self._type_data.range
):
return None
# Get subkey value from Json string.
if self._type is DPType.JSON:
if self.entity_description.subkey is None:
return None
values = ElectricityTypeData.from_json(value)
return getattr(values, self.entity_description.subkey)
if self._type is DPType.RAW:
if self.entity_description.subkey is None:
return None
values = ElectricityTypeData.from_raw(value)
return getattr(values, self.entity_description.subkey)
# Valid string or enum value
return value
| [
"noreply@github.com"
] | JeffLIrion.noreply@github.com |
2330592ed088ee598023957ab4a8652c45eddcc4 | da4c40f8f21b0813c9a935479c56230eddf71dd3 | /simple_rl/ltl-old/experiments/run_experiments.py | cc1af01de47102559286607165c2b55b9ebd459b | [] | no_license | h2r/ltl-amdp | f804bf4ad083dc9d7f0f960af535411b1cbc6f9a | 5aa3a4d6fa060b0e9f151c0efee45eea61c799d7 | refs/heads/master | 2022-07-22T07:52:37.719118 | 2019-08-29T06:16:32 | 2019-08-29T06:16:32 | 187,088,228 | 4 | 1 | null | 2022-06-21T21:59:20 | 2019-05-16T19:21:08 | Python | UTF-8 | Python | false | false | 8,040 | py | import time
import os
from simple_rl.ltl.AMDP.RoomCubePlainMDPClass import RoomCubePlainMDP
from simple_rl.ltl.AMDP.LtlAMDPClass import LTLAMDP
from simple_rl.ltl.settings.build_cube_env_1 import build_cube_env
from simple_rl.planning import ValueIteration
def run_plain_pMDP(init_loc, ltl_formula, cube_env, ap_maps, verbose=False):
start_time = time.time()
mdp = RoomCubePlainMDP(init_loc = init_loc, ltl_formula=ltl_formula, env_file=[cube_env],
ap_maps=ap_maps)
value_iter = ValueIteration(mdp, sample_rate=1, max_iterations=50)
value_iter.run_vi()
# Value Iteration
action_seq, state_seq = value_iter.plan(mdp.get_init_state())
computing_time = time.time() - start_time
# Print
if verbose:
print("=====================================================")
print("Plain: Plan for ", ltl_formula)
for i in range(len(action_seq)):
room_number, floor_number = mdp._get_abstract_number(state_seq[i])
print(
"\t {} in room {} on the floor {}, {}".format(state_seq[i], room_number, floor_number, action_seq[i]))
room_number, floor_number = mdp._get_abstract_number(state_seq[-1])
print("\t {} in room {} on the floor {}".format(state_seq[-1], room_number, floor_number))
# success?
if len(state_seq) <= 1:
flag_success = -1
else:
if mdp.automata.aut_spot.state_is_accepting(state_seq[-1].q):
flag_success = 1
else:
flag_success = 0
return computing_time, len(action_seq), flag_success, state_seq, action_seq, value_iter.get_num_backups_in_recent_run()
def run_aMDP(init_loc, ltl_formula, cube_env, ap_maps, verbose=False):
start_time = time.time()
ltl_amdp = LTLAMDP(ltl_formula, ap_maps, env_file=[cube_env], slip_prob=0.0, verbose=verbose)
# ltl_amdp.solve_debug()
sseq, aseq, len_actions, backup_num = ltl_amdp.solve(init_loc)
computing_time = time.time() - start_time
# success?
if len_actions == 0:# or len(sseq) == 0:
flag_success = -1
#len_actions = 0
else:
if sseq[-1][-1].q == 1:
flag_success = 1
else:
flag_success = 0
return computing_time, len_actions, flag_success, sseq, aseq, backup_num
def run_aMDP_lowest(init_loc, ltl_formula, cube_env, ap_maps, verbose=False):
start_time = time.time()
ltl_amdp = LTLAMDP(ltl_formula, ap_maps, env_file=[cube_env], slip_prob=0.0, verbose=verbose)
# ltl_amdp.solve_debug()
sseq, aseq, len_actions, backup_num = ltl_amdp.solve(init_loc, FLAG_LOWEST=True)
computing_time = time.time() - start_time
# success?
if len_actions == 0:
flag_success = -1
else:
if sseq[-1][-1].q == 1:
flag_success = 1
else:
flag_success = 0
return computing_time, len_actions, flag_success, sseq, aseq, backup_num
if __name__ == '__main__':
cube_env1 = build_cube_env()
# define scenarios for a large environment
formula_set1 = ['Fa', 'F (a & F b)', 'F(a & F( b & Fc))', '~a U b', 'F (a & F b)','F(a & F( b & Fc))']
ap_maps_set1 = {}
ap_maps_set1[0] = {'a': [2, 'state', 3]}
ap_maps_set1[1] = {'a': [0, 'state', (2,4,1)], 'b': [1,'state', 7]}
ap_maps_set1[2] = {'a': [1, 'state', 9], 'b': [2, 'state', 3], 'c': [1, 'state', 17]}
ap_maps_set1[3] = {'a': [1, 'state', 2], 'b': [2, 'state', 3]}
ap_maps_set1[4] = {'a': [1, 'state', 9], 'b': [1, 'state', 17]}
ap_maps_set1[5] = {'c': [0, 'state', (1, 4, 3)], 'a': [2, 'state', 1], 'b': [2, 'state', 2]}
# define scenarios for a large environment
formula_set2 = ['Fa', 'Fa', 'F (a & F b)', '~a U b', 'F(a & F( b & F c))']
ap_maps_set2 = {}
ap_maps_set2[0] = {'a': [1, 'state', 8]}
ap_maps_set2[1] = {'a': [2, 'state', 6]}
ap_maps_set2[2] = {'a': [2, 'state', 4], 'b': [1, 'state', 6]}
ap_maps_set2[3] = {'a': [1, 'state', 11], 'b': [1, 'state', 12]}
ap_maps_set2[4] = {'a': [1, 'state', 5], 'b': [2, 'state', 3], 'c': [0, 'state', (11, 11, 3)]}
formula_set3 = ['Fa', '~a U b', 'F((a | b) & F c)', 'F (a & F b)']
ap_maps_set3 = {}
ap_maps_set3[0] = {'a': [2, 'state', 3]}
ap_maps_set3[1] = {'a': [1, 'state', 2], 'b': [2, 'state', 3]}
ap_maps_set3[2] = {'a': [2, 'state', 2], 'b': [1, 'state', 2], 'c': [2, 'state', 1]}
ap_maps_set3[3] = {'a': [2, 'state', 2], 'b': [1, 'state', 8]}
# simulation settings
run_num = 1.0 #the number of run
flag_verbose = False # Show result paths
flag_save = False
num_env = 1 #environment name : build_cube_env(num_env).py 3: for examples
init_loc = (1,1,1)
# select the world (1: small, 2: large cube world)
formula_set = eval("formula_set{}".format(num_env))
ap_maps_set = eval("ap_maps_set{}".format(num_env))
for num_case in [5]:
print("+++++++++++++++++ Case: {} +++++++++++++++++++".format(num_case))
if flag_save:
file = open("{}/results/result_time.txt".format(os.getcwd()), "a")
ltl_formula = formula_set[num_case]
ap_maps = ap_maps_set[num_case]
#initialize
run_time_plain = 0.0
run_time_amdp = 0.0
run_time_amdp_lowest = 0.0
run_len_plain = 0.0
run_len_amdp = 0.0
run_len_amdp_lowest = 0.0
for i in range(int(run_num)):
print("* Trial {}".format(i))
# Experiment: AMDP
print("[Trial {}] AP-MDP ----------------------------------------".format(i))
t, l, _, _,_, backup= run_aMDP(init_loc, ltl_formula, cube_env1, ap_maps, verbose=flag_verbose)
run_time_amdp = run_time_amdp + t
run_len_amdp = run_len_amdp + l
print(" [AP-MDP] Time: {} seconds, the number of actions: {}, backup: {}"
.format(round(t, 3), l, backup))
# Experiment: decomposed LTL and solve it at the lowest level
print("[Trial {}] AP-MDP at level 0 ----------------------------------------".format(i))
t, l, _, _,_, backup = run_aMDP_lowest(init_loc, ltl_formula, cube_env1, ap_maps, verbose=flag_verbose)
run_time_amdp_lowest = run_time_amdp_lowest + t
run_len_amdp_lowest = run_len_amdp_lowest + l
print(" [AP-MDP at level 0] Time: {} seconds, the number of actions: {}, backup: {}"
.format(round(t, 3), l, backup))
# Experiment: Plain MDP
print("[Trial {}] Plain ----------------------------------------".format(i))
t, l, _, _,_, backup = run_plain_pMDP(init_loc, ltl_formula, cube_env1, ap_maps, verbose=flag_verbose)
run_time_plain = run_time_plain + t
run_len_plain = run_len_plain + l
print(" [Plain] Time: {} seconds, the number of actions: {}, backup: {}"
.format(round(t, 3), l, backup))
print("* Summary: " + ltl_formula)
print(" AP-MDP: {}s, {}".format(round(run_time_amdp / run_num, 3), run_len_amdp / run_num))
print(" AP-MDP at level 0: {}s, {}".format(round(run_time_amdp_lowest / run_num, 3), run_len_amdp_lowest / run_num))
print(" Product-MDP: {}s, {}".format(round(run_time_plain / run_num, 3), run_len_plain / run_num))
if flag_save:
file.write("=== Env {} ==============================================\n".format(num_env))
file.write("Run {} times\n".format(run_num))
file.write("Task:\t"+ltl_formula+"\n")
file.write("AP:\t{}\n".format(ap_maps))
file.write("AP-MDP:\t{}s, {}\n".format(round(run_time_amdp / run_num, 3), run_len_amdp / run_num))
file.write("AP-MDP at level 0:\t{}s, {}\n".format(round(run_time_amdp_lowest / run_num, 3),
run_len_amdp_lowest / run_num))
file.write("Product-MDP:\t{}s, {}\n".format(round(run_time_plain / run_num, 3), run_len_plain / run_num))
file.close()
| [
"romapatel996@gmail.com"
] | romapatel996@gmail.com |
e5a1dedc7088d5109287a64cf316c8912507a8f1 | f9d564f1aa83eca45872dab7fbaa26dd48210d08 | /huaweicloud-sdk-dataartsstudio/huaweicloudsdkdataartsstudio/v1/model/show_apps_overview_response.py | 71c0824500634e0b099b139b6237cc2e18cf49b0 | [
"Apache-2.0"
] | permissive | huaweicloud/huaweicloud-sdk-python-v3 | cde6d849ce5b1de05ac5ebfd6153f27803837d84 | f69344c1dadb79067746ddf9bfde4bddc18d5ecf | refs/heads/master | 2023-09-01T19:29:43.013318 | 2023-08-31T08:28:59 | 2023-08-31T08:28:59 | 262,207,814 | 103 | 44 | NOASSERTION | 2023-06-22T14:50:48 | 2020-05-08T02:28:43 | Python | UTF-8 | Python | false | false | 7,518 | py | # coding: utf-8
import six
from huaweicloudsdkcore.sdk_response import SdkResponse
from huaweicloudsdkcore.utils.http_utils import sanitize_for_serialization
class ShowAppsOverviewResponse(SdkResponse):
"""
Attributes:
openapi_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
sensitive_list = []
openapi_types = {
'apply_num': 'int',
'call_num': 'int',
'success_num': 'int',
'fail_num': 'int',
'legal_num': 'int',
'illegal_num': 'int'
}
attribute_map = {
'apply_num': 'apply_num',
'call_num': 'call_num',
'success_num': 'success_num',
'fail_num': 'fail_num',
'legal_num': 'legal_num',
'illegal_num': 'illegal_num'
}
def __init__(self, apply_num=None, call_num=None, success_num=None, fail_num=None, legal_num=None, illegal_num=None):
"""ShowAppsOverviewResponse
The model defined in huaweicloud sdk
:param apply_num: 申请量
:type apply_num: int
:param call_num: 调用总量
:type call_num: int
:param success_num: 成功调用量(取数成功)
:type success_num: int
:param fail_num: 失败调用量(取数失败)
:type fail_num: int
:param legal_num: 合法调用量(通过校验)
:type legal_num: int
:param illegal_num: 非法调用量(无法通过校验)
:type illegal_num: int
"""
super(ShowAppsOverviewResponse, self).__init__()
self._apply_num = None
self._call_num = None
self._success_num = None
self._fail_num = None
self._legal_num = None
self._illegal_num = None
self.discriminator = None
if apply_num is not None:
self.apply_num = apply_num
if call_num is not None:
self.call_num = call_num
if success_num is not None:
self.success_num = success_num
if fail_num is not None:
self.fail_num = fail_num
if legal_num is not None:
self.legal_num = legal_num
if illegal_num is not None:
self.illegal_num = illegal_num
@property
def apply_num(self):
"""Gets the apply_num of this ShowAppsOverviewResponse.
申请量
:return: The apply_num of this ShowAppsOverviewResponse.
:rtype: int
"""
return self._apply_num
@apply_num.setter
def apply_num(self, apply_num):
"""Sets the apply_num of this ShowAppsOverviewResponse.
申请量
:param apply_num: The apply_num of this ShowAppsOverviewResponse.
:type apply_num: int
"""
self._apply_num = apply_num
@property
def call_num(self):
"""Gets the call_num of this ShowAppsOverviewResponse.
调用总量
:return: The call_num of this ShowAppsOverviewResponse.
:rtype: int
"""
return self._call_num
@call_num.setter
def call_num(self, call_num):
"""Sets the call_num of this ShowAppsOverviewResponse.
调用总量
:param call_num: The call_num of this ShowAppsOverviewResponse.
:type call_num: int
"""
self._call_num = call_num
@property
def success_num(self):
"""Gets the success_num of this ShowAppsOverviewResponse.
成功调用量(取数成功)
:return: The success_num of this ShowAppsOverviewResponse.
:rtype: int
"""
return self._success_num
@success_num.setter
def success_num(self, success_num):
"""Sets the success_num of this ShowAppsOverviewResponse.
成功调用量(取数成功)
:param success_num: The success_num of this ShowAppsOverviewResponse.
:type success_num: int
"""
self._success_num = success_num
@property
def fail_num(self):
"""Gets the fail_num of this ShowAppsOverviewResponse.
失败调用量(取数失败)
:return: The fail_num of this ShowAppsOverviewResponse.
:rtype: int
"""
return self._fail_num
@fail_num.setter
def fail_num(self, fail_num):
"""Sets the fail_num of this ShowAppsOverviewResponse.
失败调用量(取数失败)
:param fail_num: The fail_num of this ShowAppsOverviewResponse.
:type fail_num: int
"""
self._fail_num = fail_num
@property
def legal_num(self):
"""Gets the legal_num of this ShowAppsOverviewResponse.
合法调用量(通过校验)
:return: The legal_num of this ShowAppsOverviewResponse.
:rtype: int
"""
return self._legal_num
@legal_num.setter
def legal_num(self, legal_num):
"""Sets the legal_num of this ShowAppsOverviewResponse.
合法调用量(通过校验)
:param legal_num: The legal_num of this ShowAppsOverviewResponse.
:type legal_num: int
"""
self._legal_num = legal_num
@property
def illegal_num(self):
"""Gets the illegal_num of this ShowAppsOverviewResponse.
非法调用量(无法通过校验)
:return: The illegal_num of this ShowAppsOverviewResponse.
:rtype: int
"""
return self._illegal_num
@illegal_num.setter
def illegal_num(self, illegal_num):
"""Sets the illegal_num of this ShowAppsOverviewResponse.
非法调用量(无法通过校验)
:param illegal_num: The illegal_num of this ShowAppsOverviewResponse.
:type illegal_num: int
"""
self._illegal_num = illegal_num
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.openapi_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
if attr in self.sensitive_list:
result[attr] = "****"
else:
result[attr] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
import simplejson as json
if six.PY2:
import sys
reload(sys)
sys.setdefaultencoding("utf-8")
return json.dumps(sanitize_for_serialization(self), ensure_ascii=False)
def __repr__(self):
"""For `print`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, ShowAppsOverviewResponse):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""Returns true if both objects are not equal"""
return not self == other
| [
"hwcloudsdk@huawei.com"
] | hwcloudsdk@huawei.com |
b6928ad9d96f86fb650c98569446ea9a1639c7cb | 2455062787d67535da8be051ac5e361a097cf66f | /Producers/BSUB/TrigProd_amumu_a5_dR5/trigger_amumu_producer_cfg_TrigProd_amumu_a5_dR5_833.py | 9a592b4e175bba45107c26d155b4e8bbc807753c | [] | no_license | kmtos/BBA-RecoLevel | 6e153c08d5ef579a42800f6c11995ee55eb54846 | 367adaa745fbdb43e875e5ce837c613d288738ab | refs/heads/master | 2021-01-10T08:33:45.509687 | 2015-12-04T09:20:14 | 2015-12-04T09:20:14 | 43,355,189 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,360 | py | import FWCore.ParameterSet.Config as cms
process = cms.Process("PAT")
#process.load("BBA/Analyzer/bbaanalyzer_cfi")
process.load("FWCore.MessageLogger.MessageLogger_cfi")
process.load('Configuration.EventContent.EventContent_cff')
process.load("Configuration.Geometry.GeometryRecoDB_cff")
process.load("Configuration.StandardSequences.FrontierConditions_GlobalTag_cff")
process.load("PhysicsTools.PatAlgos.producersLayer1.patCandidates_cff")
process.load("PhysicsTools.PatAlgos.selectionLayer1.selectedPatCandidates_cff")
from Configuration.AlCa.GlobalTag import GlobalTag
process.GlobalTag = GlobalTag(process.GlobalTag, 'MCRUN2_71_V1::All', '')
process.load("Configuration.StandardSequences.MagneticField_cff")
####################
# Message Logger
####################
process.MessageLogger.cerr.FwkReport.reportEvery = cms.untracked.int32(100)
process.options = cms.untracked.PSet( wantSummary = cms.untracked.bool(True) )
process.maxEvents = cms.untracked.PSet( input = cms.untracked.int32(-1) )
## switch to uncheduled mode
process.options.allowUnscheduled = cms.untracked.bool(True)
process.maxEvents = cms.untracked.PSet(
input = cms.untracked.int32(500)
)
####################
# Input File List
####################
# Input source
process.source = cms.Source("PoolSource",
fileNames = cms.untracked.vstring('root://eoscms//eos/cms/store/user/ktos/RECO_Step3_amumu_a5/RECO_Step3_amumu_a5_833.root'),
secondaryFileNames = cms.untracked.vstring()
)
############################################################
# Defining matching in DeltaR, sorting by best DeltaR
############################################################
process.mOniaTrigMatch = cms.EDProducer("PATTriggerMatcherDRLessByR",
src = cms.InputTag( 'slimmedMuons' ),
matched = cms.InputTag( 'patTrigger' ), # selections of trigger objects
matchedCuts = cms.string( 'type( "TriggerMuon" ) && path( "HLT_Mu16_TkMu0_dEta18_Onia*")' ), # input does not yet have the 'saveTags' parameter in HLT
maxDPtRel = cms.double( 0.5 ), # no effect here
maxDeltaR = cms.double( 0.3 ), #### selection of matches
maxDeltaEta = cms.double( 0.2 ), # no effect here
resolveAmbiguities = cms.bool( True ),# definition of matcher output
resolveByMatchQuality = cms.bool( True )# definition of matcher output
)
# talk to output module
process.out = cms.OutputModule("PoolOutputModule",
fileName = cms.untracked.string("file:RECO_Step3_amumu_a5_TrigProd_833.root"),
outputCommands = process.MINIAODSIMEventContent.outputCommands
)
process.out.outputCommands += [ 'drop *_*_*_*',
'keep *_*slimmed*_*_*',
'keep *_pfTausEI_*_*',
'keep *_hpsPFTauProducer_*_*',
'keep *_hltTriggerSummaryAOD_*_*',
'keep *_TriggerResults_*_HLT',
'keep *_patTrigger*_*_*',
'keep *_prunedGenParticles_*_*',
'keep *_mOniaTrigMatch_*_*'
]
################################################################################
# Running the matching and setting the the trigger on
################################################################################
from PhysicsTools.PatAlgos.tools.trigTools import *
switchOnTrigger( process ) # This is optional and can be omitted.
switchOnTriggerMatching( process, triggerMatchers = [ 'mOniaTrigMatch'
])
process.outpath = cms.EndPath(process.out)
| [
"kmtos@ucdavis.edu"
] | kmtos@ucdavis.edu |
3e82453e36d6430cadc7c3a2f20dea5381266dc3 | 5a5401e333bb3cc7e3f35bf08738f8fd0adb938f | /lib/python2.7/site-packages/twisted/words/test/test_msn.py | c700749d93560ff9eba01bef1fee22392d32948c | [
"LicenseRef-scancode-unknown-license-reference",
"MIT"
] | permissive | ndrberna/Google_crawler | 44b3be0f2a561e5df4ed7747bb0d485107411954 | b109db5edbfae79b1047b4798cf959dd3f3a5585 | refs/heads/master | 2016-09-05T17:27:28.031647 | 2015-01-30T12:46:59 | 2015-01-30T12:46:59 | 30,070,474 | 4 | 1 | null | null | null | null | UTF-8 | Python | false | false | 21,119 | py | # Copyright (c) Twisted Matrix Laboratories.
# See LICENSE for details.
"""
Test cases for L{twisted.words.protocols.msn}.
"""
import StringIO
from hashlib import md5
from twisted.internet.defer import Deferred
from twisted.protocols import loopback
from twisted.python.reflect import requireModule
from twisted.test.proto_helpers import StringTransport, StringIOWithoutClosing
from twisted.trial import unittest
# t.w.p.msn requires an HTTP client
try:
# So try to get one - do it directly instead of catching an ImportError
# from t.w.p.msn so that other problems which cause that module to fail
# to import don't cause the tests to be skipped.
requireModule('twisted.web.client')
except ImportError:
# If there isn't one, we're going to skip all the tests.
msn = None
else:
# Otherwise importing it should work, so do it.
from twisted.words.protocols import msn
def printError(f):
print f
class PassportTests(unittest.TestCase):
def setUp(self):
self.result = []
self.deferred = Deferred()
self.deferred.addCallback(lambda r: self.result.append(r))
self.deferred.addErrback(printError)
def test_nexus(self):
"""
When L{msn.PassportNexus} receives enough information to identify the
address of the login server, it fires the L{Deferred} passed to its
initializer with that address.
"""
protocol = msn.PassportNexus(self.deferred, 'https://foobar.com/somepage.quux')
headers = {
'Content-Length' : '0',
'Content-Type' : 'text/html',
'PassportURLs' : 'DARealm=Passport.Net,DALogin=login.myserver.com/,DAReg=reg.myserver.com'
}
transport = StringTransport()
protocol.makeConnection(transport)
protocol.dataReceived('HTTP/1.0 200 OK\r\n')
for (h, v) in headers.items():
protocol.dataReceived('%s: %s\r\n' % (h,v))
protocol.dataReceived('\r\n')
self.assertEqual(self.result[0], "https://login.myserver.com/")
def _doLoginTest(self, response, headers):
protocol = msn.PassportLogin(self.deferred,'foo@foo.com','testpass','https://foo.com/', 'a')
protocol.makeConnection(StringTransport())
protocol.dataReceived(response)
for (h,v) in headers.items(): protocol.dataReceived('%s: %s\r\n' % (h,v))
protocol.dataReceived('\r\n')
def testPassportLoginSuccess(self):
headers = {
'Content-Length' : '0',
'Content-Type' : 'text/html',
'Authentication-Info' : "Passport1.4 da-status=success,tname=MSPAuth," +
"tname=MSPProf,tname=MSPSec,from-PP='somekey'," +
"ru=http://messenger.msn.com"
}
self._doLoginTest('HTTP/1.1 200 OK\r\n', headers)
self.failUnless(self.result[0] == (msn.LOGIN_SUCCESS, 'somekey'))
def testPassportLoginFailure(self):
headers = {
'Content-Type' : 'text/html',
'WWW-Authenticate' : 'Passport1.4 da-status=failed,' +
'srealm=Passport.NET,ts=-3,prompt,cburl=http://host.com,' +
'cbtxt=the%20error%20message'
}
self._doLoginTest('HTTP/1.1 401 Unauthorized\r\n', headers)
self.failUnless(self.result[0] == (msn.LOGIN_FAILURE, 'the error message'))
def testPassportLoginRedirect(self):
headers = {
'Content-Type' : 'text/html',
'Authentication-Info' : 'Passport1.4 da-status=redir',
'Location' : 'https://newlogin.host.com/'
}
self._doLoginTest('HTTP/1.1 302 Found\r\n', headers)
self.failUnless(self.result[0] == (msn.LOGIN_REDIRECT, 'https://newlogin.host.com/', 'a'))
if msn is not None:
class DummySwitchboardClient(msn.SwitchboardClient):
def userTyping(self, message):
self.state = 'TYPING'
def gotSendRequest(self, fileName, fileSize, cookie, message):
if fileName == 'foobar.ext' and fileSize == 31337 and cookie == 1234: self.state = 'INVITATION'
class DummyNotificationClient(msn.NotificationClient):
def loggedIn(self, userHandle, screenName, verified):
if userHandle == 'foo@bar.com' and screenName == 'Test Screen Name' and verified:
self.state = 'LOGIN'
def gotProfile(self, message):
self.state = 'PROFILE'
def gotContactStatus(self, code, userHandle, screenName):
if code == msn.STATUS_AWAY and userHandle == "foo@bar.com" and screenName == "Test Screen Name":
self.state = 'INITSTATUS'
def contactStatusChanged(self, code, userHandle, screenName):
if code == msn.STATUS_LUNCH and userHandle == "foo@bar.com" and screenName == "Test Name":
self.state = 'NEWSTATUS'
def contactOffline(self, userHandle):
if userHandle == "foo@bar.com": self.state = 'OFFLINE'
def statusChanged(self, code):
if code == msn.STATUS_HIDDEN: self.state = 'MYSTATUS'
def listSynchronized(self, *args):
self.state = 'GOTLIST'
def gotPhoneNumber(self, listVersion, userHandle, phoneType, number):
msn.NotificationClient.gotPhoneNumber(self, listVersion, userHandle, phoneType, number)
self.state = 'GOTPHONE'
def userRemovedMe(self, userHandle, listVersion):
msn.NotificationClient.userRemovedMe(self, userHandle, listVersion)
c = self.factory.contacts.getContact(userHandle)
if not c and self.factory.contacts.version == listVersion: self.state = 'USERREMOVEDME'
def userAddedMe(self, userHandle, screenName, listVersion):
msn.NotificationClient.userAddedMe(self, userHandle, screenName, listVersion)
c = self.factory.contacts.getContact(userHandle)
if c and (c.lists | msn.REVERSE_LIST) and (self.factory.contacts.version == listVersion) and \
(screenName == 'Screen Name'):
self.state = 'USERADDEDME'
def gotSwitchboardInvitation(self, sessionID, host, port, key, userHandle, screenName):
if sessionID == 1234 and \
host == '192.168.1.1' and \
port == 1863 and \
key == '123.456' and \
userHandle == 'foo@foo.com' and \
screenName == 'Screen Name':
self.state = 'SBINVITED'
class DispatchTests(unittest.TestCase):
"""
Tests for L{DispatchClient}.
"""
def _versionTest(self, serverVersionResponse):
"""
Test L{DispatchClient} version negotiation.
"""
client = msn.DispatchClient()
client.userHandle = "foo"
transport = StringTransport()
client.makeConnection(transport)
self.assertEqual(
transport.value(), "VER 1 MSNP8 CVR0\r\n")
transport.clear()
client.dataReceived(serverVersionResponse)
self.assertEqual(
transport.value(),
"CVR 2 0x0409 win 4.10 i386 MSNMSGR 5.0.0544 MSMSGS foo\r\n")
def test_version(self):
"""
L{DispatchClient.connectionMade} greets the server with a I{VER}
(version) message and then L{NotificationClient.dataReceived}
handles the server's I{VER} response by sending a I{CVR} (client
version) message.
"""
self._versionTest("VER 1 MSNP8 CVR0\r\n")
def test_versionWithoutCVR0(self):
"""
If the server responds to a I{VER} command without including the
I{CVR0} protocol, L{DispatchClient} behaves in the same way as if
that protocol were included.
Starting in August 2008, CVR0 disappeared from the I{VER} response.
"""
self._versionTest("VER 1 MSNP8\r\n")
class NotificationTests(unittest.TestCase):
""" testing the various events in NotificationClient """
def setUp(self):
self.client = DummyNotificationClient()
self.client.factory = msn.NotificationFactory()
self.client.state = 'START'
def tearDown(self):
self.client = None
def _versionTest(self, serverVersionResponse):
"""
Test L{NotificationClient} version negotiation.
"""
self.client.factory.userHandle = "foo"
transport = StringTransport()
self.client.makeConnection(transport)
self.assertEqual(
transport.value(), "VER 1 MSNP8 CVR0\r\n")
transport.clear()
self.client.dataReceived(serverVersionResponse)
self.assertEqual(
transport.value(),
"CVR 2 0x0409 win 4.10 i386 MSNMSGR 5.0.0544 MSMSGS foo\r\n")
def test_version(self):
"""
L{NotificationClient.connectionMade} greets the server with a I{VER}
(version) message and then L{NotificationClient.dataReceived}
handles the server's I{VER} response by sending a I{CVR} (client
version) message.
"""
self._versionTest("VER 1 MSNP8 CVR0\r\n")
def test_versionWithoutCVR0(self):
"""
If the server responds to a I{VER} command without including the
I{CVR0} protocol, L{NotificationClient} behaves in the same way as
if that protocol were included.
Starting in August 2008, CVR0 disappeared from the I{VER} response.
"""
self._versionTest("VER 1 MSNP8\r\n")
def test_challenge(self):
"""
L{NotificationClient} responds to a I{CHL} message by sending a I{QRY}
back which included a hash based on the parameters of the I{CHL}.
"""
transport = StringTransport()
self.client.makeConnection(transport)
transport.clear()
challenge = "15570131571988941333"
self.client.dataReceived('CHL 0 ' + challenge + '\r\n')
# md5 of the challenge and a magic string defined by the protocol
response = "8f2f5a91b72102cd28355e9fc9000d6e"
# Sanity check - the response is what the comment above says it is.
self.assertEqual(
response, md5(challenge + "Q1P7W2E4J9R8U3S5").hexdigest())
self.assertEqual(
transport.value(),
# 2 is the next transaction identifier. 32 is the length of the
# response.
"QRY 2 msmsgs@msnmsgr.com 32\r\n" + response)
def testLogin(self):
self.client.lineReceived('USR 1 OK foo@bar.com Test%20Screen%20Name 1 0')
self.failUnless((self.client.state == 'LOGIN'), msg='Failed to detect successful login')
def test_loginWithoutSSLFailure(self):
"""
L{NotificationClient.loginFailure} is called if the necessary SSL APIs
are unavailable.
"""
self.patch(msn, 'ClientContextFactory', None)
success = []
self.client.loggedIn = lambda *args: success.append(args)
failure = []
self.client.loginFailure = failure.append
self.client.lineReceived('USR 6 TWN S opaque-string-goes-here')
self.assertEqual(success, [])
self.assertEqual(
failure,
["Exception while authenticating: "
"Connecting to the Passport server requires SSL, but SSL is "
"unavailable."])
def testProfile(self):
m = 'MSG Hotmail Hotmail 353\r\nMIME-Version: 1.0\r\nContent-Type: text/x-msmsgsprofile; charset=UTF-8\r\n'
m += 'LoginTime: 1016941010\r\nEmailEnabled: 1\r\nMemberIdHigh: 40000\r\nMemberIdLow: -600000000\r\nlang_preference: 1033\r\n'
m += 'preferredEmail: foo@bar.com\r\ncountry: AU\r\nPostalCode: 90210\r\nGender: M\r\nKid: 0\r\nAge:\r\nsid: 400\r\n'
m += 'kv: 2\r\nMSPAuth: 2CACCBCCADMoV8ORoz64BVwmjtksIg!kmR!Rj5tBBqEaW9hc4YnPHSOQ$$\r\n\r\n'
map(self.client.lineReceived, m.split('\r\n')[:-1])
self.failUnless((self.client.state == 'PROFILE'), msg='Failed to detect initial profile')
def testStatus(self):
t = [('ILN 1 AWY foo@bar.com Test%20Screen%20Name 0', 'INITSTATUS', 'Failed to detect initial status report'),
('NLN LUN foo@bar.com Test%20Name 0', 'NEWSTATUS', 'Failed to detect contact status change'),
('FLN foo@bar.com', 'OFFLINE', 'Failed to detect contact signing off'),
('CHG 1 HDN 0', 'MYSTATUS', 'Failed to detect my status changing')]
for i in t:
self.client.lineReceived(i[0])
self.failUnless((self.client.state == i[1]), msg=i[2])
def testListSync(self):
# currently this test does not take into account the fact
# that BPRs sent as part of the SYN reply may not be interpreted
# as such if they are for the last LST -- maybe I should
# factor this in later.
self.client.makeConnection(StringTransport())
msn.NotificationClient.loggedIn(self.client, 'foo@foo.com', 'foobar', 1)
lines = [
"SYN %s 100 1 1" % self.client.currentID,
"GTC A",
"BLP AL",
"LSG 0 Other%20Contacts 0",
"LST userHandle@email.com Some%20Name 11 0"
]
map(self.client.lineReceived, lines)
contacts = self.client.factory.contacts
contact = contacts.getContact('userHandle@email.com')
self.failUnless(contacts.version == 100, "Invalid contact list version")
self.failUnless(contact.screenName == 'Some Name', "Invalid screen-name for user")
self.failUnless(contacts.groups == {0 : 'Other Contacts'}, "Did not get proper group list")
self.failUnless(contact.groups == [0] and contact.lists == 11, "Invalid contact list/group info")
self.failUnless(self.client.state == 'GOTLIST', "Failed to call list sync handler")
def testAsyncPhoneChange(self):
c = msn.MSNContact(userHandle='userHandle@email.com')
self.client.factory.contacts = msn.MSNContactList()
self.client.factory.contacts.addContact(c)
self.client.makeConnection(StringTransport())
self.client.lineReceived("BPR 101 userHandle@email.com PHH 123%20456")
c = self.client.factory.contacts.getContact('userHandle@email.com')
self.failUnless(self.client.state == 'GOTPHONE', "Did not fire phone change callback")
self.failUnless(c.homePhone == '123 456', "Did not update the contact's phone number")
self.failUnless(self.client.factory.contacts.version == 101, "Did not update list version")
def testLateBPR(self):
"""
This test makes sure that if a BPR response that was meant
to be part of a SYN response (but came after the last LST)
is received, the correct contact is updated and all is well
"""
self.client.makeConnection(StringTransport())
msn.NotificationClient.loggedIn(self.client, 'foo@foo.com', 'foo', 1)
lines = [
"SYN %s 100 1 1" % self.client.currentID,
"GTC A",
"BLP AL",
"LSG 0 Other%20Contacts 0",
"LST userHandle@email.com Some%20Name 11 0",
"BPR PHH 123%20456"
]
map(self.client.lineReceived, lines)
contact = self.client.factory.contacts.getContact('userHandle@email.com')
self.failUnless(contact.homePhone == '123 456', "Did not update contact's phone number")
def testUserRemovedMe(self):
self.client.factory.contacts = msn.MSNContactList()
contact = msn.MSNContact(userHandle='foo@foo.com')
contact.addToList(msn.REVERSE_LIST)
self.client.factory.contacts.addContact(contact)
self.client.lineReceived("REM 0 RL 100 foo@foo.com")
self.failUnless(self.client.state == 'USERREMOVEDME', "Failed to remove user from reverse list")
def testUserAddedMe(self):
self.client.factory.contacts = msn.MSNContactList()
self.client.lineReceived("ADD 0 RL 100 foo@foo.com Screen%20Name")
self.failUnless(self.client.state == 'USERADDEDME', "Failed to add user to reverse lise")
def testAsyncSwitchboardInvitation(self):
self.client.lineReceived("RNG 1234 192.168.1.1:1863 CKI 123.456 foo@foo.com Screen%20Name")
self.failUnless(self.client.state == "SBINVITED")
def testCommandFailed(self):
"""
Ensures that error responses from the server fires an errback with
MSNCommandFailed.
"""
id, d = self.client._createIDMapping()
self.client.lineReceived("201 %s" % id)
d = self.assertFailure(d, msn.MSNCommandFailed)
def assertErrorCode(exception):
self.assertEqual(201, exception.errorCode)
return d.addCallback(assertErrorCode)
class MessageHandlingTests(unittest.TestCase):
""" testing various message handling methods from SwichboardClient """
def setUp(self):
self.client = DummySwitchboardClient()
self.client.state = 'START'
def tearDown(self):
self.client = None
def testClientCapabilitiesCheck(self):
m = msn.MSNMessage()
m.setHeader('Content-Type', 'text/x-clientcaps')
self.assertEqual(self.client.checkMessage(m), 0, 'Failed to detect client capability message')
def testTypingCheck(self):
m = msn.MSNMessage()
m.setHeader('Content-Type', 'text/x-msmsgscontrol')
m.setHeader('TypingUser', 'foo@bar')
self.client.checkMessage(m)
self.failUnless((self.client.state == 'TYPING'), msg='Failed to detect typing notification')
def testFileInvitation(self, lazyClient=False):
m = msn.MSNMessage()
m.setHeader('Content-Type', 'text/x-msmsgsinvite; charset=UTF-8')
m.message += 'Application-Name: File Transfer\r\n'
if not lazyClient:
m.message += 'Application-GUID: {5D3E02AB-6190-11d3-BBBB-00C04F795683}\r\n'
m.message += 'Invitation-Command: Invite\r\n'
m.message += 'Invitation-Cookie: 1234\r\n'
m.message += 'Application-File: foobar.ext\r\n'
m.message += 'Application-FileSize: 31337\r\n\r\n'
self.client.checkMessage(m)
self.failUnless((self.client.state == 'INVITATION'), msg='Failed to detect file transfer invitation')
def testFileInvitationMissingGUID(self):
return self.testFileInvitation(True)
def testFileResponse(self):
d = Deferred()
d.addCallback(self.fileResponse)
self.client.cookies['iCookies'][1234] = (d, None)
m = msn.MSNMessage()
m.setHeader('Content-Type', 'text/x-msmsgsinvite; charset=UTF-8')
m.message += 'Invitation-Command: ACCEPT\r\n'
m.message += 'Invitation-Cookie: 1234\r\n\r\n'
self.client.checkMessage(m)
self.failUnless((self.client.state == 'RESPONSE'), msg='Failed to detect file transfer response')
def testFileInfo(self):
d = Deferred()
d.addCallback(self.fileInfo)
self.client.cookies['external'][1234] = (d, None)
m = msn.MSNMessage()
m.setHeader('Content-Type', 'text/x-msmsgsinvite; charset=UTF-8')
m.message += 'Invitation-Command: ACCEPT\r\n'
m.message += 'Invitation-Cookie: 1234\r\n'
m.message += 'IP-Address: 192.168.0.1\r\n'
m.message += 'Port: 6891\r\n'
m.message += 'AuthCookie: 4321\r\n\r\n'
self.client.checkMessage(m)
self.failUnless((self.client.state == 'INFO'), msg='Failed to detect file transfer info')
def fileResponse(self, (accept, cookie, info)):
if accept and cookie == 1234: self.client.state = 'RESPONSE'
def fileInfo(self, (accept, ip, port, aCookie, info)):
if accept and ip == '192.168.0.1' and port == 6891 and aCookie == 4321: self.client.state = 'INFO'
class FileTransferTests(unittest.TestCase):
"""
test FileSend against FileReceive
"""
def setUp(self):
self.input = 'a' * 7000
self.output = StringIOWithoutClosing()
def tearDown(self):
self.input = None
self.output = None
def test_fileTransfer(self):
"""
Test L{FileSend} against L{FileReceive} using a loopback transport.
"""
auth = 1234
sender = msn.FileSend(StringIO.StringIO(self.input))
sender.auth = auth
sender.fileSize = 7000
client = msn.FileReceive(auth, "foo@bar.com", self.output)
client.fileSize = 7000
def check(ignored):
self.assertTrue(
client.completed and sender.completed,
msg="send failed to complete")
self.assertEqual(
self.input, self.output.getvalue(),
msg="saved file does not match original")
d = loopback.loopbackAsync(sender, client)
d.addCallback(check)
return d
if msn is None:
for testClass in [DispatchTests, PassportTests, NotificationTests,
MessageHandlingTests, FileTransferTests]:
testClass.skip = (
"MSN requires an HTTP client but none is available, "
"skipping tests.")
| [
"bernardini.andrea@gmail.com"
] | bernardini.andrea@gmail.com |
0069f78905351db695975fbd72ebc1b5ae332d60 | 9b9b6a7aa1de1092a8480771f2b08ffa0972218d | /python/sklearn/linear-regression/workload-analysis/classify/online/params/param_counter.py | 1af00ac73271adc2c5101bc6c3e24af7064e2cb7 | [
"WTFPL"
] | permissive | lijiansong/lang | c42ca757306b38f37a26fef841b2460f05a13af6 | 27ffecd9afe67ddac003fc4d6333e06e2cc20434 | refs/heads/master | 2023-02-25T17:36:01.221720 | 2023-02-14T14:10:29 | 2023-02-14T14:10:29 | 149,586,739 | 1 | 1 | null | null | null | null | UTF-8 | Python | false | false | 825 | py | import caffe
from numpy import prod, sum
from pprint import pprint
import os
def print_param_info(net_name, net_file):
net = caffe.Net(net_file, caffe.TEST)
print('Layer-wise params:')
pprint([(k, v[0].data.shape) for k, v in net.params.items()])
print('--> {} total number of params: {}'.format(net_name, sum([prod(v[0].data.shape) for k, v in net.params.items()])/1024.0/1024.0))
if __name__ == '__main__':
net_list = ['mobilenet_v2-github', 'mobilenet', 'squeezenet', 'densenet121', 'resnet50', 'ssd_mobilenetv1', 'ssd_vgg16']
for net in net_list:
net_file_blob = net + '/' + net + '_float16_dense_1batch.prototxt'
if not os.path.exists(net_file_blob):
print('{} NOT exists!!!'.format(net_file_blob))
exit(-1)
print_param_info(net, net_file_blob)
| [
"lijiansong@ict.ac.cn"
] | lijiansong@ict.ac.cn |
28691a0f6566ba2f43a980db581c0b15aacf756e | 34e44059f187d9064959842ef8b6a8526e0db431 | /pariba/settings.py | 25791709cfaaa0939a65be86a085476062311b53 | [] | no_license | SimeonYS/pariba | f682285d3a370468b2612fb02af3e2ef7b51be51 | af662fb20d5f33d45915062180f4481fa014d18a | refs/heads/main | 2023-03-07T07:26:18.513902 | 2021-02-26T11:39:15 | 2021-02-26T11:39:15 | 342,557,708 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 377 | py | BOT_NAME = 'pariba'
SPIDER_MODULES = ['pariba.spiders']
NEWSPIDER_MODULE = 'pariba.spiders'
FEED_EXPORT_ENCODING = 'utf-8'
LOG_LEVEL = 'ERROR'
DOWNLOAD_DELAY = 0
USER_AGENT="Mozilla/5.0 (Windows NT 6.1; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/88.0.4324.150 Safari/537.36"
ROBOTSTXT_OBEY = True
ITEM_PIPELINES = {
'pariba.pipelines.ParibaPipeline': 300,
} | [
"simeon.simeonov@ADPVT.com"
] | simeon.simeonov@ADPVT.com |
21da94d8fe23dc14ad9e002afb37fa79fa9265db | 808eb8f2ae19d78531d2179d28aa1bc569b87a7d | /setup.py | 79722097e9aa60945975738c3bbfa496e741002f | [
"BSD-3-Clause"
] | permissive | huangjundashuaige/elastic | b59f5143fbc28291500e441bba824d532947ffc7 | 9e0b6abc87a5cb6c0ece2d630d3bdb812682ba45 | refs/heads/master | 2022-12-07T05:26:40.370489 | 2020-09-04T05:30:51 | 2020-09-04T05:32:27 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,241 | py | #!/usr/bin/env python3
# Copyright (c) Facebook, Inc. and its affiliates.
# All rights reserved.
#
# This source code is licensed under the BSD-style license found in the
# LICENSE file in the root directory of this source tree.
import os
import re
import sys
from setuptools import find_packages, setup
def get_version():
# get version string from version.py
# TODO: ideally the version.py should be generated when setup is run
version_file = os.path.join(os.path.dirname(__file__), "torchelastic/version.py")
version_regex = r"__version__ = ['\"]([^'\"]*)['\"]"
with open(version_file, "r") as f:
version = re.search(version_regex, f.read(), re.M).group(1)
return version
if __name__ == "__main__":
if sys.version_info < (3, 8):
sys.exit("python >= 3.8 required for torchelastic")
with open("README.md", encoding="utf8") as f:
readme = f.read()
with open("requirements.txt") as f:
reqs = f.read()
version = get_version()
print("-- Building version: " + version)
setup(
# Metadata
name="torchelastic",
version=version,
author="PyTorch Elastic Devs",
author_email="torchelastic@fb.com",
description="PyTorch Elastic Training",
long_description=readme,
long_description_content_type="text/markdown",
url="https://github.com/pytorch/elastic",
license="BSD-3",
keywords=["pytorch", "machine learning", "elastic", "distributed"],
python_requires=">=3.6",
install_requires=reqs.strip().split("\n"),
include_package_data=True,
packages=find_packages(exclude=("test", "test.*")),
test_suite="test.suites.unittests",
# PyPI package information.
classifiers=[
"Development Status :: 4 - Beta",
"Intended Audience :: Developers",
"Intended Audience :: Science/Research",
"License :: OSI Approved :: BSD License",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.8",
"Topic :: System :: Distributed Computing",
"Topic :: Scientific/Engineering :: Artificial Intelligence",
],
)
| [
"facebook-github-bot@users.noreply.github.com"
] | facebook-github-bot@users.noreply.github.com |
0631bc7566c1dfbeee8db0e84fcf39ae79453c92 | 181af10fcf40b824fe92d3b8f72fd15d6d1490c2 | /Medium/144. Binary Tree Preorder Traversal/Binary Tree Preorder Traversal.py | caf9c234794d2dbc9002cb5f15b5547ac17fe488 | [] | no_license | wangyendt/LeetCode | 402c59a0b7b7f5b3a672231ea5dad8056ade36af | 4a3ba15284c45b2d8bf38306c8c8526ae174615c | refs/heads/master | 2023-08-10T06:27:54.995152 | 2023-08-10T02:22:27 | 2023-08-10T02:22:27 | 176,651,399 | 6 | 0 | null | null | null | null | UTF-8 | Python | false | false | 705 | py | #!/usr/bin/env python
# encoding: utf-8
"""
@author: Wayne
@contact: wangye.hope@gmail.com
@software: PyCharm
@file: Binary Tree Preorder Traversal
@time: 2019/8/22 17:55
"""
import sys
sys.path.append('..')
from Tools.BinaryTree import *
class Solution:
def preorderTraversal(self, root: TreeNode) -> list:
ret = []
if not root: return ret
stack = [root]
while stack:
s = stack.pop()
ret.append(s.val)
if s.right:
stack.append(s.right)
if s.left:
stack.append(s.left)
return ret
so = Solution()
tree = parseTreeNode([1, 2, 3, 4, 5, 6, 7])
print(so.preorderTraversal(tree))
| [
"905317742@qq.com"
] | 905317742@qq.com |
36cf221a113a5fbedd4fa3a7d3a6fff3f1f11dcb | d101a1499cb453364eaed7bc920f1bf0e01a3470 | /tests/onnx_matrix_conv2d_resblock/onnx_matrix_conv2d_resblock.py | 38199eff52c911fdce9f66aba9277e92279bfc4b | [
"Apache-2.0",
"LicenseRef-scancode-unknown-license-reference"
] | permissive | m1kit/nngen | 01cfdc373ffcb67e68d9069e1919b126745b501d | 5d71885a59b667b883a328f3cbd0d40579300b7f | refs/heads/master | 2020-09-23T13:45:57.949339 | 2019-11-27T04:25:16 | 2019-11-27T04:25:16 | 225,514,595 | 0 | 0 | Apache-2.0 | 2019-12-03T02:39:22 | 2019-12-03T02:39:21 | null | UTF-8 | Python | false | false | 10,477 | py | from __future__ import absolute_import
from __future__ import print_function
import os
import sys
import functools
import math
import numpy as np
import torch
import torchvision
import torchvision.transforms as transforms
import torch.nn as nn
import torch.nn.functional as F
import torch.autograd
# the next line can be removed after installation
sys.path.insert(0, os.path.dirname(os.path.dirname(
os.path.dirname(os.path.abspath(__file__)))))
import nngen as ng
from veriloggen import *
import veriloggen.thread as vthread
import veriloggen.types.axi as axi
class MatrixConv2dResblock(nn.Module):
def __init__(self, weight_shape, stride=1, padding=0,
with_batchnorm=False, act_func='relu'):
super(MatrixConv2dResblock, self).__init__()
self.conv = nn.Conv2d(weight_shape[3], weight_shape[0], weight_shape[1],
stride=stride, padding=padding, bias=not with_batchnorm)
if with_batchnorm:
self.bn = nn.BatchNorm2d(weight_shape[0])
else:
self.bn = None
if act_func == 'relu':
self.f = nn.ReLU(inplace=True)
elif act_func == 'leaky_relu':
self.f = nn.LeakyReLU(inplace=True)
else:
self.f = None
def forward(self, x):
y = self.conv(x)
if self.bn is not None:
y = self.bn(y)
if self.f is not None:
y = self.f(y)
y = torch.add(x, y)
return y
def run(act_shape=(1, 7, 7, 15), weight_shape=(15, 3, 3, 15),
act_dtype=ng.int32, weight_dtype=ng.int32,
stride=1, padding=1,
with_batchnorm=False, act_func='relu', disable_fusion=False,
par_ich=1, par_och=1, par_col=1, par_row=1,
concur_och=None, stationary='filter',
chunk_size=64,
axi_datawidth=32, silent=False,
filename=None, simtype='iverilog', outputfile=None):
if weight_shape[0] != weight_shape[3]:
raise ValueError('not supported shape: weight_shape[0] != weight_shape[3]')
# model definition
model = MatrixConv2dResblock(weight_shape, stride, padding,
with_batchnorm, act_func)
# overwrite weight values for test
# model.conv.weight.data = torch.from_numpy(np.ones_like(model.conv.weight.data.numpy()))
# model.conv.bias.data = torch.from_numpy(np.zeros_like(model.conv.bias.data.numpy()))
# Pytorch to ONNX
onnx_filename = 'onnx_matrix_conv2d_resblock.onnx'
dummy_input = torch.randn(*act_shape).transpose(1, 3)
input_names = ['act']
output_names = ['out']
model.eval()
torch.onnx.export(model, dummy_input, onnx_filename,
input_names=input_names, output_names=output_names)
# ONNX to NNgen
value_dtypes = {'act': act_dtype,
'0.weight': weight_dtype,
'out': act_dtype}
(outputs, placeholders, variables,
constants, operators) = ng.from_onnx(onnx_filename,
value_dtypes=value_dtypes,
default_placeholder_dtype=act_dtype,
default_variable_dtype=weight_dtype,
default_constant_dtype=weight_dtype,
default_operator_dtype=act_dtype,
default_scale_dtype=ng.int32,
default_bias_dtype=ng.int32,
disable_fusion=disable_fusion)
# default linear quantization
if act_dtype.width >= 8:
value_ranges = {'act': (-120, 120)}
else:
value_ranges = {'act': (-(2 ** (act_dtype.width - 1)), (2 ** (act_dtype.width - 1)))}
ng.quantize(outputs, value_ranges=value_ranges)
# set attribute
for op in operators.values():
if isinstance(op, ng.conv2d):
op.attribute(par_ich=par_ich, par_och=par_och,
par_row=par_row, par_col=par_col,
concur_och=concur_och)
# create target hardware
act = placeholders['act']
out = outputs['out']
targ = ng.to_veriloggen([out], 'onnx_matrix_conv2d', silent=silent,
config={'maxi_datawidth': axi_datawidth,
'chunk_size': chunk_size})
# verification data
# if act_dtype.width > 4:
# vact = np.arange(act.length, dtype=np.int64).reshape(act.shape) % [11] + [1]
# else:
# vact = np.arange(act.length, dtype=np.int64).reshape(act.shape) % [5] + [1]
#vact = np.ones(act.shape)
#vact = np.zeros(act.shape)
vact = np.random.normal(size=act.length).reshape(act.shape)
vact = np.clip(vact, -3.0, 3.0)
vact_min_val, vact_max_val = value_ranges['act']
vact_max_abs_range = max(abs(vact_min_val), abs(vact_max_val))
vact_width = vact_max_abs_range.bit_length() + 1
vact = vact * (1.0 * (2 ** (vact_width - 1) - 1)) / 3.0
vact = np.round(vact).astype(np.int64)
eval_outs = ng.eval([out], act=vact)
vout = eval_outs[0]
# exec on pytorch
model_input = vact.astype(np.float32)
if act.perm is not None:
model_input = np.transpose(model_input, act.reversed_perm)
model.eval()
model_out = model(torch.from_numpy(model_input)).detach().numpy()
if act.perm is not None:
model_out = np.transpose(model_out, act.perm)
scaled_model_out = model_out * out.scale_factor
out_diff = vout - scaled_model_out
out_err = out_diff / (scaled_model_out + 0.00000001)
max_out_err = np.max(np.abs(out_err))
# breakpoint()
# if max_out_err > 0.1:
# raise ValueError("too large output error: %f > 0.1" % max_out_err)
# to memory image
param_data = ng.export_ndarray([out], chunk_size)
param_bytes = len(param_data)
variable_addr = int(math.ceil((act.addr + act.memory_size) / chunk_size)) * chunk_size
check_addr = int(math.ceil((variable_addr + param_bytes) / chunk_size)) * chunk_size
tmp_addr = int(math.ceil((check_addr + out.memory_size) / chunk_size)) * chunk_size
memimg_datawidth = 32
mem = np.zeros([1024 * 1024 * 8 // (memimg_datawidth // 8)], dtype=np.int64)
mem = mem + [100]
# placeholder
axi.set_memory(mem, vact, memimg_datawidth,
act_dtype.width, act.addr,
max(int(math.ceil(axi_datawidth / act_dtype.width)), par_ich))
# parameters (variable and constant)
axi.set_memory(mem, param_data, memimg_datawidth,
8, variable_addr)
# verification data
axi.set_memory(mem, vout, memimg_datawidth,
act_dtype.width, check_addr,
max(int(math.ceil(axi_datawidth / act_dtype.width)), par_och))
# test controller
m = Module('test')
params = m.copy_params(targ)
ports = m.copy_sim_ports(targ)
clk = ports['CLK']
resetn = ports['RESETN']
rst = m.Wire('RST')
rst.assign(Not(resetn))
# AXI memory model
if outputfile is None:
outputfile = os.path.splitext(os.path.basename(__file__))[0] + '.out'
memimg_name = 'memimg_' + outputfile
memory = axi.AxiMemoryModel(m, 'memory', clk, rst,
datawidth=axi_datawidth,
memimg=mem, memimg_name=memimg_name,
memimg_datawidth=memimg_datawidth)
memory.connect(ports, 'maxi')
# AXI-Slave controller
_saxi = vthread.AXIMLite(m, '_saxi', clk, rst, noio=True)
_saxi.connect(ports, 'saxi')
# timer
time_counter = m.Reg('time_counter', 32, initval=0)
seq = Seq(m, 'seq', clk, rst)
seq(
time_counter.inc()
)
def ctrl():
for i in range(100):
pass
ng.sim.set_global_addrs(_saxi, tmp_addr)
start_time = time_counter.value
ng.sim.start(_saxi)
print('# start')
ng.sim.wait(_saxi)
end_time = time_counter.value
print('# end')
print('# execution cycles: %d' % (end_time - start_time))
# verify
ok = True
for bat in range(out.shape[0]):
for y in range(out.shape[1]):
for x in range(out.shape[2]):
for ch in range(out.shape[3]):
orig = memory.read_word(
bat * out.aligned_shape[1] * out.aligned_shape[2] * out.aligned_shape[3] +
y * out.aligned_shape[2] * out.aligned_shape[3] +
x * out.aligned_shape[3] + ch,
out.addr, act_dtype.width)
check = memory.read_word(
bat * out.aligned_shape[1] * out.aligned_shape[2] * out.aligned_shape[3] +
y * out.aligned_shape[2] * out.aligned_shape[3] +
x * out.aligned_shape[3] + ch,
check_addr, act_dtype.width)
if vthread.verilog.NotEql(orig, check):
print('NG (', bat, y, x, ch,
') orig: ', orig, ' check: ', check)
ok = False
# else:
# print('OK (', bat, y, x, ch,
# ') orig: ', orig, ' check: ', check)
if ok:
print('# verify: PASSED')
else:
print('# verify: FAILED')
vthread.finish()
th = vthread.Thread(m, 'th_ctrl', clk, rst, ctrl)
fsm = th.start()
uut = m.Instance(targ, 'uut',
params=m.connect_params(targ),
ports=m.connect_ports(targ))
# simulation.setup_waveform(m, uut)
simulation.setup_clock(m, clk, hperiod=5)
init = simulation.setup_reset(m, resetn, m.make_reset(), period=100, polarity='low')
init.add(
Delay(10000000),
Systask('finish'),
)
# output source code
if filename is not None:
m.to_verilog(filename)
# run simulation
sim = simulation.Simulator(m, sim=simtype)
rslt = sim.run(outputfile=outputfile)
lines = rslt.splitlines()
if simtype == 'verilator' and lines[-1].startswith('-'):
rslt = '\n'.join(lines[:-1])
return rslt
if __name__ == '__main__':
rslt = run(silent=False, filename='tmp.v')
print(rslt)
| [
"shta.ky1018@gmail.com"
] | shta.ky1018@gmail.com |
13c34b9b58229caa417302b1c34bd40e5e0b53b6 | 13bbcaf8713bc2da48907640ebd3b27860684079 | /opy/opy_main.py | 9e5ed01ef73d39ca22b45ec908c669ad7a7490d4 | [
"Apache-2.0"
] | permissive | tekknolagi/oil | 3f36034c0e2722562dd28805e1637f68905126b5 | a4c42abe13a7cc97c4fce0422371f127d7c78df6 | refs/heads/master | 2022-07-20T16:45:42.974107 | 2019-10-22T05:03:05 | 2019-10-22T05:03:05 | 157,894,333 | 0 | 0 | Apache-2.0 | 2018-11-16T16:35:34 | 2018-11-16T16:35:34 | null | UTF-8 | Python | false | false | 17,253 | py | """
opy_main.py
"""
from __future__ import print_function
import cStringIO
import hashlib
import optparse
import os
import sys
import marshal
import types
from . import pytree
from . import skeleton
from .compiler2 import consts
from .compiler2 import dis_tool
from .compiler2 import misc
from .compiler2 import transformer
# Disabled for now because byterun imports 'six', and that breaks the build.
from .byterun import execfile
from .byterun import ovm
from pgen2 import driver, parse, pgen, grammar
from pgen2 import token
from pgen2 import tokenize
from frontend import args
from core.util import log
from core import pyutil
from ovm2 import oheap2
from typing import TYPE_CHECKING
if TYPE_CHECKING:
from typing import Dict
from pgen2.parse import PNode
# From lib2to3/pygram.py. This takes the place of the 'symbol' module.
# compiler/transformer module needs this.
class Symbols(object):
def __init__(self, gr):
"""
Creates an attribute for each grammar symbol (nonterminal), whose value is
the symbol's type (an int >= 256).
"""
for name, symbol in gr.symbol2number.items():
setattr(self, name, symbol)
#log('%s -> %d' % (name, symbol))
# For transformer to use
self.number2symbol = gr.number2symbol
def HostStdlibNames():
import symbol
import token
names = {}
for k, v in symbol.sym_name.items():
names[k] = v
for k, v in token.tok_name.items():
names[k] = v
return names
def WriteGrammar(grammar_path, marshal_path):
"""Used for py27.grammar.
oil_lang/grammar.pgen2 uses oil_lang/grammar_gen.py
"""
with open(grammar_path) as f:
gr = pgen.MakeGrammar(f)
with open(marshal_path, 'wb') as out_f:
gr.dump(out_f)
log('Compiled %s -> grammar tables in %s', grammar_path, marshal_path)
def CountTupleTree(tu):
"""Count the nodes in a tuple parse tree."""
if isinstance(tu, tuple):
s = 0
for entry in tu:
s += CountTupleTree(entry)
return s
elif isinstance(tu, int):
return 1
elif isinstance(tu, str):
return 1
else:
raise AssertionError(tu)
class TupleTreePrinter(object):
def __init__(self, names):
self._names = names
# TODO: parameterize by grammar.
self.max_token_index = max(token.tok_name)
def Print(self, tu, f=sys.stdout, indent=0):
ind = ' ' * indent
f.write(ind)
if isinstance(tu, tuple):
num = tu[0]
if num < self.max_token_index:
f.write(self._names[num])
f.write(' %s (%d, %d)\n' % (tu[1], tu[2], tu[3]))
return
f.write(self._names[num])
f.write('\n')
for entry in tu[1:]:
self.Print(entry, f, indent=indent+1)
elif isinstance(tu, int):
f.write(str(tu))
f.write('\n')
elif isinstance(tu, str):
f.write(tu)
f.write('\n')
else:
raise AssertionError(tu)
class ParseTreePrinter(object):
"""Prints a tree of PNode instances."""
def __init__(self, names):
# type: (Dict[int, str]) -> None
self.names = names
self.f = sys.stdout
def Print(self, pnode, f=sys.stdout, indent=0, i=0):
# type: (PNode, int, int) -> None
ind = ' ' * indent
# NOTE:
# - 'tok' used to be opaque context
# - it's None for PRODUCTIONS (nonterminals)
# - for terminals, it's (prefix, (lineno, column)), where lineno is
# 1-based, and 'prefix' is a string of whitespace.
# e.g. for 'f(1, 3)', the "3" token has a prefix of ' '.
if isinstance(pnode.tok, tuple):
# Used for ParseWith
v = pnode.tok[0]
else:
v = '-'
self.f.write('%s%d %s %s\n' % (ind, i, self.names[pnode.typ], v))
if pnode.children: # could be None
for i, child in enumerate(pnode.children):
self.Print(child, indent=indent+1, i=i)
class TableOutput(object):
def __init__(self, out_dir):
self.out_dir = out_dir
self.frames_f = open(os.path.join(out_dir, 'frames.tsv2'), 'w')
self.names_f = open(os.path.join(out_dir, 'names.tsv2'), 'w')
self.consts_f = open(os.path.join(out_dir, 'consts.tsv2'), 'w')
self.flags_f = open(os.path.join(out_dir, 'flags.tsv2'), 'w')
self.ops_f = open(os.path.join(out_dir, 'ops.tsv2'), 'w')
# NOTE: The opcode encoding is variable length, so bytecode_bytes is
# different than the number of instructions.
print('path\tcode_name\targcount\tnlocals\tstacksize\tbytecode_bytes',
file=self.frames_f)
print('path\tcode_name\tkind\tname', file=self.names_f)
print('path\tcode_name\ttype\tlen_or_val', file=self.consts_f)
print('path\tcode_name\tflag', file=self.flags_f)
print('path\tcode_name\top_name\top_arg', file=self.ops_f)
def WriteFrameRow(self, path, code_name, argcount, nlocals, stacksize,
bytecode_bytes):
row = [path, code_name, str(argcount), str(nlocals), str(stacksize),
str(bytecode_bytes)]
print('\t'.join(row), file=self.frames_f)
def WriteNameRow(self, path, code_name, kind, name):
row = [path, code_name, kind, name]
print('\t'.join(row), file=self.names_f)
def WriteConstRow(self, path, code_name, type_, len_or_val):
row = [path, code_name, type_, str(len_or_val)]
print('\t'.join(row), file=self.consts_f)
def WriteFlagRow(self, path, code_name, flag_name):
row = [path, code_name, flag_name]
print('\t'.join(row), file=self.flags_f)
def WriteOpRow(self, path, code_name, op_name, op_arg):
row = [path, code_name, op_name, str(op_arg)]
print('\t'.join(row), file=self.ops_f)
def Close(self):
self.frames_f.close()
self.names_f.close()
self.consts_f.close()
self.flags_f.close()
self.ops_f.close()
log('Wrote 5 files in %s', self.out_dir)
def WriteDisTables(pyc_path, co, out):
"""Write 3 TSV files."""
#log('Disassembling %s in %s', co, pyc_path)
out.WriteFrameRow(pyc_path, co.co_name, co.co_argcount, co.co_nlocals,
co.co_stacksize, len(co.co_code))
# Write a row for every name
for name in co.co_names:
out.WriteNameRow(pyc_path, co.co_name, 'name', name)
for name in co.co_varnames:
out.WriteNameRow(pyc_path, co.co_name, 'var', name)
for name in co.co_cellvars:
out.WriteNameRow(pyc_path, co.co_name, 'cell', name)
for name in co.co_freevars:
out.WriteNameRow(pyc_path, co.co_name, 'free', name)
# Write a row for every op.
for op_name, op_arg in dis_tool.ParseOps(co.co_code):
out.WriteOpRow(pyc_path, co.co_name, op_name, op_arg)
# TODO: Write a row for every flag. OPy outputs these:
# CO_VARARGS, CO_VAR_KEYWORDS, CO_GENERATOR, CO_NEWLOCALS (we only support
# this?) FUTURE_DIVISION, FUTURE_ABSOLUTE_IMPORT, etc.
for flag in sorted(consts.VALUE_TO_NAME):
if co.co_flags & flag:
flag_name = consts.VALUE_TO_NAME[flag]
out.WriteFlagRow(pyc_path, co.co_name, flag_name)
# Write a row for every constant
for const in co.co_consts:
if isinstance(const, int):
len_or_val = const
elif isinstance(const, (str, tuple)):
len_or_val = len(const)
else:
len_or_val = 'NA'
out.WriteConstRow(pyc_path, co.co_name, const.__class__.__name__, len_or_val)
if isinstance(const, types.CodeType):
WriteDisTables(pyc_path, const, out)
def Options():
"""Returns an option parser instance."""
p = optparse.OptionParser()
# NOTE: default command is None because empty string is valid.
# NOTE: In 'opy run oil.pyc -c', -c is an arg to opy, and not a flag.
p.add_option(
'-c', dest='command', default=None,
help='Python command to run')
return p
# Made by the Makefile.
GRAMMAR_REL_PATH = '_build/opy/py27.grammar.marshal'
def OpyCommandMain(argv):
"""Dispatch to the right action."""
# TODO: Use core/args.
#opts, argv = Options().parse_args(argv)
try:
action = argv[0]
except IndexError:
raise args.UsageError('opy: Missing required subcommand.')
argv = argv[1:] # TODO: Should I do input.ReadRequiredArg()?
# That will shift the input.
if action in (
'parse', 'parse-with', 'compile', 'dis', 'ast', 'symbols', 'cfg',
'compile-ovm', 'eval', 'repl', 'run', 'run-ovm'):
loader = pyutil.GetResourceLoader()
f = loader.open(GRAMMAR_REL_PATH)
contents = f.read()
f.close()
gr = grammar.Grammar()
gr.loads(contents)
# In Python 2 code, always use from __future__ import print_function.
try:
del gr.keywords["print"]
except KeyError:
pass
symbols = Symbols(gr)
pytree.Init(symbols) # for type_repr() pretty printing
transformer.Init(symbols) # for _names and other dicts
compiler = skeleton.Compiler(gr)
else:
# e.g. pgen2 doesn't use any of these. Maybe we should make a different
# tool.
compiler = None
# TODO: Also have a run_spec for 'opyc run'.
compile_spec = args.OilFlags()
compile_spec.Flag('-emit-docstring', args.Bool, default=True,
help='Whether to emit docstrings')
compile_spec.Flag('-fast-ops', args.Bool, default=True,
help='Whether to emit LOAD_FAST, STORE_FAST, etc.')
compile_spec.Flag('-oil-subset', args.Bool, default=False,
help='Only allow the constructs necessary to implement'
'Oil. Example: using multiple inheritance will abort '
'compilation.')
#
# Actions
#
if action == 'pgen2':
grammar_path = argv[0]
marshal_path = argv[1]
WriteGrammar(grammar_path, marshal_path)
elif action == 'stdlib-parse':
# This is what the compiler/ package was written against.
import parser
py_path = argv[1]
with open(py_path) as f:
st = parser.suite(f.read())
tree = st.totuple()
printer = TupleTreePrinter(HostStdlibNames())
printer.Print(tree)
n = CountTupleTree(tree)
log('COUNT %d', n)
elif action == 'lex':
py_path = argv[0]
with open(py_path) as f:
tokens = tokenize.generate_tokens(f.readline)
for typ, val, start, end, unused_line in tokens:
print('%10s %10s %-10s %r' % (start, end, token.tok_name[typ], val))
elif action == 'lex-names': # Print all the NAME tokens.
for py_path in argv:
log('Lexing %s', py_path)
with open(py_path) as f:
tokens = tokenize.generate_tokens(f.readline)
for typ, val, start, end, unused_line in tokens:
if typ == token.NAME:
print(val)
elif action == 'parse':
py_path = argv[0]
with open(py_path) as f:
tokens = tokenize.generate_tokens(f.readline)
p = parse.Parser(gr, convert=skeleton.py2st)
parse_tree = driver.PushTokens(p, tokens, gr, 'file_input')
if isinstance(parse_tree, tuple):
n = CountTupleTree(parse_tree)
log('COUNT %d', n)
printer = TupleTreePrinter(transformer._names)
printer.Print(parse_tree)
else:
tree.PrettyPrint(sys.stdout)
log('\tChildren: %d' % len(tree.children), file=sys.stderr)
# Parse with an arbitrary grammar, but the Python lexer.
elif action == 'parse-with':
grammar_path = argv[0]
start_symbol = argv[1]
code_str = argv[2]
with open(grammar_path) as f:
gr = pgen.MakeGrammar(f)
f = cStringIO.StringIO(code_str)
tokens = tokenize.generate_tokens(f.readline)
p = parse.Parser(gr) # no convert=
try:
pnode = driver.PushTokens(p, tokens, gr, start_symbol)
except parse.ParseError as e:
# Extract location information and show it.
_, _, (lineno, offset) = e.opaque
# extra line needed for '\n' ?
lines = code_str.splitlines() + ['']
line = lines[lineno-1]
log(' %s', line)
log(' %s^', ' '*offset)
log('Parse Error: %s', e)
return 1
printer = ParseTreePrinter(transformer._names) # print raw nodes
printer.Print(pnode)
elif action == 'ast': # output AST
opt, i = compile_spec.ParseArgv(argv)
py_path = argv[i]
with open(py_path) as f:
graph = compiler.Compile(f, opt, 'exec', print_action='ast')
elif action == 'symbols': # output symbols
opt, i = compile_spec.ParseArgv(argv)
py_path = argv[i]
with open(py_path) as f:
graph = compiler.Compile(f, opt, 'exec', print_action='symbols')
elif action == 'cfg': # output Control Flow Graph
opt, i = compile_spec.ParseArgv(argv)
py_path = argv[i]
with open(py_path) as f:
graph = compiler.Compile(f, opt, 'exec', print_action='cfg')
elif action == 'compile': # 'opyc compile' is pgen2 + compiler2
# spec.Arg('action', ['foo', 'bar'])
# But that leads to some duplication.
opt, i = compile_spec.ParseArgv(argv)
py_path = argv[i]
out_path = argv[i+1]
with open(py_path) as f:
co = compiler.Compile(f, opt, 'exec')
log("Compiled to %d bytes of top-level bytecode", len(co.co_code))
# Write the .pyc file
with open(out_path, 'wb') as out_f:
h = misc.getPycHeader(py_path)
out_f.write(h)
marshal.dump(co, out_f)
elif action == 'compile-ovm':
opt, i = compile_spec.ParseArgv(argv)
py_path = argv[i]
out_path = argv[i+1]
# Compile to Python bytecode (TODO: remove ovm_codegen.py)
mode = 'exec'
with open(py_path) as f:
co = compiler.Compile(f, opt, mode)
if 1:
with open(out_path, 'wb') as out_f:
oheap2.Write(co, out_f)
return 0
log("Compiled to %d bytes of top-level bytecode", len(co.co_code))
# Write the .pyc file
with open(out_path, 'wb') as out_f:
if 1:
out_f.write(co.co_code)
else:
h = misc.getPycHeader(py_path)
out_f.write(h)
marshal.dump(co, out_f)
log('Wrote only the bytecode to %r', out_path)
elif action == 'eval': # Like compile, but parses to a code object and prints it
opt, i = compile_spec.ParseArgv(argv)
py_expr = argv[i]
f = skeleton.StringInput(py_expr, '<eval input>')
co = compiler.Compile(f, opt, 'eval')
v = dis_tool.Visitor()
v.show_code(co)
print()
print('RESULT:')
print(eval(co))
elif action == 'repl': # Like eval in a loop
while True:
py_expr = raw_input('opy> ')
f = skeleton.StringInput(py_expr, '<REPL input>')
# TODO: change this to 'single input'? Why doesn't this work?
co = compiler.Compile(f, opt, 'eval')
v = dis_tool.Visitor()
v.show_code(co)
print(eval(co))
elif action == 'dis-tables':
out_dir = argv[0]
pyc_paths = argv[1:]
out = TableOutput(out_dir)
for pyc_path in pyc_paths:
with open(pyc_path) as f:
magic, unixtime, timestamp, code = dis_tool.unpack_pyc(f)
WriteDisTables(pyc_path, code, out)
out.Close()
elif action == 'dis':
opt, i = compile_spec.ParseArgv(argv)
path = argv[i]
v = dis_tool.Visitor()
if path.endswith('.py'):
with open(path) as f:
co = compiler.Compile(f, opt, 'exec')
log("Compiled to %d bytes of top-level bytecode", len(co.co_code))
v.show_code(co)
else: # assume pyc_path
with open(path, 'rb') as f:
v.Visit(f)
elif action == 'dis-md5':
pyc_paths = argv
if not pyc_paths:
raise args.UsageError('dis-md5: At least one .pyc path is required.')
for path in pyc_paths:
h = hashlib.md5()
with open(path) as f:
magic = f.read(4)
h.update(magic)
ignored_timestamp = f.read(4)
while True:
b = f.read(64 * 1024)
if not b:
break
h.update(b)
print('%6d %s %s' % (os.path.getsize(path), h.hexdigest(), path))
elif action == 'run': # Compile and run, without writing pyc file
# TODO: Add an option like -v in __main__
#level = logging.DEBUG if args.verbose else logging.WARNING
#logging.basicConfig(level=level)
#logging.basicConfig(level=logging.DEBUG)
opt, i = compile_spec.ParseArgv(argv)
py_path = argv[i]
opy_argv = argv[i:]
if py_path.endswith('.py'):
with open(py_path) as f:
co = compiler.Compile(f, opt, 'exec')
num_ticks = execfile.run_code_object(co, opy_argv)
elif py_path.endswith('.pyc') or py_path.endswith('.opyc'):
with open(py_path) as f:
f.seek(8) # past header. TODO: validate it!
co = marshal.load(f)
num_ticks = execfile.run_code_object(co, opy_argv)
else:
raise args.UsageError('Invalid path %r' % py_path)
elif action == 'run-ovm': # Compile and run, without writing pyc file
opt, i = compile_spec.ParseArgv(argv)
py_path = argv[i]
opy_argv = argv[i+1:]
if py_path.endswith('.py'):
#mode = 'exec'
mode = 'ovm' # OVM bytecode is different!
with open(py_path) as f:
co = compiler.Compile(f, opt, mode)
log('Compiled to %d bytes of OVM code', len(co.co_code))
num_ticks = ovm.run_code_object(co, opy_argv)
elif py_path.endswith('.pyc') or py_path.endswith('.opyc'):
with open(py_path) as f:
f.seek(8) # past header. TODO: validate it!
co = marshal.load(f)
num_ticks = ovm.run_code_object(co, opy_argv)
else:
raise args.UsageError('Invalid path %r' % py_path)
else:
raise args.UsageError('Invalid action %r' % action)
| [
"andy@oilshell.org"
] | andy@oilshell.org |
47f47d17d3d69a25285ce3e88fbb0277e9f7b490 | 5bd49cdbd0dd04af74a98b96aba3a936512f40c9 | /datasetCode/resize_dataset.py | 5885e7fab30c69cac2e8e36f2d7780e41ab09c94 | [] | no_license | gombru/instaEmotions | fd78ca7a27ccd868bea239b98d5a8db895d945f8 | 94c0b69096e6aeb0c187aac8d76db49e379f9bc7 | refs/heads/master | 2021-05-09T23:21:14.938278 | 2018-02-13T08:25:19 | 2018-02-13T08:25:19 | 118,780,583 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,965 | py | # Resizes the images in a folder and creates a resized datasetcd in another
# It also filters corrupted images
import glob
from PIL import Image
from joblib import Parallel, delayed
import os
from shutil import copyfile
import time
json_filtered_path = "../../../hd/datasets/instaEmotions/json_filtered/"
im_dest_path = "../../../hd/datasets/instaEmotions/img_resized/"
minSize = 256
def resize(file):
try:
im_file = file.replace('json_filtered','img').replace('json','jpg')
im = Image.open(im_file)
w = im.size[0]
h = im.size[1]
# print "Original w " + str(w)
# print "Original h " + str(h)
if w < h:
new_width = minSize
new_height = int(minSize * (float(h) / w))
if h <= w:
new_height = minSize
new_width = int(minSize * (float(w) / h))
# print "New width "+str(new_width)
# print "New height "+str(new_height)
im = im.resize((new_width, new_height), Image.ANTIALIAS)
if not os.path.exists(im_dest_path + im_file.split('/')[-2]):
os.makedirs(im_dest_path + im_file.split('/')[-2])
im.save(im_dest_path + im_file.split('/')[-2] + '/' + im_file.split('/')[-1])
except:
print "Failed copying image. Removing caption " + str(file)
try:
# os.remove(file.replace("img", "json").replace("jpg", "json"))
os.remove(file)
# os.remove(file.replace("img", "json_filtered").replace("jpg", "json"))
except:
print "Cannot remove " + str(file)
return
print "Removed"
return
if not os.path.exists(im_dest_path):
os.makedirs(im_dest_path)
dirs = [dI for dI in os.listdir(json_filtered_path) if os.path.isdir(os.path.join(json_filtered_path, dI))]
c = 0
for dir in dirs:
print dir
Parallel(n_jobs=12)(delayed(resize)(file) for file in glob.glob(json_filtered_path + dir + "/*.json")) | [
"raulgombru@gmail.com"
] | raulgombru@gmail.com |
76527e924164481f7266a0971135b8d4ce87e829 | d66aa4c77f65bb837e07626c696b6dc886c7b1c1 | /base/Chapter-9/Chapter-9-3/Chapter-9-3.py | b9b07535566362568d6657934bc76850d1527ebf | [] | no_license | silianpan/Excel_to_Python | 2a789aec0eb38d3178be6dd44205792624d0d4c4 | 1c5890988c99b2939c4d98bb6a881e15d6c3ad7d | refs/heads/master | 2021-07-09T00:25:54.665343 | 2021-05-04T11:25:18 | 2021-05-04T11:25:18 | 242,090,461 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 293 | py | # 自定义函数。
def level(number,lv1,lv2,lv3):
if number>=90:
return lv1
elif number>=60:
return lv2
elif number>=0:
return lv3
# 自定义函数的调用。
for score in [95,63,58,69,41,88,96]:
print(score,level(score,'优','中','差')) | [
"liu.pan@silianpan.cn"
] | liu.pan@silianpan.cn |
d840c7184808da0922210c4d1bfc5af6e5d4d5d2 | 149c3220bd49460554868fc5752f21f5396cbd86 | /HW3/libs/miscellaneous/__init__.py | 744ef6f2b876d34cf00e197730b4d9915dfc2ee6 | [
"MIT"
] | permissive | Aminaba123/CMU10707_Assignments | 01879d2a84acbf9c5dda7765888d9d674465b1a9 | 9c331c471acc443773b27f0b1778676731284cfd | refs/heads/master | 2020-04-13T01:06:27.549010 | 2018-02-12T21:38:44 | 2018-02-12T21:38:44 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 56 | py | from .conversions import sort_dict, scalar_list2str_list | [
"xinshuo.weng@gmail.com"
] | xinshuo.weng@gmail.com |
77cebbb34a0206d63b09f11451825731950f316b | 90419da201cd4948a27d3612f0b482c68026c96f | /sdk/python/pulumi_azure_nextgen/scheduler/latest/get_job_collection.py | 01c4ede4ea20f274e912f4161ff2a78f9a16fe18 | [
"BSD-3-Clause",
"Apache-2.0"
] | permissive | test-wiz-sec/pulumi-azure-nextgen | cd4bee5d70cb0d332c04f16bb54e17d016d2adaf | 20a695af0d020b34b0f1c336e1b69702755174cc | refs/heads/master | 2023-06-08T02:35:52.639773 | 2020-11-06T22:39:06 | 2020-11-06T22:39:06 | 312,993,761 | 0 | 0 | Apache-2.0 | 2023-06-02T06:47:28 | 2020-11-15T09:04:00 | null | UTF-8 | Python | false | false | 3,835 | py | # coding=utf-8
# *** WARNING: this file was generated by the Pulumi SDK Generator. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union
from ... import _utilities, _tables
from . import outputs
__all__ = [
'GetJobCollectionResult',
'AwaitableGetJobCollectionResult',
'get_job_collection',
]
@pulumi.output_type
class GetJobCollectionResult:
def __init__(__self__, location=None, name=None, properties=None, tags=None, type=None):
if location and not isinstance(location, str):
raise TypeError("Expected argument 'location' to be a str")
pulumi.set(__self__, "location", location)
if name and not isinstance(name, str):
raise TypeError("Expected argument 'name' to be a str")
pulumi.set(__self__, "name", name)
if properties and not isinstance(properties, dict):
raise TypeError("Expected argument 'properties' to be a dict")
pulumi.set(__self__, "properties", properties)
if tags and not isinstance(tags, dict):
raise TypeError("Expected argument 'tags' to be a dict")
pulumi.set(__self__, "tags", tags)
if type and not isinstance(type, str):
raise TypeError("Expected argument 'type' to be a str")
pulumi.set(__self__, "type", type)
@property
@pulumi.getter
def location(self) -> Optional[str]:
"""
Gets or sets the storage account location.
"""
return pulumi.get(self, "location")
@property
@pulumi.getter
def name(self) -> Optional[str]:
"""
Gets or sets the job collection resource name.
"""
return pulumi.get(self, "name")
@property
@pulumi.getter
def properties(self) -> 'outputs.JobCollectionPropertiesResponse':
"""
Gets or sets the job collection properties.
"""
return pulumi.get(self, "properties")
@property
@pulumi.getter
def tags(self) -> Optional[Mapping[str, str]]:
"""
Gets or sets the tags.
"""
return pulumi.get(self, "tags")
@property
@pulumi.getter
def type(self) -> str:
"""
Gets the job collection resource type.
"""
return pulumi.get(self, "type")
class AwaitableGetJobCollectionResult(GetJobCollectionResult):
# pylint: disable=using-constant-test
def __await__(self):
if False:
yield self
return GetJobCollectionResult(
location=self.location,
name=self.name,
properties=self.properties,
tags=self.tags,
type=self.type)
def get_job_collection(job_collection_name: Optional[str] = None,
resource_group_name: Optional[str] = None,
opts: Optional[pulumi.InvokeOptions] = None) -> AwaitableGetJobCollectionResult:
"""
Use this data source to access information about an existing resource.
:param str job_collection_name: The job collection name.
:param str resource_group_name: The resource group name.
"""
__args__ = dict()
__args__['jobCollectionName'] = job_collection_name
__args__['resourceGroupName'] = resource_group_name
if opts is None:
opts = pulumi.InvokeOptions()
if opts.version is None:
opts.version = _utilities.get_version()
__ret__ = pulumi.runtime.invoke('azure-nextgen:scheduler/latest:getJobCollection', __args__, opts=opts, typ=GetJobCollectionResult).value
return AwaitableGetJobCollectionResult(
location=__ret__.location,
name=__ret__.name,
properties=__ret__.properties,
tags=__ret__.tags,
type=__ret__.type)
| [
"public@paulstack.co.uk"
] | public@paulstack.co.uk |
4cd64a5900987513454fab8e412bc4c1b59e5930 | 9588581462e70c0fd8728da13771cc72dbb9691e | /src/python/interpret/test/test_interactive.py | 84126bedf8fa54bb79bee7d7aa223ce4414ea308 | [
"MIT"
] | permissive | anshuman-unilu/interpret | df88a34433e0fc82ea697109afbf56799c886068 | 79d4afe441e8f153ef13313d81b3dd0a09361be1 | refs/heads/master | 2020-05-24T07:40:18.372789 | 2019-05-16T23:31:44 | 2019-05-16T23:31:44 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 588 | py | # Copyright (c) 2019 Microsoft Corporation
# Distributed under the MIT software license
from ..visual.interactive import set_show_addr, get_show_addr, shutdown_show_server
def test_shutdown():
target_addr = ("127.0.0.1", 1337)
set_show_addr(target_addr)
actual_response = shutdown_show_server()
expected_response = True
assert actual_response == expected_response
def test_addr_assignment():
target_addr = ("127.0.0.1", 1337)
set_show_addr(target_addr)
actual_addr = get_show_addr()
assert target_addr == actual_addr
shutdown_show_server()
| [
"interpretml@outlook.com"
] | interpretml@outlook.com |
aeb1ba7b12cf83d1a171bfd95e019d04261e82be | bfe1d367f09969c5f99b66960d23bd23db2d1cfd | /migrations/versions/d7da7d3067a1_followers.py | 8862d09bb2ba3c2aaac8674c244ba8ca132e5f14 | [] | no_license | sockduct/myblog | f97ed3e97262ea2d175499fb3acb7eac98cd3567 | 1b0a30e21c119b6c494d2f6241ff346a3bbcd4e9 | refs/heads/master | 2022-12-08T22:35:08.921777 | 2018-12-27T11:31:32 | 2018-12-27T11:31:32 | 160,174,872 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 840 | py | """followers
Revision ID: d7da7d3067a1
Revises: 470ad871bbde
Create Date: 2018-11-20 07:53:39.417383
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = 'd7da7d3067a1'
down_revision = '470ad871bbde'
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.create_table('followers',
sa.Column('follower_id', sa.Integer(), nullable=True),
sa.Column('followed_id', sa.Integer(), nullable=True),
sa.ForeignKeyConstraint(['followed_id'], ['user.id'], ),
sa.ForeignKeyConstraint(['follower_id'], ['user.id'], )
)
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.drop_table('followers')
# ### end Alembic commands ###
| [
"james.r.small@outlook.com"
] | james.r.small@outlook.com |
a4ff7a1359bc8e2211bd34192f2b5c15102c5918 | 061a223e2207d9d8743979be1c64cf4a1ffa7920 | /src/pretix/base/migrations/0002_auto_20151021_1412.py | 69380c7e0f47d1430bc6ca50835ae1d940525e41 | [
"Apache-2.0",
"BSD-3-Clause"
] | permissive | cygery/pretix | 925bf8e612f305d87aa9acbef02fa7697917a15c | 232c42b265bdbcd535dca2dec98c73dbca9becd9 | refs/heads/master | 2021-01-18T02:50:20.297908 | 2015-10-22T15:45:19 | 2015-10-22T15:45:19 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 502 | py | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('pretixbase', '0001_initial'),
]
operations = [
migrations.AlterField(
model_name='quota',
name='size',
field=models.PositiveIntegerField(help_text='Leave empty for an unlimited number of tickets.', verbose_name='Total capacity', blank=True, null=True),
),
]
| [
"mail@raphaelmichel.de"
] | mail@raphaelmichel.de |
7c38e98199e0c1619b75fb425ef013bfde8a52dc | 205e2d038ad3fd01e4072e5f7fcc0d32dbea341f | /06函数/03/function_bmi_upgrade.py | 7ca38bf21a62d1d00a09f7f99cd38defea8627e4 | [] | no_license | xiaohema233/LearnPythonWithZeroFoundation | b35cd95a13389a2d97ab4a4eff73fddb18ab85c8 | da9f4579b76e3b951dd0870664744014538fdb14 | refs/heads/master | 2022-10-24T10:42:30.453209 | 2020-06-13T07:32:00 | 2020-06-13T07:32:00 | 271,948,165 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,445 | py | def fun_bmi_upgrade(*person):
"""功能:根据身高和体重计算BMI指数(升级版)
*person:可变参数该参数中需要传递带3个元素的列表,
分别为姓名、身高(单位:米)和体重(单位:千克)
"""
for list_person in person:
for item in list_person:
person = item[0] # 姓名
height = item[1] # 身高(单位:米)
weight = item[2] # 体重(单位:千克)
print("\n" + "=" * 13, person, "=" * 13)
print("身高:" + str(height) + "米 \t 体重:" + str(weight) + "千克")
bmi = weight / (height * height) # 用于计算BMI指数,公式为“体重/身高的平方”
print("BMI指数:" + str(bmi)) # 输出BMI指数
# 判断身材是否合理
if bmi < 18.5:
print("您的体重过轻 ~@_@~")
if 18.5 <= bmi < 24.9:
print("正常范围,注意保持 (-_-)")
if 24.9 <= bmi < 29.9:
print("您的体重过重 ~@_@~")
if bmi >= 29.9:
print("肥胖 ^@_@^")
# *****************************调用函数***********************************#
list_w = [('绮梦', 1.70, 65), ('零语', 1.77, 50), ('黛兰', 1.72, 66)]
list_m = [('梓轩', 1.80, 75), ('冷伊一', 1.75, 70)]
fun_bmi_upgrade(list_w, list_m) # 调用函数指定可变参数
| [
"33052287+xiaohema233@users.noreply.github.com"
] | 33052287+xiaohema233@users.noreply.github.com |
069972eef43625c5c870e7345f1562830ce1d413 | 112882b8d6c5071e7d2610c595bfca9210c79a0a | /tools/leetcode.200.Number of Islands/leetcode.200.Number of Islands.submission2.py | 02ab1888baec51062ca3339b9c5f160cca05b049 | [
"MIT"
] | permissive | tedye/leetcode | 193b1900d98e35d5c402013cbe3bc993d0235da2 | 975d7e3b8cb9b6be9e80e07febf4bcf6414acd46 | refs/heads/master | 2021-01-01T19:06:06.408135 | 2015-10-24T06:44:40 | 2015-10-24T06:44:40 | 41,804,923 | 4 | 0 | null | null | null | null | UTF-8 | Python | false | false | 727 | py | class Solution:
# @param {character[][]} grid
# @return {integer}
def numIslands(self, grid):
if not grid:
return 0
m = len(grid)
n = len(grid[0])
count = 0
for i in range(m):
for j in range(n):
if grid[i][j] == '1':
count += 1
self.touch(grid, i,j,m,n)
return count
def touch(self,grid,i,j,m,n):
if grid[i][j] == '0':
return
grid[i][j] = '0'
if i > 0: self.touch(grid, i-1,j,m,n)
if i < m -1: self.touch(grid, i+1,j,m,n)
if j > 0: self.touch(grid, i,j-1,m,n)
if j < n -1: self.touch(grid, i,j+1,m,n)
| [
"tedye@bu.edu"
] | tedye@bu.edu |
e295fde13f0aa807766ef22576da20670075e3a5 | 82074ba616918ede605dec64b038546a7b07bd7d | /app/reports/report_amp.py | 57d0c3ba657966103e6c93e3b2adfc42283e2a16 | [] | no_license | chetankhopade/EmpowerRM | b7ab639eafdfa57c054a0cf9da15c3d4b90bbd66 | 8d968592f5e0d160c56f31a4870e79c30240b514 | refs/heads/main | 2023-07-05T03:20:13.820049 | 2021-08-26T11:56:28 | 2021-08-26T11:56:28 | 399,354,317 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 5,515 | py | import datetime
from datetime import timedelta
from django.contrib.auth.decorators import login_required
from django.http import HttpResponseRedirect, JsonResponse
from django.shortcuts import render
from django.urls import reverse
from django.views.decorators.csrf import csrf_exempt
from app.management.utilities.exports import export_report_to_excel, export_report_to_csv
from app.management.utilities.functions import convert_string_to_date, datatable_handler, bad_json, dates_exceeds_range
from app.management.utilities.globals import addGlobalData
from app.reports.reports_structures import get_amp_report_structure
from erms.models import (ChargeBackLineHistory)
@login_required(redirect_field_name='ret', login_url='/login')
def view(request):
"""
User's AMP Report
"""
data = {'title': 'AMP Data Report', 'header_title': 'AMP Data Report'}
addGlobalData(request, data)
if not data['company'] or not data['has_access_to_company']:
return HttpResponseRedirect(reverse('companies'))
# activate menu option
data['menu_option'] = 'menu_reports'
return render(request, "reports/amp.html", data)
@login_required(redirect_field_name='ret', login_url='/login')
@csrf_exempt
def load_data(request):
"""
call DT Handler function with the required params: request, queryset and search_fields
"""
try:
# all ChargeBackLineHistory
start_date_str = request.POST.get('start_date', '')
end_date_str = request.POST.get('end_date', '')
start_date = None
end_date = None
if start_date_str and end_date_str:
start_date = convert_string_to_date(start_date_str)
end_date = convert_string_to_date(end_date_str)
# EA-1355 - limit all reports to pull only 2 years of data at most
date_range_exceeds = dates_exceeds_range(start_date, end_date, 2)
if date_range_exceeds:
return bad_json(message="Date range should not exceed beyond 2 years")
if start_date_str and not end_date_str:
start_date = convert_string_to_date(start_date_str)
end_date = convert_string_to_date(datetime.datetime.now().date().strftime('%m/%d/%Y'))
if end_date_str and not start_date_str:
end_date = convert_string_to_date(end_date_str)
start_date = end_date - timedelta(days=365)
queryset = None
if start_date and end_date:
queryset = ChargeBackLineHistory.objects.filter(updated_at__date__range=[start_date, end_date]).exclude(claim_amount_issue=0).order_by('cblnid')
if not queryset:
return JsonResponse({
'data': [],
'recordsTotal': 0,
'recordsFiltered': 0,
})
search_fields = ['cbtype', 'cb_cm_number', 'customer', 'distributor', 'distributor_city', 'distributor_state',
'distributor_zipcode', 'contract_name', 'contract_no', 'invoice_no', 'indirect_customer_name',
'indirect_customer_location_no', 'indirect_customer_address1', 'indirect_customer_address2',
'indirect_customer_city', 'indirect_customer_state', 'indirect_customer_zipcode', 'item_ndc',
'item_brand', 'item_description', 'item_uom', 'item_qty', 'wac_system', 'wac_submitted',
'claim_amount_system', 'claim_amount_submitted', 'cbnumber']
response = datatable_handler(request=request, queryset=queryset, search_fields=search_fields)
return JsonResponse(response)
except Exception as ex:
return bad_json(message=ex.__str__())
@login_required(redirect_field_name='ret', login_url='/login')
def export(request):
try:
export_to = request.GET.get('export_to', 'excel')
start_date_str = request.GET.get('sd', '')
end_date_str = request.GET.get('ed', '')
start_date = None
end_date = None
if start_date_str and end_date_str:
start_date = convert_string_to_date(start_date_str)
end_date = convert_string_to_date(end_date_str)
if start_date_str and not end_date_str:
start_date = convert_string_to_date(start_date_str)
end_date = convert_string_to_date(datetime.datetime.now().date().strftime('%m/%d/%Y'))
if end_date_str and not start_date_str:
end_date = convert_string_to_date(end_date_str)
start_date = end_date - timedelta(days=365)
cblines_history = []
if start_date and end_date:
cblines_history = ChargeBackLineHistory.objects.filter(updated_at__date__range=[start_date, end_date]).exclude(claim_amount_issue=0).order_by('cblnid')
time1 = datetime.datetime.now()
# Structure
structure = get_amp_report_structure()
# Export to excel or csv
if export_to == 'excel':
filename = f"{datetime.datetime.now().strftime('%Y-%m-%d')}_amp_report.xlsx"
response = export_report_to_excel(cblines_history, filename, structure)
else:
filename = f"{datetime.datetime.now().strftime('%Y-%m-%d')}_amp_report.csv"
response = export_report_to_csv(cblines_history, filename, structure)
time2 = datetime.datetime.now()
delta = (time2 - time1).total_seconds()
print(f"Delta Time Export: {delta} sec")
return response
except Exception as ex:
print(ex.__str__())
| [
"noreply@github.com"
] | chetankhopade.noreply@github.com |
39dd28a5b187c7e69548e7fb1a343b9abc4b6ff5 | dfeff5d7da852d8dfb4ab10cb9b6a902041eb313 | /website/forms.py | b7b4848a6ff8e8fac9c368ae8d34c1ecd967118b | [] | no_license | jdriscoll98/TorresLawPayments | adac6348e7e4dc0df01142f68b5ce91e1ecd5a8e | b9e59478866924a954f9c9d818b97ed1591d6a96 | refs/heads/master | 2021-06-21T05:30:18.019685 | 2020-01-22T02:46:14 | 2020-01-22T02:46:14 | 219,618,278 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,392 | py | from django import forms
from phonenumber_field.formfields import PhoneNumberField
class NewClientForm(forms.Form):
name = forms.CharField(max_length=200)
email = forms.EmailField(max_length=254)
phone_number = PhoneNumberField() # validators should be a list
monthly_payment = forms.DecimalField(max_digits=10, decimal_places=2)
total_amount_due = forms.DecimalField(max_digits=10, decimal_places=2)
admin_fee = forms.DecimalField(max_digits=10, decimal_places=2)
first_payment_date = forms.DateField(required=False)
def clean_monthly_payment(self):
monthly_payment = self.cleaned_data['monthly_payment']
if monthly_payment < 0:
raise forms.ValidationError("Please enter a positive value")
return monthly_payment
def clean_total_amount_due(self):
total_amount_due = self.cleaned_data['total_amount_due']
if total_amount_due < 0:
raise forms.ValidationError("Please enter a positive value")
return total_amount_due
# def clean_phone_number(self, form):
# data = self.cleaned_data['phone_number']
# if "fred@example.com" not in data:
# raise forms.ValidationError("You have forgotten about Fred!")
# # Always return a value to use as the new cleaned data, even if
# # this method didn't change it.
# return data
| [
"41447166+jdriscoll98@users.noreply.github.com"
] | 41447166+jdriscoll98@users.noreply.github.com |
ca4b0d329a6fa697df06e68aa0d627f9e1bc4182 | f576f0ea3725d54bd2551883901b25b863fe6688 | /sdk/communication/azure-communication-phonenumbers/samples/get_purchased_phone_number_sample.py | 562eff620cef4cf89444f8caff02db237857b51b | [
"LicenseRef-scancode-generic-cla",
"MIT",
"LGPL-2.1-or-later"
] | permissive | Azure/azure-sdk-for-python | 02e3838e53a33d8ba27e9bcc22bd84e790e4ca7c | c2ca191e736bb06bfbbbc9493e8325763ba990bb | refs/heads/main | 2023-09-06T09:30:13.135012 | 2023-09-06T01:08:06 | 2023-09-06T01:08:06 | 4,127,088 | 4,046 | 2,755 | MIT | 2023-09-14T21:48:49 | 2012-04-24T16:46:12 | Python | UTF-8 | Python | false | false | 1,558 | py | # coding: utf-8
# -------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
# --------------------------------------------------------------------------
"""
FILE:get_purchased_phone_number_sample.py
DESCRIPTION:
This sample demonstrates how to get the information from an acquired phone number using your connection string
USAGE:
python get_purchased_phone_number_sample.py
Set the environment variables with your own values before running the sample:
1) COMMUNICATION_SAMPLES_CONNECTION_STRING - The connection string including your endpoint and
access key of your Azure Communication Service
2) AZURE_PHONE_NUMBER - The phone number you want to get its information
"""
import os
from azure.communication.phonenumbers import (
PhoneNumbersClient
)
connection_str = os.getenv('COMMUNICATION_SAMPLES_CONNECTION_STRING')
phone_number = os.getenv("AZURE_PHONE_NUMBER") # e.g. "+18001234567"
phone_numbers_client = PhoneNumbersClient.from_connection_string(connection_str)
def get_purchased_phone_number_information():
purchased_phone_number_information = phone_numbers_client.get_purchased_phone_number(phone_number)
print('Phone number: ' + purchased_phone_number_information.phone_number)
print('Country code: ' + purchased_phone_number_information.country_code)
if __name__ == '__main__':
get_purchased_phone_number_information()
| [
"noreply@github.com"
] | Azure.noreply@github.com |
dd976bc4291db029b6f0a14de8e8e7701759e5b5 | dd2c8e0a8895ffb217813efb24568c34921cbcf1 | /tests/test_field.py | c7b16627cc419c58e0489f094a5f2a2bf2992cef | [
"MIT",
"LicenseRef-scancode-warranty-disclaimer"
] | permissive | saxix/django-strategy-field | c543ccf101f473994da407def7671f1c2d1dba41 | 0c92b47976e39a4e8c06e5abfa21033af5a3e806 | refs/heads/develop | 2022-09-10T01:10:01.135944 | 2022-08-23T14:01:07 | 2022-08-23T14:01:07 | 26,797,927 | 17 | 3 | NOASSERTION | 2021-12-09T17:19:48 | 2014-11-18T07:45:50 | Python | UTF-8 | Python | false | false | 929 | py | import logging
import pytest
from django.core.mail.backends.filebased import EmailBackend
from demoproject.demoapp.models import DemoModelNoRegistry
logger = logging.getLogger(__name__)
# @pytest.mark.django_db
# def test_valid_class():
# d = DemoModelNoRegistry(klass='a.b.c')
# with pytest.raises(ValidationError):
# d.clean_fields()
class Dummy:
pass
@pytest.mark.django_db
def test_no_registry_assign_class():
d = DemoModelNoRegistry(klass=Dummy)
d.save()
assert d.klass == Dummy
@pytest.mark.django_db
def test_no_registry_assign_instance():
d = DemoModelNoRegistry(instance=Dummy)
d.save()
assert isinstance(d.instance, Dummy)
@pytest.mark.django_db
def test_no_registry_assign_string():
d = DemoModelNoRegistry(instance='django.core.mail.backends.filebased.EmailBackend')
d.save()
assert isinstance(d.instance, EmailBackend)
assert d.instance.open()
| [
"s.apostolico@gmail.com"
] | s.apostolico@gmail.com |
39641467aa7ab39ab1fcd6172a25743cbdf618ba | 3d705ec48c94373817e5f61d3f839988910431e3 | /lib/platform/dataprocess/testdata/MonthlyPeerActivity.py | 0ae6c9885b06e65b5deb4586b28678c73cfdb018 | [] | no_license | namesuqi/zeus | 937d3a6849523ae931162cd02c5a09b7e37ebdd8 | 3445b59b29854b70f25da2950016f135aa2a5204 | refs/heads/master | 2022-07-24T14:42:28.600288 | 2018-03-29T08:03:09 | 2018-03-29T08:03:09 | 127,256,973 | 0 | 0 | null | 2022-07-07T22:57:57 | 2018-03-29T07:53:16 | Python | UTF-8 | Python | false | false | 907 | py | import sys
import os
sys.path.append(os.path.abspath(os.path.dirname(__file__) + '/' + '..'))
def makeexpecteddata(hour=-1):
with open(os.path.abspath(os.path.dirname(__file__)) + '/../inputdata/MonthlyDistinctPeerID.txt', 'r') as resultfile:
orglines = resultfile.readlines()
expectedfile = open(os.path.abspath(os.path.dirname(__file__)) + "/../inputdata/%s.txt"%__name__.split('.')[-1],"w")
peeridprefix={}
for line in orglines:
peerid,_ = line.split(',')
if peerid[:8] not in peeridprefix:
peeridprefix[peerid[:8]] = 1
else:
peeridprefix[peerid[:8]] = peeridprefix[peerid[:8]] + 1
for prefix,count in peeridprefix.items():
expectedfile.write('%s,%d,%s\n' % (prefix,count,''))
expectedfile.close()
return os.path.abspath(os.path.dirname(__file__)) + "/../inputdata/%s.txt"%__name__.split('.')[-1]
| [
"suqi_name@163.com"
] | suqi_name@163.com |
e86e6c51e9f3fd1ff1426620e49ec00ce6d705e8 | 4a770290fe9201c39162cf0210d0d2b858f4b26a | /src/AuthTokenSerializer.py | 0783a415eeae394894ba3743fc42e60d7454765c | [] | no_license | FuckBrains/geekflex | 7e1d8783053313c301c8848d1aa23b03e8f5b0d6 | 72f048e336619f65c8558b7ec4bdf9ece53e71c3 | refs/heads/master | 2023-04-21T01:22:49.410096 | 2020-06-04T15:54:02 | 2020-06-04T15:54:02 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 5,680 | py | from django.contrib.auth import authenticate
from django.utils.translation import gettext_lazy as _
from rest_framework.exceptions import ValidationError
from rest_framework import serializers
from custom_exception_message import *
class AuthTokenSerializer(serializers.Serializer):
username = serializers.CharField(label=_("Username"))
password = serializers.CharField(
label=_("Password"),
style={'input_type': 'password'},
trim_whitespace=False,
required=False
)
auth_provider = serializers.CharField(required=False)
def validate(self, attrs):
print("attrs",attrs)
auth_provider = attrs.get('auth_provider')
# print("auth_provider",auth_provider)
#changes done by abhisek singh
if auth_provider.lower()== 'admin':
username = attrs.get('username')
password = attrs.get('password')
# if username is and password :
print("username",username,'password',password)
if username and password :
user = authenticate(request=self.context.get('request'),
username=username, password=password,auth_provider=auth_provider)
# The authenticate call simply returns None for is_active=False
# users. (Assuming the default ModelBackend authentication
# backend.)
if not user:
msg = _('Unable to log in with provided credentials.')
# raise serializers.ValidationError(msg, code='authorization')
raise CustomAPIException(None,msg,status_code=status.HTTP_200_OK)
else:
msg = _('Provided credentials with username and password cannot be blank .')
raise CustomAPIException(None,msg,status_code=status.HTTP_400_BAD_REQUEST)
elif auth_provider.lower()== 'kidsclub':
username = attrs.get('username')
password = attrs.get('password')
# if username is and password :
print("username",username,'password',password,'auth_provider',auth_provider)
if username and password :
# if (username is not None or username is not "") and (password is not None or password is not ""):
user = authenticate(request=self.context.get('request'),
username=username, password=password,auth_provider=auth_provider)
# The authenticate call simply returns None for is_active=False
# users. (Assuming the default ModelBackend authentication
# backend.)
if not user:
msg = _('Unable to log in with provided credentials.')
# raise serializers.ValidationError(msg, code='authorization')
raise CustomAPIException(None,msg,status_code=status.HTTP_200_OK)
else:
msg = _('Provided credentials with username and password cannot be blank .')
raise CustomAPIException(None,msg,status_code=status.HTTP_400_BAD_REQUEST)
elif auth_provider.lower()== 'subchild':
username = attrs.get('username')
password = attrs.get('password')
# if username is and password :
print("username",username,'password',password,'auth_provider',auth_provider)
if username and password :
# if (username is not None or username is not "") and (password is not None or password is not ""):
user = authenticate(request=self.context.get('request'),
username=username, password=password,auth_provider=auth_provider)
print("user",user)
# The authenticate call simply returns None for is_active=False
# users. (Assuming the default ModelBackend authentication
# backend.)
if not user:
msg = _('Unable to log in with provided credentials.')
# raise serializers.ValidationError(msg, code='authorization')
raise CustomAPIException(None,msg,status_code=status.HTTP_200_OK)
else:
msg = _('Provided credentials with username and password cannot be blank .')
raise CustomAPIException(None,msg,status_code=status.HTTP_400_BAD_REQUEST)
elif auth_provider.lower() == 'facebook' or auth_provider.lower() == 'google':
username = attrs.get('username')
if username :
user = authenticate(request=self.context.get('request'),
username=username,auth_provider=auth_provider)
# The authenticate call simply returns None for is_active=False
# users. (Assuming the default ModelBackend authentication
# backend.)
if not user:
msg = _('Unable to log in with provided credentials.')
# raise serializers.ValidationError(msg, code='authorization')
raise CustomAPIException(None,msg,status_code=status.HTTP_200_OK)
else:
msg = _('Must include "username" and "password".')
# raise serializers.ValidationError(msg, code='authorization')
raise ValidationError({
"error":{
'request_status': 0,
'msg': msg
}
})
attrs['user'] = user
return attrs | [
"abhishek.singh@shyamfuture.com"
] | abhishek.singh@shyamfuture.com |
cc74541a939aef0c5ad8e6b75077aa486fe02deb | 5d573c2e3a90e8c4e3b884d54aacd4a44d5d0b87 | /Python/python_stack/Django/main/apps/vinmyMVC/urls.py | 21c8fbe20ab9cff1240f9595f9784c6c44aa3994 | [] | no_license | RubenDuran/DojoAssignments | 42d12088feabee09eb1874da010e594b0eb7da18 | aa691ae2c41a35f632fa082fbf2eae60ea1f4601 | refs/heads/master | 2021-01-19T20:57:31.962140 | 2018-02-13T01:40:07 | 2018-02-13T01:40:07 | 88,580,713 | 0 | 2 | null | 2017-06-07T22:01:32 | 2017-04-18T04:14:30 | Python | UTF-8 | Python | false | false | 134 | py | from django.conf.urls import url d
from . import views
urlpatterns = [
url(r'^$', views.index),
url(r'^users$', views.show)
]
| [
"rd@rubenduran.net"
] | rd@rubenduran.net |
bf084a4d641ccb146774e29836ec3816e58686eb | 69cb95793176da6cc64c109028358184c04f0614 | /furniture_project/settings.py | 5722f6b28964c46f1ef0f7b26f6114b44ca1f5dd | [] | no_license | viralsir/furniture_project | e8b0275a175a058a3c49b841c53501ae0421b688 | 8823988eedf7d13844d429056cbebace12657790 | refs/heads/master | 2023-06-02T02:15:28.030895 | 2021-06-24T10:55:11 | 2021-06-24T10:55:11 | 379,879,513 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,417 | py | """
Django settings for furniture_project project.
Generated by 'django-admin startproject' using Django 3.2.4.
For more information on this file, see
https://docs.djangoproject.com/en/3.2/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/3.2/ref/settings/
"""
from pathlib import Path
# Build paths inside the project like this: BASE_DIR / 'subdir'.
BASE_DIR = Path(__file__).resolve().parent.parent
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/3.2/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = 'django-insecure-e#0uv-2a7mtpse(9z9u=qg1ibcp9@b=&e_2!p0(lmiwb&=_b_*'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS = [
'users.apps.UsersConfig',
'crispy_forms',
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
]
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 'furniture_project.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [BASE_DIR / 'templates']
,
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'furniture_project.wsgi.application'
# Database
# https://docs.djangoproject.com/en/3.2/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': BASE_DIR / 'db.sqlite3',
}
}
# Password validation
# https://docs.djangoproject.com/en/3.2/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/3.2/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/3.2/howto/static-files/
STATIC_URL = '/static/'
# Default primary key field type
# https://docs.djangoproject.com/en/3.2/ref/settings/#default-auto-field
DEFAULT_AUTO_FIELD = 'django.db.models.BigAutoField'
CRISPY_TEMPLATE_PACK='bootstrap4'
LOGIN_REDIRECT_URL='user-home' | [
"viralsir2018@gmail.com"
] | viralsir2018@gmail.com |
80fe5968b58c6bc39a226d89c6a53f4b943bec80 | 15f321878face2af9317363c5f6de1e5ddd9b749 | /solutions_python/Problem_117/1697.py | 2d533ed7e58f2a6b946b3996a912c5ea1547f56b | [] | no_license | dr-dos-ok/Code_Jam_Webscraper | c06fd59870842664cd79c41eb460a09553e1c80a | 26a35bf114a3aa30fc4c677ef069d95f41665cc0 | refs/heads/master | 2020-04-06T08:17:40.938460 | 2018-10-14T10:12:47 | 2018-10-14T10:12:47 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,421 | py | #!/usr/bin/python
"""
lawnmower.py
google code jam
Date: April 13, 2013
"""
# Imports
import sys, os
__version__ = "0.0"
__copyright__ = "CopyRight (C) 2012-13 by Coding Assassin"
__license__ = "MIT"
__author__ = "Coding Assassin"
__author_email__ = "Coding Assassin, codingassassin@gmail.com"
USAGE = "%prog [options]"
VERSION = "%prog v" + __version__
AGENT = "%s/%s" % (__name__, __version__)
def main():
# Open files
w = open("output.txt", 'w')
f = open("workfile.txt", 'r')
T = int(f.readline())
for i in range(T):
buff = f.readline().split()
N = int(buff[0])
M = int(buff[1])
# Load into arr
arr = []
for n in range(N):
arr.append(f.readline().rstrip().split())
for a in arr:
print a
# check for maximum in row and column
possible = True
for n in range(N):
for m in range(M):
rowPos = True
colPos = True
# check for max in row
if max(arr[n]) > arr[n][m]:
rowPos = False
# check for max in column
for x in range(N):
if arr[x][m] > arr[n][m]:
colPos = False
break
if rowPos == False and colPos == False:
possible = False
break
if possible == False:
break
if possible == False:
w.write("Case #"+str(i+1)+": NO\n")
print "Case #"+str(i+1)+": NO"
else:
w.write("Case #"+str(i+1)+": YES\n")
print "Case #"+str(i+1)+": YES"
if __name__ == '__main__':
main()
| [
"miliar1732@gmail.com"
] | miliar1732@gmail.com |
4fac6b7a9628197b6fe387f186a7fc07623e4ca6 | 14b5679d88afa782dc5d6b35878ab043089a060a | /students/20171587wangyubin/house_class/vcap1.py | f0ffe88f3375d9602fec4d65ea1638ee0c2c9ff9 | [] | no_license | mutiangua/EIS2020 | c541ef32623f67f9277945cd39cff3c02f06e4dd | 92aa2711b763a2c93be238825c445bf2db8da391 | refs/heads/master | 2022-11-18T05:21:47.567342 | 2020-07-11T10:11:21 | 2020-07-11T10:11:21 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,737 | py | import cv2
import numpy as np
import os
import time
def movedetect():
cap = cv2.VideoCapture(0)
background = None
temp = 0
while True:
# 读取视频流
ret, frame= cap.read()
gray_lwpCV = cv2.cvtColor(frame, cv2.COLOR_BGR2GRAY)
# 将第一帧设置为整个输入的背景
if background is None:
background = gray_lwpCV
continue
# 对于每个从背景之后读取的帧都会计算其与背景之间的差异,并得到一个差分图(different map)。
diff = cv2.absdiff(background, gray_lwpCV)
diff = cv2.threshold(diff, 100, 255, cv2.THRESH_BINARY)[1] # 二值化阈值处理
# 显示矩形框
contours, hierarchy = cv2.findContours(diff.copy(), cv2.RETR_EXTERNAL, cv2.CHAIN_APPROX_SIMPLE) # 该函数计算一幅图像中目标的轮廓
count = 0
for c in contours:
if cv2.contourArea(c) < 2000: # 对于矩形区域,只显示大于给定阈值的轮廓,所以一些微小的变化不会显示。对于光照不变和噪声低的摄像头可不设定轮廓最小尺寸的阈值
continue
(x, y, w, h) = cv2.boundingRect(c) # 该函数计算矩形的边界框
cv2.rectangle(frame, (x, y), (x+w, y+h), (0, 255, 0), 2)
count += 1
if temp != 0:
if count > temp : # 方框变多视为运动物体进入
os.system("test.wav")
time.sleep(6)
temp = count
cv2.imshow('contours', frame)
cv2.imshow('dis', diff)
key = cv2.waitKey(1) & 0xFF
# 按'q'健退出循环
if key == ord('q'):
cap.release()
cv2.destroyAllWindows()
break
# When everything done, release the capture
while True:
movedetect() | [
"noreply@github.com"
] | mutiangua.noreply@github.com |
02e26a493efe213d0a3b77f60ce46f37ef39a36e | 4910c0f3d03935fc8ee03f1e9dc20dfdb2c7c04b | /Codigos estudiantes por lenguaje/PY/Bryann Valderrama/Grafos/BipartiteGraph.py | d04bb54e45d392f35ce30a386bc046a34184b7b6 | [] | no_license | roca12/gpccodes | ab15eeedc0cadc0735651262887b44f1c2e65b93 | aa034a3014c6fb879ec5392c51f9714bdc5b50c2 | refs/heads/master | 2023-02-01T13:49:27.563662 | 2023-01-19T22:50:58 | 2023-01-19T22:50:58 | 270,723,328 | 3 | 5 | null | null | null | null | UTF-8 | Python | false | false | 705 | py | V = 4
def isBipartite(G, src):
colorArr = [-1 for x in range(V)]
colorArr[src] = 1
q = []
q.append(src)
while not len(q) == 0:
u = q.pop()
if (G[u][u] == 1):
return False
for v in range(V):
if (G[u][u] == 1 and colorArr[v] == 1):
colorArr[v] = 1 - colorArr[u]
q.append(v)
elif (G[u][v] == 1 and colorArr[v] == colorArr[u]):
return False
return True
if __name__ == '__main__':
G = [[0, 1, 0, 1],
[1, 0, 1, 0],
[0, 1, 0, 1],
[1, 0, 1, 0]]
if (isBipartite(G, 0)):
print('Es bipartito')
else:
print('No es Bipartito')
| [
"noreply@github.com"
] | roca12.noreply@github.com |
5285bc090689cadec3f1c61678c4e661e70d5554 | c9094a4ed256260bc026514a00f93f0b09a5d60c | /homeassistant/components/ovo_energy/config_flow.py | 0b2f7aac2d0dd332aa5b63b9f04288dab3f820d5 | [
"Apache-2.0"
] | permissive | turbokongen/home-assistant | 824bc4704906ec0057f3ebd6d92788e096431f56 | 4ab0151fb1cbefb31def23ba850e197da0a5027f | refs/heads/dev | 2023-03-12T05:49:44.508713 | 2021-02-17T14:06:16 | 2021-02-17T14:06:16 | 50,231,140 | 4 | 1 | Apache-2.0 | 2023-02-22T06:14:30 | 2016-01-23T08:55:09 | Python | UTF-8 | Python | false | false | 3,440 | py | """Config flow to configure the OVO Energy integration."""
import aiohttp
from ovoenergy.ovoenergy import OVOEnergy
import voluptuous as vol
from homeassistant import config_entries
from homeassistant.config_entries import ConfigFlow
from homeassistant.const import CONF_PASSWORD, CONF_USERNAME
from .const import DOMAIN # pylint: disable=unused-import
REAUTH_SCHEMA = vol.Schema({vol.Required(CONF_PASSWORD): str})
USER_SCHEMA = vol.Schema(
{vol.Required(CONF_USERNAME): str, vol.Required(CONF_PASSWORD): str}
)
class OVOEnergyFlowHandler(ConfigFlow, domain=DOMAIN):
"""Handle a OVO Energy config flow."""
VERSION = 1
CONNECTION_CLASS = config_entries.CONN_CLASS_CLOUD_POLL
def __init__(self):
"""Initialize the flow."""
self.username = None
async def async_step_user(self, user_input=None):
"""Handle a flow initiated by the user."""
errors = {}
if user_input is not None:
client = OVOEnergy()
try:
authenticated = await client.authenticate(
user_input[CONF_USERNAME], user_input[CONF_PASSWORD]
)
except aiohttp.ClientError:
errors["base"] = "cannot_connect"
else:
if authenticated:
await self.async_set_unique_id(user_input[CONF_USERNAME])
self._abort_if_unique_id_configured()
return self.async_create_entry(
title=client.username,
data={
CONF_USERNAME: user_input[CONF_USERNAME],
CONF_PASSWORD: user_input[CONF_PASSWORD],
},
)
errors["base"] = "invalid_auth"
return self.async_show_form(
step_id="user", data_schema=USER_SCHEMA, errors=errors
)
async def async_step_reauth(self, user_input):
"""Handle configuration by re-auth."""
errors = {}
if user_input and user_input.get(CONF_USERNAME):
self.username = user_input[CONF_USERNAME]
self.context["title_placeholders"] = {CONF_USERNAME: self.username}
if user_input is not None and user_input.get(CONF_PASSWORD) is not None:
client = OVOEnergy()
try:
authenticated = await client.authenticate(
self.username, user_input[CONF_PASSWORD]
)
except aiohttp.ClientError:
errors["base"] = "connection_error"
else:
if authenticated:
await self.async_set_unique_id(self.username)
for entry in self._async_current_entries():
if entry.unique_id == self.unique_id:
self.hass.config_entries.async_update_entry(
entry,
data={
CONF_USERNAME: self.username,
CONF_PASSWORD: user_input[CONF_PASSWORD],
},
)
return self.async_abort(reason="reauth_successful")
errors["base"] = "authorization_error"
return self.async_show_form(
step_id="reauth", data_schema=REAUTH_SCHEMA, errors=errors
)
| [
"noreply@github.com"
] | turbokongen.noreply@github.com |
d4b37b4be69c7bd3ef9bfd185f240b150b6a97cd | 9c268aa04ab8b359d11399f94a30c8f4fe171a0c | /tb/eth_mac_1g_gmii_fifo/test_eth_mac_1g_gmii_fifo.py | 1879de7188d98184a75f8464c03b1ceacfb43b1f | [
"MIT"
] | permissive | alexforencich/verilog-ethernet | e41586b9214e66341f3eace03da2baa9c004da89 | b316c6764e083823f95f52b3f324fccee4f12fa0 | refs/heads/master | 2023-09-03T00:58:09.380285 | 2023-08-26T19:44:50 | 2023-08-26T19:44:50 | 26,883,874 | 1,690 | 530 | MIT | 2023-08-25T05:59:58 | 2014-11-19T22:04:53 | Verilog | UTF-8 | Python | false | false | 7,694 | py | #!/usr/bin/env python
"""
Copyright (c) 2020 Alex Forencich
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
"""
import itertools
import logging
import os
import cocotb_test.simulator
import cocotb
from cocotb.clock import Clock
from cocotb.triggers import RisingEdge
from cocotb.regression import TestFactory
from cocotbext.eth import GmiiFrame, GmiiPhy
from cocotbext.axi import AxiStreamBus, AxiStreamSource, AxiStreamSink
class TB:
def __init__(self, dut, speed=1000e6):
self.dut = dut
self.log = logging.getLogger("cocotb.tb")
self.log.setLevel(logging.DEBUG)
cocotb.start_soon(Clock(dut.gtx_clk, 8, units="ns").start())
cocotb.start_soon(Clock(dut.logic_clk, 8, units="ns").start())
self.gmii_phy = GmiiPhy(dut.gmii_txd, dut.gmii_tx_er, dut.gmii_tx_en, dut.mii_tx_clk, dut.gmii_tx_clk,
dut.gmii_rxd, dut.gmii_rx_er, dut.gmii_rx_dv, dut.gmii_rx_clk, speed=speed)
self.axis_source = AxiStreamSource(AxiStreamBus.from_prefix(dut, "tx_axis"), dut.logic_clk, dut.logic_rst)
self.axis_sink = AxiStreamSink(AxiStreamBus.from_prefix(dut, "rx_axis"), dut.logic_clk, dut.logic_rst)
dut.cfg_ifg.setimmediatevalue(0)
dut.cfg_tx_enable.setimmediatevalue(0)
dut.cfg_rx_enable.setimmediatevalue(0)
async def reset(self):
self.dut.gtx_rst.setimmediatevalue(0)
self.dut.logic_rst.setimmediatevalue(0)
await RisingEdge(self.dut.tx_clk)
await RisingEdge(self.dut.tx_clk)
self.dut.gtx_rst.value = 1
self.dut.logic_rst.value = 1
await RisingEdge(self.dut.tx_clk)
await RisingEdge(self.dut.tx_clk)
self.dut.gtx_rst.value = 0
self.dut.logic_rst.value = 0
await RisingEdge(self.dut.tx_clk)
await RisingEdge(self.dut.tx_clk)
def set_speed(self, speed):
pass
async def run_test_rx(dut, payload_lengths=None, payload_data=None, ifg=12, speed=1000e6):
tb = TB(dut, speed)
tb.gmii_phy.rx.ifg = ifg
tb.dut.cfg_ifg.value = ifg
tb.dut.cfg_rx_enable.value = 1
tb.set_speed(speed)
await tb.reset()
for k in range(100):
await RisingEdge(dut.rx_clk)
if speed == 10e6:
assert dut.speed == 0
elif speed == 100e6:
assert dut.speed == 1
else:
assert dut.speed == 2
test_frames = [payload_data(x) for x in payload_lengths()]
for test_data in test_frames:
test_frame = GmiiFrame.from_payload(test_data)
await tb.gmii_phy.rx.send(test_frame)
for test_data in test_frames:
rx_frame = await tb.axis_sink.recv()
assert rx_frame.tdata == test_data
assert rx_frame.tuser == 0
assert tb.axis_sink.empty()
await RisingEdge(dut.rx_clk)
await RisingEdge(dut.rx_clk)
async def run_test_tx(dut, payload_lengths=None, payload_data=None, ifg=12, speed=1000e6):
tb = TB(dut, speed)
tb.gmii_phy.rx.ifg = ifg
tb.dut.cfg_ifg.value = ifg
tb.dut.cfg_tx_enable.value = 1
tb.set_speed(speed)
await tb.reset()
for k in range(100):
await RisingEdge(dut.rx_clk)
if speed == 10e6:
assert dut.speed == 0
elif speed == 100e6:
assert dut.speed == 1
else:
assert dut.speed == 2
test_frames = [payload_data(x) for x in payload_lengths()]
for test_data in test_frames:
await tb.axis_source.send(test_data)
for test_data in test_frames:
rx_frame = await tb.gmii_phy.tx.recv()
assert rx_frame.get_payload() == test_data
assert rx_frame.check_fcs()
assert rx_frame.error is None
assert tb.gmii_phy.tx.empty()
await RisingEdge(dut.tx_clk)
await RisingEdge(dut.tx_clk)
def size_list():
return list(range(60, 128)) + [512, 1514] + [60]*10
def incrementing_payload(length):
return bytearray(itertools.islice(itertools.cycle(range(256)), length))
def cycle_en():
return itertools.cycle([0, 0, 0, 1])
if cocotb.SIM_NAME:
for test in [run_test_rx, run_test_tx]:
factory = TestFactory(test)
factory.add_option("payload_lengths", [size_list])
factory.add_option("payload_data", [incrementing_payload])
factory.add_option("ifg", [12])
factory.add_option("speed", [1000e6, 100e6, 10e6])
factory.generate_tests()
# cocotb-test
tests_dir = os.path.abspath(os.path.dirname(__file__))
rtl_dir = os.path.abspath(os.path.join(tests_dir, '..', '..', 'rtl'))
lib_dir = os.path.abspath(os.path.join(rtl_dir, '..', 'lib'))
axis_rtl_dir = os.path.abspath(os.path.join(lib_dir, 'axis', 'rtl'))
def test_eth_mac_1g_gmii_fifo(request):
dut = "eth_mac_1g_gmii_fifo"
module = os.path.splitext(os.path.basename(__file__))[0]
toplevel = dut
verilog_sources = [
os.path.join(rtl_dir, f"{dut}.v"),
os.path.join(rtl_dir, "eth_mac_1g_gmii.v"),
os.path.join(rtl_dir, "gmii_phy_if.v"),
os.path.join(rtl_dir, "ssio_sdr_in.v"),
os.path.join(rtl_dir, "ssio_sdr_out.v"),
os.path.join(rtl_dir, "oddr.v"),
os.path.join(rtl_dir, "eth_mac_1g.v"),
os.path.join(rtl_dir, "axis_gmii_rx.v"),
os.path.join(rtl_dir, "axis_gmii_tx.v"),
os.path.join(rtl_dir, "lfsr.v"),
os.path.join(axis_rtl_dir, "axis_adapter.v"),
os.path.join(axis_rtl_dir, "axis_async_fifo.v"),
os.path.join(axis_rtl_dir, "axis_async_fifo_adapter.v"),
]
parameters = {}
parameters['AXIS_DATA_WIDTH'] = 8
parameters['AXIS_KEEP_ENABLE'] = int(parameters['AXIS_DATA_WIDTH'] > 8)
parameters['AXIS_KEEP_WIDTH'] = parameters['AXIS_DATA_WIDTH'] // 8
parameters['ENABLE_PADDING'] = 1
parameters['MIN_FRAME_LENGTH'] = 64
parameters['TX_FIFO_DEPTH'] = 16384
parameters['TX_FRAME_FIFO'] = 1
parameters['TX_DROP_OVERSIZE_FRAME'] = parameters['TX_FRAME_FIFO']
parameters['TX_DROP_BAD_FRAME'] = parameters['TX_DROP_OVERSIZE_FRAME']
parameters['TX_DROP_WHEN_FULL'] = 0
parameters['RX_FIFO_DEPTH'] = 16384
parameters['RX_FRAME_FIFO'] = 1
parameters['RX_DROP_OVERSIZE_FRAME'] = parameters['RX_FRAME_FIFO']
parameters['RX_DROP_BAD_FRAME'] = parameters['RX_DROP_OVERSIZE_FRAME']
parameters['RX_DROP_WHEN_FULL'] = parameters['RX_DROP_OVERSIZE_FRAME']
extra_env = {f'PARAM_{k}': str(v) for k, v in parameters.items()}
sim_build = os.path.join(tests_dir, "sim_build",
request.node.name.replace('[', '-').replace(']', ''))
cocotb_test.simulator.run(
python_search=[tests_dir],
verilog_sources=verilog_sources,
toplevel=toplevel,
module=module,
parameters=parameters,
sim_build=sim_build,
extra_env=extra_env,
)
| [
"alex@alexforencich.com"
] | alex@alexforencich.com |
ad0716b036a955c295028b8c16f49890ea6ae1bb | e96ffb6103e1e9396c19312710d523f1bada0595 | /system_program/upgrade_os.py | 78e448d3cb8af6635b75b0c09b0821bd5cfe2530 | [] | no_license | ktsdn/switch_application | 9e1e6558aa295b8bd7512d06d24fdeb2cf15d50a | 9e6b6f1186ef48320a5f21cc8a908fbfede54aba | refs/heads/master | 2016-09-06T19:48:42.092557 | 2014-06-16T17:45:43 | 2014-06-16T17:45:43 | 19,848,643 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 1,851 | py | #!/usr/bin/python
import os, re, subprocess, sys, urllib, string, Tac, EntityManager
sys.stdout
if len(sys.argv) != 2:
print sys.argv[0]+" requres [os image name]"
sys.exit()
osimage_name=sys.argv[1]
configUrl = "http://20.0.2.224/os/%s"
parsedUrl = configUrl % ( osimage_name )
# Look at the boot-config file and get the currently set EOS version
fd = open("/etc/swi-version", "r")
for item in fd:
if "SWI_VERSION=" in item:
swiversion = item.strip('SWI_VERSION=|\n')
fd.close()
#This allows output to the console during boot time
def printLog( logmsg ):
print logmsg
os.system( '/usr/bin/logger -p local4.crit -t ZeroTouch %s' % logmsg )
ret = urllib.urlopen(parsedUrl)
updateBootConfig = False
if int(ret.info()['content-length']) < 2048:
printLog('there is no os images or permission to download')
sys.exit()
else:
if ret.info()['content-type'] == 'application/vnd.aristanetworks.swi':
if not os.path.isfile('/mnt/flash/%s' % osimage_name):
download = True
else:
download = False
updateBootConfig = True
printLog(osimage_name+' already existed in /mnt/flash')
# download processing
if download == True:
swiSize = ret.info()['content-length']
urllib.urlretrieve(parsedUrl, '/mnt/flash/%s' % osimage_name)
printLog('download url = %s' % parsedUrl)
localFileSize = str(os.stat('/mnt/flash/%s' % osimage_name).st_size)
if swiSize == localFileSize:
printLog ('Downloaded %s' % osimage_name)
updateBootConfig = True
else:
printLog ('Download failed, exiting')
updateBootConfig = False
else:
printLog('this image is not os image, content-type is wrong')
sys.exit()
ret.close()
# Change the boot-config file to new version
if updateBootConfig:
fd = open("/mnt/flash/boot-config", "w")
fd.write("SWI=flash:%s\n\n" % osimage_name)
fd.close()
sys.exit()
| [
"root@localhost.localdomain"
] | root@localhost.localdomain |
ce4d410eeb53f83ebb2c998716d80b0c3187ed27 | 78d35bb7876a3460d4398e1cb3554b06e36c720a | /sdk/network/azure-mgmt-network/azure/mgmt/network/v2019_12_01/operations/_route_filters_operations.py | 6aa61e7e5b86a508969a0b6fd241056b48cc0b1a | [
"MIT",
"LicenseRef-scancode-generic-cla",
"LGPL-2.1-or-later"
] | permissive | catchsrinivas/azure-sdk-for-python | e35f59b60318a31b3c940a7a3a07b61b28118aa5 | 596227a7738a5342274486e30489239d539b11d1 | refs/heads/main | 2023-08-27T09:08:07.986249 | 2021-11-11T11:13:35 | 2021-11-11T11:13:35 | 427,045,896 | 0 | 0 | MIT | 2021-11-11T15:14:31 | 2021-11-11T15:14:31 | null | UTF-8 | Python | false | false | 27,222 | py | # coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
from typing import TYPE_CHECKING
import warnings
from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
from azure.core.paging import ItemPaged
from azure.core.pipeline import PipelineResponse
from azure.core.pipeline.transport import HttpRequest, HttpResponse
from azure.core.polling import LROPoller, NoPolling, PollingMethod
from azure.mgmt.core.exceptions import ARMErrorFormat
from azure.mgmt.core.polling.arm_polling import ARMPolling
from .. import models as _models
if TYPE_CHECKING:
# pylint: disable=unused-import,ungrouped-imports
from typing import Any, Callable, Dict, Generic, Iterable, Optional, TypeVar, Union
T = TypeVar('T')
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]]
class RouteFiltersOperations(object):
"""RouteFiltersOperations operations.
You should not instantiate this class directly. Instead, you should create a Client instance that
instantiates it for you and attaches it as an attribute.
:ivar models: Alias to model classes used in this operation group.
:type models: ~azure.mgmt.network.v2019_12_01.models
:param client: Client for service requests.
:param config: Configuration of service client.
:param serializer: An object model serializer.
:param deserializer: An object model deserializer.
"""
models = _models
def __init__(self, client, config, serializer, deserializer):
self._client = client
self._serialize = serializer
self._deserialize = deserializer
self._config = config
def _delete_initial(
self,
resource_group_name, # type: str
route_filter_name, # type: str
**kwargs # type: Any
):
# type: (...) -> None
cls = kwargs.pop('cls', None) # type: ClsType[None]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2019-12-01"
accept = "application/json"
# Construct URL
url = self._delete_initial.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'routeFilterName': self._serialize.url("route_filter_name", route_filter_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.delete(url, query_parameters, header_parameters)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 202, 204]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
if cls:
return cls(pipeline_response, None, {})
_delete_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/routeFilters/{routeFilterName}'} # type: ignore
def begin_delete(
self,
resource_group_name, # type: str
route_filter_name, # type: str
**kwargs # type: Any
):
# type: (...) -> LROPoller[None]
"""Deletes the specified route filter.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param route_filter_name: The name of the route filter.
:type route_filter_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be ARMPolling.
Pass in False for this operation to not poll, or pass in your own initialized polling object for a personal polling strategy.
:paramtype polling: bool or ~azure.core.polling.PollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
:return: An instance of LROPoller that returns either None or the result of cls(response)
:rtype: ~azure.core.polling.LROPoller[None]
:raises ~azure.core.exceptions.HttpResponseError:
"""
polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType[None]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = self._delete_initial(
resource_group_name=resource_group_name,
route_filter_name=route_filter_name,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
if cls:
return cls(pipeline_response, None, {})
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'routeFilterName': self._serialize.url("route_filter_name", route_filter_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
if polling is True: polling_method = ARMPolling(lro_delay, lro_options={'final-state-via': 'location'}, path_format_arguments=path_format_arguments, **kwargs)
elif polling is False: polling_method = NoPolling()
else: polling_method = polling
if cont_token:
return LROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/routeFilters/{routeFilterName}'} # type: ignore
def get(
self,
resource_group_name, # type: str
route_filter_name, # type: str
expand=None, # type: Optional[str]
**kwargs # type: Any
):
# type: (...) -> "_models.RouteFilter"
"""Gets the specified route filter.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param route_filter_name: The name of the route filter.
:type route_filter_name: str
:param expand: Expands referenced express route bgp peering resources.
:type expand: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: RouteFilter, or the result of cls(response)
:rtype: ~azure.mgmt.network.v2019_12_01.models.RouteFilter
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.RouteFilter"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2019-12-01"
accept = "application/json"
# Construct URL
url = self.get.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'routeFilterName': self._serialize.url("route_filter_name", route_filter_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
if expand is not None:
query_parameters['$expand'] = self._serialize.query("expand", expand, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.get(url, query_parameters, header_parameters)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize('RouteFilter', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/routeFilters/{routeFilterName}'} # type: ignore
def _create_or_update_initial(
self,
resource_group_name, # type: str
route_filter_name, # type: str
route_filter_parameters, # type: "_models.RouteFilter"
**kwargs # type: Any
):
# type: (...) -> "_models.RouteFilter"
cls = kwargs.pop('cls', None) # type: ClsType["_models.RouteFilter"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2019-12-01"
content_type = kwargs.pop("content_type", "application/json")
accept = "application/json"
# Construct URL
url = self._create_or_update_initial.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'routeFilterName': self._serialize.url("route_filter_name", route_filter_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(route_filter_parameters, 'RouteFilter')
body_content_kwargs['content'] = body_content
request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 201]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
if response.status_code == 200:
deserialized = self._deserialize('RouteFilter', pipeline_response)
if response.status_code == 201:
deserialized = self._deserialize('RouteFilter', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
_create_or_update_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/routeFilters/{routeFilterName}'} # type: ignore
def begin_create_or_update(
self,
resource_group_name, # type: str
route_filter_name, # type: str
route_filter_parameters, # type: "_models.RouteFilter"
**kwargs # type: Any
):
# type: (...) -> LROPoller["_models.RouteFilter"]
"""Creates or updates a route filter in a specified resource group.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param route_filter_name: The name of the route filter.
:type route_filter_name: str
:param route_filter_parameters: Parameters supplied to the create or update route filter
operation.
:type route_filter_parameters: ~azure.mgmt.network.v2019_12_01.models.RouteFilter
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be ARMPolling.
Pass in False for this operation to not poll, or pass in your own initialized polling object for a personal polling strategy.
:paramtype polling: bool or ~azure.core.polling.PollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
:return: An instance of LROPoller that returns either RouteFilter or the result of cls(response)
:rtype: ~azure.core.polling.LROPoller[~azure.mgmt.network.v2019_12_01.models.RouteFilter]
:raises ~azure.core.exceptions.HttpResponseError:
"""
polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType["_models.RouteFilter"]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = self._create_or_update_initial(
resource_group_name=resource_group_name,
route_filter_name=route_filter_name,
route_filter_parameters=route_filter_parameters,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
deserialized = self._deserialize('RouteFilter', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'routeFilterName': self._serialize.url("route_filter_name", route_filter_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
if polling is True: polling_method = ARMPolling(lro_delay, lro_options={'final-state-via': 'azure-async-operation'}, path_format_arguments=path_format_arguments, **kwargs)
elif polling is False: polling_method = NoPolling()
else: polling_method = polling
if cont_token:
return LROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_create_or_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/routeFilters/{routeFilterName}'} # type: ignore
def update_tags(
self,
resource_group_name, # type: str
route_filter_name, # type: str
parameters, # type: "_models.TagsObject"
**kwargs # type: Any
):
# type: (...) -> "_models.RouteFilter"
"""Updates tags of a route filter.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param route_filter_name: The name of the route filter.
:type route_filter_name: str
:param parameters: Parameters supplied to update route filter tags.
:type parameters: ~azure.mgmt.network.v2019_12_01.models.TagsObject
:keyword callable cls: A custom type or function that will be passed the direct response
:return: RouteFilter, or the result of cls(response)
:rtype: ~azure.mgmt.network.v2019_12_01.models.RouteFilter
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.RouteFilter"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2019-12-01"
content_type = kwargs.pop("content_type", "application/json")
accept = "application/json"
# Construct URL
url = self.update_tags.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'routeFilterName': self._serialize.url("route_filter_name", route_filter_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(parameters, 'TagsObject')
body_content_kwargs['content'] = body_content
request = self._client.patch(url, query_parameters, header_parameters, **body_content_kwargs)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize('RouteFilter', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
update_tags.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/routeFilters/{routeFilterName}'} # type: ignore
def list_by_resource_group(
self,
resource_group_name, # type: str
**kwargs # type: Any
):
# type: (...) -> Iterable["_models.RouteFilterListResult"]
"""Gets all route filters in a resource group.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either RouteFilterListResult or the result of cls(response)
:rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.network.v2019_12_01.models.RouteFilterListResult]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.RouteFilterListResult"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2019-12-01"
accept = "application/json"
def prepare_request(next_link=None):
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
if not next_link:
# Construct URL
url = self.list_by_resource_group.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
request = self._client.get(url, query_parameters, header_parameters)
else:
url = next_link
query_parameters = {} # type: Dict[str, Any]
request = self._client.get(url, query_parameters, header_parameters)
return request
def extract_data(pipeline_response):
deserialized = self._deserialize('RouteFilterListResult', pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, iter(list_of_elem)
def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
return pipeline_response
return ItemPaged(
get_next, extract_data
)
list_by_resource_group.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/routeFilters'} # type: ignore
def list(
self,
**kwargs # type: Any
):
# type: (...) -> Iterable["_models.RouteFilterListResult"]
"""Gets all route filters in a subscription.
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either RouteFilterListResult or the result of cls(response)
:rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.network.v2019_12_01.models.RouteFilterListResult]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.RouteFilterListResult"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2019-12-01"
accept = "application/json"
def prepare_request(next_link=None):
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
if not next_link:
# Construct URL
url = self.list.metadata['url'] # type: ignore
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
request = self._client.get(url, query_parameters, header_parameters)
else:
url = next_link
query_parameters = {} # type: Dict[str, Any]
request = self._client.get(url, query_parameters, header_parameters)
return request
def extract_data(pipeline_response):
deserialized = self._deserialize('RouteFilterListResult', pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, iter(list_of_elem)
def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
return pipeline_response
return ItemPaged(
get_next, extract_data
)
list.metadata = {'url': '/subscriptions/{subscriptionId}/providers/Microsoft.Network/routeFilters'} # type: ignore
| [
"noreply@github.com"
] | catchsrinivas.noreply@github.com |
4b8adfb2906855b5ff52615208c97650bd34b3ce | 383845f6cd8e2a32f95e5970d2f7e9fb755b6598 | /course/tasks/flip-number.py | cb67df395df46d19422f9495602d30f9b414e4b1 | [] | no_license | ash/python-tut | e553e9e3a4ecb866e87c7ce9f04d7f517244ac01 | f89f079f13dd29eef6ba293b074a0087272dc8a6 | refs/heads/master | 2022-12-17T03:20:40.633262 | 2018-03-21T15:04:09 | 2018-03-21T15:04:09 | 96,119,629 | 0 | 0 | null | 2022-12-08T00:55:59 | 2017-07-03T14:29:02 | Python | UTF-8 | Python | false | false | 53 | py | n = int(input('Your number: '))
print(str(n)[::-1])
| [
"andy@shitov.ru"
] | andy@shitov.ru |
d5b7faf942dad1ccd22553726dc85d68233b94b4 | be0f3dfbaa2fa3d8bbe59229aef3212d032e7dd1 | /Gauss_v45r10p1/Gen/DecFiles/options/11166141.py | 278d2a652da2694d5480708b494ecc58d53edac3 | [] | no_license | Sally27/backup_cmtuser_full | 34782102ed23c6335c48650a6eaa901137355d00 | 8924bebb935b96d438ce85b384cfc132d9af90f6 | refs/heads/master | 2020-05-21T09:27:04.370765 | 2018-12-12T14:41:07 | 2018-12-12T14:41:07 | 185,989,173 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 897 | py | # file /home/hep/ss4314/cmtuser/Gauss_v45r10p1/Gen/DecFiles/options/11166141.py generated: Wed, 25 Jan 2017 15:25:27
#
# Event Type: 11166141
#
# ASCII decay Descriptor: [B0 -> (D*(2010)- -> (D~0 -> (KS0 -> pi+ pi-) pi+ pi-) pi-) pi+]cc
#
from Configurables import Generation
Generation().EventType = 11166141
Generation().SampleGenerationTool = "SignalRepeatedHadronization"
from Configurables import SignalRepeatedHadronization
Generation().addTool( SignalRepeatedHadronization )
Generation().SignalRepeatedHadronization.ProductionTool = "PythiaProduction"
from Configurables import ToolSvc
from Configurables import EvtGenDecay
ToolSvc().addTool( EvtGenDecay )
ToolSvc().EvtGenDecay.UserDecayFile = "$DECFILESROOT/dkfiles/Bd_Dst-pi,KSpipi=DecProdCut.dec"
Generation().SignalRepeatedHadronization.CutTool = "DaughtersInLHCb"
Generation().SignalRepeatedHadronization.SignalPIDList = [ 511,-511 ]
| [
"slavomirastefkova@b2pcx39016.desy.de"
] | slavomirastefkova@b2pcx39016.desy.de |
09d164a0cc83bb33cd140a93eb84a19fb6fb57f3 | 2e682fd72e3feaa70e3f7bf2a3b83c50d783ec02 | /PyTorch/dev/cv/detection/YOLOX_ID2833_for_PyTorch/configs/faster_rcnn/faster_rcnn_r101_caffe_fpn_1x_coco.py | e505116b2ee93d866a0249a0775faee4bf904044 | [
"Apache-2.0",
"BSD-2-Clause",
"MIT",
"BSD-3-Clause",
"LicenseRef-scancode-generic-cla",
"LicenseRef-scancode-unknown-license-reference",
"GPL-1.0-or-later"
] | permissive | Ascend/ModelZoo-PyTorch | 4c89414b9e2582cef9926d4670108a090c839d2d | 92acc188d3a0f634de58463b6676e70df83ef808 | refs/heads/master | 2023-07-19T12:40:00.512853 | 2023-07-17T02:48:18 | 2023-07-17T02:48:18 | 483,502,469 | 23 | 6 | Apache-2.0 | 2022-10-15T09:29:12 | 2022-04-20T04:11:18 | Python | UTF-8 | Python | false | false | 871 | py |
# Copyright 2022 Huawei Technologies Co., Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# Copyright (c) Open-MMLab. All rights reserved.
_base_ = './faster_rcnn_r50_caffe_fpn_1x_coco.py'
model = dict(
backbone=dict(
depth=101,
init_cfg=dict(
type='Pretrained',
checkpoint='open-mmlab://detectron2/resnet101_caffe')))
| [
"wangjiangben@huawei.com"
] | wangjiangben@huawei.com |
0f62e49af163ce60e641517c8e094dc79f51ed77 | d842a95213e48e30139b9a8227fb7e757f834784 | /gcloud/google-cloud-sdk/.install/.backup/lib/googlecloudsdk/command_lib/secrets/args.py | 8263c671f43b9e8f6e2e916e137e2d0b4170bf43 | [
"LicenseRef-scancode-unknown-license-reference",
"Apache-2.0",
"MIT"
] | permissive | bopopescu/JobSniperRails | f37a15edb89f54916cc272884b36dcd83cdc868a | 39e7f871887176770de0f4fc6789e9ddc7f32b1f | refs/heads/master | 2022-11-22T18:12:37.972441 | 2019-09-20T22:43:14 | 2019-09-20T22:43:14 | 282,293,504 | 0 | 0 | MIT | 2020-07-24T18:47:35 | 2020-07-24T18:47:34 | null | UTF-8 | Python | false | false | 6,385 | py | # -*- coding: utf-8 -*- #
# Copyright 2019 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Shared resource arguments and flags."""
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
from googlecloudsdk.calliope import arg_parsers
from googlecloudsdk.calliope.concepts import concepts
from googlecloudsdk.command_lib.util.concepts import concept_parsers
from googlecloudsdk.core import resources
# Args
def AddCreateIfMissing(parser, resource, positional=False, **kwargs):
parser.add_argument(
_ArgOrFlag('create-if-missing', positional),
action='store_true',
help=('Create the {resource} if it does not exist. If this flag is not '
'set, the command will return an error when attempting to update a '
'{resource} that does not exist.').format(resource=resource),
**kwargs)
def AddDataFile(parser, positional=False, **kwargs):
parser.add_argument(
_ArgOrFlag('data-file', positional),
metavar='PATH',
help=('File path from which to read secret data. Set this to "-" to read '
'the secret data from stdin.'),
**kwargs)
def AddProject(parser, positional=False, **kwargs):
concept_parsers.ConceptParser.ForResource(
name=_ArgOrFlag('project', positional),
resource_spec=GetProjectResourceSpec(),
group_help='The project ID.',
**kwargs).AddToParser(parser)
def AddLocation(parser, purpose, positional=False, **kwargs):
concept_parsers.ConceptParser.ForResource(
name=_ArgOrFlag('location', positional),
resource_spec=GetLocationResourceSpec(),
group_help='The location {}.'.format(purpose),
**kwargs).AddToParser(parser)
# TODO(b/135570696): may want to convert to resource arg & add fallthrough
def AddLocations(parser, resource, positional=False, **kwargs):
parser.add_argument(
_ArgOrFlag('locations', positional),
action=arg_parsers.UpdateAction,
metavar='LOCATION',
type=arg_parsers.ArgList(),
help=('Comma-separated list of locations in which the {resource} should '
'be replicated.').format(resource=resource),
**kwargs)
def AddSecret(parser, purpose, positional=False, **kwargs):
concept_parsers.ConceptParser.ForResource(
name=_ArgOrFlag('secret', positional),
resource_spec=GetSecretResourceSpec(),
group_help='The secret {}.'.format(purpose),
**kwargs).AddToParser(parser)
def AddVersion(parser, purpose, positional=False, **kwargs):
concept_parsers.ConceptParser.ForResource(
name=_ArgOrFlag('version', positional),
resource_spec=GetVersionResourceSpec(),
group_help='Numeric secret version {}.'.format(purpose),
**kwargs).AddToParser(parser)
def _ArgOrFlag(name, positional):
"""Returns the argument name in resource argument format or flag format.
Args:
name (str): name of the argument
positional (bool): whether the argument is positional
Returns:
arg (str): the argument or flag
"""
if positional:
return name.upper().replace('-', '_')
return '--{}'.format(name)
### Attribute configurations
def GetProjectAttributeConfig():
return concepts.DEFAULT_PROJECT_ATTRIBUTE_CONFIG
def GetLocationAttributeConfig():
return concepts.ResourceParameterAttributeConfig(
name='location',
help_text='The location of the {resource}.',
completion_request_params={'fieldMask': 'name'},
completion_id_field='name')
def GetSecretAttributeConfig():
return concepts.ResourceParameterAttributeConfig(
name='secret',
help_text='The secret of the {resource}.',
completion_request_params={'fieldMask': 'name'},
completion_id_field='name')
def GetVersionAttributeConfig():
return concepts.ResourceParameterAttributeConfig(
name='version',
help_text='The version of the {resource}.',
completion_request_params={'fieldMask': 'name'},
completion_id_field='name')
# Resource specs
def GetProjectResourceSpec():
return concepts.ResourceSpec(
resource_collection='secretmanager.projects',
resource_name='project',
plural_name='projects',
disable_auto_completers=False,
projectsId=GetProjectAttributeConfig())
def GetLocationResourceSpec():
return concepts.ResourceSpec(
resource_collection='secretmanager.projects.locations',
resource_name='location',
plural_name='locations',
disable_auto_completers=False,
locationsId=GetLocationAttributeConfig(),
projectsId=GetProjectAttributeConfig())
def GetSecretResourceSpec():
return concepts.ResourceSpec(
resource_collection='secretmanager.projects.secrets',
resource_name='secret',
plural_name='secrets',
disable_auto_completers=False,
secretsId=GetSecretAttributeConfig(),
projectsId=GetProjectAttributeConfig())
def GetVersionResourceSpec():
return concepts.ResourceSpec(
'secretmanager.projects.secrets.versions',
resource_name='version',
plural_name='version',
disable_auto_completers=False,
versionsId=GetVersionAttributeConfig(),
secretsId=GetSecretAttributeConfig(),
projectsId=GetProjectAttributeConfig())
# Resource parsers
def ParseProjectRef(ref, **kwargs):
kwargs['collection'] = 'secretmanager.projects'
return resources.REGISTRY.Parse(ref, **kwargs)
def ParseLocationRef(ref, **kwargs):
kwargs['collection'] = 'secretmanager.projects.locations'
return resources.REGISTRY.Parse(ref, **kwargs)
def ParseSecretRef(ref, **kwargs):
kwargs['collection'] = 'secretmanager.projects.secrets'
return resources.REGISTRY.Parse(ref, **kwargs)
def ParseVersionRef(ref, **kwargs):
kwargs['collection'] = 'secretmanager.projects.secrets.versions'
return resources.REGISTRY.Parse(ref, **kwargs)
| [
"luizfper@gmail.com"
] | luizfper@gmail.com |
d38e23d4fe46dcbd8af662da3ec32e0ddd091617 | e5ea52ee45b1938fdafee1b43e472cc7d7fbaed7 | /content/downloads/code/test_edward.py | 67cddc71179cfee70a615cc8fe0ed4facb909578 | [
"MIT"
] | permissive | mattpitkin/samplers-demo | fabe2735ba80706fc8688e462797cb1919e03109 | b511f891a30fb46c215c065ceb7e618c820d4d03 | refs/heads/master | 2022-07-31T05:33:59.252825 | 2022-07-18T12:16:06 | 2022-07-18T12:16:06 | 119,070,470 | 8 | 1 | MIT | 2021-05-20T09:47:35 | 2018-01-26T15:47:25 | Jupyter Notebook | UTF-8 | Python | false | false | 1,759 | py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
Example of running edward to fit the parameters of a straight line.
"""
from __future__ import print_function, division
import os
import sys
import numpy as np
import matplotlib as mpl
mpl.use("Agg") # force Matplotlib backend to Agg
# import edward and TensorFlow
import edward as ed
import tensorflow as tf
from edward.models import Normal, Uniform, Empirical
# import model and data
from createdata import *
# set the priors
cmin = -10. # lower range of uniform distribution on c
cmax = 10. # upper range of uniform distribution on c
cp = Uniform(low=cmin, high=cmax)
mmu = 0. # mean of Gaussian distribution on m
msigma = 10. # standard deviation of Gaussian distribution on m
mp = Normal(loc=mmu, scale=msigma)
# set the likelihood containing the model
y = Normal(loc=mp*x + cp, scale=sigma*tf.ones(len(data)))
# set number of samples
Nsamples = 2000 # final number of samples
Ntune = 2000 # number of tuning samples
# set parameters to infer
qm = Empirical(params=tf.Variable(tf.zeros(Nsamples+Ntune)))
qc = Empirical(params=tf.Variable(tf.zeros(Nsamples+Ntune)))
# use Hamiltonian Monte Carlo
inference = ed.HMC({mp: qm, cp: qc}, data={y: data})
inference.run(step_size=1.5e-2) # higher steps sizes can lead to zero acceptance rates
# extract the samples
msamples = qm.params.eval()[Ntune:]
csamples = qc.params.eval()[Ntune:]
postsamples = np.vstack((msamples, csamples)).T
# plot posterior samples (if corner.py is installed)
try:
import corner # import corner.py
except ImportError:
sys.exit(1)
print('Number of posterior samples is {}'.format(postsamples.shape[0]))
fig = corner.corner(postsamples, labels=[r"$m$", r"$c$"], truths=[m, c])
fig.savefig('edward.png')
| [
"matthew.pitkin@ligo.org"
] | matthew.pitkin@ligo.org |
f89d60d03f53f6543a62034824916e850a8de360 | 51d05aa62e8c21d50bba116b795a003107d14e2a | /detection_app/object_detection_tf_multiprocessing.py | 8389dafb8c6ee25b1da5bc0830915e7e2414388b | [
"MIT"
] | permissive | Airyzf/object_detection_with_tensorflow | 7747659f4dcbca124ca9615872428152c1194204 | 301d1fe316aaa7579dae2b42babe4e8ace0a18e7 | refs/heads/master | 2021-08-24T15:20:33.349083 | 2017-12-10T06:45:03 | 2017-12-10T06:45:03 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 7,238 | py | import os
import tarfile
import tensorflow as tf
import multiprocessing
from multiprocessing import Queue
import time
import argparse
import logging
import numpy as np
import cv2
from myutil import downloadutil, fps_measure, queue_seq
from object_detection.utils import label_map_util
from object_detection.utils import visualization_utils as vis_util
arg_parser = argparse.ArgumentParser()
arg_parser.add_argument('-v', '--video', type=str, required=True,
help="video file for detection")
arg_parser.add_argument('-p', "--process", type=int, default=1,
help="# of detection process")
args = arg_parser.parse_args()
# What model to download.
# MODEL_NAME = 'ssd_mobilenet_v1_coco_2017_11_08'
MODEL_NAME = 'ssd_mobilenet_v1_coco_11_06_2017'
def load_graph(model_name=MODEL_NAME):
MODEL_FILE = model_name + '.tar.gz'
DOWNLOAD_BASE = 'http://download.tensorflow.org/models/object_detection/'
# Path to frozen detection graph. This is the actual model that is used for the object detection.
PATH_TO_CKPT = model_name + '/frozen_inference_graph.pb'
downloadutil.maybe_download(os.getcwd(), MODEL_FILE,
DOWNLOAD_BASE+MODEL_FILE)
tar_file = tarfile.open(MODEL_FILE)
for file in tar_file.getmembers():
file_name = os.path.basename(file.name)
if 'frozen_inference_graph.pb' in file_name:
tar_file.extract(file, os.getcwd())
# load graph
detection_graph = tf.Graph()
with detection_graph.as_default():
od_graph_def = tf.GraphDef()
with tf.gfile.GFile(PATH_TO_CKPT, 'rb') as fid:
serialized_graph = fid.read()
od_graph_def.ParseFromString(serialized_graph)
tf.import_graph_def(od_graph_def, name='')
return detection_graph
NUM_CLASSES = 90
def load_label_map(label_map_name, num_class):
# List of the strings that is used to add correct label for each box.
PATH_TO_LABELS = os.path.join('data', label_map_name)
# load label map
label_map = label_map_util.load_labelmap(PATH_TO_LABELS)
categories = label_map_util.convert_label_map_to_categories(label_map,
max_num_classes=num_class, use_display_name=True)
category_index = label_map_util.create_category_index(categories)
return category_index
def detect_object(detection_graph, sess, image, category_index):
with detection_graph.as_default():
with sess.as_default() as sess:
# Definite input and output Tensors for detection_graph
image_tensor = detection_graph.get_tensor_by_name('image_tensor:0')
# Each box represents a part of the image where a particular object was detected.
detection_boxes = detection_graph.get_tensor_by_name('detection_boxes:0')
# Each score represent how level of confidence for each of the objects.
# Score is shown on the result image, together with the class label.
detection_scores = detection_graph.get_tensor_by_name('detection_scores:0')
detection_classes = detection_graph.get_tensor_by_name('detection_classes:0')
num_detections = detection_graph.get_tensor_by_name('num_detections:0')
# image = Image.open(image_path)
# the array based representation of the image will be used later in order to prepare the
# result image with boxes and labels on it.
# image_np = load_image_into_numpy_array(image)
image_np = image
# Expand dimensions since the model expects images to have shape: [1, None, None, 3]
image_np_expanded = np.expand_dims(image_np, axis=0)
# Actual detection.
(boxes, scores, classes, num) = sess.run(
[detection_boxes, detection_scores, detection_classes, num_detections],
feed_dict={image_tensor: image_np_expanded})
# Visualization of the results of a detection.
vis_util.visualize_boxes_and_labels_on_image_array(
image_np,
np.squeeze(boxes),
np.squeeze(classes).astype(np.int32),
np.squeeze(scores),
category_index,
use_normalized_coordinates=True,
line_thickness=8,
min_score_thresh = 0.7)
return image_np
detection_graph = load_graph(model_name=MODEL_NAME)
category_index = load_label_map(label_map_name='mscoco_label_map.pbtxt', num_class=NUM_CLASSES)
image_q = Queue(maxsize=200)
processed_q = queue_seq.Queue_Seq(maxsize=200)
#a process that put imge into image_q
def image_worker(image_q, video_file):
logging.info("image worker start")
video_capture = cv2.VideoCapture(video_file)
ret, frame = video_capture.read()
if not ret:
logging.error("Can not read video file, please check!!!!")
frame_count = 0
while ret:
frame = cv2.cvtColor(frame, cv2.COLOR_BGR2RGB)
image_q.put((frame_count, frame))
logging.debug("put image into queue")
ret, frame = video_capture.read()
frame_count += 1
video_capture.release()
input_process = multiprocessing.Process(target=image_worker, args=(image_q, args.video))
# a process to do the detection_graph
def object_detection_worker(image_q, processed_q, detection_graph, category_index, fps=None):
print("detection worker start")
gpu_options = tf.GPUOptions(allow_growth=True)
config = tf.ConfigProto(gpu_options=gpu_options)
sess = tf.Session(graph=detection_graph, config=config)
while True:
frame_count, frame = image_q.get()
t = time.time()
ann_image = detect_object(detection_graph, sess, frame, category_index)
ann_image = cv2.cvtColor(ann_image, cv2.COLOR_RGB2BGR)
if fps:
fps.add_frame()
processed_q.put((frame_count, ann_image))
def main():
# configure logger
logging.basicConfig(
level=logging.INFO,
)
# setup fps counter
fps = fps_measure.FPS()
fps.start_count()
detector_process = [multiprocessing.Process(target=object_detection_worker,
args=(image_q, processed_q, detection_graph, category_index, fps))
for i in range(args.process)]
input_process.start()
for p in detector_process:
p.start()
last_frame = -1
while True:
frame_count, ann_image = processed_q.get()
font = cv2.FONT_HERSHEY_SIMPLEX
cv2.putText(ann_image, 'FPS:{}'.format(int(fps.get_fps())), (50, 50), font, 2, (255, 255, 255), 2, cv2.LINE_AA)
# check frame order
if last_frame != -1:
if last_frame +1 != frame_count:
cv2.putText(ann_image, "Frame order error", (100,100), font, 2, (0, 0, 255), 2, cv2.LINE_AA)
last_frame = frame_count
cv2.imshow('frame', ann_image)
# print("fps is:", fps.get_fps())
if cv2.waitKey(1) & 0xFF == ord('q'):
break
input_process.terminate()
for p in detector_process:
p.terminate()
input_process.join()
for p in detector_process:
p.join()
cv2.destroyAllWindows()
if __name__ == "__main__":
main()
| [
"scotthuang1989@163.com"
] | scotthuang1989@163.com |
9c48335f00e4ce934b4c7957642e0f09ec30604b | 7c47e106c9ec85a7239c84c55ad5f20972edefcf | /particles/DustParticle.py | b4fb7a915db500a8b4446eb8d19a8df6c7c4a800 | [] | no_license | anasthesia/pyBBN | 11813717ad5023a9b29f9594ccde93fbc2d5a0c9 | 0e88604b765eb5ce2f196909c65cf2af11a8cc2f | refs/heads/master | 2021-01-21T03:37:46.309318 | 2016-05-10T12:03:50 | 2016-05-10T12:03:50 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,055 | py | """
Non-relativistic simplifications of density, energy density and pressure calculations
"""
import numpy
import IntermediateParticle
name = 'dust'
def density(particle):
""" ## Density
\begin{equation}
n = g \left(\frac{M T}{2 \pi}\right)^{3/2} e^{-\frac{M}{T}}
\end{equation}
"""
return (
particle.dof
* numpy.sqrt(particle.mass * particle.T / 2. / numpy.pi)**3
* numpy.exp(- particle.mass / particle.T)
)
def energy_density(particle):
""" ## Energy density
\begin{equation}
\rho = n \left(M + \frac32 T\right)
\end{equation}
"""
return (particle.mass + 3./2. * particle.T) * density(particle)
def pressure(particle):
""" ## Pressure
\begin{equation}
p = n T
\end{equation}
"""
return density(particle) * particle.T
# ## Master equation terms
# Dust regime does not differ from intermediate regime here.
numerator = IntermediateParticle.numerator
denominator = IntermediateParticle.denominator
| [
"andrew.magalich@gmail.com"
] | andrew.magalich@gmail.com |
fcbdc26bdfbfc32229cd4c6209a56680ffbcab39 | 6e5ab77fee1fb4a0310213dd8c6dd8601828b1b9 | /Algorithm/Swea/D3_17937.py | c33b3d05a10f502e205aa35b54c042cc4e7d0f4a | [] | no_license | hongyong3/TIL | 36d031c0da9e3e6db3eebb977bd3e12df00a849f | 7f1492128e957a78fc95b255f4f7f2978161e471 | refs/heads/master | 2023-08-19T09:16:03.231757 | 2023-08-18T09:38:47 | 2023-08-18T09:38:47 | 162,100,258 | 1 | 0 | null | 2023-02-11T00:52:32 | 2018-12-17T08:42:42 | Jupyter Notebook | UTF-8 | Python | false | false | 198 | py | import sys
sys.stdin = open("D3_17937_input.txt", "r")
T = int(input())
for test_case in range(T):
A, B = map(int, input().split())
print("#{} {}".format(test_case + 1, A if A == B else 1)) | [
"chy66822495@gmail.com"
] | chy66822495@gmail.com |
6826c1f6a09f7b06ef7a2f2506da4f8233577248 | 880aeff2ae5d70c8a01c11f0c0d9f6154d390229 | /tests/test_scale_limits.py | 8a20d5b5de56b50c95df85b6a97b1f3ff8c79f99 | [
"CC0-1.0"
] | permissive | bellyfat/ccxt_rate_limiter | 8799c097589de876040521573f382f1615260609 | d37d675829eb2c1b89980c5001022b63c554ed90 | refs/heads/master | 2023-04-12T22:15:28.280063 | 2021-04-21T04:57:14 | 2021-04-21T04:57:14 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,111 | py | from unittest import TestCase
from ccxt_rate_limiter import scale_limits
class TestScaleLimits(TestCase):
def test_scale_limits(self):
result = scale_limits([
{
'tag': 'tag1',
'period_sec': 1,
'count': 10,
},
{
'tag': 'tag2',
'period_sec': 1,
'count': 20,
},
], 0.1)
self.assertEqual(result, [
{
'tag': 'tag1',
'period_sec': 1,
'count': 1,
},
{
'tag': 'tag2',
'period_sec': 1,
'count': 2,
},
])
def test_input_not_changed(self):
input = [
{
'tag': 'tag1',
'period_sec': 1,
'count': 10,
},
]
scale_limits(input, 0.1)
self.assertEqual(input, [
{
'tag': 'tag1',
'period_sec': 1,
'count': 10,
},
])
| [
"a@example.com"
] | a@example.com |
0987bb7dede5a91338e72049a78083b1e8ac34ff | ef6229d281edecbea3faad37830cb1d452d03e5b | /ucsmsdk/methodmeta/PolicySetCentraleStorageMeta.py | e59ff3cebcf9e73fcfaa63776c0f7540eff10df3 | [
"Apache-2.0"
] | permissive | anoop1984/python_sdk | 0809be78de32350acc40701d6207631322851010 | c4a226bad5e10ad233eda62bc8f6d66a5a82b651 | refs/heads/master | 2020-12-31T00:18:57.415950 | 2016-04-26T17:39:38 | 2016-04-26T17:39:38 | 57,148,449 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 931 | py | """This module contains the meta information of PolicySetCentraleStorage ExternalMethod."""
import sys, os
from ..ucscoremeta import MethodMeta, MethodPropertyMeta
method_meta = MethodMeta("PolicySetCentraleStorage", "policySetCentraleStorage", "Version142b")
prop_meta = {
"cookie": MethodPropertyMeta("Cookie", "cookie", "Xs:string", "Version142b", "InputOutput", False),
"in_data": MethodPropertyMeta("InData", "inData", "Xs:string", "Version142b", "Input", False),
"in_oper": MethodPropertyMeta("InOper", "inOper", "Xs:unsignedInt", "Version142b", "Input", False),
"in_side": MethodPropertyMeta("InSide", "inSide", "Xs:string", "Version142b", "Input", False),
"out_data": MethodPropertyMeta("OutData", "outData", "Xs:string", "Version142b", "Output", False),
}
prop_map = {
"cookie": "cookie",
"inData": "in_data",
"inOper": "in_oper",
"inSide": "in_side",
"outData": "out_data",
}
| [
"test@cisco.com"
] | test@cisco.com |
8e7744daac894e64ea037091c80cf88a8cc3ce3e | 3d962ed6a8d35929b1f24b8654634abef957f0c9 | /src/qanda/views.py | a3ef2751e4208ff6d1b938a708963272d9a31004 | [] | no_license | Michaeltetteh/stackoverflowclone | 44a98ac9c809b5597bf30921166fcc66961a3a16 | 7d086f3aa3098eb6636145f2c3ba43168275cc8a | refs/heads/master | 2020-06-07T03:33:43.664983 | 2019-07-12T18:59:46 | 2019-07-12T18:59:46 | 192,911,917 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,176 | py | from django.contrib.auth.mixins import LoginRequiredMixin
from django.views.generic import (
CreateView,
DetailView,
RedirectView,
DayArchiveView,
UpdateView,
TemplateView,
)
from django.shortcuts import render
from .forms import (
QuestionForm,
AnswerForm,
AnswerAcceptanceForm,
)
from .models import Question,Answer
from django.http import HttpResponseBadRequest
from django.urls import reverse
from django.utils import timezone
from .service.elasticsearch import search_for_questions
class AskQuestionView(LoginRequiredMixin,CreateView):
form_class = QuestionForm
template_name = 'qanda/ask.html'
def get_initial(self):
return {
'user': self.request.user.id
}
def form_valid(self,form):
action = self.request.POST.get('action')
if action == 'SAVE':
return super().form_valid(form)
elif action == 'PREVIEW':
preview = Question(
question = form.cleaned_data['question'],
title = form.cleaned_data['title']
)
ctx = self.get_context_data(preview=preview)
return self.render_to_response(context=ctx)
return HttpResponseBadRequest()
class QuestionDetailView(DetailView):
model = Question
ACCEPT_FORM = AnswerAcceptanceForm(initial={'accepted':True})
REJECTED_FORM = AnswerAcceptanceForm(initial={'accepted': False})
def get_context_data(self,**kwargs):
ctx = super().get_context_data(**kwargs)
ctx.update({
'answer_form': AnswerForm(initial={
'user': self.request.user.id,
'question': self.object.id,
})
})
if self.object.can_accept_answers(self.request.user):
ctx.update({
'accept_form': self.ACCEPT_FORM,
'reject_form': self.REJECTED_FORM,
})
return ctx
class CreateAnswerView(LoginRequiredMixin,CreateView):
form_class = AnswerForm
template_name = 'qanda/create_answer.html'
def get_initial(self):
return {
'question': self.get_question().id,
'user': self.request.user.id,
}
def get_context_data(self,**kwargs):
return super().get_context_data(question=self.get_question(),**kwargs)
def get_success_url(self):
return self.object.question.get_absolute_url()
def form_valid(self,form):
action = self.request.POST.get('action')
if action == 'SAVE':
return super().form_valid(form)
elif action == 'PREVIEW':
ctx = self.get_context_data(preview=form.cleaned_data['answer'])
return self.render_to_response(context=ctx)
return HttpResponseBadRequest()
def get_question(self):
return Question.objects.get(pk=self.kwargs['pk'])
class UpdateAnswerAccepetance(LoginRequiredMixin,UpdateView):
form_class = AnswerAcceptanceForm
queryset = Answer.objects.all()
template_name = "qanda/common/list_answers.html"
def get_success_url(self):
return self.object.question.get_absolute_url()
def form_invalid(self,form):
return HttpResponseBadRequest(
redirect_to=self.object.question.get_absolute_url()
)
class DailyQuestionList(DayArchiveView):
queryset = Question.objects.all()
date_field = 'created'
month_format = '%m'
allow_empty = True
class TodaysQuestionList(RedirectView):
def get_redirect_url(self, *args, **kwargs):
today = timezone.now()
return reverse('qanda:daily_questions',
kwargs={
'day': today.day,
'month': today.month,
'year': today.year,
}
)
class SearchView(TemplateView):
template_name = 'qanda/search.html'
def get_context_data(self,**kwargs):
query = self.request.GET.get('q',None)
ctx = super().get_context_data(query=query,**kwargs)
if query:
result = search_for_questions(query)
ctx['hits'] = result
return ctx
| [
"miketetteh751@outlook.com"
] | miketetteh751@outlook.com |
127cf91a36cce18269d93f055cf62345174901b7 | 45870a80cbe343efe95eb9e8d0bd47c8c88353d1 | /特殊的函数/venv/Lib/site-packages/tensorflow/tools/api/generator/api/errors/__init__.py | ef90b8fb905ffd54a01f8504bd2d3b233f1a0383 | [] | no_license | pippichi/IntelliJ_PYTHON | 3af7fbb2c8a3c2ff4c44e66736bbfb7aed51fe88 | 0bc6ded6fb5b5d9450920e4ed5e90a2b82eae7ca | refs/heads/master | 2021-07-10T09:53:01.264372 | 2020-07-09T13:19:41 | 2020-07-09T13:19:41 | 159,319,825 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,608 | py | """Imports for Python API.
This file is MACHINE GENERATED! Do not edit.
Generated by: tensorflow/tools/api/generator/create_python_api.py script.
"""
from tensorflow.python import OpError
from tensorflow.python.framework.errors import AbortedError
from tensorflow.python.framework.errors import AlreadyExistsError
from tensorflow.python.framework.errors import CancelledError
from tensorflow.python.framework.errors import DataLossError
from tensorflow.python.framework.errors import DeadlineExceededError
from tensorflow.python.framework.errors import FailedPreconditionError
from tensorflow.python.framework.errors import InternalError
from tensorflow.python.framework.errors import InvalidArgumentError
from tensorflow.python.framework.errors import NotFoundError
from tensorflow.python.framework.errors import OutOfRangeError
from tensorflow.python.framework.errors import PermissionDeniedError
from tensorflow.python.framework.errors import ResourceExhaustedError
from tensorflow.python.framework.errors import UnauthenticatedError
from tensorflow.python.framework.errors import UnavailableError
from tensorflow.python.framework.errors import UnimplementedError
from tensorflow.python.framework.errors import UnknownError
from tensorflow.python.framework.errors import error_code_from_exception_type
from tensorflow.python.framework.errors import exception_type_from_error_code
from tensorflow.python.framework.errors import raise_exception_on_not_ok_status
from tensorflow.python.framework.errors_impl import ABORTED
from tensorflow.python.framework.errors_impl import ALREADY_EXISTS
from tensorflow.python.framework.errors_impl import CANCELLED
from tensorflow.python.framework.errors_impl import DATA_LOSS
from tensorflow.python.framework.errors_impl import DEADLINE_EXCEEDED
from tensorflow.python.framework.errors_impl import FAILED_PRECONDITION
from tensorflow.python.framework.errors_impl import INTERNAL
from tensorflow.python.framework.errors_impl import INVALID_ARGUMENT
from tensorflow.python.framework.errors_impl import NOT_FOUND
from tensorflow.python.framework.errors_impl import OK
from tensorflow.python.framework.errors_impl import OUT_OF_RANGE
from tensorflow.python.framework.errors_impl import PERMISSION_DENIED
from tensorflow.python.framework.errors_impl import RESOURCE_EXHAUSTED
from tensorflow.python.framework.errors_impl import UNAUTHENTICATED
from tensorflow.python.framework.errors_impl import UNAVAILABLE
from tensorflow.python.framework.errors_impl import UNIMPLEMENTED
from tensorflow.python.framework.errors_impl import UNKNOWN | [
"874496049@qq.com"
] | 874496049@qq.com |
cfce7f8cc064960588d18c6795bf79c1dd97fe64 | 1734fd26a9adf7d2580f8bd981babda861944ebd | /location/knn3.py | def676e1aefe9ed988837a256d577a49d17692d9 | [] | no_license | tangzhuochen/Python_ML_Code | 420f4d80552a901b41e368e4e66a06f51ea1b29f | b418fd6a431a77838447ab4736bdf24019276309 | refs/heads/master | 2020-03-28T11:44:50.853941 | 2018-02-08T06:59:31 | 2018-02-08T06:59:31 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 12,491 | py | # -*- coding:utf-8 -*-
"""
import csv
import math
import datetime
#user_habit_dict:每个用户的乘车记录:起点,终点,距离
user_habit_dict={}
#start_end_dict:每条记录的起点,终点对
start_end_dict={}
#end_start_dict:每条记录的起点,终点对
end_start_dict={}
#user_habit_dict_test:test中每个用户的记录
user_habit_dict_test={}
#bike_dict:bike中的记录
bike_dict={}
#弧度转换
def rad(tude):
return (math.pi/180.0)*tude
#geohash模块提取的
__base32 = '0123456789bcdefghjkmnpqrstuvwxyz'
__decodemap = { }
for i in range(len(__base32)):
__decodemap[__base32[i]] = i
del i
#返回 精确的经纬度和误差
def decode_exactly(geohash):
lat_interval, lon_interval = (-90.0, 90.0), (-180.0, 180.0)
lat_err, lon_err = 90.0, 180.0
is_even = True
for c in geohash:
cd = __decodemap[c]
for mask in [16, 8, 4, 2, 1]:
if is_even: # adds longitude info
lon_err /= 2
if cd & mask:
lon_interval = ((lon_interval[0]+lon_interval[1])/2, lon_interval[1])
else:
lon_interval = (lon_interval[0], (lon_interval[0]+lon_interval[1])/2)
else: # adds latitude info
lat_err /= 2
if cd & mask:
lat_interval = ((lat_interval[0]+lat_interval[1])/2, lat_interval[1])
else:
lat_interval = (lat_interval[0], (lat_interval[0]+lat_interval[1])/2)
is_even = not is_even
lat = (lat_interval[0] + lat_interval[1]) / 2
lon = (lon_interval[0] + lon_interval[1]) / 2
return lat, lon, lat_err, lon_err
#返回 欧式距离 (其实还可以返回南北方向距离,东西方向距离,曼哈顿距离,方向(-0.5:0.5),但是删了,没啥吊用)
def produceLocationInfo(latitude1, longitude1,latitude2, longitude2):
radLat1 = rad(latitude1)
radLat2 = rad(latitude2)
a = radLat1-radLat2
b = rad(longitude1)-rad(longitude2)
R = 6378137
d = R*2*math.asin(math.sqrt(math.pow(math.sin(a/2),2)+math.cos(radLat1)*math.cos(radLat2)*math.pow(math.sin(b/2),2)))
detallat = abs(a)*R
detalLon = math.sqrt(d**2-detallat**2)
if b==0:
direction = 1/2 if a*b>0 else -1/2
else:
direction = math.atan(detallat/detalLon*(1 if a*b>0 else -1))/math.pi
return round(d)
#返回 欧式距离
def loc_2_dis(hotStartLocation,hotEndLocation):
StartLocation = decode_exactly(hotStartLocation[:7])
EndLocation = decode_exactly(hotEndLocation[:7])
latitude1 = StartLocation[0]
longitude1 = StartLocation[1]
latitude2 = EndLocation[0]
longitude2 = EndLocation[1]
return produceLocationInfo(latitude1, longitude1, latitude2, longitude2)
#返回 是否放假,距0点的分钟数,距5月1的天数
def produceTimeInfo(TimeData):
TimeData = TimeData.split(' ')
baseData = datetime.datetime(2017, 5, 1, 0, 0, 1)
mydata = TimeData[0].split('-')
mytime = TimeData[1].split(':')
mydata[0] = int(mydata[0])
mydata[1] = int(mydata[1])
mydata[2] = int(mydata[2])
mytime[0] = int(mytime[0])
mytime[1] = int(mytime[1])
mytime[2] = int(mytime[2].split('.')[0])
dt = datetime.datetime(mydata[0], mydata[1], mydata[2], mytime[0], mytime[1], mytime[2])
minute = mytime[1]+mytime[0]*60
# return int((dt-baseData).__str__().split(' ')[0]),miao,dt.weekday(),round(miao/900)
isHoliday = 0
if dt.weekday()in [5,6] or int((dt-baseData).__str__().split(' ')[0]) in [29,28]:
isHoliday=1
return isHoliday,minute,int((dt-baseData).__str__().split(' ')[0])
#模型之间的融合,粗暴的取了最值,这个可以再提升
def add2result(result1,result2):
for each in result2:
if each in result1:
result1[each] = min(result1[each] ,result2[each] )
else:
result1[each] = result2[each]
return result1
# 其实就是knn算法,结合了leak。一般的knn+leak应该是0.26分。这里主要有两点创新。一是给算出来的距离值除以频度的1.1次方,
# 这个加了很多分,二是对于新用户又使用了一个新的knn,其他算法在处理新用户的时候也可以参考下。
# knn算法产生的特征可以融合进xgb再训练,已实现,但内存不够弃赛
def training(trainfile = 'train.csv',testfile = 'test.csv',subfile = 'submission.csv' ,
leak1 = 0.01 ,leak2 = 4 ,leak3 = 20, #leak
qidianquan = 10,shijianquan = 10,jiejiaquan = 2,bikequan = 0.5, #都是拼音,字面意思,越大则这个特征比重越大
zhishu = 1.1 #对结果影响很大
):
tr = csv.DictReader(open(trainfile))
#利用train.csv建立user_habit_dict和start_end_dict
for rec in tr:
user = rec['userid']
start = rec['geohashed_start_loc']
end = rec['geohashed_end_loc']
rec['isHoliday'] , rec['minute'] , rec['data'] = produceTimeInfo(rec['starttime'])
if user in user_habit_dict:
user_habit_dict[user].append(rec)
else:
user_habit_dict[user] = [rec]
if start in start_end_dict:
start_end_dict[start].append(rec)
else:
start_end_dict[start] = [rec]
if end in end_start_dict:
end_start_dict[end].append(rec)
else:
end_start_dict[end] = [rec]
print('train done!')
# te是测试文件
te = csv.DictReader(open(testfile))
for rec in te:
user = rec['userid']
bike = rec['bikeid']
rec['isHoliday'], rec['minute'], rec['data'] = produceTimeInfo(rec['starttime'])
if user in user_habit_dict_test:
user_habit_dict_test[user].append(rec)
else:
user_habit_dict_test[user] = [rec]
if bike in bike_dict:
bike_dict[bike].append(rec)
else:
bike_dict[bike] = [rec]
print("test done!")
#sub是提交文件
sub = open(subfile, 'w')
iter1 = 0
# AllhotLocSort = sorted(end_start_dict.items(), key=lambda d: len(d[1]), reverse=True)
te1 = csv.DictReader(open(testfile))
for rec in te1:
iter1 += 1
if iter1 % 10000== 0:
print(iter1/20000,'%',sep='')
# testTime = timeSlipt(rec['minute'])
rec['isHoliday'], rec['minute'], rec['data'] = produceTimeInfo(rec['starttime'])
user1 = rec['userid']
bikeid1 = rec['bikeid']
order1 = rec['orderid']
start1 = rec['geohashed_start_loc']
hour1 = rec['minute']/60
minute1 = rec['minute']
isHoliday1 = rec['isHoliday']
biketype1 = rec['biketype']
data1 = rec['data']
result = {}
hotLoc = {}
#knn
if user1 in user_habit_dict:
for eachAct in user_habit_dict[user1]:
start2 = eachAct['geohashed_start_loc']
end2 = eachAct['geohashed_end_loc']
hour2 = eachAct['minute']/60
isHoliday2 = eachAct['isHoliday']
biketype2 = eachAct['biketype']
data2 = rec['data']
dis = loc_2_dis(start1, start2)
dis = min(dis, 1000) #1000
qidian= qidianquan * (dis / 100) ** 2
detalaTime = abs(hour2 - hour1) if abs(hour2 - hour1) < 12 else 24 - abs(hour2 - hour1)
shijian= shijianquan * (detalaTime / 12 * 10) ** 2
dayType = isHoliday2 - isHoliday1
jiejia= jiejiaquan * (dayType * 10) ** 2 #?
biType = int(biketype2) - int(biketype1)
bike= bikequan * (biType * 10) ** 2 #0.5
#利用终点预测
# return 欧式距离,南北方向距离,东西方向距离,曼哈顿距离,方向(-0.5:0.5)
# test2train_dis = loc_2_dis(start1,end2)
# train2train_dis = loc_2_dis(start2,end2)
# dis_detal = min(abs(test2train_dis[3]-train2train_dis[3]),1000) #1000
# direction_detal = abs(test2train_dis[4]-train2train_dis[4])
# direction_detal = direction_detal if direction_detal<0.5 else 1-direction_detal
# jvli = 4 * (dis_detal/100)**2
# fangxiang = 1 * (direction_detal/0.5*10)**2
score = qidian+shijian+jiejia+bike #jvli+fangxiang
# print(qidian,shijian,jiejia,bike,jvli,fangxiang)
if end2 in hotLoc:
hotLoc[end2] += 1
else:
hotLoc[end2] = 1
if end2 in result:
if result[end2] > score:
result[end2] = score
else:
result[end2] = score
for each in hotLoc:
result[each] = result[each] / (hotLoc[each]**zhishu) #0
for each in result:
result[each] = math.sqrt(result[each])
#利用test中的用户历史记录
if user1 in user_habit_dict_test:
resulttest = {}
user_habit_dict_test[user1].sort(key = lambda x:x['data']*60*24+x['minute'])
xuhao = 0
for i in range(len(user_habit_dict_test[user1])-1):
if user_habit_dict_test[user1][i]['orderid'] == order1:
xuhao = i
resulttest[user_habit_dict_test[user1][i+1]['geohashed_start_loc']] = 21
for i in range(len(user_habit_dict_test[user1])):
if i not in [xuhao,xuhao+1]:
resulttest[user_habit_dict_test[user1][i]['geohashed_start_loc']] = 21+abs(i-xuhao)
result = add2result(result, resulttest)
# leak
if bikeid1 in bike_dict:
resultleak = {}
bike_dict[bikeid1].sort(key = lambda x:x['data']*60*24+x['minute'])
for i in range(len(bike_dict[bikeid1])-1):
if bike_dict[bikeid1][i]['orderid'] == order1:
zhong = bike_dict[bikeid1][i+1]['data']*60*24+bike_dict[bikeid1][i+1]['minute']
qi = bike_dict[bikeid1][i]['data']*60*24+bike_dict[bikeid1][i]['minute']
detal = zhong-qi
if detal<30:
resultleak[bike_dict[bikeid1][i + 1]['geohashed_start_loc']] = leak1
elif detal<2*60:
resultleak[bike_dict[bikeid1][i + 1]['geohashed_start_loc']] = leak2 #4
else:
resultleak[bike_dict[bikeid1][i + 1]['geohashed_start_loc']] = leak3 #20
result = add2result(result,resultleak)
#起点终点对的knn
if start1 in start_end_dict:
endDict = {}
resultqizhong={}
for eachAct in start_end_dict[start1]:
score = 0
score += (24-abs(hour1-eachAct['minute']/60))/24
score += (1-abs(isHoliday1-eachAct['isHoliday']))*0.4
if eachAct['geohashed_end_loc'] in endDict:
endDict[eachAct['geohashed_end_loc']] += score
else:
endDict[eachAct['geohashed_end_loc']] = score
hotLoc = sorted(endDict.items(),key = lambda x:x[1],reverse=True)
if len(hotLoc)>=1:
resultqizhong[hotLoc[0][0]] = 1000
if len(hotLoc) >= 2:
resultqizhong[hotLoc[1][0]] = 1001
if len(hotLoc) >= 3:
resultqizhong[hotLoc[2][0]] = 1002
result = add2result(result, resultqizhong)
#剔除不合理结果
for each in result:
distance = loc_2_dis(each,start1)
if distance > 2500:
result[each] = 1999
if start1 in result:
result[start1] = min(2000, result[start1])
else:
result[start1]=2000
result['fuck2'] = 2001
result['fuck3'] = 2002
bestResult = sorted(result.items(), key=lambda d: d[1])
string = rec['orderid']
num = 0
for item in bestResult:
string += ',' + item[0]
# string += ':' + str(item[1]) + '\t'
num += 1
if num == 3:
break
sub.write(string + '\n')
sub.close()
print('ok')
if __name__ =="__main__":
training('train.csv', 'test.csv', 'submission.csv' )
"""
| [
"1002937942@qq.com"
] | 1002937942@qq.com |
93dc42a7298af52504ab1711a2face810ea772a6 | acb8e84e3b9c987fcab341f799f41d5a5ec4d587 | /langs/5/mav.py | 7c92e4c55f7682fe3d2e676ed7917bb9b3828d60 | [] | no_license | G4te-Keep3r/HowdyHackers | 46bfad63eafe5ac515da363e1c75fa6f4b9bca32 | fb6d391aaecb60ab5c4650d4ae2ddd599fd85db2 | refs/heads/master | 2020-08-01T12:08:10.782018 | 2016-11-13T20:45:50 | 2016-11-13T20:45:50 | 73,624,224 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 486 | py | import sys
def printFunction(lineRemaining):
if lineRemaining[0] == '"' and lineRemaining[-1] == '"':
if len(lineRemaining) > 2:
#data to print
lineRemaining = lineRemaining[1:-1]
print ' '.join(lineRemaining)
else:
print
def main(fileName):
with open(fileName) as f:
for line in f:
data = line.split()
if data[0] == 'mAV':
printFunction(data[1:])
else:
print 'ERROR'
return
if __name__ == '__main__':
main(sys.argv[1]) | [
"juliettaylorswift@gmail.com"
] | juliettaylorswift@gmail.com |
7723b40c756b824d50377be7a404363e77475980 | d3192b76f276d5102b231baf470f32d39a5e4854 | /test/StimulusSelector_tests.py | 2bf4a0c195b04eb5a0c9e5d1bc93d9af884989f2 | [] | no_license | drordotan/trajtracker | 874529e1c253f6d2c7527967616adf1c03977e05 | f76693c14d649899cfab5b2bbad4835dbf1cd15c | refs/heads/master | 2021-06-15T05:31:06.404147 | 2017-03-27T18:49:21 | 2017-03-27T18:49:21 | 81,454,083 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 733 | py | import unittest
import trajtracker
from trajtracker.stimuli import StimulusSelector
from ttrk_testing import DummyStimulus
class StimulusSelectorTests(unittest.TestCase):
def test_select(self):
a = DummyStimulus()
b = DummyStimulus()
sel = StimulusSelector([["a", a], ["b", b]])
self.assertIsNone(sel.active_stimulus)
sel.activate("a")
self.assertEqual(a, sel.active_stimulus)
sel.activate("b")
self.assertEqual(b, sel.active_stimulus)
def test_select_invalid(self):
a = DummyStimulus()
sel = StimulusSelector([["a", a]])
self.assertRaises(ValueError, lambda: sel.activate("c"))
if __name__ == '__main__':
unittest.main()
| [
"dror.dotan@gmail.com"
] | dror.dotan@gmail.com |
529315e1618e7474cbfb659adba1527cede96ec3 | dae7646a7780d471c32ec9dbe637445aa039b082 | /cnc/flybox_178x133x10/slots/fixture/pocket.py | d19d779125c2ccc3438e9b9350e9f2da6e3e35d9 | [
"Apache-2.0"
] | permissive | iorodeo/flybox_two_chamber | 249dc805074a7c5d2d9d7a8ebec7c3e9c18a792d | d3e24b3fded55308fff8bb95abb2ed97cb2d4465 | refs/heads/master | 2022-11-09T20:31:37.968775 | 2015-04-23T00:56:58 | 2015-04-23T00:56:58 | 273,790,614 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,250 | py | from __future__ import print_function
import os
import sys
from py2gcode import gcode_cmd
from py2gcode import cnc_dxf
feedrate = 150.0
fileName = 'flybox_178x133x10.dxf'
depth = 0.120
startZ = 0.0
safeZ = 0.5
overlap = 0.5
overlapFinish = 0.6
maxCutDepth = 0.15
toolDiam = 0.5
cornerCut = True
direction = 'ccw'
startDwell = 1.0
prog = gcode_cmd.GCodeProg()
prog.add(gcode_cmd.GenericStart())
prog.add(gcode_cmd.Space())
prog.add(gcode_cmd.FeedRate(feedrate))
param = {
'fileName' : fileName,
'layers' : ['POCKET'],
'components' : True,
'depth' : depth,
'startZ' : startZ,
'safeZ' : safeZ,
'overlap' : overlap,
'overlapFinish' : overlapFinish,
'maxCutDepth' : maxCutDepth,
'toolDiam' : toolDiam,
'cornerCut' : cornerCut,
'direction' : direction,
'startDwell' : startDwell,
}
pocket = cnc_dxf.DxfRectPocketFromExtent(param)
prog.add(pocket)
prog.add(gcode_cmd.Space())
prog.add(gcode_cmd.End(),comment=True)
baseName, dummy = os.path.splitext(__file__)
fileName = '{0}.ngc'.format(baseName)
print('generating: {0}'.format(fileName))
prog.write(fileName)
| [
"will@iorodeo.com"
] | will@iorodeo.com |
fbfc6ebef270cae196f812b58597f0589713b520 | 1610e03bc2b9b8419e11824079de3c8636f3a039 | /0x0A-python-inheritance/1-my_list.py | 4db9209230a10b8ff001d267f86007a96f27b635 | [] | no_license | Andresmelek/holbertonschool-higher_level_programming | 12afc1d94b1cd8dcdcfbeb1f9dc819999506afb8 | a1969506f346b808450e33f91d37790732ee7c57 | refs/heads/master | 2020-09-28T23:40:49.866556 | 2020-05-14T17:52:53 | 2020-05-14T17:52:53 | 226,893,592 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 133 | py | #!/usr/bin/python3
"""
Subclas my list from list.
"""
class MyList(list):
def print_sorted(self):
print(sorted(self))
| [
"candres.isaza@gmail.com"
] | candres.isaza@gmail.com |
6e766d1791c25108dbb653d7258644de85a8c46a | 40b3028706b79b2c12603ec3d8c3731186ff054c | /template/project/views/__init__.py | 8ab71c6bb2795bf48c9ecf78989f3384a105c55c | [] | no_license | avara1986/python-ms | 565c3ddac46eaf8be2a7e7094b73122aebd5911b | 788943686c69ead7029253ff20d74b64fa122628 | refs/heads/master | 2021-09-10T10:11:05.703836 | 2018-03-24T14:07:42 | 2018-03-24T14:07:42 | 121,014,766 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 227 | py | # coding=utf-8
from __future__ import unicode_literals, print_function, absolute_import, division
from flask import Blueprint
views_bp = Blueprint('views', __name__, static_url_path='/static')
from project.views import views | [
"a.vara.1986@gmail.com"
] | a.vara.1986@gmail.com |
133dd666a8084cf7b442838d7798f039cbb32c14 | ff66dfb302dfdc5a519787cea8ad0ccfc2264334 | /python/ex2_logistic_regression/log_reg_funcs/map_feature.py | e18a6bc7d0f8ee1195718d8e67bf4de005bbca2a | [
"MIT"
] | permissive | ashu-vyas-github/AndrewNg_MachineLearning_Coursera | 1c2d50e6a44e8e673203bf06a3f0165cac0a240e | 1be5124b07df61f7295dd1c5151b86b061bf50fc | refs/heads/main | 2023-07-11T14:30:52.057125 | 2021-08-17T06:04:30 | 2021-08-17T06:04:30 | 388,360,602 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 837 | py | import numpy
def map_feature(X1, X2, num_examples, degree=6):
"""
Feature mapping function to polynomial features.
Maps the features to quadratic features.
Returns a new df with more features, comprising of
x1, x2, x1^2, x2^2, x1*x2, x1*x2^2, etc...
Args:
X1, X2: vectors of original features
features: int, the number of initial features
degree: int, the polynomial degree
Returns:
mapped_features: a matrix with the new features
"""
mapped_features = numpy.ones((num_examples))
for idx_i in range(1, degree+1):
for idx_j in range(idx_i + 1):
polynomial_features = numpy.multiply(numpy.power(X1, idx_i - idx_j), numpy.power(X2, idx_j))
mapped_features = numpy.c_[mapped_features, polynomial_features]
return mapped_features
| [
"ashutoshavyas@gmail.com"
] | ashutoshavyas@gmail.com |
0a65abe66e6a1fdb564042336f4b90993b9c6ce4 | 4deda1b482534cbd8e9a8f638b8e89651251e62e | /2_Training/src/keras_yolo3/kmeans.py | ff6af5da5aca2c882996ae6045ec7d82841e91cb | [] | no_license | SIlvaMFPedro/train-your-own-yolo | 4cd92af5542a81caa6ce607bf6e487fc0aa43ef0 | f872f514dbc553ce7732b98c6d0f1b1134fa539e | refs/heads/master | 2022-12-30T11:26:59.462514 | 2020-10-09T13:32:17 | 2020-10-09T13:32:17 | 295,769,074 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,751 | py | # -----------------------------
# USAGE
# -----------------------------
# python kmeans.py
# -----------------------------
# IMPORTS
# -----------------------------
# Import the necessary packages
import numpy as np
# -----------------------------
# YOLO KMEANS
# -----------------------------
class YOLO_Kmeans:
def __init__(self, cluster_number, filename):
self.cluster_number = cluster_number
self.filename = "2012_train.txt"
def iou(self, boxes, clusters): # 1 box -> k clusters
n = boxes.shape[0]
k = self.cluster_number
box_area = boxes[:, 0] * boxes[:, 1]
box_area = box_area.repeat(k)
box_area = np.reshape(box_area, (n, k))
cluster_area = clusters[:, 0] * clusters[:, 1]
cluster_area = np.tile(cluster_area, [1, n])
cluster_area = np.reshape(cluster_area, (n, k))
box_w_matrix = np.reshape(boxes[:, 0].repeat(k), (n, k))
cluster_w_matrix = np.reshape(np.tile(clusters[:, 0], (1, n)), (n, k))
min_w_matrix = np.minimum(cluster_w_matrix, box_w_matrix)
box_h_matrix = np.reshape(boxes[:, 1].repeat(k), (n, k))
cluster_h_matrix = np.reshape(np.tile(clusters[:, 1], (1, n)), (n, k))
min_h_matrix = np.minimum(cluster_h_matrix, box_h_matrix)
inter_area = np.multiply(min_w_matrix, min_h_matrix)
result = inter_area / (box_area + cluster_area - inter_area)
return result
def avg_iou(self, boxes, clusters):
accuracy = np.mean([np.max(self.iou(boxes, clusters), axis=1)])
return accuracy
def kmeans(self, boxes, k, dist=np.median):
box_number = boxes.shape[0]
distances = np.empty((box_number, k))
last_nearest = np.zeros((box_number,))
np.random.seed()
clusters = boxes[np.random.choice(box_number, k, replace=False)] # init k clusters
while True:
distances = 1 - self.iou(boxes, clusters)
current_nearest = np.argmin(distances, axis=1)
if (last_nearest == current_nearest).all():
break # Clusters won't change
for cluster in range(k):
# Update clusters
clusters[cluster] = dist(boxes[current_nearest == cluster], axis=0)
last_nearest = current_nearest
return clusters
def result2txt(self, data):
f = open("yolo_anchors.txt", "w")
row = np.shape(data)[0]
for i in range(row):
if i == 0:
x_y = "%d,%d" % (data[i][0], data[i][1])
else:
x_y = ", %d,%d" % (data[i][0], data[i][1])
f.write(x_y)
f.close()
def txt2boxes(self):
f = open(self.filename, "r")
dataset = []
for line in f:
infos = line.split(" ")
length = len(infos)
for i in range(1, length):
width = int(infos[i].split(",")[2]) - int(infos[i].split(",")[0])
height = int(infos[i].split(",")[3]) - int(infos[i].split(",")[1])
dataset.append([width, height])
result = np.array(dataset)
f.close()
return result
def txt2clusters(self):
all_boxes = self.txt2boxes()
result = self.kmeans(all_boxes, k=self.cluster_number)
result = result[np.lexsort(result.T[0, None])]
self.result2txt(result)
print("K anchors:\n {}".format(result))
print("Accuracy: {:.2f}%".format(self.avg_iou(all_boxes, result) * 100))
# -----------------------------
# MAIN
# -----------------------------
if __name__ == '__main__':
cluster_number = 9
filename = "2012_train.txt"
kmeans = YOLO_Kmeans(cluster_number, filename)
kmeans.txt2clusters()
| [
"silva.mfpedro@gmail.com"
] | silva.mfpedro@gmail.com |
94d9193a284802436208775ba4c528db0580ef80 | 5eef5390146a6a1a8502ffbeba5b3bc211060bf2 | /0x0F-python-object_relational_mapping/10-model_state_my_get.py | aac6c0c0c3b83628cc89f650ee0f63df3cfdddd8 | [] | no_license | sebastiancalleu/holbertonschool-higher_level_programming | 581b68fea5c5ea469a8abfddae9890cc8c9387e3 | 06b7a7f6481d01f37f0fa0a66073881cda76016f | refs/heads/master | 2023-04-22T15:22:18.981649 | 2021-05-13T04:18:16 | 2021-05-13T04:18:16 | 319,347,075 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 781 | py | #!/usr/bin/python3
'''
Select data from states table and return the first row
the print the state id of that first element
the element to search is given by the user.
'''
import sys
from model_state import Base, State
from sqlalchemy import (create_engine)
from sqlalchemy.orm import Session
if __name__ == "__main__":
engine = create_engine('mysql+mysqldb://{}:{}@localhost/{}'.format
(sys.argv[1], sys.argv[2], sys.argv[3]),
pool_pre_ping=True)
Base.metadata.create_all(engine)
session = Session(engine)
state = (session.query(State).order_by(State.id).filter
(State.name == sys.argv[4]).first())
if state:
print(state.id)
else:
print("Not found")
session.close()
| [
"sebastian.calleu@gmail.com"
] | sebastian.calleu@gmail.com |
14adc39cbb8806bbd51cc0b927aa8060d305a1d9 | 3f100a1002a1f8ed453c8b81a9b403444d77b4c6 | /while_loops/loops5_b.py | 9bcb87471c46f1a13f53fed9b8089e0926fbe710 | [] | no_license | Kimuda/Phillip_Python | c19c85a43c5a13760239e4e94c08436c99787ebf | 59d56a0d45839656eb15dbe288bdb0d18cb7df2b | refs/heads/master | 2016-09-09T22:19:02.347744 | 2015-05-01T10:56:49 | 2015-05-01T10:56:49 | 32,330,951 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 69 | py | i=1
b=int(input("enter a number "))
while i<=b:
print(i)
i=i+1
| [
"pjkanywa@gmail.com"
] | pjkanywa@gmail.com |
33034bddb04f8718ad796d8a5a5bbca8d501c310 | d94b6845aeeb412aac6850b70e22628bc84d1d6d | /q_match/algorithms/dino_pretext_training.py | 83c1d32c2d7f7308bec3e50fbda26aba6c75ca5d | [
"CC-BY-4.0",
"Apache-2.0"
] | permissive | ishine/google-research | 541aea114a68ced68736340e037fc0f8257d1ea2 | c1ae273841592fce4c993bf35cdd0a6424e73da4 | refs/heads/master | 2023-06-08T23:02:25.502203 | 2023-05-31T01:00:56 | 2023-05-31T01:06:45 | 242,478,569 | 0 | 0 | Apache-2.0 | 2020-06-23T01:55:11 | 2020-02-23T07:59:42 | Jupyter Notebook | UTF-8 | Python | false | false | 11,224 | py | # coding=utf-8
# Copyright 2023 The Google Research Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""DINO pretext.
Maintains a queue of embeddings. For each data in the batch, corrupt it
twice into two views: view1 and view2. Compute a list of similarities for each
view: dist1=sim(view1,queue) and dist2(view2,queue). The loss for this algo
is then the cross entropy between the two distributions.
"""
from absl import logging
from flax.core import freeze
import jax
import jax.numpy as jnp
from q_match.algorithms.training_algo import PretextTrainingAlgo
from q_match.algorithms.vime_pretext_training import vime_corruption
@jax.jit
def entropy(teacher_logits, student_logits,
teacher_temperature, student_temperature):
teacher_logits /= teacher_temperature
student_logits /= student_temperature
targets = jax.nn.softmax(teacher_logits, axis=-1)
loss = jnp.sum(-targets * jax.nn.log_softmax(student_logits, axis=-1),
axis=-1)
return loss
@jax.jit
def dino_loss(teacher_embs_1,
teacher_embs_2,
student_embs_1,
student_embs_2,
center,
teacher_temperature=0.04,
student_temperature=0.1):
"""Dina loss.
Usually teacher temp is lower than student temperature.
Args:
teacher_embs_1: The embeddings to use as the target view 1 logits.
teacher_embs_2: The embeddings to use as the target view 2 logits.
student_embs_1: The embeddings to propogate gradients view 1 logits.
student_embs_2: The embeddings to propogate gradients view 2 logits.
center: Centering used for teacher embeddings.
teacher_temperature: Scaling temp for the teacher.
student_temperature: Scaling temp for the student.
Returns:
Loss for how closes the student distribution matches the teacher
distribution.
"""
teacher_embs_1 = jax.lax.stop_gradient(teacher_embs_1 - center)
teacher_embs_2 = jax.lax.stop_gradient(teacher_embs_2 - center)
entropy_1 = entropy(teacher_embs_1, student_embs_2,
teacher_temperature, student_temperature)
entropy_2 = entropy(teacher_embs_2, student_embs_1,
teacher_temperature, student_temperature)
return jnp.mean(entropy_1 + entropy_2) / 2
@jax.jit
def _update_ema_params(ema_params, new_params, tau):
"""Returns new EMA params."""
return jax.tree_map(lambda x, y: x * tau + (1. - tau) * y, ema_params,
new_params)
@jax.jit
def _update_center(center, new_center, tau):
"""Returns new Center params."""
return jax.tree_map(lambda x, y: x * tau + (1. - tau) * y, center,
new_center)
class DinoPretextTraining(PretextTrainingAlgo):
"""Dino Training Algorithm.
Attributes:
logdir: location of the log directory.
dataset: tf dataset to train.
batch_size: batch size for training.
model: Dictionary of models that includes
learning_rate: the learning rate for training.
epochs: number of epochs to train for
params: Optional params to start training from. If None, random params
are initialized.
state: Optional state to start training from.
writer: Writer for writing to tensorboard.
support_set_size: Size of the support set. if zero, batch mode is
used instead.
batch_mode: Whether to use batch mode.
use_mse_loss: whether to use MSE loss instead of log loss.
support_init_key: support set initialization key.
weight_decay: weight decay on pretext params.
corruption_p: The probability of corrupting for view1
query_corruption_p: The probability for corruption for view 2
student_temperature: Student temperature in distribution match loss.
"""
def __init__(
self,
logdir,
dataset,
batch_size,
model,
eval_model,
learning_rate,
epochs,
params=None,
state=None,
writer=None,
weight_decay=0.,
corruption_p=.3,
student_temperature=0.1,
teacher_temperature=0.04,
patience=32,
use_momentum_encoder=True,
tau=.999,
**kwargs
):
super(DinoPretextTraining,
self).__init__(logdir, dataset, batch_size, model, eval_model,
learning_rate, epochs, params, state, writer,
weight_decay, patience=patience)
self.mask_key = jax.random.PRNGKey(99)
self.corruption_p = corruption_p
self.student_temperature = student_temperature
self.teacher_temperature = teacher_temperature
self.use_momentum_encoder = use_momentum_encoder
self.tau = tau
def _loss(
self, params, state, teacher_state, center, features, mask_key, ema_params
):
"""Loss with distribution match."""
variables = freeze({'params': params, **state})
if self.use_momentum_encoder:
variables_2 = freeze({'params': ema_params, **teacher_state})
else:
variables_2 = freeze({'params': params, **state})
variables_2 = jax.lax.stop_gradient(variables_2)
## View 1
view_1_features, _ = vime_corruption(features, self.corruption_p,
mask_key)
# Student View 1
output_s1, updated_state = self.model.apply(variables,
view_1_features,
mutable=['batch_stats'],
rngs=self.rngs)
# pretext_output = output['pretext']
encoded_s1 = output_s1['pretext']['protos']
student_embs_1 = encoded_s1
# Teacher View 1
output_t1, _ = self.model.apply(
variables_2, view_1_features, mutable=['batch_stats'], rngs=self.rngs)
encoded_t1 = output_t1['pretext']['protos']
teacher_embs_1 = encoded_t1
## View 2
# Use the first key later, so pick second.
_, new_mask_key = jax.random.split(self.mask_key)
view_2_features, _ = vime_corruption(
features, p=self.corruption_p, mask_key=new_mask_key)
# Student View 2
output_s2, updated_state = self.model.apply(variables,
view_2_features,
mutable=['batch_stats'],
rngs=self.rngs)
encoded_s2 = output_s2['pretext']['protos']
student_embs_2 = encoded_s2
# Teacher View 2
output_t2, updated_teacher_state_2 = self.model.apply(
variables_2, view_2_features, mutable=['batch_stats'], rngs=self.rngs)
encoded_t2 = output_t2['pretext']['protos']
teacher_embs_2 = encoded_t2
new_center = ((encoded_t2 + encoded_t1) / 2).mean(axis=0)
pretext_loss = dino_loss(
teacher_embs_1=teacher_embs_1,
teacher_embs_2=teacher_embs_2,
student_embs_1=student_embs_1,
student_embs_2=student_embs_2,
center=center,
student_temperature=self.student_temperature,
teacher_temperature=self.teacher_temperature)
return pretext_loss, (updated_state, updated_teacher_state_2, new_center)
def run(self,):
"""Runs a pretext training algo."""
params = self.params
state = self.state
dataset = self.dataset
model = self.model
ema_params = jax.tree_util.tree_map(jax.numpy.copy, params)
teacher_state = jax.tree_util.tree_map(jax.numpy.copy, state)
example_data = jax.numpy.array(dataset.get_example_features())
variables = freeze({'params': params, **state})
example_output, _ = model.apply(variables,
example_data,
mutable=['batch_stats'],
rngs=self.rngs,
)
logging.debug(str(example_output))
# initialize center
center = example_output['pretext']['protos'].mean(axis=0)
center = jax.tree_util.tree_map(jax.numpy.copy, center)
optimizer_state = self.optimizer.init(params=params)
grad_fn = self.get_grad_fn()
steps = 0
for epoch in range(self.epochs):
logging.info('Pretext Epoch: %d', epoch)
for example in dataset.get_pretext_ds():
features = jax.numpy.array(example['features'])
if steps % 100 == 0:
pretext_loss, _ = self.loss(
params, state, teacher_state, center, features,
self.mask_key,
ema_params)
log_train_loss_msg = f'pretext training loss {pretext_loss}'
logging.info(log_train_loss_msg)
metrics = {'pretext_train_loss': pretext_loss,}
if self.writer is not None:
self.writer.write_scalars(steps, metrics)
gradients, (state, teacher_state,
new_center) = grad_fn(params, state, teacher_state,
center, features, self.mask_key,
ema_params)
params, optimizer_state = self.update_model(params,
gradients,
optimizer_state)
self.update_rngs()
ema_params = _update_ema_params(ema_params, params, self.tau)
center = _update_center(center, new_center, self.tau)
self.mask_key, _ = jax.random.split(self.mask_key)
steps += 1
# # check validation pretext dataset if it exists
pretext_validation_ds = dataset.get_pretext_validation_ds()
if pretext_validation_ds is not None:
# compute validation loss
validation_loss = 0.
val_seen = 0
val_mask_key = self.mask_key
for example in pretext_validation_ds:
features = jax.numpy.array(example['features'])
seen = features.shape[0]
validation_loss += self.loss(
params,
state,
teacher_state,
center,
features,
val_mask_key,
ema_params)[0] * seen
val_seen += seen
val_mask_key, _ = jax.random.split(val_mask_key)
validation_loss /= float(val_seen)
self.writer.write_scalars(
epoch,
{'pretext_validation_loss': validation_loss})
if validation_loss < self.best_early_stop_loss:
self.best_early_stop_loss = validation_loss
self.early_stop_params = params
self.early_stop_state = state
self.patience_counter = 0
else:
self.patience_counter += 1
if self.patience_counter > self.patience:
break
else:
self.early_stop_params = params
self.early_stop_state = state
return self.early_stop_params, self.early_stop_state
| [
"copybara-worker@google.com"
] | copybara-worker@google.com |
514579e74860ed75d0f38480fbea5d0c3adc143e | e10a6d844a286db26ef56469e31dc8488a8c6f0e | /m_theory/m_theory_lib/ode/ode_hessian.py | 7a4d2de1fd85717b53c65fe357066402e686e45c | [
"Apache-2.0",
"CC-BY-4.0"
] | permissive | Jimmy-INL/google-research | 54ad5551f97977f01297abddbfc8a99a7900b791 | 5573d9c5822f4e866b6692769963ae819cb3f10d | refs/heads/master | 2023-04-07T19:43:54.483068 | 2023-03-24T16:27:28 | 2023-03-24T16:32:17 | 282,682,170 | 1 | 0 | Apache-2.0 | 2020-07-26T15:50:32 | 2020-07-26T15:50:31 | null | UTF-8 | Python | false | false | 28,642 | py | # coding=utf-8
# Copyright 2022 The Google Research Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Backpropagating Hessians through ODEs."""
import functools
import numbers
import time
from typing import Callable, Optional
import numpy
import numpy.typing
import scipy.integrate
import scipy.optimize
import tensorflow as tf
# Internal variable names need to use terse abbreviations that
# closely follow the (somewhat involved) mathematics.
# pylint:disable=invalid-name
# Pylint is wrong when complaining about 'this is of type type'
# annotations where we precisely mean that. The type `type`
# is not a generic in such situations!
# pylint:disable=g-bare-generic
# Module-private switch to turn off autograph for debugging.
# This then simplifies inspecting tensors, as we can call
# .numpy() on intermediate quantities while in eager mode.
_DEFAULT_USE_TF_FUNCTION = True
def maybe_tf_function(use_tf_function):
"""Returns tf.function if use_tf_function is true, identity otherwise."""
return tf.function if use_tf_function else lambda f: f
def fd_grad(f, x0, *, eps=1e-7):
"""Computes a gradient via finite-differencing.
Args:
f: The ArrayLike -> ArrayLike function to take the gradient of.
x0: The position at which to take the gradient.
eps: step size for symmetric differencing.
Returns:
The gradient of `f` at `x0`, computed by taking central differences.
If `f` returns an array-like which as an ndarray would have shape `s`,
then the shape of this gradient is `s + (len(x0),)`, with the final
index indicating the coordinate `i` with respect to which the partial
derivative was computed effectively as:
`(f(x0 + delta_i]) - f(x0 - delta_i)) / (2 * eps)`, where `delta_i` is
`numpy.array([eps if k == i else 0] for i in range(len(x0))]`.
"""
x0 = numpy.asarray(x0)
if x0.ndim != 1:
raise ValueError(f'Need 1-index position-vector x0, got shape: {x0.shape}')
x0_eps_type = type(x0[0] + eps)
if not isinstance(x0[0], x0_eps_type):
# If `eps` changes cannot be represented alongside x0-coordinates,
# adjust the array to have suitable element-type.
x0 = x0.astype(x0_eps_type)
dim = x0.size
f0 = numpy.asarray(f(x0))
result = numpy.zeros(f0.shape + (dim,), dtype=f0.dtype)
denominator = 2 * eps
xpos = numpy.array(x0)
for num_coord in range(dim):
xpos[num_coord] = x0[num_coord] + eps
f_plus = numpy.asarray(f(xpos))
xpos[num_coord] = x0[num_coord] - eps
f_minus = numpy.asarray(f(xpos))
result[Ellipsis, num_coord] = (f_plus - f_minus) / denominator
xpos[num_coord] = x0[num_coord]
return result
def fd_hessian(f, x0, *, eps=1e-5):
"""Computes a hessian via iterated-finite-differencing."""
grad_f = lambda x: fd_grad(f, x, eps=eps)
return fd_grad(grad_f, x0, eps=eps)
def tf_jacobian(t_vec_func,
use_tf_function=_DEFAULT_USE_TF_FUNCTION):
"""Maps a TF vector-valued function to its TF Jacobian-function."""
# This is here only used to work out the hessian w.r.t. ODE initial-state
# and final-state coordinates. Computing the costate-equation jacobian
# with this would be wasteful.
@maybe_tf_function(use_tf_function)
def tf_j(t_xs):
tape = tf.GradientTape()
with tape:
tape.watch(t_xs)
v = t_vec_func(t_xs)
ret = tape.jacobian(v, t_xs,
unconnected_gradients=tf.UnconnectedGradients.ZERO)
return ret
return tf_j
def tf_jac_vec(
t_vec_func,
use_tf_function=_DEFAULT_USE_TF_FUNCTION):
"""Maps a TF vector-function F to a "(x, sx) -> sx_j Fj,k(x)" function."""
@maybe_tf_function(use_tf_function)
def tf_j(t_xs, t_s_xs):
tape = tf.GradientTape()
with tape:
tape.watch(t_xs)
t_v = t_vec_func(t_xs)
return tape.gradient(t_v, t_xs,
output_gradients=[t_s_xs],
unconnected_gradients=tf.UnconnectedGradients.ZERO)
return tf_j
def tf_jac_vec_v1(
t_vec_func,
use_tf_function=_DEFAULT_USE_TF_FUNCTION):
"""Maps a TF vector-function F to a "(x, sx) -> sx_j Fj,k(x)" function."""
# See discussion in the accompanying article.
@maybe_tf_function(use_tf_function)
def tf_j(t_xs, t_s_xs):
tape = tf.GradientTape()
with tape:
tape.watch(t_xs)
t_v = tf.tensordot(t_s_xs, t_vec_func(t_xs), axes=1)
return tape.gradient(t_v, t_xs,
unconnected_gradients=tf.UnconnectedGradients.ZERO)
return tf_j
def tf_grad(tf_f,
use_tf_function=_DEFAULT_USE_TF_FUNCTION):
"""Maps a TF scalar-function to its TF gradient-function."""
@maybe_tf_function(use_tf_function)
def tf_grad_f(t_ys):
tape = tf.GradientTape()
with tape:
tape.watch(t_ys)
t_loss = tf_f(t_ys)
return tape.gradient(t_loss, t_ys,
unconnected_gradients=tf.UnconnectedGradients.ZERO)
return tf_grad_f
def tf_grad_hessian(tf_f,
want_hessian=True,
use_tf_function=_DEFAULT_USE_TF_FUNCTION):
"""Maps a TF scalar-function to its `gradient` and `hessian` functions."""
tf_fprime = tf_grad(tf_f, use_tf_function=use_tf_function)
return tf_fprime, (tf_jacobian(tf_fprime, use_tf_function=use_tf_function)
if want_hessian else None)
def tf_backprop_ode(tf_dy_dt,
use_tf_function=_DEFAULT_USE_TF_FUNCTION):
"""Maps a (y -> dy/dt) ODE to the 'doubled sensitivity-backprop' ODE."""
tf_jv = tf_jac_vec(tf_dy_dt, use_tf_function=use_tf_function)
@maybe_tf_function(use_tf_function)
def tf_back_dyext_dt(yext):
ys, s_ys = tf.unstack(tf.reshape(yext, (2, -1)))
return tf.concat([tf_dy_dt(ys), -tf_jv(ys, s_ys)], axis=0)
return tf_back_dyext_dt
def tf_dp_backprop_ode(tf_dy_dt,
dim_y,
use_tf_function=_DEFAULT_USE_TF_FUNCTION):
"""Maps a (y -> dy/dt) ODE to the 'differential programming backprop' ODE.
The returned callable cannot be used directly as a dy/dt for ODE-integration,
but needs to be wrapped into a closure that sets the first bool argument
to `True` if the sigma_i F_i,jk term is to be included, or `False` if not.
The latter makes sense for backpropagating the hessian at a minimum.
Args:
tf_dy_dt: tf.Tensor -> tf.Tensor function that maps a state-space position
to a velocity.
dim_y: Dimension of state-space.
use_tf_function: Whether to @tf.function wrap the result.
Returns:
A (bool, tf.Tensor) -> tf.Tensor function which, depending on whether the
first argument is `True`, includes the sigma_i F_i,jk term in the
computation of the rate-of-change of the extended state-space vector.
This extended vector (always, irrespective of how the boolean is set)
has structure tf.concat([ys, s_ys, tf.reshape(s2_ys, -1)], axis=0),
where s_ys are the components of the gradient, and s2_ys are the
components of the hessian.
"""
tf_jv = tf_jac_vec(tf_dy_dt, use_tf_function=use_tf_function)
# Jacobian of h_ij F_j, i.e. J_ik := h_ij F_j,k.
@maybe_tf_function(use_tf_function)
def tf_hF(t_h_ij, t_ys):
tape = tf.GradientTape()
with tape:
tape.watch(t_ys)
t_v = tf.linalg.matvec(t_h_ij, tf_dy_dt(t_ys))
return tape.jacobian(t_v, t_ys,
unconnected_gradients=tf.UnconnectedGradients.ZERO)
# sigma_i F_i,kl:
@maybe_tf_function(use_tf_function)
def tf_sF2(t_xs, t_s_xs):
tape0 = tf.GradientTape()
with tape0:
tape0.watch(t_xs)
tape1 = tf.GradientTape()
with tape1:
tape1.watch(t_xs)
t_sF = tf.tensordot(t_s_xs, tf_dy_dt(t_xs), axes=1)
t_grad_sF = tape1.gradient(
t_sF, t_xs,
unconnected_gradients=tf.UnconnectedGradients.ZERO)
return tape0.jacobian(
t_grad_sF, t_xs,
unconnected_gradients=tf.UnconnectedGradients.ZERO)
#
@maybe_tf_function(use_tf_function)
def tf_back_dyext_dt(include_d2f, yext):
ys = yext[:dim_y]
s_ys = yext[dim_y: 2 * dim_y]
s2_ys = tf.reshape(yext[2 * dim_y:], (dim_y, dim_y))
ddt_ys = tf_dy_dt(ys)
ddt_s_ys = tf_jv(ys, s_ys)
hF = tf_hF(s2_ys, ys)
ddt_s2_ys_linear = hF + tf.transpose(hF)
if include_d2f:
ddt_s2_ys = ddt_s2_ys_linear + tf_sF2(ys, s_ys)
else:
ddt_s2_ys = ddt_s2_ys_linear
return tf.concat([ddt_ys,
-ddt_s_ys,
-tf.reshape(ddt_s2_ys, (-1,))], axis=0)
return tf_back_dyext_dt
def scipy_odeint(f_dy_dt,
t0_t1,
y0,
*args,
method = None,
**kwargs):
"""Wraps scipy's `odeint` to use a (y -> dy/dt) ODE-function.
Args:
f_dy_dt: Function mapping a state-vector numpy.ndarray y to its
"velocity" dy/dt.
t0_t1: Pair of `(starting_time, final_time)`.
y0: State-vector at `starting_time`.
*args: Further arguments, forwarded to the scipy ode-integrator.
method: If `None`, this function internally uses `scipy.integrate.odeint()`
for ODE-integration. Otherwise, it uses `scipy.integrate.solve_ivp()`,
forwarding the `method`-parameter.
**kwargs: Further keyword arguments, forwarded to the scipy ode-integrator.
Returns:
numpy.ndarray, state-vector at `final_time`.
"""
def f_wrapped(t, y):
del t # Unused.
return f_dy_dt(y)
if method is None: # Use .odeint()
kwargs['tfirst'] = True
kwargs['full_output'] = False
ret = scipy.integrate.odeint(f_wrapped, y0, t0_t1, *args, **kwargs)
return ret[1, :]
else: # Use .solve_ivp()
ret = scipy.integrate.solve_ivp(f_wrapped, t0_t1, y0, method=method,
*args, **kwargs)
return ret.y[:, -1]
def _check_and_symmetrize(m, opt_rtol_atol):
"""Optionally symmetrizes `m`, with tolerance thresholds."""
if opt_rtol_atol is None:
return m
rtol, atol = opt_rtol_atol
if not numpy.allclose(m, m.T, rtol=rtol, atol=atol):
return None
return 0.5 * (m + m.T)
class ODEBackpropProblem:
"""An ODE problem on which we want to do back-propagation.
Attributes:
numpy_dy_dt: The {state} -> {rate of change} numpy-array-valued
function describing the dynamics. Must not be modified.
"""
# The deeper reason for this to be a class (rather than having just
# some functions with interfaces similar to
# scipy.integrate.odeint()) is that we want to use some intermediate
# quantities, such as TensorFlow-functions that have been wrapped up
# to hand them to an ODE-solver, across multiple calls. While such
# quantities are not dynamical state (which is what class instances
# are typically used for), they are cached optionally-computed
# properties (generated when needed first), and since they are
# needed/used across multiple calls, having a class instance to own
# them and manage lazy creation simplifies the logic. Given that the
# general structure of this code is rather functional (since we
# naturally pass around functions describing various aspects of an
# ODE problem), this is an example for object-oriented and
# functional design properly complementing one another.
def __init__(self,
*,
dim_y,
with_timings=False,
tf_dy_dt,
tf_L_y0y1,
tf_dtype = tf.float64,
use_tf_function = _DEFAULT_USE_TF_FUNCTION):
"""Initializes the instance.
Args:
dim_y: state-space dimension.
with_timings: whether callbacks should record timing measurements.
tf_dy_dt: tf.Tensor -> tf.Tensor state-space-velocity function,
as a function of state-space position.
tf_L_y0y1: (tf.Tensor, tf.Tensor) -> tf.Tensor 'loss' as a function
of initial and final state-space position.
tf_dtype: Numerical type to use as dtype= for the calculation.
use_tf_function: whether to wrap TensorFlow functions into @tf.function.
"""
self._dim_y = dim_y
self._tf_dy_dt = tf_dy_dt
self._tf_L_y0y1 = tf_L_y0y1
self._tf_dtype = tf_dtype
self._use_tf_function = use_tf_function
self._with_timings = with_timings
# Timings
self._t_numpy_dy_dt = []
self._t_numpy_back_ode = []
self._t_numpy_back2_ode = []
self._t_numpy_dp_back_ode = []
#
if with_timings:
def numpy_dy_dt(ys):
t0 = time.monotonic()
result = tf_dy_dt(tf.constant(ys, dtype=tf_dtype)).numpy()
self._t_numpy_dy_dt.append(time.monotonic() - t0)
return result
else:
def numpy_dy_dt(ys):
return tf_dy_dt(tf.constant(ys, dtype=tf_dtype)).numpy()
self.numpy_dy_dt = numpy_dy_dt
#
def tf_L_y01(tf_y01):
y0, y1 = tf.unstack(tf.reshape(tf_y01, (2, -1)))
return tf_L_y0y1(y0, y1)
self._tf_L_y01 = tf_L_y01
# Gradient and hessian of the loss, as compilation-cached callables:
self._opt_tf_dL_dys_and_d2L_dys_2 = [None, None]
@functools.cached_property
def _loss_has_direct_y0_dependency(self):
"""Checks whether the loss has an explicit dependency on `y0`.
If the TensorFlow-generated graph sees an unconnected gradient
when backpropagating from the loss into the `y0` argument,
this returns `False`.
"""
tc_y0 = tf.constant([0.0] * self._dim_y, dtype=self._tf_dtype)
tc_y1 = tf.constant([0.0] * self._dim_y, dtype=self._tf_dtype)
tape_for_checking_y0_dependency = tf.GradientTape()
with tape_for_checking_y0_dependency:
tape_for_checking_y0_dependency.watch(tc_y0)
t_loss_for_checking_y0_dependency = self._tf_L_y0y1(tc_y0, tc_y1)
dL_dy0_or_None = (
tape_for_checking_y0_dependency.gradient(
t_loss_for_checking_y0_dependency, tc_y0,
unconnected_gradients=tf.UnconnectedGradients.NONE))
return dL_dy0_or_None is not None
@functools.cached_property
def _tf_back_ode(self):
"""The backpropagation-extended ODE."""
return tf_backprop_ode(self._tf_dy_dt,
use_tf_function=self._use_tf_function)
@functools.cached_property
def _numpy_back_ode(self):
"""NumPy wrapper for tf_back_ode()."""
tf_dtype = self._tf_dtype
tf_back_ode = self._tf_back_ode.get_concrete_function(
tf.zeros(2 * self._dim_y, dtype=tf_dtype))
if self._with_timings:
def fn_ydot(yexts):
t0 = time.monotonic()
result = tf_back_ode(tf.constant(yexts, dtype=tf_dtype)).numpy()
self._t_numpy_back_ode.append(time.monotonic() - t0)
return result
else:
def fn_ydot(yexts):
return tf_back_ode(tf.constant(yexts, dtype=tf_dtype)).numpy()
return fn_ydot
@functools.cached_property
def _tf_back2_ode(self):
"""The twice-backpropagation-extended ODE."""
return tf_backprop_ode(self._tf_back_ode,
use_tf_function=self._use_tf_function)
@functools.cached_property
def _numpy_back2_ode(self):
"""NumPy wrapper for tf_back2_ode()."""
tf_dtype = self._tf_dtype
tf_back2_ode = self._tf_back2_ode.get_concrete_function(
tf.zeros(4 * self._dim_y, dtype=tf_dtype))
if self._with_timings:
def fn_ydot(ye2):
t0 = time.monotonic()
result = tf_back2_ode(tf.constant(ye2, dtype=tf_dtype)).numpy()
self._t_numpy_back2_ode.append(time.monotonic() - t0)
return result
else:
def fn_ydot(ye2):
return tf_back2_ode(tf.constant(ye2, dtype=tf_dtype)).numpy()
return fn_ydot
@functools.cached_property
def _tf_dp_back_ode(self):
"""The differential-programming backpropagation-extended ODE."""
return tf_dp_backprop_ode(self._tf_dy_dt,
self._dim_y,
use_tf_function=self._use_tf_function)
def _numpy_dp_back_ode(self, include_sF_term):
"""NumPy wrapper for tf_dp_back_ode()."""
tf_dtype = self._tf_dtype
tf_dp_back_ode = self._tf_dp_back_ode.get_concrete_function(
include_sF_term,
tf.zeros(self._dim_y * (2 + self._dim_y),
dtype=tf_dtype))
if self._with_timings:
def fn_ydot(ye):
t0 = time.monotonic()
result = tf_dp_back_ode(tf.constant(ye, dtype=tf_dtype)).numpy()
self._t_numpy_dp_back_ode.append(time.monotonic() - t0)
return result
else:
def fn_ydot(ye):
return tf_dp_back_ode(tf.constant(ye, dtype=tf_dtype)).numpy()
return fn_ydot
@functools.cached_property
def _numpy_dp_back_ode_with_sF_term(self):
"""NumPy wrapper for tf_dp_back_ode(), including the s_i*F_i,jk-term."""
return self._numpy_dp_back_ode(True)
@functools.cached_property
def _numpy_dp_back_ode_without_sF_term(self):
"""NumPy wrapper for tf_dp_back_ode(), without the s_i*F_i,jk-term."""
return self._numpy_dp_back_ode(False)
def collect_timers(self):
"""Resets and collects timers."""
result = (
self._t_numpy_dy_dt,
self._t_numpy_back_ode,
self._t_numpy_back2_ode,
self._t_numpy_dp_back_ode)
self._t_numpy_dy_dt = []
self._t_numpy_back_ode = []
self._t_numpy_back2_ode = []
self._t_numpy_dp_back_ode = []
return result
def backprop(
self,
y0,
t0_to_t1,
*,
odeint = scipy_odeint,
odeint_args=(),
odeint_kwargs=(),
symmetrize_hessian_rtol_atol=None,
want_order = 2,
use_reconstructed_y0=False):
"""Computes the loss-value, and optionally its gradient and hessian.
Args:
y0: array-like real vector, the starting position.
t0_to_t1: array-like real vector, initial and final time.
odeint: Callable to use for ODE-integration.
Must have same calling signature as the default,
which is `scipy_odeint`.
odeint_args: Extra arguments to provide to `odeint` for
each ODE-integration.
odeint_kwargs: Extra keyword arguments to provide to `odeint` for
each ODE-integration, will get converted to dict().
symmetrize_hessian_rtol_atol: Optional pair `(rtol, atol)`. If absent,
the un-symmetrized hessian will be returned. If present,
the discrepancy between the computed hessian and its transpose
is checked against these relative/absolute tolerance thresholds,
and the symmetrized hessian is returned.
want_order: If 0, only the function's value will be computed.
If 1, the gradient will also be computed and returned.
If 2, the hessian will be added.
use_reconstructed_y0: Whether backpropagation-of-backpropagation
should use the numerically-noisy reconstructed starting position y0
from the first backpropagation. This parameter only exists to
gauge the impact of this improvement vs. what autogenerated
code that does not know about this optimization would do.
Returns:
A 4-tuple `(y1, val_loss, opt_grad, opt_hessian)` of numerical data,
where `y1` is the ODE-integration final-state vector as a numpy.ndarray,
`val_loss` is the corresponding value of the loss-function,
`opt_grad` is `None` or the gradient as a numpy.ndarray,
depending on `want_order`, and `opt_hessian` is `None` or the hessian
as a numpy.ndarray, depending on `want_order`.
Raises:
ValueError, if symmetrization was asked for and the Hessian's degree
of asymmetry violates thresholds.
"""
if not 0 <= want_order <= 2:
raise ValueError(f'Invalid {want_order=}')
y0 = numpy.asarray(y0, dtype=self._tf_dtype.as_numpy_dtype())
if y0.size != self._dim_y:
raise ValueError(
f'Expected y0 to have shape [{self._dim_y}], got: {list(y0.shape)}')
t0_to_t1 = numpy.asarray(t0_to_t1)
odeint_kwargs = dict(odeint_kwargs)
dim = y0.size
dim_zeros = numpy.zeros_like(y0)
#
# Forward-propagate ODE.
#
y1 = odeint(self.numpy_dy_dt, t0_to_t1, y0, *odeint_args, **odeint_kwargs)
tc_y0 = tf.constant(y0, dtype=self._tf_dtype)
tc_y1 = tf.constant(y1, dtype=self._tf_dtype)
T_val = self._tf_L_y0y1(tc_y0, tc_y1).numpy()
if want_order == 0:
# We only want the function value.
return y1, T_val, None, None
#
# Backprop ODE to get d {loss T} / d {y0}.
#
tc_y01 = tf.concat([tc_y0, tc_y1], axis=0)
# Let TensorFlow work out the gradient and hessian w.r.t. inputs
# of the loss-function, or take cached callables if this was already
# done. Note that if we ask for a hessian and an earlier calculation
# done on the same instance compiled the gradient-function only,
# we redo that compilation. This little waste of effort is generally benign.
opt_tf_dL_dys_and_d2L_dys_2 = self._opt_tf_dL_dys_and_d2L_dys_2
if want_order >= 2 and opt_tf_dL_dys_and_d2L_dys_2[1] is None:
opt_tf_dL_dys_and_d2L_dys_2[:] = tf_grad_hessian(self._tf_L_y01,
want_hessian=True)
elif want_order >= 1 and opt_tf_dL_dys_and_d2L_dys_2[0] is None:
opt_tf_dL_dys_and_d2L_dys_2[:] = tf_grad_hessian(self._tf_L_y01,
want_hessian=False)
# pylint:disable=not-callable
# We do know that these actually are callable now.
opt_tf_dL_dys, opt_tf_d2L_dys_2 = opt_tf_dL_dys_and_d2L_dys_2
sL_y01 = opt_tf_dL_dys(tc_y01).numpy()
# From here on, variable naming and also tags `F{n}` align
# with the text of the paper.
s_T_y_start, s_T_y_final = sL_y01.reshape(2, -1) # F3, F4
obp1_start = numpy.concatenate([y1, s_T_y_final], axis=0) # F5
# The full vector for step F7 in the paper.
# Retaining this allows us to do experiments that illustrate
# the impact on result-quality of starting the 2nd
# ODE-backpropagation from an unnecessarily noisy starting point.
s_T_y_start_via_y_final_full = odeint( # F6
self._numpy_back_ode,
t0_to_t1[::-1], # Reversed!
obp1_start,
*odeint_args, **odeint_kwargs)
s_T_y_start_via_y_final = s_T_y_start_via_y_final_full[dim:]
s_T_y_start_total = s_T_y_start_via_y_final + s_T_y_start
if want_order == 1:
# We only want the function value and sensitivity w.r.t. y0.
return y1, T_val, s_T_y_start_total, None
#
# Start of the actual backpropagation of the Hessian.
#
# The paper discusses computing an individual
# row of the hessian. Here, we then have to assemble these rows
# into a matrix.
#
result_hessian = numpy.zeros([dim, dim], dtype=y0.dtype)
d2L_dys_2 = opt_tf_d2L_dys_2(tc_y01).numpy()
T00 = d2L_dys_2[:dim, :dim]
T01 = d2L_dys_2[:dim, dim:]
T11 = d2L_dys_2[dim:, dim:]
#
if use_reconstructed_y0:
y0_for_back2 = s_T_y_start_via_y_final_full[:dim]
else:
y0_for_back2 = y0
for row_j in range(dim):
onehot_j = numpy.arange(dim) == row_j
zj_obp1_start = odeint(
self._numpy_back2_ode,
t0_to_t1, # Reversed-reversed
numpy.concatenate(
[y0_for_back2,
s_T_y_start_via_y_final,
dim_zeros,
onehot_j],
axis=0),
*odeint_args, **odeint_kwargs)[2*dim:]
zj_s_T_y_final = zj_obp1_start[dim:]
zj_y_final = (
zj_obp1_start[:dim] +
# Paper:
## # from F4
## &es(T11(pos0[:]=y_start[:], pos1[:]=y_final[:]) @ a, b;
## z_s_T_y_final[:] @ b -> a) +
# Python variant:
T11.dot(zj_s_T_y_final) +
# Paper:
## # from F3
## &es(T01(pos0[:]=y_start[:], pos1[:]=y_final[:]) @ a, b;
## z_s_T_y_start[:] @ b -> a))
# Python variant, using `zj_s_T_y_start = onehot_j`:
T01[row_j, :])
result_hessian[row_j, :] = (
# Paper:
## # from F3
## &es(T00(pos0[:]=y_start[:], pos1[:]=y_final[:]) @ a, b;
## z_s_T_y_start @ a -> b) +
# Python variant, using `zj_s_T_y_start = onehot_j`:
T00[row_j, :] +
# Paper:
## # from F4
## &es(T01(pos0[:]=y_start[:], pos1[:]=y_final[:]) @ a, b;
## z_s_T_y_start @ a -> b) +
# Python variant:
T01.dot(zj_s_T_y_final) +
# Paper: ODE(...)
odeint(
self._numpy_back_ode,
t0_to_t1[::-1], # Reversed!
numpy.concatenate([y1, zj_y_final], axis=0),
*odeint_args, **odeint_kwargs)[dim:])
opt_symmetrized_hessian = _check_and_symmetrize(
result_hessian, symmetrize_hessian_rtol_atol)
if opt_symmetrized_hessian is None:
raise ValueError('Hessian violates symmetry expectations '
'(likely due to numerical noise).')
return (y1, T_val,
s_T_y_start_total,
opt_symmetrized_hessian)
def dp_backprop(self,
y0,
t0_to_t1,
*,
odeint = scipy_odeint,
odeint_args=(),
odeint_kwargs=(),
include_hessian_d2ydot_dy2_term = True):
"""ODE-backpropagates a hessian via 'differential programming'.
Args:
y0: array-like real vector, the starting position.
t0_to_t1: array-like real vector, initial and final time.
odeint: Callable to use for ODE-integration.
Must have same calling signature as the default,
which is `scipy_odeint`.
odeint_args: Extra arguments to provide to `odeint` for
each ODE-integration.
odeint_kwargs: Extra keyword arguments to provide to `odeint` for
each ODE-integration, will get converted to dict().
include_hessian_d2ydot_dy2_term: whether the s_i F_i,kl term should
be included in the calculation. When backpropagating
a hessian around a critical point, the F_i,kl factor
multiplies a zero, and so we can skip this computation.
Returns:
A 4-tuple `(y1, val_loss, grad, hessian)` of numerical data,
where `y1` is the ODE-integration final-state vector as a numpy.ndarray,
`val_loss` is the corresponding value of the loss-function,
`grad` is the gradient as a numpy.ndarray, and
`opt_hessian` is the hessian as a numpy.ndarray.
Raises:
NotImplementedError, if the loss has an actual dependency on the
initial-state. The version of this code accompanying the publication
illustrates the value of having a formalism to keep track of
complicated dependencies via the generic `backprop` method, but
tries to keep the corresponding complication out of the discussion of
dynamic 'differential programming'.
"""
if self._loss_has_direct_y0_dependency:
raise NotImplementedError('Loss has a dependency on initial-state.')
t0_to_t1 = numpy.asarray(t0_to_t1)
odeint_kwargs = dict(odeint_kwargs)
dim = self._dim_y
y1 = odeint(self.numpy_dy_dt, t0_to_t1, y0, *odeint_args, **odeint_kwargs)
tc_y01 = tf.concat([tf.constant(y0, dtype=self._tf_dtype),
tf.constant(y1, dtype=self._tf_dtype)], axis=0)
loss = self._tf_L_y01(tc_y01).numpy()
opt_tf_dL_dys_and_d2L_dys_2 = self._opt_tf_dL_dys_and_d2L_dys_2
if opt_tf_dL_dys_and_d2L_dys_2[1] is None:
opt_tf_dL_dys_and_d2L_dys_2[:] = tf_grad_hessian(self._tf_L_y01,
want_hessian=True)
# pylint:disable=not-callable
# We do know that these actually are callable now.
opt_tf_dL_dys, opt_tf_d2L_dys_2 = opt_tf_dL_dys_and_d2L_dys_2
d2L_dys_2 = opt_tf_d2L_dys_2(tc_y01).numpy()
T1 = opt_tf_dL_dys(tc_y01).numpy()[dim:]
T11 = d2L_dys_2[dim:, dim:]
y0_ext_re = odeint(
self._numpy_dp_back_ode_with_sF_term if include_hessian_d2ydot_dy2_term
else self._numpy_dp_back_ode_without_sF_term,
t0_to_t1[::-1],
numpy.concatenate([y1, T1, T11.ravel()], axis=0),
*odeint_args, **odeint_kwargs)
return (y1,
loss,
y0_ext_re[dim : 2 * dim],
y0_ext_re[2 * dim:].reshape(dim, dim))
| [
"copybara-worker@google.com"
] | copybara-worker@google.com |
9bbbedff01056b88fabd955be7b0e926ef8759f1 | b3b68efa404a7034f0d5a1c10b281ef721f8321a | /Scripts/simulation/event_testing/resolver.py | e7c20b72ec4202bb888c21a913d2018159b881bc | [
"Apache-2.0"
] | permissive | velocist/TS4CheatsInfo | 62195f3333076c148b2a59f926c9fb5202f1c6fb | b59ea7e5f4bd01d3b3bd7603843d525a9c179867 | refs/heads/main | 2023-03-08T01:57:39.879485 | 2021-02-13T21:27:38 | 2021-02-13T21:27:38 | 337,543,310 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 81,129 | py | # uncompyle6 version 3.7.4
# Python bytecode 3.7 (3394)
# Decompiled from: Python 3.7.9 (tags/v3.7.9:13c94747c7, Aug 17 2020, 18:58:18) [MSC v.1900 64 bit (AMD64)]
# Embedded file name: T:\InGame\Gameplay\Scripts\Server\event_testing\resolver.py
# Compiled at: 2020-07-28 02:03:24
# Size of source mod 2**32: 61938 bytes
import itertools, random, sys, time
from event_testing.results import TestResult
from interactions import ParticipantType, ParticipantTypeSituationSims
from performance.test_profiling import TestProfileRecord, ProfileMetrics
from sims4.utils import classproperty
from singletons import DEFAULT
import caches, event_testing.test_constants, services, sims4.log, sims4.reload
logger = sims4.log.Logger('Resolver')
with sims4.reload.protected(globals()):
RESOLVER_PARTICIPANT = 'resolver'
test_profile = None
SINGLE_TYPES = frozenset((ParticipantType.Affordance,
ParticipantType.InteractionContext,
event_testing.test_constants.FROM_DATA_OBJECT,
event_testing.test_constants.OBJECTIVE_GUID64,
event_testing.test_constants.FROM_EVENT_DATA))
class Resolver:
def __init__(self, skip_safe_tests=False, search_for_tooltip=False):
self._skip_safe_tests = skip_safe_tests
self._search_for_tooltip = search_for_tooltip
@property
def skip_safe_tests(self):
return self._skip_safe_tests
@property
def search_for_tooltip(self):
return self._search_for_tooltip
@property
def interaction(self):
pass
def get_resolved_args(self, expected):
if expected is None:
raise ValueError('Expected arguments from test instance get_expected_args are undefined: {}'.format(expected))
ret = {}
for event_key, participant_type in expected.items():
if participant_type in SINGLE_TYPES:
value = self.get_participant(participant_type, event_key=event_key)
else:
value = self.get_participants(participant_type, event_key=event_key)
ret[event_key] = value
return ret
@property
def profile_metric_key(self):
pass
def __call__(self, test):
global test_profile
if test.expected_kwargs is None:
expected_args = test.get_expected_args()
if expected_args:
test.expected_kwargs = tuple(expected_args.items())
else:
test.expected_kwargs = ()
if test_profile is not None:
start_time = time.perf_counter()
resolved_args = {}
for event_key, participant_type in test.expected_kwargs:
if participant_type in SINGLE_TYPES:
value = self.get_participants(participant_type, event_key=event_key)
resolved_args[event_key] = value[0] if value else None
else:
resolved_args[event_key] = self.get_participants(participant_type, event_key=event_key)
if test_profile is not None:
resolve_end_time = time.perf_counter()
result = test(**resolved_args)
if test_profile is not None:
test_end_time = time.perf_counter()
resolve_time = resolve_end_time - start_time
test_time = test_end_time - resolve_end_time
self._record_test_profile_metrics(test, resolve_time, test_time)
return result
def _record_test_profile_metrics(self, test, resolve_time, test_time):
global test_profile
try:
from event_testing.tests import TestSetInstance
from event_testing.test_based_score_threshold import TestBasedScoreThresholdTest
is_test_set = isinstance(test, type) and issubclass(test, TestSetInstance)
test_name = '[TS]{}'.format(test.__name__) if is_test_set else test.__class__.__name__
if isinstance(test, TestBasedScoreThresholdTest):
is_test_set = True
record = test_profile.get(test_name)
if record is None:
record = TestProfileRecord(is_test_set=is_test_set)
test_profile[test_name] = record
record.metrics.update(resolve_time, test_time)
resolver_name = type(self).__name__
resolver_dict = record.resolvers.get(resolver_name)
if resolver_dict is None:
resolver_dict = dict()
record.resolvers[resolver_name] = resolver_dict
key_name = self.profile_metric_key
if key_name is None:
key_name = 'Key'
metrics = resolver_dict.get(key_name)
if metrics is None:
metrics = ProfileMetrics(is_test_set=is_test_set)
resolver_dict[key_name] = metrics
metrics.update(resolve_time, test_time)
except Exception as e:
try:
logger.exception('Resetting test_profile due to an exception {}.', e, owner='manus')
test_profile = None
finally:
e = None
del e
def get_participant(self, participant_type, **kwargs):
participants = (self.get_participants)(participant_type, **kwargs)
if not participants:
return
if len(participants) > 1:
raise ValueError('Too many participants returned for {}!'.format(participant_type))
return next(iter(participants))
def get_participants(self, participant_type, **kwargs):
raise NotImplementedError('Attempting to use the Resolver base class, use sub-classes instead.')
def _get_participants_base(self, participant_type, **kwargs):
if participant_type == RESOLVER_PARTICIPANT:
return self
return Resolver.get_particpants_shared(participant_type)
def get_target_id(self, test, id_type=None):
expected_args = test.get_expected_args()
resolved_args = self.get_resolved_args(expected_args)
resolved_args['id_type'] = id_type
return (test.get_target_id)(**resolved_args)
def get_posture_id(self, test):
expected_args = test.get_expected_args()
resolved_args = self.get_resolved_args(expected_args)
return (test.get_posture_id)(**resolved_args)
def get_tags(self, test):
expected_args = test.get_expected_args()
resolved_args = self.get_resolved_args(expected_args)
return (test.get_tags)(**resolved_args)
def get_localization_tokens(self, *args, **kwargs):
return ()
@staticmethod
def get_particpants_shared(participant_type):
if participant_type == ParticipantType.Lot:
return (
services.active_lot(),)
if participant_type == ParticipantType.LotOwners:
owning_household = services.owning_household_of_active_lot()
if owning_household is not None:
return tuple((sim_info for sim_info in owning_household.sim_info_gen()))
return ()
if participant_type == ParticipantType.LotOwnersOrRenters:
owning_household = services.owning_household_of_active_lot()
if owning_household is not None:
return tuple((sim_info for sim_info in owning_household.sim_info_gen()))
current_zone = services.current_zone()
travel_group = services.travel_group_manager().get_travel_group_by_zone_id(current_zone.id)
if travel_group is not None:
return tuple((sim_info for sim_info in travel_group.sim_info_gen()))
return ()
if participant_type == ParticipantType.LotOwnerSingleAndInstanced:
owning_household = services.owning_household_of_active_lot()
if owning_household is not None:
for sim_info in owning_household.sim_info_gen():
if sim_info.is_instanced():
return (
sim_info,)
return ()
if participant_type == ParticipantType.ActiveHousehold:
active_household = services.active_household()
if active_household is not None:
return tuple(active_household.sim_info_gen())
return ()
if participant_type == ParticipantType.AllInstancedActiveHouseholdSims:
active_household = services.active_household()
if active_household is not None:
return tuple(active_household.instanced_sims_gen())
return ()
if participant_type == ParticipantType.CareerEventSim:
career = services.get_career_service().get_career_in_career_event()
if career is not None:
return (
career.sim_info.get_sim_instance() or career.sim_info,)
return ()
if participant_type == ParticipantType.AllInstancedSims:
return tuple(services.sim_info_manager().instanced_sims_gen())
if participant_type == ParticipantType.Street:
street = services.current_zone().street
street_service = services.street_service()
if street_service is None:
return ()
street_civic_policy_provider = street_service.get_provider(street)
if street_civic_policy_provider is None:
return ()
return (
street_civic_policy_provider,)
if participant_type == ParticipantType.VenuePolicyProvider:
venue_service = services.venue_service()
if venue_service.source_venue is None or venue_service.source_venue.civic_policy_provider is None:
return ()
return (
venue_service.source_venue.civic_policy_provider,)
if participant_type == ParticipantType.CurrentRegion:
region_inst = services.current_region_instance()
if region_inst is None:
return ()
return (
region_inst,)
class GlobalResolver(Resolver):
def get_participants(self, participant_type, **kwargs):
result = (self._get_participants_base)(participant_type, **kwargs)
if result is not None:
return result
if participant_type == event_testing.test_constants.FROM_EVENT_DATA:
return ()
logger.error('GlobalResolver unable to resolve {}', participant_type)
return ()
class AffordanceResolver(Resolver):
def __init__(self, affordance, actor):
super().__init__(skip_safe_tests=False, search_for_tooltip=False)
self.affordance = affordance
self.actor = actor
def __repr__(self):
return 'AffordanceResolver: affordance: {}, actor {}'.format(self.affordance, self.actor)
def get_participants(self, participant_type, **kwargs):
if participant_type == event_testing.test_constants.FROM_DATA_OBJECT:
return ()
if participant_type == event_testing.test_constants.OBJECTIVE_GUID64:
return ()
if participant_type == event_testing.test_constants.FROM_EVENT_DATA:
return ()
if participant_type == event_testing.test_constants.SIM_INSTANCE or participant_type == ParticipantType.Actor:
if self.actor is not None:
result = _to_sim_info(self.actor)
if result:
return (
result,)
return ()
if participant_type == 0:
logger.error('Calling get_participants with no flags on {}.', self)
return ()
if participant_type == ParticipantType.Affordance:
return (
self.affordance,)
if participant_type == ParticipantType.AllRelationships:
return (
ParticipantType.AllRelationships,)
return (self._get_participants_base)(participant_type, **kwargs)
def __call__(self, test):
if not test.supports_early_testing():
return True
if test.participants_for_early_testing is None:
test.participants_for_early_testing = tuple(test.get_expected_args().values())
for participant in test.participants_for_early_testing:
if self.get_participants(participant) is None:
return TestResult.TRUE
return super().__call__(test)
class InteractionResolver(Resolver):
def __init__(self, affordance, interaction, target=DEFAULT, context=DEFAULT, custom_sim=None, super_interaction=None, skip_safe_tests=False, search_for_tooltip=False, **interaction_parameters):
super().__init__(skip_safe_tests, search_for_tooltip)
self.affordance = affordance
self._interaction = interaction
self.target = interaction.target if target is DEFAULT else target
self.context = interaction.context if context is DEFAULT else context
self.custom_sim = custom_sim
self.super_interaction = super_interaction
self.interaction_parameters = interaction_parameters
def __repr__(self):
return 'InteractionResolver: affordance: {}, interaction:{}, target: {}, context: {}, si: {}'.format(self.affordance, self.interaction, self.target, self.context, self.super_interaction)
@property
def interaction(self):
return self._interaction
@property
def profile_metric_key(self):
if self.affordance is None:
return 'NoAffordance'
return self.affordance.__name__
def get_participants--- This code section failed: ---
L. 433 0 LOAD_FAST 'participant_type'
2 LOAD_GLOBAL event_testing
4 LOAD_ATTR test_constants
6 LOAD_ATTR SIM_INSTANCE
8 COMPARE_OP ==
10 POP_JUMP_IF_FALSE 18 'to 18'
L. 434 12 LOAD_GLOBAL ParticipantType
14 LOAD_ATTR Actor
16 STORE_FAST 'participant_type'
18_0 COME_FROM 10 '10'
L. 437 18 LOAD_FAST 'participant_type'
20 LOAD_GLOBAL ParticipantType
22 LOAD_ATTR Actor
24 COMPARE_OP ==
26 POP_JUMP_IF_FALSE 74 'to 74'
L. 438 28 LOAD_FAST 'self'
30 LOAD_ATTR context
32 LOAD_ATTR sim
34 STORE_FAST 'sim'
L. 443 36 LOAD_FAST 'sim'
38 LOAD_CONST None
40 COMPARE_OP is-not
42 POP_JUMP_IF_FALSE 70 'to 70'
L. 444 44 LOAD_GLOBAL _to_sim_info
46 LOAD_FAST 'sim'
48 CALL_FUNCTION_1 1 '1 positional argument'
50 STORE_FAST 'result'
L. 445 52 LOAD_FAST 'result'
54 LOAD_CONST None
56 COMPARE_OP is-not
58 POP_JUMP_IF_FALSE 66 'to 66'
L. 446 60 LOAD_FAST 'result'
62 BUILD_TUPLE_1 1
64 RETURN_VALUE
66_0 COME_FROM 58 '58'
L. 447 66 LOAD_CONST ()
68 RETURN_VALUE
70_0 COME_FROM 42 '42'
70_72 JUMP_FORWARD 672 'to 672'
74_0 COME_FROM 26 '26'
L. 448 74 LOAD_FAST 'participant_type'
76 LOAD_GLOBAL ParticipantType
78 LOAD_ATTR Object
80 COMPARE_OP ==
82 POP_JUMP_IF_FALSE 122 'to 122'
L. 449 84 LOAD_FAST 'self'
86 LOAD_ATTR target
88 LOAD_CONST None
90 COMPARE_OP is-not
92 POP_JUMP_IF_FALSE 118 'to 118'
L. 450 94 LOAD_GLOBAL _to_sim_info
96 LOAD_FAST 'self'
98 LOAD_ATTR target
100 CALL_FUNCTION_1 1 '1 positional argument'
102 STORE_FAST 'result'
L. 451 104 LOAD_FAST 'result'
106 LOAD_CONST None
108 COMPARE_OP is-not
110 POP_JUMP_IF_FALSE 118 'to 118'
L. 452 112 LOAD_FAST 'result'
114 BUILD_TUPLE_1 1
116 RETURN_VALUE
118_0 COME_FROM 110 '110'
118_1 COME_FROM 92 '92'
L. 453 118 LOAD_CONST ()
120 RETURN_VALUE
122_0 COME_FROM 82 '82'
L. 454 122 LOAD_FAST 'participant_type'
124 LOAD_GLOBAL ParticipantType
126 LOAD_ATTR ObjectIngredients
128 COMPARE_OP ==
130 POP_JUMP_IF_FALSE 184 'to 184'
L. 455 132 LOAD_FAST 'self'
134 LOAD_ATTR target
136 LOAD_CONST None
138 COMPARE_OP is-not
140 POP_JUMP_IF_FALSE 180 'to 180'
L. 456 142 LOAD_FAST 'self'
144 LOAD_ATTR target
146 LOAD_ATTR crafting_component
148 POP_JUMP_IF_FALSE 180 'to 180'
L. 457 150 LOAD_FAST 'self'
152 LOAD_ATTR target
154 LOAD_METHOD get_crafting_process
156 CALL_METHOD_0 0 '0 positional arguments'
158 STORE_FAST 'target_crafting_process'
L. 458 160 LOAD_FAST 'target_crafting_process'
162 LOAD_CONST None
164 COMPARE_OP is-not
166 POP_JUMP_IF_FALSE 180 'to 180'
L. 459 168 LOAD_GLOBAL tuple
170 LOAD_FAST 'target_crafting_process'
172 LOAD_METHOD get_ingredients_object_definitions
174 CALL_METHOD_0 0 '0 positional arguments'
176 CALL_FUNCTION_1 1 '1 positional argument'
178 RETURN_VALUE
180_0 COME_FROM 166 '166'
180_1 COME_FROM 148 '148'
180_2 COME_FROM 140 '140'
L. 460 180 LOAD_CONST ()
182 RETURN_VALUE
184_0 COME_FROM 130 '130'
L. 461 184 LOAD_FAST 'participant_type'
186 LOAD_GLOBAL ParticipantType
188 LOAD_ATTR TargetSim
190 COMPARE_OP ==
192 POP_JUMP_IF_FALSE 240 'to 240'
L. 462 194 LOAD_FAST 'self'
196 LOAD_ATTR target
198 LOAD_CONST None
200 COMPARE_OP is-not
202 POP_JUMP_IF_FALSE 236 'to 236'
204 LOAD_FAST 'self'
206 LOAD_ATTR target
208 LOAD_ATTR is_sim
210 POP_JUMP_IF_FALSE 236 'to 236'
L. 463 212 LOAD_GLOBAL _to_sim_info
214 LOAD_FAST 'self'
216 LOAD_ATTR target
218 CALL_FUNCTION_1 1 '1 positional argument'
220 STORE_FAST 'result'
L. 464 222 LOAD_FAST 'result'
224 LOAD_CONST None
226 COMPARE_OP is-not
228 POP_JUMP_IF_FALSE 236 'to 236'
L. 465 230 LOAD_FAST 'result'
232 BUILD_TUPLE_1 1
234 RETURN_VALUE
236_0 COME_FROM 228 '228'
236_1 COME_FROM 210 '210'
236_2 COME_FROM 202 '202'
L. 466 236 LOAD_CONST ()
238 RETURN_VALUE
240_0 COME_FROM 192 '192'
L. 467 240 LOAD_FAST 'participant_type'
242 LOAD_GLOBAL ParticipantType
244 LOAD_ATTR ActorPostureTarget
246 COMPARE_OP ==
248_250 POP_JUMP_IF_FALSE 308 'to 308'
L. 468 252 LOAD_FAST 'self'
254 LOAD_ATTR interaction
256 LOAD_CONST None
258 COMPARE_OP is-not
260_262 POP_JUMP_IF_FALSE 278 'to 278'
L. 469 264 LOAD_FAST 'self'
266 LOAD_ATTR interaction
268 LOAD_ATTR get_participants
270 LOAD_FAST 'participant_type'
272 LOAD_CONST ('participant_type',)
274 CALL_FUNCTION_KW_1 1 '1 total positional and keyword args'
276 RETURN_VALUE
278_0 COME_FROM 260 '260'
L. 470 278 LOAD_FAST 'self'
280 LOAD_ATTR super_interaction
282 LOAD_CONST None
284 COMPARE_OP is-not
286_288 POP_JUMP_IF_FALSE 672 'to 672'
L. 471 290 LOAD_FAST 'self'
292 LOAD_ATTR super_interaction
294 LOAD_ATTR get_participants
296 LOAD_FAST 'participant_type'
298 LOAD_CONST ('participant_type',)
300 CALL_FUNCTION_KW_1 1 '1 total positional and keyword args'
302 RETURN_VALUE
304_306 JUMP_FORWARD 672 'to 672'
308_0 COME_FROM 248 '248'
L. 472 308 LOAD_FAST 'participant_type'
310 LOAD_GLOBAL ParticipantType
312 LOAD_ATTR AssociatedClub
314 COMPARE_OP ==
316_318 POP_JUMP_IF_TRUE 344 'to 344'
L. 473 320 LOAD_FAST 'participant_type'
322 LOAD_GLOBAL ParticipantType
324 LOAD_ATTR AssociatedClubLeader
326 COMPARE_OP ==
328_330 POP_JUMP_IF_TRUE 344 'to 344'
L. 474 332 LOAD_FAST 'participant_type'
334 LOAD_GLOBAL ParticipantType
336 LOAD_ATTR AssociatedClubMembers
338 COMPARE_OP ==
340_342 POP_JUMP_IF_FALSE 452 'to 452'
344_0 COME_FROM 328 '328'
344_1 COME_FROM 316 '316'
L. 475 344 LOAD_FAST 'self'
346 LOAD_ATTR interaction_parameters
348 LOAD_METHOD get
350 LOAD_STR 'associated_club'
352 CALL_METHOD_1 1 '1 positional argument'
354 STORE_FAST 'associated_club'
L. 476 356 LOAD_FAST 'self'
358 LOAD_ATTR interaction
360 LOAD_CONST None
362 COMPARE_OP is
364_366 POP_JUMP_IF_FALSE 380 'to 380'
368 LOAD_FAST 'self'
370 LOAD_ATTR super_interaction
372 LOAD_CONST None
374 COMPARE_OP is
376_378 POP_JUMP_IF_TRUE 390 'to 390'
380_0 COME_FROM 364 '364'
L. 477 380 LOAD_FAST 'associated_club'
382 LOAD_CONST None
384 COMPARE_OP is-not
386_388 POP_JUMP_IF_FALSE 672 'to 672'
390_0 COME_FROM 376 '376'
L. 478 390 LOAD_FAST 'participant_type'
392 LOAD_GLOBAL ParticipantType
394 LOAD_ATTR AssociatedClubLeader
396 COMPARE_OP ==
398_400 POP_JUMP_IF_FALSE 410 'to 410'
L. 479 402 LOAD_FAST 'associated_club'
404 LOAD_ATTR leader
406 BUILD_TUPLE_1 1
408 RETURN_VALUE
410_0 COME_FROM 398 '398'
L. 480 410 LOAD_FAST 'participant_type'
412 LOAD_GLOBAL ParticipantType
414 LOAD_ATTR AssociatedClub
416 COMPARE_OP ==
418_420 POP_JUMP_IF_FALSE 428 'to 428'
L. 481 422 LOAD_FAST 'associated_club'
424 BUILD_TUPLE_1 1
426 RETURN_VALUE
428_0 COME_FROM 418 '418'
L. 482 428 LOAD_FAST 'participant_type'
430 LOAD_GLOBAL ParticipantType
432 LOAD_ATTR AssociatedClubMembers
434 COMPARE_OP ==
436_438 POP_JUMP_IF_FALSE 672 'to 672'
L. 483 440 LOAD_GLOBAL tuple
442 LOAD_FAST 'associated_club'
444 LOAD_ATTR members
446 CALL_FUNCTION_1 1 '1 positional argument'
448 RETURN_VALUE
450 JUMP_FORWARD 672 'to 672'
452_0 COME_FROM 340 '340'
L. 484 452 LOAD_FAST 'participant_type'
454 LOAD_GLOBAL ParticipantType
456 LOAD_ATTR ObjectCrafter
458 COMPARE_OP ==
460_462 POP_JUMP_IF_FALSE 546 'to 546'
L. 485 464 LOAD_FAST 'self'
466 LOAD_ATTR target
468 LOAD_CONST None
470 COMPARE_OP is
472_474 POP_JUMP_IF_TRUE 490 'to 490'
476 LOAD_FAST 'self'
478 LOAD_ATTR target
480 LOAD_ATTR crafting_component
482 LOAD_CONST None
484 COMPARE_OP is
486_488 POP_JUMP_IF_FALSE 494 'to 494'
490_0 COME_FROM 472 '472'
L. 486 490 LOAD_CONST ()
492 RETURN_VALUE
494_0 COME_FROM 486 '486'
L. 487 494 LOAD_FAST 'self'
496 LOAD_ATTR target
498 LOAD_METHOD get_crafting_process
500 CALL_METHOD_0 0 '0 positional arguments'
502 STORE_FAST 'crafting_process'
L. 488 504 LOAD_FAST 'crafting_process'
506 LOAD_CONST None
508 COMPARE_OP is
510_512 POP_JUMP_IF_FALSE 518 'to 518'
L. 489 514 LOAD_CONST ()
516 RETURN_VALUE
518_0 COME_FROM 510 '510'
L. 490 518 LOAD_FAST 'crafting_process'
520 LOAD_METHOD get_crafter_sim_info
522 CALL_METHOD_0 0 '0 positional arguments'
524 STORE_FAST 'crafter_sim_info'
L. 491 526 LOAD_FAST 'crafter_sim_info'
528 LOAD_CONST None
530 COMPARE_OP is
532_534 POP_JUMP_IF_FALSE 540 'to 540'
L. 492 536 LOAD_CONST ()
538 RETURN_VALUE
540_0 COME_FROM 532 '532'
L. 493 540 LOAD_FAST 'crafter_sim_info'
542 BUILD_TUPLE_1 1
544 RETURN_VALUE
546_0 COME_FROM 460 '460'
L. 494 546 LOAD_FAST 'participant_type'
548 LOAD_GLOBAL ParticipantTypeSituationSims
550 COMPARE_OP in
552_554 POP_JUMP_IF_FALSE 672 'to 672'
L. 495 556 LOAD_CONST None
558 STORE_FAST 'provider_source'
L. 496 560 LOAD_FAST 'self'
562 LOAD_ATTR _interaction
564 LOAD_CONST None
566 COMPARE_OP is-not
568_570 POP_JUMP_IF_FALSE 580 'to 580'
L. 497 572 LOAD_FAST 'self'
574 LOAD_ATTR _interaction
576 STORE_FAST 'provider_source'
578 JUMP_FORWARD 618 'to 618'
580_0 COME_FROM 568 '568'
L. 498 580 LOAD_FAST 'self'
582 LOAD_ATTR super_interaction
584 LOAD_CONST None
586 COMPARE_OP is-not
588_590 POP_JUMP_IF_FALSE 600 'to 600'
L. 499 592 LOAD_FAST 'self'
594 LOAD_ATTR super_interaction
596 STORE_FAST 'provider_source'
598 JUMP_FORWARD 618 'to 618'
600_0 COME_FROM 588 '588'
L. 500 600 LOAD_FAST 'self'
602 LOAD_ATTR affordance
604 LOAD_CONST None
606 COMPARE_OP is-not
608_610 POP_JUMP_IF_FALSE 618 'to 618'
L. 501 612 LOAD_FAST 'self'
614 LOAD_ATTR affordance
616 STORE_FAST 'provider_source'
618_0 COME_FROM 608 '608'
618_1 COME_FROM 598 '598'
618_2 COME_FROM 578 '578'
L. 503 618 LOAD_FAST 'provider_source'
620 LOAD_CONST None
622 COMPARE_OP is-not
624_626 POP_JUMP_IF_FALSE 672 'to 672'
L. 504 628 LOAD_FAST 'provider_source'
630 LOAD_METHOD get_situation_participant_provider
632 CALL_METHOD_0 0 '0 positional arguments'
634 STORE_FAST 'provider'
L. 505 636 LOAD_FAST 'provider'
638 LOAD_CONST None
640 COMPARE_OP is-not
642_644 POP_JUMP_IF_FALSE 658 'to 658'
L. 506 646 LOAD_FAST 'provider'
648 LOAD_METHOD get_participants
650 LOAD_FAST 'participant_type'
652 LOAD_FAST 'self'
654 CALL_METHOD_2 2 '2 positional arguments'
656 RETURN_VALUE
658_0 COME_FROM 642 '642'
L. 508 658 LOAD_GLOBAL logger
660 LOAD_METHOD error
662 LOAD_STR "Requesting {} in {} that doesn't have a SituationSimParticipantProviderLiability"
664 LOAD_FAST 'participant_type'
666 LOAD_FAST 'provider_source'
668 CALL_METHOD_3 3 '3 positional arguments'
670 POP_TOP
672_0 COME_FROM 624 '624'
672_1 COME_FROM 552 '552'
672_2 COME_FROM 450 '450'
672_3 COME_FROM 436 '436'
672_4 COME_FROM 386 '386'
672_5 COME_FROM 304 '304'
672_6 COME_FROM 286 '286'
672_7 COME_FROM 70 '70'
L. 510 672 LOAD_FAST 'participant_type'
674 LOAD_CONST 0
676 COMPARE_OP ==
678_680 POP_JUMP_IF_FALSE 698 'to 698'
L. 511 682 LOAD_GLOBAL logger
684 LOAD_METHOD error
686 LOAD_STR 'Calling get_participants with no flags on {}.'
688 LOAD_FAST 'self'
690 CALL_METHOD_2 2 '2 positional arguments'
692 POP_TOP
L. 512 694 LOAD_CONST ()
696 RETURN_VALUE
698_0 COME_FROM 678 '678'
L. 513 698 LOAD_FAST 'self'
700 LOAD_ATTR _get_participants_base
702 LOAD_FAST 'participant_type'
704 BUILD_TUPLE_1 1
706 LOAD_FAST 'kwargs'
708 CALL_FUNCTION_EX_KW 1 'keyword and positional arguments'
710 STORE_FAST 'result'
L. 514 712 LOAD_FAST 'result'
714 LOAD_CONST None
716 COMPARE_OP is-not
718_720 POP_JUMP_IF_FALSE 726 'to 726'
L. 515 722 LOAD_FAST 'result'
724 RETURN_VALUE
726_0 COME_FROM 718 '718'
L. 520 726 LOAD_FAST 'participant_type'
728 LOAD_GLOBAL event_testing
730 LOAD_ATTR test_constants
732 LOAD_ATTR FROM_DATA_OBJECT
734 COMPARE_OP ==
736_738 POP_JUMP_IF_FALSE 744 'to 744'
L. 521 740 LOAD_CONST ()
742 RETURN_VALUE
744_0 COME_FROM 736 '736'
L. 522 744 LOAD_FAST 'participant_type'
746 LOAD_GLOBAL event_testing
748 LOAD_ATTR test_constants
750 LOAD_ATTR OBJECTIVE_GUID64
752 COMPARE_OP ==
754_756 POP_JUMP_IF_FALSE 762 'to 762'
L. 523 758 LOAD_CONST ()
760 RETURN_VALUE
762_0 COME_FROM 754 '754'
L. 524 762 LOAD_FAST 'participant_type'
764 LOAD_GLOBAL event_testing
766 LOAD_ATTR test_constants
768 LOAD_ATTR FROM_EVENT_DATA
770 COMPARE_OP ==
772_774 POP_JUMP_IF_FALSE 780 'to 780'
L. 525 776 LOAD_CONST ()
778 RETURN_VALUE
780_0 COME_FROM 772 '772'
L. 527 780 LOAD_FAST 'participant_type'
782 LOAD_GLOBAL ParticipantType
784 LOAD_ATTR Affordance
786 COMPARE_OP ==
788_790 POP_JUMP_IF_FALSE 800 'to 800'
L. 528 792 LOAD_FAST 'self'
794 LOAD_ATTR affordance
796 BUILD_TUPLE_1 1
798 RETURN_VALUE
800_0 COME_FROM 788 '788'
L. 529 800 LOAD_FAST 'participant_type'
802 LOAD_GLOBAL ParticipantType
804 LOAD_ATTR InteractionContext
806 COMPARE_OP ==
808_810 POP_JUMP_IF_FALSE 820 'to 820'
L. 530 812 LOAD_FAST 'self'
814 LOAD_ATTR context
816 BUILD_TUPLE_1 1
818 RETURN_VALUE
820_0 COME_FROM 808 '808'
L. 531 820 LOAD_FAST 'participant_type'
822 LOAD_GLOBAL ParticipantType
824 LOAD_ATTR CustomSim
826 COMPARE_OP ==
828_830 POP_JUMP_IF_FALSE 864 'to 864'
L. 532 832 LOAD_FAST 'self'
834 LOAD_ATTR custom_sim
836 LOAD_CONST None
838 COMPARE_OP is-not
840_842 POP_JUMP_IF_FALSE 854 'to 854'
L. 533 844 LOAD_FAST 'self'
846 LOAD_ATTR custom_sim
848 LOAD_ATTR sim_info
850 BUILD_TUPLE_1 1
852 RETURN_VALUE
854_0 COME_FROM 840 '840'
L. 535 854 LOAD_GLOBAL ValueError
856 LOAD_STR 'Trying to use CustomSim without passing a custom_sim in InteractionResolver.'
858 CALL_FUNCTION_1 1 '1 positional argument'
860 POP_TOP
862 JUMP_FORWARD 884 'to 884'
864_0 COME_FROM 828 '828'
L. 536 864 LOAD_FAST 'participant_type'
866 LOAD_GLOBAL ParticipantType
868 LOAD_ATTR AllRelationships
870 COMPARE_OP ==
872_874 POP_JUMP_IF_FALSE 884 'to 884'
L. 540 876 LOAD_GLOBAL ParticipantType
878 LOAD_ATTR AllRelationships
880 BUILD_TUPLE_1 1
882 RETURN_VALUE
884_0 COME_FROM 872 '872'
884_1 COME_FROM 862 '862'
L. 542 884 LOAD_FAST 'participant_type'
886 LOAD_GLOBAL ParticipantType
888 LOAD_ATTR PickedItemId
890 COMPARE_OP ==
892_894 POP_JUMP_IF_FALSE 922 'to 922'
L. 543 896 LOAD_FAST 'self'
898 LOAD_ATTR interaction_parameters
900 LOAD_METHOD get
902 LOAD_STR 'picked_item_ids'
904 CALL_METHOD_1 1 '1 positional argument'
906 STORE_FAST 'picked_item_ids'
L. 544 908 LOAD_FAST 'picked_item_ids'
910 LOAD_CONST None
912 COMPARE_OP is-not
914_916 POP_JUMP_IF_FALSE 922 'to 922'
L. 545 918 LOAD_FAST 'picked_item_ids'
920 RETURN_VALUE
922_0 COME_FROM 914 '914'
922_1 COME_FROM 892 '892'
L. 548 922 LOAD_FAST 'self'
924 LOAD_ATTR interaction
926 LOAD_CONST None
928 COMPARE_OP is-not
930_932 POP_JUMP_IF_FALSE 972 'to 972'
L. 549 934 LOAD_FAST 'self'
936 LOAD_ATTR interaction
938 LOAD_ATTR get_participants
940 BUILD_TUPLE_0 0
942 LOAD_FAST 'participant_type'
L. 550 944 LOAD_FAST 'self'
946 LOAD_ATTR context
948 LOAD_ATTR sim
950 LOAD_FAST 'self'
952 LOAD_ATTR target
L. 551 954 LOAD_CONST False
956 LOAD_CONST ('participant_type', 'sim', 'target', 'listener_filtering_enabled')
958 BUILD_CONST_KEY_MAP_4 4
L. 552 960 LOAD_FAST 'self'
962 LOAD_ATTR interaction_parameters
964 BUILD_MAP_UNPACK_WITH_CALL_2 2
966 CALL_FUNCTION_EX_KW 1 'keyword and positional arguments'
968 STORE_FAST 'participants'
970 JUMP_FORWARD 1076 'to 1076'
972_0 COME_FROM 930 '930'
L. 553 972 LOAD_FAST 'self'
974 LOAD_ATTR super_interaction
976 LOAD_CONST None
978 COMPARE_OP is-not
980_982 POP_JUMP_IF_FALSE 1028 'to 1028'
L. 559 984 LOAD_FAST 'self'
986 LOAD_ATTR super_interaction
988 LOAD_ATTR get_participants
990 BUILD_TUPLE_0 0
992 LOAD_FAST 'participant_type'
L. 560 994 LOAD_FAST 'self'
996 LOAD_ATTR context
998 LOAD_ATTR sim
1000 LOAD_FAST 'self'
1002 LOAD_ATTR target
L. 561 1004 LOAD_CONST False
L. 562 1006 LOAD_FAST 'self'
1008 LOAD_ATTR affordance
1010 LOAD_ATTR target_type
1012 LOAD_CONST ('participant_type', 'sim', 'target', 'listener_filtering_enabled', 'target_type')
1014 BUILD_CONST_KEY_MAP_5 5
L. 563 1016 LOAD_FAST 'self'
1018 LOAD_ATTR interaction_parameters
1020 BUILD_MAP_UNPACK_WITH_CALL_2 2
1022 CALL_FUNCTION_EX_KW 1 'keyword and positional arguments'
1024 STORE_FAST 'participants'
1026 JUMP_FORWARD 1076 'to 1076'
1028_0 COME_FROM 980 '980'
L. 565 1028 LOAD_FAST 'self'
1030 LOAD_ATTR affordance
1032 LOAD_ATTR get_participants
1034 BUILD_TUPLE_0 0
1036 LOAD_FAST 'participant_type'
L. 566 1038 LOAD_FAST 'self'
1040 LOAD_ATTR context
1042 LOAD_ATTR sim
1044 LOAD_FAST 'self'
1046 LOAD_ATTR target
L. 567 1048 LOAD_FAST 'self'
1050 LOAD_ATTR context
1052 LOAD_ATTR carry_target
L. 568 1054 LOAD_CONST False
L. 569 1056 LOAD_FAST 'self'
1058 LOAD_ATTR affordance
1060 LOAD_ATTR target_type
1062 LOAD_CONST ('participant_type', 'sim', 'target', 'carry_target', 'listener_filtering_enabled', 'target_type')
1064 BUILD_CONST_KEY_MAP_6 6
L. 570 1066 LOAD_FAST 'self'
1068 LOAD_ATTR interaction_parameters
1070 BUILD_MAP_UNPACK_WITH_CALL_2 2
1072 CALL_FUNCTION_EX_KW 1 'keyword and positional arguments'
1074 STORE_FAST 'participants'
1076_0 COME_FROM 1026 '1026'
1076_1 COME_FROM 970 '970'
L. 571 1076 LOAD_GLOBAL set
1078 CALL_FUNCTION_0 0 '0 positional arguments'
1080 STORE_FAST 'resolved_participants'
L. 572 1082 SETUP_LOOP 1112 'to 1112'
1084 LOAD_FAST 'participants'
1086 GET_ITER
1088 FOR_ITER 1110 'to 1110'
1090 STORE_FAST 'participant'
L. 573 1092 LOAD_FAST 'resolved_participants'
1094 LOAD_METHOD add
1096 LOAD_GLOBAL _to_sim_info
1098 LOAD_FAST 'participant'
1100 CALL_FUNCTION_1 1 '1 positional argument'
1102 CALL_METHOD_1 1 '1 positional argument'
1104 POP_TOP
1106_1108 JUMP_BACK 1088 'to 1088'
1110 POP_BLOCK
1112_0 COME_FROM_LOOP 1082 '1082'
L. 575 1112 LOAD_GLOBAL tuple
1114 LOAD_FAST 'resolved_participants'
1116 CALL_FUNCTION_1 1 '1 positional argument'
1118 RETURN_VALUE
-1 RETURN_LAST
Parse error at or near `RETURN_VALUE' instruction at offset 1118
def get_localization_tokens(self, *args, **kwargs):
return (self.interaction.get_localization_tokens)(*args, **kwargs)
@caches.clearable_barebones_cache
def _to_sim_info(participant):
sim_info = getattr(participant, 'sim_info', None)
if sim_info is None or sim_info.is_baby:
return participant
return sim_info
class AwayActionResolver(Resolver):
VALID_AWAY_ACTION_PARTICIPANTS = ParticipantType.Actor | ParticipantType.TargetSim | ParticipantType.Lot
def __init__(self, away_action, skip_safe_tests=False, search_for_tooltip=False, **away_action_parameters):
super().__init__(skip_safe_tests, search_for_tooltip)
self.away_action = away_action
self.away_action_parameters = away_action_parameters
def __repr__(self):
return 'AwayActionResolver: away_action: {}'.format(self.away_action)
@property
def sim(self):
return self.get_participant(ParticipantType.Actor)
def get_participants(self, participant_type, **kwargs):
if participant_type == 0:
logger.error('Calling get_participants with no flags on {}.', self)
return ()
if participant_type == ParticipantType.Lot:
return (self.away_action.get_participants)(participant_type=participant_type, **self.away_action_parameters)
result = self._get_participants_base(participant_type)
if result is not None:
return result
if participant_type == event_testing.test_constants.FROM_DATA_OBJECT:
return ()
if participant_type == event_testing.test_constants.OBJECTIVE_GUID64:
return ()
if participant_type == event_testing.test_constants.FROM_EVENT_DATA:
return ()
if participant_type & AwayActionResolver.VALID_AWAY_ACTION_PARTICIPANTS:
return (self.away_action.get_participants)(participant_type=participant_type, **self.away_action_parameters)
raise ValueError('Trying to use AwayActionResolver without a valid type: {}'.format(participant_type))
def get_localization_tokens(self, *args, **kwargs):
return (self.interaction.get_localization_tokens)(*args, **kwargs)
class SingleSimResolver(Resolver):
def __init__(self, sim_info_to_test, additional_participants={}, additional_localization_tokens=(), additional_metric_key_data=None):
super().__init__()
self.sim_info_to_test = sim_info_to_test
self._additional_participants = additional_participants
self._additional_localization_tokens = additional_localization_tokens
self._source = None
if event_testing.resolver.test_profile is not None:
frame = sys._getframe(self.profile_metric_stack_depth)
qualified_name = frame.f_code.co_filename
unqualified_name = qualified_name.split('\\')[(-1)]
self._source = unqualified_name
if additional_metric_key_data is not None:
self._source = '{}:{}'.format(self._source, additional_metric_key_data)
def __repr__(self):
return 'SingleSimResolver: sim_to_test: {}'.format(self.sim_info_to_test)
@property
def profile_metric_key(self):
return 'source:{}'.format(self._source)
@classproperty
def profile_metric_stack_depth(cls):
return 1
def get_participants(self, participant_type, **kwargs):
if participant_type == ParticipantType.Actor or participant_type == ParticipantType.CustomSim:
return (
self.sim_info_to_test,)
if participant_type == ParticipantType.SignificantOtherActor:
significant_other = self.sim_info_to_test.get_significant_other_sim_info()
if significant_other is not None:
return (significant_other,)
return ()
if participant_type == ParticipantType.PregnancyPartnerActor:
pregnancy_partner = self.sim_info_to_test.pregnancy_tracker.get_partner()
if pregnancy_partner is not None:
return (pregnancy_partner,)
return ()
if participant_type == ParticipantType.AllRelationships:
return ParticipantType.AllRelationships
if participant_type == ParticipantType.ActorFeudTarget:
feud_target = self.sim_info_to_test.get_feud_target()
if feud_target is not None:
return (feud_target,)
return ()
if participant_type == event_testing.test_constants.FROM_EVENT_DATA:
return ()
if participant_type == ParticipantType.InteractionContext or participant_type == ParticipantType.Affordance:
return ()
if participant_type == event_testing.test_constants.SIM_INSTANCE:
return (
self.sim_info_to_test,)
if participant_type == ParticipantType.Familiar:
return self._get_familiar_for_sim_info(self.sim_info_to_test)
if participant_type in self._additional_participants:
return self._additional_participants[participant_type]
if participant_type == ParticipantType.PickedZoneId:
return frozenset()
if participant_type == ParticipantType.ActorLot:
sim_home_lot = self.sim_info_to_test.get_home_lot()
if sim_home_lot is None:
return ()
return (sim_home_lot,)
if participant_type == ParticipantType.RoutingSlaves:
sim_inst = self.sim_info_to_test.get_sim_instance()
routing_slave_data = sim_inst.get_routing_slave_data() if sim_inst is not None else None
if routing_slave_data is None:
return ()
return tuple({data.slave for data in routing_slave_data})
if participant_type == ParticipantType.StoredCASPartsOnObject:
return ()
result = (self._get_participants_base)(participant_type, **kwargs)
if result is not None:
return result
raise ValueError('Trying to use {} with unsupported participant: {}'.format(type(self).__name__, participant_type))
def _get_familiar_for_sim_info(self, sim_info):
familiar_tracker = self.sim_info_to_test.familiar_tracker
if familiar_tracker is None:
return ()
familiar = familiar_tracker.get_active_familiar()
if familiar is None:
return ()
if familiar.is_sim:
return (
familiar.sim_info,)
return (
familiar,)
def get_localization_tokens(self, *args, **kwargs):
return (
self.sim_info_to_test,) + self._additional_localization_tokens
def set_additional_participant(self, participant_type, value):
self._additional_participants[participant_type] = value
class DoubleSimResolver(SingleSimResolver):
def __init__(self, sim_info, target_sim_info, **kwargs):
(super().__init__)(sim_info, **kwargs)
self.target_sim_info = target_sim_info
def __repr__(self):
return 'DoubleSimResolver: sim: {} target_sim: {}'.format(self.sim_info_to_test, self.target_sim_info)
@classproperty
def profile_metric_stack_depth(cls):
return 2
def get_participants(self, participant_type, **kwargs):
if participant_type == ParticipantType.TargetSim:
return (
self.target_sim_info,)
if participant_type == ParticipantType.SignificantOtherTargetSim:
return (
self.target_sim_info.get_significant_other_sim_info(),)
if participant_type == ParticipantType.FamiliarOfTarget:
return self._get_familiar_for_sim_info(self.target_sim_info)
return (super().get_participants)(participant_type, **kwargs)
def get_localization_tokens(self, *args, **kwargs):
return (
self.sim_info_to_test, self.target_sim_info) + self._additional_localization_tokens
class DataResolver(Resolver):
def __init__(self, sim_info, event_kwargs=None, custom_keys=()):
super().__init__()
self.sim_info = sim_info
if event_kwargs is not None:
self._interaction = event_kwargs.get('interaction', None)
self.on_zone_load = event_kwargs.get('init', False)
else:
self._interaction = None
self.on_zone_load = False
self.event_kwargs = event_kwargs
self.data_object = None
self.objective_guid64 = None
self.custom_keys = custom_keys
def __repr__(self):
return 'DataResolver: participant: {}'.format(self.sim_info)
def __call__(self, test, data_object=None, objective_guid64=None):
if data_object is not None:
self.data_object = data_object
self.objective_guid64 = objective_guid64
return super().__call__(test)
@property
def interaction(self):
return self._interaction
@property
def profile_metric_key(self):
interaction_name = None
if self._interaction is not None:
interaction_name = self._interaction.aop.affordance.__name__
objective_name = 'Invalid'
if self.objective_guid64 is not None:
objective_manager = services.objective_manager()
objective = objective_manager.get(self.objective_guid64)
objective_name = objective.__name__
return 'objective:{} (interaction:{})'.format(objective_name, interaction_name)
def get_resolved_arg(self, key):
return self.event_kwargs.get(key, None)
def get_participants(self, participant_type, event_key=None):
result = self._get_participants_base(participant_type, event_key=event_key)
if result is not None:
return result
if participant_type == event_testing.test_constants.SIM_INSTANCE:
return (
self.sim_info,)
if participant_type == event_testing.test_constants.FROM_DATA_OBJECT:
return (
self.data_object,)
if participant_type == event_testing.test_constants.OBJECTIVE_GUID64:
return (
self.objective_guid64,)
if participant_type == event_testing.test_constants.FROM_EVENT_DATA:
if not self.event_kwargs:
return ()
return (
self.event_kwargs.get(event_key),)
if self._interaction is not None:
return tuple((getattr(participant, 'sim_info', participant) for participant in self._interaction.get_participants(participant_type)))
if participant_type == ParticipantType.Actor:
return (
self.sim_info,)
if participant_type == ParticipantType.AllRelationships:
sim_mgr = services.sim_info_manager()
relations = set((sim_mgr.get(relations.get_other_sim_id(self.sim_info.sim_id)) for relations in self.sim_info.relationship_tracker))
return tuple(relations)
if participant_type == ParticipantType.TargetSim:
if not self.event_kwargs:
return ()
target_sim_id = self.event_kwargs.get(event_testing.test_constants.TARGET_SIM_ID)
if target_sim_id is None:
return ()
return (
services.sim_info_manager().get(target_sim_id),)
if participant_type == ParticipantType.ActiveHousehold:
active_household = services.active_household()
if active_household is not None:
return tuple(active_household.sim_info_gen())
if self.on_zone_load:
return ()
raise ValueError('Trying to use DataResolver with type that is not supported by DataResolver: {}'.format(participant_type))
class SingleObjectResolver(Resolver):
def __init__(self, obj):
super().__init__()
self._obj = obj
def __repr__(self):
return 'SingleObjectResolver: object: {}'.format(self._obj)
def get_participants(self, participant_type, **kwargs):
if participant_type == ParticipantType.Object:
return (
self._obj,)
if participant_type == ParticipantType.ObjectIngredients:
if self._obj.crafting_component:
crafting_process = self._obj.get_crafting_process()
if crafting_process is not None:
return tuple(crafting_process.get_ingredients_object_definitions())
return ()
if participant_type == ParticipantType.Actor:
return (
self._obj,)
if participant_type == ParticipantType.StoredSim:
stored_sim_info = self._obj.get_stored_sim_info()
return (stored_sim_info,)
if participant_type == ParticipantType.StoredSimOrNameData:
stored_sim_name_data = self._obj.get_stored_sim_info_or_name_data()
return (stored_sim_name_data,)
if participant_type == ParticipantType.OwnerSim:
owner_sim_info_id = self._obj.get_sim_owner_id()
owner_sim_info = services.sim_info_manager().get(owner_sim_info_id)
return (owner_sim_info,)
if participant_type == ParticipantType.ObjectParent and not self._obj is None:
if self._obj.parent is None:
return ()
return (
self._obj.parent,)
if participant_type == ParticipantType.ObjectChildren:
if self._obj is None:
return ()
if self._obj.is_part:
return tuple(self._obj.part_owner.children_recursive_gen())
return tuple(self._obj.children_recursive_gen())
if participant_type == ParticipantType.RandomInventoryObject:
return (
random.choice(tuple(self._obj.inventory_component.visible_storage)),)
if participant_type == ParticipantType.PickedObject or participant_type == ParticipantType.CarriedObject or participant_type == ParticipantType.LiveDragActor:
if self._obj.is_sim:
return (
self._obj.sim_info,)
return (
self._obj,)
if participant_type == ParticipantType.RoutingOwner:
if self._obj.get_routing_owner().is_sim:
return (
self._obj.get_routing_owner().sim_info,)
return (self._obj.get_routing_owner(),)
else:
if participant_type == ParticipantType.RoutingTarget:
if self._obj.get_routing_target().is_sim:
return (
self._obj.get_routing_target().sim_info,)
return (self._obj.get_routing_target(),)
else:
if participant_type == ParticipantType.StoredCASPartsOnObject:
stored_cas_parts = self._obj.get_stored_cas_parts()
if stored_cas_parts is None:
return ()
return tuple(iter(self._obj.get_stored_cas_parts()))
result = (self._get_participants_base)(participant_type, **kwargs)
if result is not None:
return result
if participant_type == event_testing.test_constants.FROM_EVENT_DATA:
return ()
raise ValueError('Trying to use SingleObjectResolver with something that is not an Object: {}'.format(participant_type))
def get_localization_tokens(self, *args, **kwargs):
return (
self._obj,)
class DoubleObjectResolver(Resolver):
def __init__(self, source_obj, target_obj):
super().__init__()
self._source_obj = source_obj
self._target_obj = target_obj
def __repr__(self):
return 'DoubleObjectResolver: actor_object: {}, target_object:{}'.format(self._source_obj, self._target_obj)
def get_participants--- This code section failed: ---
L.1059 0 LOAD_FAST 'self'
2 LOAD_ATTR _get_participants_base
4 LOAD_FAST 'participant_type'
6 BUILD_TUPLE_1 1
8 LOAD_FAST 'kwargs'
10 CALL_FUNCTION_EX_KW 1 'keyword and positional arguments'
12 STORE_FAST 'result'
L.1060 14 LOAD_FAST 'result'
16 LOAD_CONST None
18 COMPARE_OP is-not
20 POP_JUMP_IF_FALSE 26 'to 26'
L.1061 22 LOAD_FAST 'result'
24 RETURN_VALUE
26_0 COME_FROM 20 '20'
L.1063 26 LOAD_FAST 'participant_type'
28 LOAD_GLOBAL ParticipantType
30 LOAD_ATTR Actor
32 COMPARE_OP ==
34 POP_JUMP_IF_TRUE 66 'to 66'
L.1064 36 LOAD_FAST 'participant_type'
38 LOAD_GLOBAL ParticipantType
40 LOAD_ATTR PickedObject
42 COMPARE_OP ==
44 POP_JUMP_IF_TRUE 66 'to 66'
L.1065 46 LOAD_FAST 'participant_type'
48 LOAD_GLOBAL ParticipantType
50 LOAD_ATTR CarriedObject
52 COMPARE_OP ==
54 POP_JUMP_IF_TRUE 66 'to 66'
L.1066 56 LOAD_FAST 'participant_type'
58 LOAD_GLOBAL ParticipantType
60 LOAD_ATTR LiveDragActor
62 COMPARE_OP ==
64 POP_JUMP_IF_FALSE 92 'to 92'
66_0 COME_FROM 54 '54'
66_1 COME_FROM 44 '44'
66_2 COME_FROM 34 '34'
L.1067 66 LOAD_FAST 'self'
68 LOAD_ATTR _source_obj
70 LOAD_ATTR is_sim
72 POP_JUMP_IF_FALSE 84 'to 84'
L.1068 74 LOAD_FAST 'self'
76 LOAD_ATTR _source_obj
78 LOAD_ATTR sim_info
80 BUILD_TUPLE_1 1
82 RETURN_VALUE
84_0 COME_FROM 72 '72'
L.1069 84 LOAD_FAST 'self'
86 LOAD_ATTR _source_obj
88 BUILD_TUPLE_1 1
90 RETURN_VALUE
92_0 COME_FROM 64 '64'
L.1071 92 LOAD_FAST 'participant_type'
94 LOAD_GLOBAL ParticipantType
96 LOAD_ATTR Listeners
98 COMPARE_OP ==
100 POP_JUMP_IF_TRUE 132 'to 132'
L.1072 102 LOAD_FAST 'participant_type'
104 LOAD_GLOBAL ParticipantType
106 LOAD_ATTR Object
108 COMPARE_OP ==
110 POP_JUMP_IF_TRUE 132 'to 132'
L.1073 112 LOAD_FAST 'participant_type'
114 LOAD_GLOBAL ParticipantType
116 LOAD_ATTR TargetSim
118 COMPARE_OP ==
120 POP_JUMP_IF_TRUE 132 'to 132'
L.1074 122 LOAD_FAST 'participant_type'
124 LOAD_GLOBAL ParticipantType
126 LOAD_ATTR LiveDragTarget
128 COMPARE_OP ==
130 POP_JUMP_IF_FALSE 158 'to 158'
132_0 COME_FROM 120 '120'
132_1 COME_FROM 110 '110'
132_2 COME_FROM 100 '100'
L.1075 132 LOAD_FAST 'self'
134 LOAD_ATTR _target_obj
136 LOAD_ATTR is_sim
138 POP_JUMP_IF_FALSE 150 'to 150'
L.1076 140 LOAD_FAST 'self'
142 LOAD_ATTR _target_obj
144 LOAD_ATTR sim_info
146 BUILD_TUPLE_1 1
148 RETURN_VALUE
150_0 COME_FROM 138 '138'
L.1077 150 LOAD_FAST 'self'
152 LOAD_ATTR _target_obj
154 BUILD_TUPLE_1 1
156 RETURN_VALUE
158_0 COME_FROM 130 '130'
L.1079 158 LOAD_FAST 'participant_type'
160 LOAD_GLOBAL event_testing
162 LOAD_ATTR test_constants
164 LOAD_ATTR FROM_EVENT_DATA
166 COMPARE_OP ==
168 POP_JUMP_IF_FALSE 174 'to 174'
L.1080 170 LOAD_CONST ()
172 RETURN_VALUE
174_0 COME_FROM 168 '168'
L.1082 174 LOAD_FAST 'participant_type'
176 LOAD_GLOBAL ParticipantType
178 LOAD_ATTR LinkedPostureSim
180 COMPARE_OP ==
182 POP_JUMP_IF_FALSE 218 'to 218'
L.1083 184 LOAD_FAST 'self'
186 LOAD_ATTR _source_obj
188 LOAD_ATTR is_sim
190 POP_JUMP_IF_FALSE 218 'to 218'
L.1084 192 LOAD_FAST 'self'
194 LOAD_ATTR _source_obj
196 LOAD_ATTR posture
198 STORE_FAST 'posture'
L.1085 200 LOAD_FAST 'posture'
202 LOAD_ATTR multi_sim
204 POP_JUMP_IF_FALSE 218 'to 218'
L.1086 206 LOAD_FAST 'posture'
208 LOAD_ATTR linked_posture
210 LOAD_ATTR sim
212 LOAD_ATTR sim_info
214 BUILD_TUPLE_1 1
216 RETURN_VALUE
218_0 COME_FROM 204 '204'
218_1 COME_FROM 190 '190'
218_2 COME_FROM 182 '182'
L.1088 218 LOAD_FAST 'participant_type'
220 LOAD_GLOBAL ParticipantType
222 LOAD_ATTR ObjectParent
224 COMPARE_OP ==
226_228 POP_JUMP_IF_FALSE 268 'to 268'
L.1089 230 LOAD_FAST 'self'
232 LOAD_ATTR _target_obj
234 LOAD_CONST None
236 COMPARE_OP is
238 POP_JUMP_IF_TRUE 254 'to 254'
240 LOAD_FAST 'self'
242 LOAD_ATTR _target_obj
244 LOAD_ATTR parent
246 LOAD_CONST None
248 COMPARE_OP is
250_252 POP_JUMP_IF_FALSE 258 'to 258'
254_0 COME_FROM 238 '238'
L.1090 254 LOAD_CONST ()
256 RETURN_VALUE
258_0 COME_FROM 250 '250'
L.1091 258 LOAD_FAST 'self'
260 LOAD_ATTR _target_obj
262 LOAD_ATTR parent
264 BUILD_TUPLE_1 1
266 RETURN_VALUE
268_0 COME_FROM 226 '226'
L.1093 268 LOAD_FAST 'participant_type'
270 LOAD_GLOBAL ParticipantType
272 LOAD_ATTR RoutingOwner
274 COMPARE_OP ==
276_278 POP_JUMP_IF_FALSE 320 'to 320'
L.1094 280 LOAD_FAST 'self'
282 LOAD_ATTR _source_obj
284 LOAD_METHOD get_routing_owner
286 CALL_METHOD_0 0 '0 positional arguments'
288 LOAD_ATTR is_sim
290_292 POP_JUMP_IF_FALSE 308 'to 308'
L.1095 294 LOAD_FAST 'self'
296 LOAD_ATTR _source_obj
298 LOAD_METHOD get_routing_owner
300 CALL_METHOD_0 0 '0 positional arguments'
302 LOAD_ATTR sim_info
304 BUILD_TUPLE_1 1
306 RETURN_VALUE
308_0 COME_FROM 290 '290'
L.1097 308 LOAD_FAST 'self'
310 LOAD_ATTR _source_obj
312 LOAD_METHOD get_routing_owner
314 CALL_METHOD_0 0 '0 positional arguments'
316 BUILD_TUPLE_1 1
318 RETURN_VALUE
320_0 COME_FROM 276 '276'
L.1098 320 LOAD_FAST 'participant_type'
322 LOAD_GLOBAL ParticipantType
324 LOAD_ATTR RoutingTarget
326 COMPARE_OP ==
328_330 POP_JUMP_IF_FALSE 372 'to 372'
L.1099 332 LOAD_FAST 'self'
334 LOAD_ATTR _source_obj
336 LOAD_METHOD get_routing_target
338 CALL_METHOD_0 0 '0 positional arguments'
340 LOAD_ATTR is_sim
342_344 POP_JUMP_IF_FALSE 360 'to 360'
L.1100 346 LOAD_FAST 'self'
348 LOAD_ATTR _source_obj
350 LOAD_METHOD get_routing_target
352 CALL_METHOD_0 0 '0 positional arguments'
354 LOAD_ATTR sim_info
356 BUILD_TUPLE_1 1
358 RETURN_VALUE
360_0 COME_FROM 342 '342'
L.1102 360 LOAD_FAST 'self'
362 LOAD_ATTR _source_obj
364 LOAD_METHOD get_routing_target
366 CALL_METHOD_0 0 '0 positional arguments'
368 BUILD_TUPLE_1 1
370 RETURN_VALUE
372_0 COME_FROM 328 '328'
L.1104 372 LOAD_GLOBAL ValueError
374 LOAD_STR 'Trying to use DoubleObjectResolver with something that is not supported: Participant {}, Resolver {}'
376 LOAD_METHOD format
378 LOAD_FAST 'participant_type'
380 LOAD_FAST 'self'
382 CALL_METHOD_2 2 '2 positional arguments'
384 CALL_FUNCTION_1 1 '1 positional argument'
386 RAISE_VARARGS_1 1 'exception instance'
Parse error at or near `CALL_FUNCTION_1' instruction at offset 384
def get_localization_tokens(self, *args, **kwargs):
return (
self._source_obj, self._target_obj)
class SingleActorAndObjectResolver(Resolver):
def __init__(self, actor_sim_info, obj, source):
super().__init__()
self._sim_info = actor_sim_info
self._obj = obj
self._source = source
def __repr__(self):
return 'SingleActorAndObjectResolver: sim_info: {}, object: {}'.format(self._sim_info, self._obj)
@property
def profile_metric_key(self):
return 'source:{} object:{}'.format(self._source, self._obj)
def get_participants(self, participant_type, **kwargs):
result = (self._get_participants_base)(participant_type, **kwargs)
if result is not None:
return result
if participant_type == ParticipantType.Actor or participant_type == ParticipantType.CustomSim or participant_type == event_testing.test_constants.SIM_INSTANCE:
return (
self._sim_info,)
if participant_type == ParticipantType.Object:
return (
self._obj,)
if participant_type == ParticipantType.ObjectIngredients:
if self._obj.crafting_component:
crafting_process = self._obj.get_crafting_process()
if crafting_process is not None:
return tuple(crafting_process.get_ingredients_object_definitions())
return ()
if participant_type == ParticipantType.ObjectParent and not self._obj is None:
if self._obj.parent is None:
return ()
return (
self._obj.parent,)
if participant_type == ParticipantType.StoredSim:
stored_sim_info = self._obj.get_stored_sim_info()
return (stored_sim_info,)
if participant_type == ParticipantType.StoredCASPartsOnObject:
stored_cas_parts = self._obj.get_stored_cas_parts()
if stored_cas_parts is None:
return ()
return tuple(iter(self._obj.get_stored_cas_parts()))
if participant_type == ParticipantType.OwnerSim:
owner_sim_info_id = self._obj.get_sim_owner_id()
owner_sim_info = services.sim_info_manager().get(owner_sim_info_id)
return (owner_sim_info,)
if participant_type == ParticipantType.Affordance or participant_type == ParticipantType.InteractionContext or participant_type == event_testing.test_constants.FROM_EVENT_DATA:
return ()
raise ValueError('Trying to use SingleActorAndObjectResolver with something that is not supported: {}'.format(participant_type))
def get_localization_tokens(self, *args, **kwargs):
return (
self._sim_info, self._obj)
class DoubleSimAndObjectResolver(Resolver):
def __init__(self, actor_sim_info, target_sim_info, obj, source):
super().__init__()
self._actor_sim_info = actor_sim_info
self._target_sim_info = target_sim_info
self._obj = obj
self._source = source
def __repr__(self):
return f"DoubleActorAndObjectResolver: actor_sim_info: {self._actor_sim_info}, target_sim_info: {self._target_sim_info}, object: {self._obj}"
@property
def profile_metric_key(self):
return f"source:{self._source} object:{self._obj}"
def get_participants(self, participant_type, **kwargs):
result = (self._get_participants_base)(participant_type, **kwargs)
if result is not None:
return result
if participant_type == ParticipantType.Actor or participant_type == ParticipantType.CustomSim or participant_type == event_testing.test_constants.SIM_INSTANCE:
return (
self._actor_sim_info,)
if participant_type == ParticipantType.TargetSim:
return (
self._target_sim_info,)
if participant_type == ParticipantType.SignificantOtherTargetSim:
return (
self._target_sim_info.get_significant_other_sim_info(),)
if participant_type == ParticipantType.Object:
return (
self._obj,)
if participant_type == ParticipantType.ObjectIngredients:
if self._obj.crafting_component:
crafting_process = self._obj.get_crafting_process()
if crafting_process is not None:
return tuple(crafting_process.get_ingredients_object_definitions())
return ()
if participant_type == ParticipantType.ObjectParent and not self._obj is None:
if self._obj.parent is None:
return ()
return (
self._obj.parent,)
if participant_type == ParticipantType.StoredSim:
stored_sim_info = self._obj.get_stored_sim_info()
return (stored_sim_info,)
if participant_type == ParticipantType.StoredCASPartsOnObject:
stored_cas_parts = self._obj.get_stored_cas_parts()
if stored_cas_parts is None:
return ()
return tuple(iter(self._obj.get_stored_cas_parts()))
if participant_type == ParticipantType.OwnerSim:
owner_sim_info_id = self._obj.get_sim_owner_id()
owner_sim_info = services.sim_info_manager().get(owner_sim_info_id)
return (owner_sim_info,)
if participant_type == ParticipantType.Affordance:
return ()
if participant_type == ParticipantType.InteractionContext:
return ()
if participant_type == event_testing.test_constants.FROM_EVENT_DATA:
return ()
raise ValueError(f"Trying to use DoubleActorAndObjectResolver with something that is not supported: {participant_type}")
def get_localization_tokens(self, *args, **kwargs):
return (
self._sim_info, self._target_sim_info, self._obj)
class PhotoResolver(SingleActorAndObjectResolver):
def __init__(self, photographer, photo_object, photo_targets, source):
super().__init__(photographer, photo_object, source)
self._photo_targets = photo_targets
def __repr__(self):
return 'PhotoResolver: photographer: {}, photo_object:{}, photo_targets:{}'.format(self._sim_info, self._obj, self._photo_targets)
def get_participants(self, participant_type, **kwargs):
if participant_type == ParticipantType.PhotographyTargets:
return self._photo_targets
return (super().get_participants)(participant_type, **kwargs)
class ZoneResolver(GlobalResolver):
def __init__(self, zone_id, *args, **kwargs):
(super().__init__)(*args, **kwargs)
self._zone_id = zone_id
def __repr__(self):
return 'ZoneResolver: zone_id: {}'.format(self._zone_id)
def get_participants(self, participant_type, **kwargs):
if participant_type == ParticipantType.PickedZoneId:
return (
self._zone_id,)
return (super().get_participants)(participant_type, **kwargs)
class StreetResolver(GlobalResolver):
def __init__(self, street, **kwargs):
(super().__init__)(**kwargs)
self._street = street
def get_participants(self, participant_type, **kwargs):
if participant_type == ParticipantType.Street:
street_service = services.street_service()
if street_service is None:
return ()
street_civic_policy_provider = street_service.get_provider(self._street)
if street_civic_policy_provider is None:
return ()
return (
street_civic_policy_provider,)
return (super().get_participants)(participant_type, **kwargs)
class VenuePolicyProviderResolver(GlobalResolver):
def __init__(self, venue_policy_provider, **kwargs):
(super().__init__)(**kwargs)
self._venue_policy_provider = venue_policy_provider
def get_participants(self, participant_type, **kwargs):
if participant_type == ParticipantType.VenuePolicyProvider:
return (
self._venue_policy_provider,)
return (super().get_participants)(participant_type, **kwargs)
class LotResolver(GlobalResolver):
def __init__(self, lot, **kwargs):
(super().__init__)(**kwargs)
self._lot = lot
def get_participants(self, participant_type, **kwargs):
if participant_type == ParticipantType.Lot:
return (
self._lot,)
return (super().get_participants)(participant_type, **kwargs) | [
"cristina.caballero2406@gmail.com"
] | cristina.caballero2406@gmail.com |
c267b35781314c7896acf033300f5734509d3201 | 8205fe05169b8fd478f4f5e6b8d190e0378148b2 | /automationFramework/test_end2end.py | 2f79fbc24b084f97ee6b524e3d849ce565cc64df | [] | no_license | akashgkrishnan/test_automate | 04de6f0f3b3ee3ab92675897451ae53d44a37322 | 0e89e5759edeb6866be252d2d3c742ded0c81c3e | refs/heads/master | 2022-07-09T00:46:22.665077 | 2020-05-12T10:32:36 | 2020-05-12T10:32:36 | 263,295,779 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,578 | py | from time import sleep
from selenium.webdriver.common.by import By
from selenium.webdriver.support import expected_conditions
from selenium.webdriver.support.wait import WebDriverWait
import pytest
from automationFramework.utilities.BaseClass import BaseClass
#@pytest.mark.usefixtures('setup')
class TestOne(BaseClass):
def test_case1(self,):
self.driver.find_element_by_link_text('Shop').click()
sleep(2)
products = self.driver.find_elements_by_xpath('//div[@class="card h-100"]')
for product in products:
product_name = product.find_element_by_xpath('div/h4/a').text
if product_name == 'Blackberry':
product.find_element_by_xpath('div/button').click()
self.driver.find_element_by_css_selector('a[class*="btn-primary"]').click()
sleep(2)
self.driver.find_element_by_xpath('//button[@class="btn btn-success"]').click()
sleep(2)
self.driver.find_element_by_id("country").send_keys('Ind')
wait = WebDriverWait(self.driver, 7)
wait.until(expected_conditions.presence_of_element_located((By.LINK_TEXT, 'India')))
self.driver.find_element_by_link_text('India').click()
self.driver.find_element_by_xpath('//div[@class="checkbox checkbox-primary"]').click()
self.driver.find_element_by_css_selector('[type="submit"]').click()
sucess_text = self.driver.find_element_by_class_name('alert-success').text
assert "Success! Thank you!" in sucess_text
self.driver.get_screenshot_as_file('suces_page.png')
| [
"krishnanag1996@gmail.com"
] | krishnanag1996@gmail.com |
0cb3dab001668911dcae3905f35bdb948c5b58c9 | bc2dae7b0874d760128ff4de79b659d45a2e5219 | /de_sim/examples/sirs.py | a4fea3c03d081d2ffaef7dcd97f6d2544f5dfaca | [
"MIT"
] | permissive | rmes-ai/de_sim | 5cb260bed25d4f49f09e72d3321a8b1a5ddf7cc5 | 6f189d8c8e850e092d816f6be3d6f87b4f983ac2 | refs/heads/master | 2023-02-20T21:45:38.728639 | 2020-12-17T14:34:51 | 2020-12-17T14:34:51 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 9,140 | py | """ Example DE-Sim implementations of stochastic Susceptible, Infectious, or Recovered (SIR) epidemic models
:Author: Arthur Goldberg <Arthur.Goldberg@mssm.edu>
:Date: 2020-07-08
:Copyright: 2020, Karr Lab
:License: MIT
"""
import enum
import numpy
from de_sim.checkpoint import AccessCheckpoints
from de_sim.simulation_checkpoint_object import (AccessStateObjectInterface,
CheckpointSimulationObject)
import de_sim
class SusceptibleToInfectious(de_sim.EventMessage):
"S -> I transition"
class InfectiousToRecovered(de_sim.EventMessage):
"I -> R transition"
MESSAGE_TYPES = [SusceptibleToInfectious, InfectiousToRecovered]
class SIR(de_sim.SimulationObject):
""" Implement a Susceptible, Infectious, or Recovered (SIR) epidemic model
This example uses DE-Sim to implement a continuous-time Markov chain (CTMC) SIR
epidemic model, as described in section 3 of Allen (2017).
Allen, L.J., 2017. A primer on stochastic epidemic models: Formulation, numerical simulation, and analysis.
Infectious Disease Modelling, 2(2), pp.128-142.
Attributes:
s (:obj:`int`): number of susceptible subjects
i (:obj:`int`): number of infectious subjects
N (:obj:`int`): total number of susceptible subjects, a constant
beta (:obj:`float`): SIR beta parameter
gamma (:obj:`float`): SIR gamma parameter
recording_period (:obj:`float`): time step for recording state
random_state (:obj:`numpy.random.RandomState`): a random state
history (:obj:`list`): list of recorded states
"""
def __init__(self, name, s, i, N, beta, gamma, recording_period):
""" Initialize an SIR instance
Args:
name (:obj:`str`): the instance's name
s (:obj:`int`): initial number of susceptible subjects, s(0)
i (:obj:`int`): initial number of infectious subjects, i(0)
N (:obj:`int`): total number of susceptible subjects, a constant
beta (:obj:`float`): SIR beta parameter
gamma (:obj:`float`): SIR gamma parameter
recording_period (:obj:`float`): time step for recording state
random_state (:obj:`numpy.random.RandomState`): random state
history (:obj:`list`): list of recorded states
"""
self.s = s
self.i = i
self.N = N
self.beta = beta
self.gamma = gamma
self.recording_period = recording_period
self.random_state = numpy.random.RandomState()
self.history = []
super().__init__(name)
def init_before_run(self):
""" Send the initial events, and record the initial state
"""
self.schedule_next_event()
def schedule_next_event(self):
""" Schedule the next SIR event
"""
rates = {'s_to_i': self.beta * self.s * self.i / self.N,
'i_to_r': self.gamma * self.i}
lambda_val = rates['s_to_i'] + rates['i_to_r']
if lambda_val == 0:
return
tau = self.random_state.exponential(1.0/lambda_val)
prob_s_to_i = rates['s_to_i'] / lambda_val
if self.random_state.random_sample() < prob_s_to_i:
self.send_event(tau, self, SusceptibleToInfectious())
else:
self.send_event(tau, self, InfectiousToRecovered())
def handle_s_to_i(self, event):
""" Handle a susceptible to infectious event
Args:
event (:obj:`~de_sim.event.Event`): simulation event; not used
"""
del event # Avoid PyLint warning W0613, unused-argument
self.s -= 1
self.i += 1
self.schedule_next_event()
def handle_i_to_r(self, event):
""" Handle an infectious to recovered event
Args:
event (:obj:`~de_sim.event.Event`): simulation event; not used
"""
del event # Avoid PyLint warning W0613, unused-argument
self.i -= 1
self.schedule_next_event()
event_handlers = [(SusceptibleToInfectious, 'handle_s_to_i'),
(InfectiousToRecovered, 'handle_i_to_r')]
# register the message types sent
messages_sent = MESSAGE_TYPES
class StateTransitionType(enum.Enum):
""" State transition types
"""
s_to_i = 'Transition from Susceptible to Infectious'
i_to_r = 'Transition from Infectious to Recovered'
### SIR epidemic model, version 2 ###
class TransitionMessage(de_sim.EventMessage):
"Message for all model transitions"
transition_type: StateTransitionType
MESSAGE_TYPES = [TransitionMessage]
class SIR2(SIR):
""" Version 2 of an SIR epidemic model
SIR2 is similar to SIR, but uses one event message type for both transitions, and a
single message handler to process transition events.
"""
def schedule_next_event(self):
""" Schedule the next SIR event
"""
rates = {'s_to_i': self.beta * self.s * self.i / self.N,
'i_to_r': self.gamma * self.i}
lambda_val = rates['s_to_i'] + rates['i_to_r']
if lambda_val == 0:
return
tau = self.random_state.exponential(1.0/lambda_val)
prob_s_to_i = rates['s_to_i'] / lambda_val
if self.random_state.random_sample() < prob_s_to_i:
self.send_event(tau, self, TransitionMessage(StateTransitionType.s_to_i))
else:
self.send_event(tau, self, TransitionMessage(StateTransitionType.i_to_r))
def handle_state_transition(self, event):
""" Handle an infectious state transition
Args:
event (:obj:`~de_sim.event.Event`): simulation event that contains the type of transition
"""
transition_type = event.message.transition_type
if transition_type is StateTransitionType.s_to_i:
self.s -= 1
self.i += 1
elif transition_type is StateTransitionType.i_to_r:
self.i -= 1
self.schedule_next_event()
event_handlers = [(TransitionMessage, 'handle_state_transition')]
# register the message types sent
messages_sent = MESSAGE_TYPES
class AccessSIRObjectState(AccessStateObjectInterface):
""" Get the state of an SIR object
Attributes:
sir (:obj:`obj`): an SIR object
random_state (:obj:`numpy.random.RandomState`): a random state
"""
def __init__(self, sir):
self.sir = sir
self.random_state = sir.random_state
def get_checkpoint_state(self, time):
""" Get the SIR object's state
Args:
time (:obj:`float`): current time; ignored
"""
return dict(s=self.sir.s,
i=self.sir.i)
def get_random_state(self):
""" Get the SIR object's random state
"""
return self.random_state.get_state()
class RunSIRs(object):
def __init__(self, checkpoint_dir):
self.checkpoint_dir = checkpoint_dir
def simulate(self, sir_class, max_time, **sir_args):
""" Create and run an SIR simulation
Args:
sir_class (:obj:`type`): a type of SIR class to run
max_time (:obj:`float`): simulation end time
sir_args (:obj:`dict`): arguments for an SIR object
"""
# create a simulator
simulator = de_sim.Simulator()
# create an SIR instance
self.sir = sir = sir_class(**sir_args)
simulator.add_object(sir)
# create a checkpoint simulation object
access_state_object = AccessSIRObjectState(sir)
checkpointing_obj = CheckpointSimulationObject('checkpointing_obj', sir_args['recording_period'],
self.checkpoint_dir, access_state_object)
simulator.add_object(checkpointing_obj)
# initialize simulation, which sends the SIR instance an initial event message
simulator.initialize()
# run the simulation
event_num = simulator.simulate(max_time).num_events
print("Executed {} events.\n".format(event_num))
def print_history(self):
""" Print an SIR simulation's history
"""
header = ['time', 's', 'i', 'r']
print('\t'.join(header))
access_checkpoints = AccessCheckpoints(self.checkpoint_dir)
for checkpoint_time in access_checkpoints.list_checkpoints():
chkpt = access_checkpoints.get_checkpoint(time=checkpoint_time)
state = chkpt.state
state_as_list = [checkpoint_time, state['s'], state['i'], self.sir.N - state['s'] - state['i']]
state_as_list = [str(v) for v in state_as_list]
print('\t'.join(state_as_list))
def last_checkpoint(self):
""" Get the last checkpoint of the last simulation run
Returns:
:obj:`~de_sim.checkpoint.Checkpoint`: the last checkpoint of the last simulation run
"""
access_checkpoints = AccessCheckpoints(self.checkpoint_dir)
last_checkpoint_time = access_checkpoints.list_checkpoints()[-1]
return access_checkpoints.get_checkpoint(time=last_checkpoint_time)
| [
"artgoldberg@gmail.com"
] | artgoldberg@gmail.com |
8e7d2f3abd934396afb8202bf74aac908df7bd2c | e6fac8e0289d9f82369d2eb8e22bc175c6f51b3b | /Interview Practice/sumOfTwo/sumOfTwo.py | 5b51c64a9afc26f2a8ec4511ba4c2170686d43a6 | [] | no_license | Zahidsqldba07/CodeFights-9 | f361c15d24f96afa26de08af273a7f8f507ced4a | 6c5d152b1ad35cf178dd74acbc44ceb5fdcdf139 | refs/heads/master | 2023-03-18T23:52:43.274786 | 2017-05-12T07:28:08 | 2017-05-12T07:28:08 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 313 | py | def sumOfTwo(a, b, v):
# Convert a and b to sets for O(1) average case in-operations.
a = set(a)
b = set(b)
# For each number in a, is the difference `in` b?
for number in a:
if v - number in b:
return True
# There were no pairs that added up to v.
return False
| [
"hallosputnik@gmail.com"
] | hallosputnik@gmail.com |
73e9f458e6dc0153719f1d6fd927b85b27897c0b | 854bf3649552aeda06573b7e7fea38e3a14332a1 | /thirdapp/wsgi.py | f6a77b8f75dcf0c9a423740289619093f9b83bfe | [] | no_license | mehulchopradev/charles-django-thirdapp | a90d88034c4233f175d3828f81811105f9eaeb56 | ea2ea37830393a0fbf437cb517129c13fddf696c | refs/heads/master | 2020-04-03T16:48:37.782132 | 2018-10-30T16:30:07 | 2018-10-30T16:30:07 | 155,420,092 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 393 | py | """
WSGI config for thirdapp project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/2.1/howto/deployment/wsgi/
"""
import os
from django.core.wsgi import get_wsgi_application
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'thirdapp.settings')
application = get_wsgi_application()
| [
"mehul.chopra.dev@gmail.com"
] | mehul.chopra.dev@gmail.com |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.