blob_id stringlengths 40 40 | directory_id stringlengths 40 40 | path stringlengths 3 288 | content_id stringlengths 40 40 | detected_licenses listlengths 0 112 | license_type stringclasses 2 values | repo_name stringlengths 5 115 | snapshot_id stringlengths 40 40 | revision_id stringlengths 40 40 | branch_name stringclasses 684 values | visit_date timestamp[us]date 2015-08-06 10:31:46 2023-09-06 10:44:38 | revision_date timestamp[us]date 1970-01-01 02:38:32 2037-05-03 13:00:00 | committer_date timestamp[us]date 1970-01-01 02:38:32 2023-09-06 01:08:06 | github_id int64 4.92k 681M ⌀ | star_events_count int64 0 209k | fork_events_count int64 0 110k | gha_license_id stringclasses 22 values | gha_event_created_at timestamp[us]date 2012-06-04 01:52:49 2023-09-14 21:59:50 ⌀ | gha_created_at timestamp[us]date 2008-05-22 07:58:19 2023-08-21 12:35:19 ⌀ | gha_language stringclasses 147 values | src_encoding stringclasses 25 values | language stringclasses 1 value | is_vendor bool 2 classes | is_generated bool 2 classes | length_bytes int64 128 12.7k | extension stringclasses 142 values | content stringlengths 128 8.19k | authors listlengths 1 1 | author_id stringlengths 1 132 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
48f895d6d0a9b8dfa8df8e2750cdb4c53e168f84 | 2a2ce1246252ef6f59e84dfea3888c5a98503eb8 | /examples/tutorials/09_flip.py | 9c8409019a6c692430f2e24ac884f78b28cd478d | [
"BSD-3-Clause"
] | permissive | royqh1979/PyEasyGraphics | c7f57c1fb5a829287e9c462418998dcc0463a772 | 842121e461be3273f845866cf1aa40c312112af3 | refs/heads/master | 2021-06-11T10:34:03.001842 | 2021-04-04T10:47:52 | 2021-04-04T10:47:52 | 161,438,503 | 8 | 4 | BSD-3-Clause | 2021-04-04T10:47:53 | 2018-12-12T05:43:31 | Python | UTF-8 | Python | false | false | 376 | py | """
Draw a bus without transformations
"""
from easygraphics import *
import draw_bus
def main():
init_graph(500, 300)
draw_bus.draw_bus()
set_color("gray")
set_line_style(LineStyle.DASH_LINE)
line(0, 300, 500, 0)
set_line_style(LineStyle.SOLID_LINE)
reflect(0, 300, 500, 0)
draw_bus.draw_bus()
pause()
close_graph()
easy_run(main) | [
"royqh1979@gmail.com"
] | royqh1979@gmail.com |
e687132d147ed7ba43628a5af04b87969ca6ed6a | ed114d6111f9f75bdf74de9687f1ec6145ddcae2 | /25_file_IO.py | c1e548d18301d51ec1b2960066b36dee4f3a4281 | [] | no_license | tayyabmalik4/Python-With-Tayyab | 495b6074c7a75bcb5eced34bdec3053b2103c78c | 5f566e4036bfe17f2a7879a7a7a5d70f259ec6d0 | refs/heads/main | 2023-06-29T01:25:57.827350 | 2021-08-02T15:27:52 | 2021-08-02T15:27:52 | 387,209,474 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 260 | py | # File IO Basics
"""
"r" => open file for reading----this is a default
"w" => open a file for writing
"x" => Creates a file if not exits
"a" => add more content to a file
"t" => text mode -----this is a defaut
"b" => binary mode
"r+" => read and write
"""
| [
"mtayyabmalik99@gmail.com"
] | mtayyabmalik99@gmail.com |
e90a761365c11c6ac9cab953da8b10fbd1b4b195 | 8a51e947e11d37fc2937a81cc02e9901e06b4291 | /envfh/bin/chardetect | 16d09ac1b0de412a80954ca6956a7e69b839da7d | [] | no_license | Deepakchawla/flask_mysql_heroku | 107fa4ad1074901cd5cdd4a762e8bc960b7b90ee | 5ff5186291a461f9eaedd6a009f4a5e1a08f3a62 | refs/heads/main | 2022-12-26T01:34:41.957981 | 2020-10-05T15:41:57 | 2020-10-05T15:41:57 | 301,443,938 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 271 | #!/home/deepakchawala/PycharmProjects/flask_heroku/envfh/bin/python3
# -*- coding: utf-8 -*-
import re
import sys
from chardet.cli.chardetect import main
if __name__ == '__main__':
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
sys.exit(main())
| [
"deepakchawla35@gmail.com"
] | deepakchawla35@gmail.com | |
9bcb3781b5e49ae5f845098c5dd06f91187d1435 | 9b64f0f04707a3a18968fd8f8a3ace718cd597bc | /huaweicloud-sdk-elb/setup.py | 78a2480f61627fbccd1b215889f9405423ab312f | [
"Apache-2.0"
] | permissive | jaminGH/huaweicloud-sdk-python-v3 | eeecb3fb0f3396a475995df36d17095038615fba | 83ee0e4543c6b74eb0898079c3d8dd1c52c3e16b | refs/heads/master | 2023-06-18T11:49:13.958677 | 2021-07-16T07:57:47 | 2021-07-16T07:57:47 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,654 | py | # coding: utf-8
from os import path
from setuptools import setup, find_packages
NAME = "huaweicloudsdkelb"
VERSION = "3.0.52"
AUTHOR = "HuaweiCloud SDK"
AUTHOR_EMAIL = "hwcloudsdk@huawei.com"
URL = "https://github.com/huaweicloud/huaweicloud-sdk-python-v3"
DESCRIPTION = "ELB"
this_directory = path.abspath(path.dirname(__file__))
with open(path.join(this_directory, 'README_PYPI.md'), encoding='utf-8') as f:
LONG_DESCRIPTION = f.read()
REQUIRES = ["huaweicloudsdkcore"]
OPTIONS = {
'bdist_wheel': {
'universal': True
}
}
setup(
name=NAME,
version=VERSION,
options=OPTIONS,
description=DESCRIPTION,
long_description=LONG_DESCRIPTION,
long_description_content_type='text/markdown',
author=AUTHOR,
author_email=AUTHOR_EMAIL,
license="Apache LICENSE 2.0",
url=URL,
keywords=["huaweicloud", "sdk", "ELB"],
packages=find_packages(exclude=["tests*"]),
install_requires=REQUIRES,
python_requires=">=2.7,!=3.0.*,!=3.1.*,!=3.2.*",
include_package_data=True,
classifiers=[
'Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
'License :: OSI Approved :: Apache Software License',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: 3.7',
'Programming Language :: Python :: 3.8',
'Programming Language :: Python :: 3.9',
'Topic :: Software Development'
]
)
| [
"hwcloudsdk@huawei.com"
] | hwcloudsdk@huawei.com |
63feb400bba4f58c6678073156ad61cd583474db | 3a891a79be468621aae43defd9a5516f9763f36e | /desktop/core/ext-py/defusedxml-0.5.0/other/python_genshi.py | 183d1fec94405e9cea10ac4388123eb295e70391 | [
"Apache-2.0",
"Python-2.0",
"LicenseRef-scancode-unknown-license-reference"
] | permissive | oyorooms/hue | b53eb87f805063a90f957fd2e1733f21406269aa | 4082346ef8d5e6a8365b05752be41186840dc868 | refs/heads/master | 2020-04-15T20:31:56.931218 | 2019-01-09T19:02:21 | 2019-01-09T19:05:36 | 164,998,117 | 4 | 2 | Apache-2.0 | 2019-01-10T05:47:36 | 2019-01-10T05:47:36 | null | UTF-8 | Python | false | false | 172 | py | #!/usr/bin/python
import sys
from pprint import pprint
from genshi.input import XMLParser
with open(sys.argv[1]) as f:
parser = XMLParser(f)
pprint(list(parser))
| [
"yingchen@cloudera.com"
] | yingchen@cloudera.com |
578dd5df4eda97c5cd637742a9711502dace842e | b22588340d7925b614a735bbbde1b351ad657ffc | /athena/Database/AthenaPOOL/AthenaPoolExample/AthenaPoolExampleAlgorithms/share/AthenaPoolExample_WMetaJobOptions.py | 3af9e8794b0385996c0820cd6b3c8079ce383f32 | [] | no_license | rushioda/PIXELVALID_athena | 90befe12042c1249cbb3655dde1428bb9b9a42ce | 22df23187ef85e9c3120122c8375ea0e7d8ea440 | refs/heads/master | 2020-12-14T22:01:15.365949 | 2020-01-19T03:59:35 | 2020-01-19T03:59:35 | 234,836,993 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,236 | py | ## @file AthenaPoolExample_WriteJobOptions.py
## @brief Example job options file to illustrate how to write event data to Pool.
## @author Peter van Gemmeren <gemmeren@anl.gov>
## $Id: AthenaPoolExample_WMetaJobOptions.py,v 1.8 2008-03-25 22:38:54 gemmeren Exp $
###############################################################
#
# This Job option:
# ----------------
# 1. Writes a SimplePoolFile5.root file with ExampleHit
# using WriteData algorithm
# ------------------------------------------------------------
# Expected output file (20 events):
# -rw-r--r-- 1 gemmeren zp 36266 Dec 8 19:08 SimplePoolFile5.root
#
#==============================================================
import AthenaCommon.AtlasUnixGeneratorJob
## get a handle on the default top-level algorithm sequence
from AthenaCommon.AlgSequence import AlgSequence
topSequence = AlgSequence()
## get a handle on the ServiceManager
from AthenaCommon.AppMgr import ServiceMgr as svcMgr
#--------------------------------------------------------------
# Event related parameters
#--------------------------------------------------------------
from AthenaCommon.AppMgr import theApp
theApp.EvtMax = 20
#--------------------------------------------------------------
# Load POOL support
#--------------------------------------------------------------
import AthenaPoolCnvSvc.WriteAthenaPool
#Explicitly specify the output file catalog
svcMgr.PoolSvc.WriteCatalog = "xmlcatalog_file:Catalog2.xml"
svcMgr.AthenaPoolCnvSvc.CommitInterval = 10;
from AthenaPoolCnvSvc.WriteAthenaPool import AthenaPoolOutputStream
Stream1 = AthenaPoolOutputStream( "Stream1", "ROOTTREE:SimplePoolFile5.root" )
Stream1.ItemList += [ "ExampleHitContainer#MyHits" ]
Stream1.MetadataItemList += [ "ExampleHitContainer#PedestalWriteData" ]
"""
Stream1.MetadataItemList += [ "EventBookkeeperCollection#EventBookkeepers" ]
from EventBookkeeperTools.BookkeepingInfoWriter import EventBookkeepersWriter
EBWriter1 = EventBookkeepersWriter()
EBWriter1.setDoMC( False )
EBWriter1.setCycle( 0 )
EBWriter1.OutputCollectionName = "EventBookkeepers"
topSequence += EBWriter1
"""
#--------------------------------------------------------------
# Private Application Configuration options
#--------------------------------------------------------------
# Load "user algorithm" top algorithms to be run, and the libraries that house them
from AthenaPoolExampleAlgorithms.AthenaPoolExampleAlgorithmsConf import AthPoolEx__WriteData,AthPoolEx__WriteCond
topSequence += AthPoolEx__WriteData( "WriteData" )
from StoreGate.StoreGateConf import StoreGateSvc
topSequence += AthPoolEx__WriteCond( "WriteCond", DetStore = StoreGateSvc( "MetaDataStore" ),
ConditionName = "PedestalWriteData" )
#--------------------------------------------------------------
# Set output level threshold (2=DEBUG, 3=INFO, 4=WARNING, 5=ERROR, 6=FATAL)
#--------------------------------------------------------------
svcMgr.MessageSvc.OutputLevel = 3
svcMgr.PoolSvc.OutputLevel = 2
svcMgr.AthenaPoolCnvSvc.OutputLevel = 2
topSequence.WriteData.OutputLevel = 2
Stream1.OutputLevel = 2
#
# End of job options file
#
###############################################################
| [
"rushioda@lxplus754.cern.ch"
] | rushioda@lxplus754.cern.ch |
5b52f7f306a5a2aaec29752ff685f73d5f23ce39 | 98c6ea9c884152e8340605a706efefbea6170be5 | /examples/data/Assignment_4/glnrus002/boxes.py | e4fe66cfc48ac2de962374281b9b70b12248e4d0 | [] | no_license | MrHamdulay/csc3-capstone | 479d659e1dcd28040e83ebd9e3374d0ccc0c6817 | 6f0fa0fa1555ceb1b0fb33f25e9694e68b6a53d2 | refs/heads/master | 2021-03-12T21:55:57.781339 | 2014-09-22T02:22:22 | 2014-09-22T02:22:22 | 22,372,174 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 506 | py | #boxes.p
#glnrus002
def print_square ():
print("*"*5)
for i in range (3):
print("*"," ","*" )
print("*"*5)
def print_rectangle (width, height):
print("*"*width)
for i in range(height-2):
print("*"," "*(width-2),"*",sep="")
print("*"*width)
def get_rectangle (width, height):
box="*"*width+"\n"
for i in range(height-2):
box=box+ "*"+" "*(width-2)+"*"+"\n"
box=box+("*"*width)+"\n"
return box
| [
"jarr2000@gmail.com"
] | jarr2000@gmail.com |
230653d44e2ef94bab9b0d8fe6e66616e3e15c15 | 370c40babd60df6ef84b339a31fb7365ebe2400f | /toolkit/crashreporter/google-breakpad/src/tools/windows/dump_syms/moz.build | 6220ae3cf6b64ac4db3561aa3ca94cafa06230bd | [
"BSD-3-Clause",
"LicenseRef-scancode-unicode-mappings",
"LicenseRef-scancode-unknown-license-reference"
] | permissive | roytam1/gecko-kaios | b676a1c9ae1c1412274a2327419660803f214ab9 | 2c1b5fe198e12edc4cb17a34ecbcceedc642c78e | refs/heads/master | 2023-05-10T16:50:43.590202 | 2019-01-11T07:55:14 | 2019-10-17T09:15:16 | 215,687,205 | 5 | 0 | NOASSERTION | 2023-05-07T02:37:27 | 2019-10-17T02:34:23 | null | UTF-8 | Python | false | false | 752 | build | # -*- Mode: python; c-basic-offset: 4; indent-tabs-mode: nil; tab-width: 40 -*-
# vim: set filetype=python:
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
HostProgram('dump_syms')
HOST_SOURCES += [
'../../../common/windows/dia_util.cc',
'../../../common/windows/guid_string.cc',
'../../../common/windows/omap.cc',
'../../../common/windows/pdb_source_line_writer.cc',
'../../../common/windows/string_utils.cc',
'dump_syms.cc',
]
HOST_CXXFLAGS += [
'-O2',
'-EHsc',
'-MD'
]
HOST_OS_LIBS += [
'diaguids',
'imagehlp'
]
LOCAL_INCLUDES += [
'../../..'
]
| [
"roytam@gmail.com"
] | roytam@gmail.com |
5a49851c993afa4acec0a0a96ccb39f922f00904 | 9743d5fd24822f79c156ad112229e25adb9ed6f6 | /xai/brain/wordbase/verbs/_hazed.py | d38bc8abb99b6e274b895bcfa1cd01cd638c6422 | [
"MIT"
] | permissive | cash2one/xai | de7adad1758f50dd6786bf0111e71a903f039b64 | e76f12c9f4dcf3ac1c7c08b0cc8844c0b0a104b6 | refs/heads/master | 2021-01-19T12:33:54.964379 | 2017-01-28T02:00:50 | 2017-01-28T02:00:50 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 224 | py |
from xai.brain.wordbase.verbs._haze import _HAZE
#calss header
class _HAZED(_HAZE, ):
def __init__(self,):
_HAZE.__init__(self)
self.name = "HAZED"
self.specie = 'verbs'
self.basic = "haze"
self.jsondata = {}
| [
"xingwang1991@gmail.com"
] | xingwang1991@gmail.com |
8ac6d3cdb82c433b8d428c5e8a2d3cada473a505 | e982c42888da91e957aef4a67e339135918d25ec | /venv/Lib/site-packages/jinja2/testsuite/__init__.py | db816dfe9054cf235a1bc8af3306fa9a761cedb0 | [] | no_license | nikhil9856/kisan | c88f890d88e96dd718bd9cfaef41f3d40eb7b72d | 556e57427a2b9a91fcc4a44ca25706c49e790d73 | refs/heads/master | 2020-03-15T01:59:09.825264 | 2018-05-03T18:08:02 | 2018-05-03T18:08:02 | 131,904,962 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,797 | py | # -*- coding: utf-8 -*-
"""
jinja2.testsuite
~~~~~~~~~~~~~~~~
All the unittests of Jinja2. These tests can be executed by
either running run-tests.py using multiple Python versions at
the same time.
:copyright: (c) 2010 by the Jinja Team.
:license: BSD, see LICENSE for more details.
"""
import os
import re
import sys
import unittest
from traceback import format_exception
from jinja2 import loaders
from jinja2._compat import PY2
here = os.path.dirname(os.path.abspath(__file__))
dict_loader = loaders.DictLoader({
'justdict.html': 'FOO'
})
package_loader = loaders.PackageLoader('jinja2.testsuite.res', 'templates')
filesystem_loader = loaders.FileSystemLoader(here + '/res/templates')
function_loader = loaders.FunctionLoader({'justfunction.html': 'FOO'}.get)
choice_loader = loaders.ChoiceLoader([dict_loader, package_loader])
prefix_loader = loaders.PrefixLoader({
'a': filesystem_loader,
'b': dict_loader
})
class JinjaTestCase(unittest.TestCase):
### use only these methods for testing. If you need standard
### unittest method, wrap them!
def setup(self):
pass
def teardown(self):
pass
def setUp(self):
self.setup()
def tearDown(self):
self.teardown()
def assert_equal(self, a, b):
return self.assertEqual(a, b)
def assert_raises(self, *args, **kwargs):
return self.assertRaises(*args, **kwargs)
def assert_traceback_matches(self, callback, expected_tb):
try:
callback()
except Exception as e:
tb = format_exception(*sys.exc_info())
if re.search(expected_tb.strip(), ''.join(tb)) is None:
raise self.fail('Traceback did not match:\n\n%s\nexpected:\n%s'
% (''.join(tb), expected_tb))
else:
self.fail('Expected exception')
def find_all_tests(suite):
"""Yields all the tests and their names from a given suite."""
suites = [suite]
while suites:
s = suites.pop()
try:
suites.extend(s)
except TypeError:
yield s, '%s.%s.%s' % (
s.__class__.__module__,
s.__class__.__name__,
s._testMethodName
)
class BetterLoader(unittest.TestLoader):
"""A nicer loader that solves two problems. First of all we are setting
up tests from different sources and we're doing this programmatically
which breaks the default loading logic so this is required anyways.
Secondly this loader has a nicer interpolation for test names than the
default one so you can just do ``run-tests.py ViewTestCase`` and it
will work.
"""
def getRootSuite(self):
return suite()
def loadTestsFromName(self, name, module=None):
root = self.getRootSuite()
if name == 'suite':
return root
all_tests = []
for testcase, testname in find_all_tests(root):
if testname == name or \
testname.endswith('.' + name) or \
('.' + name + '.') in testname or \
testname.startswith(name + '.'):
all_tests.append(testcase)
if not all_tests:
raise LookupError('could not find test case for "%s"' % name)
if len(all_tests) == 1:
return all_tests[0]
rv = unittest.TestSuite()
for test in all_tests:
rv.addTest(test)
return rv
def suite():
from jinja2.testsuite import ext, filters, tests, core_tags, \
loader, inheritance, imports, lexnparse, security, api, \
regression, debug, utils, bytecode_cache, doctests
suite = unittest.TestSuite()
suite.addTest(ext.suite())
suite.addTest(filters.suite())
suite.addTest(tests.suite())
suite.addTest(core_tags.suite())
suite.addTest(loader.suite())
suite.addTest(inheritance.suite())
suite.addTest(imports.suite())
suite.addTest(lexnparse.suite())
suite.addTest(security.suite())
suite.addTest(api.suite())
suite.addTest(regression.suite())
suite.addTest(debug.suite())
suite.addTest(utils.suite())
suite.addTest(bytecode_cache.suite())
# doctests will not run on python 3 currently. Too many issues
# with that, do not test that on that platform.
if PY2:
suite.addTest(doctests.suite())
return suite
def main():
"""Runs the testsuite as command line application."""
try:
unittest.main(testLoader=BetterLoader(), defaultTest='suite')
except Exception as e:
print('Error: %s' % e)
| [
"kumar.nikhil110@gmail.com"
] | kumar.nikhil110@gmail.com |
36f6b770385698e68c30bc1c36d2fb9bf80814a6 | 9f2445e9a00cc34eebcf3d3f60124d0388dcb613 | /2019-11-27-Parametersearch_again/MOOSEModel.py | 028bd96d9a3b9c8f7b6929e864f3aac0a67af60a | [] | no_license | analkumar2/Thesis-work | 7ee916d71f04a60afbd117325df588908518b7d2 | 75905427c2a78a101b4eed2c27a955867c04465c | refs/heads/master | 2022-01-02T02:33:35.864896 | 2021-12-18T03:34:04 | 2021-12-18T03:34:04 | 201,130,673 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 4,307 | py | #exec(open('MOOSEModel.py').read())
#Has been modified heavily to be used in Parametersearch_again folder. Do not use as an api
import moose
import rdesigneur as rd
import numpy as np
import matplotlib.pyplot as plt
import xmltodict
def parameterfile_parser(Parameterfile):
"""
Parses Parameterfile and returns rdesigneur function parameters
Arguements:
Parameterfile -- A valid parameterfile address, string
"""
depth = 0.1
F = 96485.3329
with open(Parameterfile) as fd:
Model = xmltodict.parse(fd.read())
Parameters = {}
cellProto = [['somaProto', 'soma', float(Model['Model']['segment']['Morphology']['@sm_diam']), float(Model['Model']['segment']['Morphology']['@sm_len'])]]
chanProto = []
chanDistrib = []
chd = Model['Model']['segment']['Channels']
for channel in chd.keys():
chanProto.append([ chd[channel]['@kinetics'][:-2]+channel+'()' , channel ])
chanDistrib.append([ channel, 'soma', 'Gbar', chd[channel]['@gbar'] ])
chanProto.append([ Model['Model']['segment']['Ca_Conc']['@kinetics'][:-2]+'Ca_Conc()' , 'Ca_conc' ])
chanDistrib.append([ 'Ca_conc', 'soma', 'CaBasal', Model['Model']['segment']['Ca_Conc']['@Ca_inf'], 'tau', Model['Model']['segment']['Ca_Conc']['@Ca_tau'] ])
passiveDistrib = [['soma', 'RM', Model['Model']['segment']['Passive']['@RM'], 'CM', Model['Model']['segment']['Passive']['@CM'], 'initVm', str(-0.065), 'Em', Model['Model']['segment']['Passive']['@Em']]]
Parameters['cellProto'] = cellProto
Parameters['chanProto'] = chanProto
Parameters['chanDistrib'] = chanDistrib
Parameters['passiveDistrib'] = passiveDistrib
Parameters['Ca_B'] = float(Model['Model']['segment']['Ca_Conc']['@Ca_B'])
return Parameters
def generateModel(Parameterfile, CurrInjection):
"""
Returns in-silico model current clamp
Arguements:
Parameterfile -- A valid parameterfile address, string
CurrInjection -- Current clamp level, float
"""
Parameters = parameterfile_parser(Parameterfile)
elecPlotDt = 0.0001
elecDt = 0.00005
depth = 0.1
preStimTime = 1
injectTime = 0.5
postStimTime = 1
try:
# [moose.delete(x) for x in ['/model', '/library']]
moose.delete('/model')
except:
pass
rdes = rd.rdesigneur(
elecPlotDt = elecPlotDt,
elecDt = elecDt,
cellProto = Parameters['cellProto'],
chanProto = Parameters['chanProto'],
passiveDistrib = Parameters['passiveDistrib'],
chanDistrib = Parameters['chanDistrib'],
stimList = [['soma', '1', '.', 'inject', f'(t>={preStimTime} && t<={preStimTime+injectTime}) ? {CurrInjection} : 0']],
plotList = [
['soma', '1', '.', 'Vm', 'Soma Membrane potential MOOSE'],
],
)
rdes.buildModel()
#Setup clock table to record time
clk = moose.element('/clock')
plott = moose.Table('/model/graphs/plott')
moose.connect(plott, 'requestOut', clk, 'getCurrentTime')
#Setting Ca_conc B value
try:
moose.element('/model/elec/soma/Ca_conc').B = Parameters['Ca_B']
# moose.element('/model/elec/soma/Ca_conc').B *= 2
# moose.element('/model/elec/soma/Ca_conc').B = 0
except:
pass
print('MOOSE Model generated')
return rdes
def runModel(Parameterfile, CurrInjection):
preStimTime = 1
injectTime = 0.5
postStimTime = 1
generateModel(Parameterfile, CurrInjection)
moose.reinit()
moose.start(preStimTime+injectTime+postStimTime)
Vmvec=moose.element('/model/graphs/plot0').vector
tvec=moose.element('/model/graphs/plott').vector
return [tvec, Vmvec]
def plotModel(Parameterfile, CurrInjection):
"""
Returns in-silico model current clamp
Arguements:
Parameterfile -- A valid parameterfile address, string
CurrInjection -- Current clamp level, float
"""
preStimTime = 1
injectTime = 0.5
postStimTime = 1
rdes = generateModel(Parameterfile, CurrInjection)
moose.reinit()
moose.start(preStimTime+injectTime+postStimTime)
rdes.display()
return rdes
if __name__ == '__main__':
rdes = plotModel('Modelparameters/dummyModel.xml', 150e-12)
tvec, Vmvec = runModel('Modelparameters/dummyModel.xml', 150e-12)
| [
"analkumar2@gmail.com"
] | analkumar2@gmail.com |
80dd01870ac9e112e4604f30ffbae9f5908ff053 | 0b9622c6d67ddcb252a7a4dd9b38d493dfc9a25f | /HackerRank/30daysChallenge/Day17.py | 8901c15a41dedb01041ed65199ea7db65b6992e5 | [] | no_license | d80b2t/python | eff2b19a69b55d73c4734fb9bc115be1d2193e2d | 73603b90996221e0bcd239f9b9f0458b99c6dc44 | refs/heads/master | 2020-05-21T20:43:54.501991 | 2017-12-24T12:55:59 | 2017-12-24T12:55:59 | 61,330,956 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,767 | py | """
Objective: Yesterday's challenge taught you to manage exceptional situations by using try and catch blocks. In today's challenge, you're going to practice throwing and propagating an exception. Check out the Tutorial tab for learning materials and an instructional video!
Task: Write a Calculator class with a single method: int power(int,int). The power method takes two integers, n and p, as parameters and returns the integer result of n^p. If either n or p is negative, then the method must throw an exception with the message: n and p should be non-negative.
Note: Do not use an access modifier (e.g.: public) in the declaration for your Calculator class.
Input Format: Input from stdin is handled for you by the locked stub code in your editor. The first line contains an integer, , the number of test cases. Each of the subsequent lines describes a test case in space-separated integers denoting and , respectively.
Constraints: No Test Case will result in overflow for correctly written code.
Output Format: Output to stdout is handled for you by the locked stub code in your editor. There are lines of output, where each line contains the result of as calculated by your Calculator class' power method.
Sample Input:
4
3 5
2 4
-1 -2
-1 3
Sample Output
243
16
n and p should be non-negative
n and p should be non-negative
"""
class Calculator:
def power(self,n,p):
self.n = n
self.p = p
if n < 0 or p < 0:
raise Exception("n and p should be non-negative")
else:
return n**p
myCalculator=Calculator()
T=int(input())
for i in range(T):
n,p = map(int, input().split())
try:
ans=myCalculator.power(n,p)
print(ans)
except Exception as e:
print(e)
| [
"npross@lbl.gov"
] | npross@lbl.gov |
7a5991c57222dafdfee32c8d59345b334061e4ce | 423f5eb4cf319ea11701ad2c84c045eeeb4e261c | /class-29/demo/custom_user/users/views.py | 643b390ee821ab3200991c48ec7ea530854d7bea | [] | no_license | MsDiala/amman-python-401d2 | 4031899d0a8d70f1ecd509e491b2cb0c63c23a06 | bb9d102da172f51f6df7371d2208146bbbee72fb | refs/heads/master | 2023-06-14T18:46:04.192199 | 2021-07-12T12:48:44 | 2021-07-12T12:48:44 | 315,765,421 | 1 | 0 | null | 2020-11-24T22:16:10 | 2020-11-24T22:16:09 | null | UTF-8 | Python | false | false | 350 | py | from django.shortcuts import render
from django.urls import reverse_lazy
from django.views.generic.edit import CreateView
from .forms import CustomUserCreationForm
# Create your views here.
class SignUpView(CreateView):
form_class = CustomUserCreationForm
success_url = reverse_lazy('login')
template_name = 'registration/signup.html'
| [
"ahmad.alawad.sf@gmail.com"
] | ahmad.alawad.sf@gmail.com |
1f81c51bf9b448533ea259031e5a3032efc5929b | 2bb90b620f86d0d49f19f01593e1a4cc3c2e7ba8 | /pardus/tags/2009.1/kernel/default/drivers/module-alsa-driver/actions.py | fad2a69a934d6867aa230a8c09e48cc680620014 | [] | no_license | aligulle1/kuller | bda0d59ce8400aa3c7ba9c7e19589f27313492f7 | 7f98de19be27d7a517fe19a37c814748f7e18ba6 | refs/heads/master | 2021-01-20T02:22:09.451356 | 2013-07-23T17:57:58 | 2013-07-23T17:57:58 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,133 | py | #!/usr/bin/python
# -*- coding: utf-8 -*-
#
# Copyright 2005-2009 TUBITAK/UEKAE
# Licensed under the GNU General Public License, version 2.
# See the file http://www.gnu.org/licenses/old-licenses/gpl-2.0.txt
from pisi.actionsapi import autotools
from pisi.actionsapi import pisitools
from pisi.actionsapi import shelltools
from pisi.actionsapi import get
from pisi.actionsapi import kerneltools
KDIR = kerneltools.getKernelVersion()
NoStrip = ["/"]
if "_" in get.srcVERSION():
# Snapshot
WorkDir = "alsa-driver"
else:
# Upstream tarball
WorkDir = "alsa-driver-%s" % get.srcVERSION()
def setup():
autotools.configure("--with-oss \
--with-kernel=/lib/modules/%s/build \
--with-isapnp=yes \
--with-sequencer=yes \
--with-card-options=all \
--disable-verbose-printk \
--with-cards=all" % KDIR)
# Needed for V4L stuff
shelltools.sym("%s/alsa-driver/include/config.h" % get.workDIR(), "%s/alsa-driver/sound/include/config.h" % get.workDIR())
shelltools.sym("%s/alsa-driver/include/config1.h" % get.workDIR(), "%s/alsa-driver/sound/include/config1.h" % get.workDIR())
def build():
autotools.make()
# Build v4l drivers
shelltools.copy("Module.symvers", "v4l/")
autotools.make("-C /lib/modules/%s/build M=%s/v4l V=1 modules" % (KDIR, get.curDIR()))
def install():
autotools.rawInstall("DESTDIR=%s" % get.installDIR(), "install-modules")
# Install v4l drivers
for d in ["saa7134", "cx88", "cx231xx", "em28xx"]:
pisitools.insinto("/lib/modules/%s/kernel/sound/drivers" % KDIR, "v4l/%s/*.ko" % d)
# Copy symvers file for external module building like saa7134-alsa, cx2388-alsa, etc.
pisitools.insinto("/lib/modules/%s/kernel/sound" % KDIR, "Module.symvers", "Module.symvers.alsa")
# Install alsa-info
pisitools.insinto("/usr/bin", "utils/alsa-info.sh", "alsa-info")
for f in shelltools.ls("alsa-kernel/Documentation/*txt"):
pisitools.dodoc(f)
pisitools.dodoc("doc/serialmidi.txt")
| [
"yusuf.aydemir@istanbul.com"
] | yusuf.aydemir@istanbul.com |
dfcf294e0b1aa993aa455bf04d608367573ead1d | 9dea14a0818dfd318b238b6c98c0e753d162896d | /venv/Scripts/pip-script.py | 8bf9870477e1fde5d687039f19bd2a794435a9c4 | [] | no_license | CatchTheDog/data_visualization | a665094ef2e64502992d0de65ddd09859afb756b | 326ecd421a7945e9566bec17bd4db18b86a6e5a4 | refs/heads/master | 2020-03-31T08:08:22.544009 | 2018-10-09T05:36:02 | 2018-10-09T05:36:02 | 152,047,286 | 0 | 0 | null | null | null | null | GB18030 | Python | false | false | 438 | py | #!C:\马俊强\软件安装\pycharm\workspace\data_visualization\venv\Scripts\python.exe -x
# EASY-INSTALL-ENTRY-SCRIPT: 'pip==10.0.1','console_scripts','pip'
__requires__ = 'pip==10.0.1'
import re
import sys
from pkg_resources import load_entry_point
if __name__ == '__main__':
sys.argv[0] = re.sub(r'(-script\.pyw?|\.exe)?$', '', sys.argv[0])
sys.exit(
load_entry_point('pip==10.0.1', 'console_scripts', 'pip')()
)
| [
"1924528451@qq.com"
] | 1924528451@qq.com |
4e8bf5c534c923a03e4af6dd7d56b15d1dc3a6cb | 6710c52d04e17facbc9fb35a7df313f7a2a7bd53 | /0137. Single Number II.py | b870ddfce211866e34fd0523d542c8aa1ff14c5a | [] | no_license | pwang867/LeetCode-Solutions-Python | 535088fbe747a453360457728cc22cf336020bd2 | 188befbfb7080ba1053ee1f7187b177b64cf42d2 | refs/heads/master | 2022-11-13T16:20:28.211707 | 2020-06-28T06:01:14 | 2020-06-28T06:01:14 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 866 | py | # https://leetcode.com/problems/single-number-ii/discuss/43295
# /Detailed-explanation-and-generalization-of-the-bitwise-operation-method-for-single-numbers
class Solution(object):
def singleNumber(self, nums):
"""
:type nums: List[int]
:rtype: int
"""
x1, x2 = 0, 0
mask = 0
for num in nums:
x2 ^= (x1&num)
x1 ^= num
mask = ~(x2&x1)
x2 = x2&mask
x1 = x1&mask
return x1|x2 # or return x1
"""
Given a non-empty array of integers, every element appears three times except for one, which appears exactly once. Find that single one.
Note:
Your algorithm should have a linear runtime complexity. Could you implement it without using extra memory?
Example 1:
Input: [2,2,3,2]
Output: 3
Example 2:
Input: [0,1,0,1,0,1,99]
Output: 99
"""
| [
"wzhou007@ucr.edu"
] | wzhou007@ucr.edu |
d9dcba660bcd03f1948e39ddf267d4040d3cd0c9 | 0eeeb14c1f5952a9d9c7b3bc13e708d2bf2a17f9 | /algos/make_graph.py | 5a5e1869f8eb8d02385257c0dcb4982bf31b0611 | [] | no_license | mbhushan/pydev | 6cc90140e1103c5d5e52e55287d02ed79d1a5c36 | bdf84710da87f58253cfec408c728f6a9134a2ea | refs/heads/master | 2020-04-25T07:22:03.281719 | 2015-12-22T16:28:42 | 2015-12-22T16:28:42 | 26,382,467 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 428 | py |
def make_link(graph, n1, n2):
if n1 not in graph:
graph[n1] = {}
(graph[n1])[n2] = 1
if n2 not in graph:
graph[n2] = {}
(graph[n2])[n1] = 1
return graph
def main():
aring = {}
n = 5
for i in range(n):
make_link(aring, i, (i+1) % n)
print len(aring)
print sum([len(aring[node]) for node in aring.keys()])/2
print aring
if __name__ == '__main__':
main()
| [
"manibhushan.cs@gmail.com"
] | manibhushan.cs@gmail.com |
67af6746f4006cdd0dbaf50e42e3b97229cf2e3d | f016dd6fd77bb2b135636f904748dbbab117d78b | /day9/异常处理.py | c6560f0dae933a2be1434adeda9c293a01f7fbb5 | [
"Apache-2.0"
] | permissive | w7374520/Coursepy | b3eddfbeeb475ce213b6f627d24547a1d36909d8 | ac13f8c87b4c503135da51ad84c35c745345df20 | refs/heads/master | 2020-04-26T23:57:42.882813 | 2018-05-24T07:54:13 | 2018-05-24T07:54:13 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 310 | py | #!/usr/bin/python
# -*- coding utf8 -*-
try:
int('xx')
except ValueError as e:
print(e)
except NameError:
print('NameError')
#万能异常
except Exception as e:
print(e)
else:
print('木有异常')
finally:
print('有无异常都会执行,finally通常用于回收资源')
'''
'''
| [
"windfishing5@gmail.com"
] | windfishing5@gmail.com |
f4e017906f223cb2f03aaf41d50c5683a986308a | 48832d27da16256ee62c364add45f21b968ee669 | /res_bw/scripts/common/lib/encodings/undefined.py | 033836eb5d544ca83ab1beb3ac718d1751bd755f | [] | no_license | webiumsk/WOT-0.9.15.1 | 0752d5bbd7c6fafdd7f714af939ae7bcf654faf7 | 17ca3550fef25e430534d079876a14fbbcccb9b4 | refs/heads/master | 2021-01-20T18:24:10.349144 | 2016-08-04T18:08:34 | 2016-08-04T18:08:34 | 64,955,694 | 0 | 0 | null | null | null | null | WINDOWS-1250 | Python | false | false | 1,550 | py | # 2016.08.04 19:59:24 Střední Evropa (letní čas)
# Embedded file name: scripts/common/Lib/encodings/undefined.py
""" Python 'undefined' Codec
This codec will always raise a ValueError exception when being
used. It is intended for use by the site.py file to switch off
automatic string to Unicode coercion.
Written by Marc-Andre Lemburg (mal@lemburg.com).
(c) Copyright CNRI, All Rights Reserved. NO WARRANTY.
"""
import codecs
class Codec(codecs.Codec):
def encode(self, input, errors = 'strict'):
raise UnicodeError('undefined encoding')
def decode(self, input, errors = 'strict'):
raise UnicodeError('undefined encoding')
class IncrementalEncoder(codecs.IncrementalEncoder):
def encode(self, input, final = False):
raise UnicodeError('undefined encoding')
class IncrementalDecoder(codecs.IncrementalDecoder):
def decode(self, input, final = False):
raise UnicodeError('undefined encoding')
class StreamWriter(Codec, codecs.StreamWriter):
pass
class StreamReader(Codec, codecs.StreamReader):
pass
def getregentry():
return codecs.CodecInfo(name='undefined', encode=Codec().encode, decode=Codec().decode, incrementalencoder=IncrementalEncoder, incrementaldecoder=IncrementalDecoder, streamwriter=StreamWriter, streamreader=StreamReader)
# okay decompyling c:\Users\PC\wotsources\files\originals\res_bw\scripts\common\lib\encodings\undefined.pyc
# decompiled 1 files: 1 okay, 0 failed, 0 verify failed
# 2016.08.04 19:59:24 Střední Evropa (letní čas)
| [
"info@webium.sk"
] | info@webium.sk |
62564c3fc592a5f84af5bf01c6625f58d819f220 | 153c943de6095aaf5803b8f452af362c4a9c66e8 | /blogger-business/business/migrations/0002_business_image.py | 2ed7550f54199dd1a44e6e2e81c8d315fb5b8d5d | [] | no_license | 3asyPe/blogger-business | 428427a0832f77c45914e737d6408d9051b57b42 | 6b9b1edefd2700b554a5a26d29dfe5158ca4861b | refs/heads/master | 2023-03-03T11:09:39.813603 | 2021-01-29T11:29:45 | 2021-01-29T11:29:45 | 304,942,909 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 457 | py | # Generated by Django 3.1 on 2020-10-18 15:03
import business.utils
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('business', '0001_initial'),
]
operations = [
migrations.AddField(
model_name='business',
name='image',
field=models.ImageField(blank=True, null=True, upload_to=business.utils.upload_image_path_business),
),
]
| [
"alex.kvasha228@gmail.com"
] | alex.kvasha228@gmail.com |
79ef43737c6204f12ad8e05708929563a5c2dca0 | cb6b1aa2d61b80cba29490dfe8755d02c7b9a79f | /sakura/scripting/mathops.py | 34da3e965be7c239e56fd4fa19d2a3961dbc79dc | [
"BSD-3-Clause",
"LicenseRef-scancode-unknown-license-reference"
] | permissive | piotrmaslanka/Ninja-Tower | c127a64888bc3306046e4b400ce3a8c6764b5481 | 7eca86e23513a8805dd42c3c542b7fae0499576b | refs/heads/master | 2021-12-06T07:56:13.796922 | 2015-10-15T08:10:35 | 2015-10-15T08:10:35 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 253 | py | from __future__ import division
from math import hypot
def vector_towards(sx, sy, tx, ty, ln):
vdif = tx - sx, ty - sy
vlen = hypot(*vdif)
if vlen == 0: return # Cannot accelerate nowhere!
return vdif[0]*ln/vlen, vdif[1]*ln/vlen
| [
"piotr.maslanka@henrietta.com.pl"
] | piotr.maslanka@henrietta.com.pl |
29f06f178db222814e1a5e134ac9f3129211a536 | 8683aed87d25177f9a0a026c5c48fb412745ff89 | /ledger/payments/bpoint/models.py | ad000b68b8caecf613dc23fca0b703ea2b22e7b5 | [
"Apache-2.0"
] | permissive | gaiaresources/ledger | 2b1e9bfee14abbfbc993c5c62f60589a6eaf0a86 | b87c4bf4226e3e2b2f0bc24303f5d107f94f134e | refs/heads/master | 2020-04-05T20:50:06.727970 | 2017-07-18T02:16:33 | 2017-07-18T02:16:33 | 51,276,638 | 2 | 0 | null | 2017-07-13T07:09:15 | 2016-02-08T02:51:55 | Python | UTF-8 | Python | false | false | 4,127 | py | from __future__ import unicode_literals
import datetime
from django.db import models
from ledger.payments.bpoint import settings as bpoint_settings
from django.utils.encoding import python_2_unicode_compatible
from oscar.apps.order.models import Order
from ledger.accounts.models import EmailUser
class BpointTransaction(models.Model):
ACTION_TYPES = (
('payment','payment'),
('refund','refund'),
('reversal','reversal'),
('preauth', 'preauth'),
('capture','capture')
)
CARD_TYPES = (
('AX','American Express'),
('DC','Diners Club'),
('JC','JCB Card'),
('MC','MasterCard'),
('VC','Visa')
)
SUB_TYPES = (
('single','single'),
('recurring','recurring')
)
TRANSACTION_TYPES = (
('callcentre','callcentre'),
('cardpresent','cardpresent'),
('ecommerce','ecommerce'),
('internet', 'internet'),
('ivr','ivr'),
('mailorder','mailorder'),
('telephoneorder','telephoneorder')
)
created = models.DateTimeField(auto_now_add=True)
action = models.CharField(max_length=20, choices=ACTION_TYPES)
amount = models.DecimalField(decimal_places=2,max_digits=12)
amount_original = models.DecimalField(decimal_places=2,max_digits=12)
amount_surcharge = models.DecimalField(default=0,decimal_places=2,max_digits=12)
cardtype = models.CharField(max_length=2, choices=CARD_TYPES, blank=True, null=True)
crn1 = models.CharField(max_length=50, help_text='Reference for the order that the transaction was made for')
response_code = models.CharField(max_length=50)
response_txt = models.CharField(max_length=128)
receipt_number = models.CharField(max_length=50)
processed = models.DateTimeField()
settlement_date = models.DateField(blank=True, null=True)
type = models.CharField(max_length=50, choices=TRANSACTION_TYPES)
# store the txn number from Bpoint
txn_number = models.CharField(unique=True, max_length=128, help_text='Transaction number used by BPOINT to identify a transaction')
original_txn = models.ForeignKey('self', to_field='txn_number', blank=True, null=True, help_text='Transaction number stored \
if current transaction depends on a previous transaction \
in the case where the action is a refund, reversal or capture')
class Meta:
ordering = ('-created',)
def __unicode__(self):
return self.txn_number
@property
def approved(self):
return self.response_code == "0"
@property
def order(self):
from ledger.payments.models import Invoice
return Order.objects.get(number=Invoice.objects.get(reference=self.crn1).order_number)
class TempBankCard(object):
def __init__(self,card_number,expiry_date,ccv=None):
self.number=card_number
self.expiry_date=datetime.datetime.strptime(expiry_date, '%m%y').date()
self.ccv=ccv
class BpointToken(models.Model):
CARD_TYPES = (
('AX','American Express'),
('DC','Diners Club'),
('JC','JCB Card'),
('MC','MasterCard'),
('VC','Visa')
)
user = models.ForeignKey(EmailUser, related_name='stored_cards')
DVToken = models.CharField(max_length=128)
masked_card = models.CharField(max_length=50)
expiry_date = models.DateField()
card_type = models.CharField(max_length=2, choices=CARD_TYPES, blank=True, null=True)
class Meta:
unique_together = ('user', 'masked_card','expiry_date','card_type')
@property
def last_digits(self):
return self.masked_card[-4:]
@property
def bankcard(self):
return TempBankCard(
self.DVToken,
self.expiry_date.strftime("%m%y")
)
def delete(self):
UsedBpointToken.objects.create(DVToken=self.DVToken)
super(BpointToken,self).delete()
class UsedBpointToken(models.Model):
added = models.DateTimeField(auto_now_add=True)
DVToken = models.CharField(max_length=128) | [
"ndwigabrian@gmail.com"
] | ndwigabrian@gmail.com |
620633147ca9c1b734abc81c4bb02677d7ee96b3 | 6ef4df3f0ecdfd4b880c49e0fc057a98931f9b2e | /marketsim/gen/_out/observable/Cumulative/_StdDev.py | 95ae75ca2166e6b0d23dd434bec57674b4d5975e | [] | no_license | Courvoisier13/marketsimulator | b8be5e25613d8c4f3ede1c47276cb6ad94637ca8 | 3ab938e2b3c980eaba8fc0dba58a141041125291 | refs/heads/master | 2021-01-18T02:17:54.153078 | 2013-12-19T15:09:32 | 2013-12-19T15:09:32 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,206 | py | from marketsim import registry
from marketsim.ops._function import Function
from marketsim import IObservable
from marketsim.gen._out.mathops._Sqrt import Sqrt
from marketsim.gen._out.observable.Cumulative._Var import Var
from marketsim import context
@registry.expose(["Statistics", "StdDev"])
class StdDev(Function[float]):
"""
"""
def __init__(self, source = None):
from marketsim.gen._out._const import const
self.source = source if source is not None else const()
self.impl = self.getImpl()
@property
def label(self):
return repr(self)
_properties = {
'source' : IObservable
}
def __repr__(self):
return "\\sqrt{\\sigma^2_{cumul}_{%(source)s}}" % self.__dict__
_internals = ['impl']
@property
def attributes(self):
return {}
def getImpl(self):
return Sqrt(Var(self.source))
def bind(self, ctx):
self._ctx = ctx.clone()
def reset(self):
self.impl = self.getImpl()
ctx = getattr(self, '_ctx', None)
if ctx: context.bind(self.impl, ctx)
def __call__(self, *args, **kwargs):
return self.impl()
| [
"anton.kolotaev@gmail.com"
] | anton.kolotaev@gmail.com |
89bbdcd7dd4f1ba5699d55df1f50ae760994fedd | 45c3624f0fd45167357c37aaf3912d77e83aaffc | /baseApp/migrations/0007_auto_20200112_1937.py | 2f97576659fbd3cffc1de19653ec90f8106d4d61 | [] | no_license | kaustubh619/AllianceDjango | c15b959d3362b4f2e7fec7bb68b09e86d7fc9e1f | 61d33e8b8ee8a8245d1a9227d5f8ff8d39507450 | refs/heads/master | 2020-12-10T14:58:50.785990 | 2020-01-13T17:27:17 | 2020-01-13T17:27:17 | 233,626,770 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 497 | py | # Generated by Django 2.2.4 on 2020-01-12 14:07
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('baseApp', '0006_packagequery'),
]
operations = [
migrations.AlterField(
model_name='packagequery',
name='package',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.PROTECT, to='baseApp.Packages'),
),
]
| [
"kaustubhkrishna9031@gmail.com"
] | kaustubhkrishna9031@gmail.com |
9bbca24acbdb33b693c0cfe366ca75c6eda5c2cb | ecd2aa3d12a5375498c88cfaf540e6e601b613b3 | /Facebook/Pro102. Binary Tree Level Order Traversal.py | f3281376de4d813a8b2c1a50aaab5967a6e63165 | [] | no_license | YoyinZyc/Leetcode_Python | abd5d90f874af5cd05dbed87f76885a1ca480173 | 9eb44afa4233fdedc2e5c72be0fdf54b25d1c45c | refs/heads/master | 2021-09-05T17:08:31.937689 | 2018-01-29T21:57:44 | 2018-01-29T21:57:44 | 103,157,916 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 820 | py | # Definition for a binary tree node.
# class TreeNode(object):
# def __init__(self, x):
# self.val = x
# self.left = None
# self.right = None
from collections import deque
class Solution(object):
def levelOrder(self, root):
"""
:type root: TreeNode
:rtype: List[List[int]]
"""
ans = []
q1 = deque()
q2 = deque()
q1.append(root)
l = []
while q1:
node = q1.popleft()
if node:
l.append(node.val)
q2.append(node.left)
q2.append(node.right)
if not q1:
temp = q2
q2 = q1
q1 = temp
if l:
ans.append(l)
l = []
return ans
| [
"yoyinzyc@gmail.com"
] | yoyinzyc@gmail.com |
a5664b4f4645595f91b6599c4663995f72bf40e4 | 553b34a101c54090e68f540d96369ac7d5774d95 | /python/python_koans/python2/koans/about_class_attributes.py | 7764eabd335da8b297a8638233b84f4d0ea2f652 | [
"MIT"
] | permissive | topliceanu/learn | fd124e1885b5c0bfea8587510b5eab79da629099 | 1c5b1433c3d6bfd834df35dee08607fcbdd9f4e3 | refs/heads/master | 2022-07-16T19:50:40.939933 | 2022-06-12T15:40:20 | 2022-06-12T15:40:20 | 21,684,180 | 26 | 12 | MIT | 2020-03-26T20:51:35 | 2014-07-10T07:22:17 | JavaScript | UTF-8 | Python | false | false | 5,195 | py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Based on AboutClassMethods in the Ruby Koans
#
from runner.koan import *
class AboutClassAttributes(Koan):
class Dog(object):
pass
def test_new_style_class_objects_are_objects(self):
# Note: Old style class instances are not objects but they are being
# phased out in Python 3.
fido = self.Dog()
self.assertEqual(True, isinstance(fido, object))
def test_classes_are_types(self):
self.assertEqual(True, self.Dog.__class__ == type)
def test_classes_are_objects_too(self):
self.assertEqual(True, issubclass(self.Dog, object))
def test_objects_have_methods(self):
fido = self.Dog()
self.assertEqual(18, len(dir(fido)))
def test_classes_have_methods(self):
self.assertEqual(18, len(dir(self.Dog)))
def test_creating_objects_without_defining_a_class(self):
singularity = object()
self.assertEqual(15, len(dir(singularity)))
def test_defining_attributes_on_individual_objects(self):
fido = self.Dog()
fido.legs = 4
self.assertEqual(4, fido.legs)
def test_defining_functions_on_individual_objects(self):
fido = self.Dog()
fido.wag = lambda: 'fidos wag'
self.assertEqual('fidos wag', fido.wag())
def test_other_objects_are_not_affected_by_these_singleton_functions(self):
fido = self.Dog()
rover = self.Dog()
def wag():
return 'fidos wag'
fido.wag = wag
try:
rover.wag()
except Exception as ex:
self.assertMatch("'Dog' object has no attribute 'wag'", ex[0])
# ------------------------------------------------------------------
class Dog2(object):
def wag(self):
return 'instance wag'
def bark(self):
return "instance bark"
def growl(self):
return "instance growl"
@staticmethod
def bark():
return "staticmethod bark, arg: None"
@classmethod
def growl(cls):
return "classmethod growl, arg: cls=" + cls.__name__
def test_like_all_objects_classes_can_have_singleton_methods(self):
self.assertMatch("classmethod growl, arg: cls=Dog2", self.Dog2.growl())
def test_classmethods_are_not_independent_of_instance_methods(self):
fido = self.Dog2()
self.assertMatch("classmethod growl, arg: cls=Dog2", fido.growl())
self.assertMatch("classmethod growl, arg: cls=Dog2", self.Dog2.growl())
def test_staticmethods_are_unbound_functions_housed_in_a_class(self):
self.assertMatch("staticmethod bark, arg: None", self.Dog2.bark())
def test_staticmethods_also_overshadow_instance_methods(self):
fido = self.Dog2()
self.assertMatch("staticmethod bark, arg: None", fido.bark())
# ------------------------------------------------------------------
class Dog3(object):
def __init__(self):
self._name = None
def get_name_from_instance(self):
return self._name
def set_name_from_instance(self, name):
self._name = name
@classmethod
def get_name(cls):
return cls._name
@classmethod
def set_name(cls, name):
cls._name = name
name = property(get_name, set_name)
name_from_instance = property(
get_name_from_instance, set_name_from_instance)
def test_classmethods_can_not_be_used_as_properties(self):
fido = self.Dog3()
try:
fido.name = "Fido"
except Exception as ex:
self.assertMatch("'classmethod' object is not callable", ex[0])
def test_classes_and_instances_do_not_share_instance_attributes(self):
fido = self.Dog3()
fido.set_name_from_instance("Fido")
fido.set_name("Rover")
self.assertEqual('Fido', fido.get_name_from_instance())
self.assertEqual('Rover', self.Dog3.get_name())
def test_classes_and_instances_do_share_class_attributes(self):
fido = self.Dog3()
fido.set_name("Fido")
self.assertEqual('Fido', fido.get_name())
self.assertEqual('Fido', self.Dog3.get_name())
# ------------------------------------------------------------------
class Dog4(object):
def a_class_method(cls):
return 'dogs class method'
def a_static_method():
return 'dogs static method'
a_class_method = classmethod(a_class_method)
a_static_method = staticmethod(a_static_method)
def test_you_can_define_class_methods_without_using_a_decorator(self):
self.assertEqual('dogs class method', self.Dog4.a_class_method())
def test_you_can_define_static_methods_without_using_a_decorator(self):
self.assertEqual('dogs static method', self.Dog4.a_static_method())
# ------------------------------------------------------------------
def test_you_can_explicitly_call_class_methods_from_instance_methods(self):
fido = self.Dog4()
self.assertEqual('dogs class method', fido.__class__.a_class_method())
| [
"alexandru.topliceanu@gmail.com"
] | alexandru.topliceanu@gmail.com |
82c9712218f271eea11ef452affd3917de8e6229 | 52a32a93942b7923b7c0c6ca5a4d5930bbba384b | /unittests/tools/test_mobsfscan_parser.py | 76e805852a9ecf97e3cb18161944979530a92095 | [
"MIT-open-group",
"GCC-exception-2.0",
"BSD-3-Clause",
"LicenseRef-scancode-free-unknown",
"LGPL-3.0-only",
"GPL-3.0-or-later",
"LicenseRef-scancode-warranty-disclaimer",
"LGPL-3.0-or-later",
"IJG",
"Zlib",
"LicenseRef-scancode-proprietary-license",
"PSF-2.0",
"LicenseRef-scancode-python-cwi... | permissive | DefectDojo/django-DefectDojo | 43bfb1c728451335661dadc741be732a50cd2a12 | b98093dcb966ffe972f8719337de2209bf3989ec | refs/heads/master | 2023-08-21T13:42:07.238370 | 2023-08-14T18:00:34 | 2023-08-14T18:00:34 | 31,028,375 | 2,719 | 1,666 | BSD-3-Clause | 2023-09-14T19:46:49 | 2015-02-19T17:53:47 | HTML | UTF-8 | Python | false | false | 6,947 | py | from ..dojo_test_case import DojoTestCase
from dojo.tools.mobsfscan.parser import MobsfscanParser
from dojo.models import Test
class TestMobsfscanParser(DojoTestCase):
def test_parse_no_findings(self):
testfile = open("unittests/scans/mobsfscan/no_findings.json")
parser = MobsfscanParser()
findings = parser.get_findings(testfile, Test())
testfile.close()
self.assertEqual(0, len(findings))
def test_parse_many_findings(self):
testfile = open("unittests/scans/mobsfscan/many_findings.json")
parser = MobsfscanParser()
findings = parser.get_findings(testfile, Test())
testfile.close()
self.assertEqual(7, len(findings))
with self.subTest(i=0):
finding = findings[0]
self.assertEqual("android_certificate_transparency", finding.title)
self.assertEqual("Low", finding.severity)
self.assertEqual(1, finding.nb_occurences)
self.assertIsNotNone(finding.description)
self.assertEqual(295, finding.cwe)
self.assertIsNotNone(finding.references)
with self.subTest(i=1):
finding = findings[1]
self.assertEqual("android_kotlin_hardcoded", finding.title)
self.assertEqual("Medium", finding.severity)
self.assertEqual(1, finding.nb_occurences)
self.assertIsNotNone(finding.description)
self.assertEqual(798, finding.cwe)
self.assertIsNotNone(finding.references)
self.assertEqual("app/src/main/java/com/routes/domain/analytics/event/Signatures.kt", finding.file_path)
self.assertEqual(10, finding.line)
with self.subTest(i=2):
finding = findings[2]
self.assertEqual("android_prevent_screenshot", finding.title)
self.assertEqual("Low", finding.severity)
self.assertEqual(1, finding.nb_occurences)
self.assertIsNotNone(finding.description)
self.assertEqual(200, finding.cwe)
self.assertIsNotNone(finding.references)
with self.subTest(i=3):
finding = findings[3]
self.assertEqual("android_root_detection", finding.title)
self.assertEqual("Low", finding.severity)
self.assertEqual(1, finding.nb_occurences)
self.assertIsNotNone(finding.description)
self.assertEqual(919, finding.cwe)
self.assertIsNotNone(finding.references)
with self.subTest(i=4):
finding = findings[4]
self.assertEqual("android_safetynet", finding.title)
self.assertEqual("Low", finding.severity)
self.assertEqual(1, finding.nb_occurences)
self.assertIsNotNone(finding.description)
self.assertEqual(353, finding.cwe)
self.assertIsNotNone(finding.references)
with self.subTest(i=5):
finding = findings[5]
self.assertEqual("android_ssl_pinning", finding.title)
self.assertEqual("Low", finding.severity)
self.assertEqual(1, finding.nb_occurences)
self.assertIsNotNone(finding.description)
self.assertEqual(295, finding.cwe)
self.assertIsNotNone(finding.references)
with self.subTest(i=6):
finding = findings[6]
self.assertEqual("android_tapjacking", finding.title)
self.assertEqual("Low", finding.severity)
self.assertEqual(1, finding.nb_occurences)
self.assertIsNotNone(finding.description)
self.assertEqual(200, finding.cwe)
self.assertIsNotNone(finding.references)
def test_parse_many_findings_cwe_lower(self):
testfile = open("unittests/scans/mobsfscan/many_findings_cwe_lower.json")
parser = MobsfscanParser()
findings = parser.get_findings(testfile, Test())
testfile.close()
self.assertEqual(7, len(findings))
with self.subTest(i=0):
finding = findings[0]
self.assertEqual("android_certificate_transparency", finding.title)
self.assertEqual("Low", finding.severity)
self.assertEqual(1, finding.nb_occurences)
self.assertIsNotNone(finding.description)
self.assertEqual(295, finding.cwe)
self.assertIsNotNone(finding.references)
with self.subTest(i=1):
finding = findings[1]
self.assertEqual("android_kotlin_hardcoded", finding.title)
self.assertEqual("Medium", finding.severity)
self.assertEqual(1, finding.nb_occurences)
self.assertIsNotNone(finding.description)
self.assertEqual(798, finding.cwe)
self.assertIsNotNone(finding.references)
self.assertEqual("app/src/main/java/com/routes/domain/analytics/event/Signatures.kt", finding.file_path)
self.assertEqual(10, finding.line)
with self.subTest(i=2):
finding = findings[2]
self.assertEqual("android_prevent_screenshot", finding.title)
self.assertEqual("Low", finding.severity)
self.assertEqual(1, finding.nb_occurences)
self.assertIsNotNone(finding.description)
self.assertEqual(200, finding.cwe)
self.assertIsNotNone(finding.references)
with self.subTest(i=3):
finding = findings[3]
self.assertEqual("android_root_detection", finding.title)
self.assertEqual("Low", finding.severity)
self.assertEqual(1, finding.nb_occurences)
self.assertIsNotNone(finding.description)
self.assertEqual(919, finding.cwe)
self.assertIsNotNone(finding.references)
with self.subTest(i=4):
finding = findings[4]
self.assertEqual("android_safetynet", finding.title)
self.assertEqual("Low", finding.severity)
self.assertEqual(1, finding.nb_occurences)
self.assertIsNotNone(finding.description)
self.assertEqual(353, finding.cwe)
self.assertIsNotNone(finding.references)
with self.subTest(i=5):
finding = findings[5]
self.assertEqual("android_ssl_pinning", finding.title)
self.assertEqual("Low", finding.severity)
self.assertEqual(1, finding.nb_occurences)
self.assertIsNotNone(finding.description)
self.assertEqual(295, finding.cwe)
self.assertIsNotNone(finding.references)
with self.subTest(i=6):
finding = findings[6]
self.assertEqual("android_tapjacking", finding.title)
self.assertEqual("Low", finding.severity)
self.assertEqual(1, finding.nb_occurences)
self.assertIsNotNone(finding.description)
self.assertEqual(200, finding.cwe)
self.assertIsNotNone(finding.references)
| [
"noreply@github.com"
] | DefectDojo.noreply@github.com |
a7b04ac6451fd8e623987f821b95ad4564fed2a5 | 9edaf93c833ba90ae9a903aa3c44c407a7e55198 | /travelport/models/booking_end_req_session_activity.py | ed703010be55ccae9fe7bcbab08770bacac0b6cd | [] | no_license | tefra/xsdata-samples | c50aab4828b8c7c4448dbdab9c67d1ebc519e292 | ef027fe02e6a075d8ed676c86a80e9647d944571 | refs/heads/main | 2023-08-14T10:31:12.152696 | 2023-07-25T18:01:22 | 2023-07-25T18:01:22 | 222,543,692 | 6 | 1 | null | 2023-06-25T07:21:04 | 2019-11-18T21:00:37 | Python | UTF-8 | Python | false | false | 238 | py | from __future__ import annotations
from enum import Enum
__NAMESPACE__ = "http://www.travelport.com/schema/sharedBooking_v52_0"
class BookingEndReqSessionActivity(Enum):
END = "End"
END_QUEUE = "EndQueue"
IGNORE = "Ignore"
| [
"chris@komposta.net"
] | chris@komposta.net |
66940ed8359ce0693b72954371aec6f4d8f3b6d7 | 99c4d4a6592fded0e8e59652484ab226ac0bd38c | /code/batch-2/dn9 - minobot/M-17105-1054.py | ba11db24d90c1085ae689b63e7fddc49161b02db | [] | no_license | benquick123/code-profiling | 23e9aa5aecb91753e2f1fecdc3f6d62049a990d5 | 0d496d649247776d121683d10019ec2a7cba574c | refs/heads/master | 2021-10-08T02:53:50.107036 | 2018-12-06T22:56:38 | 2018-12-06T22:56:38 | 126,011,752 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,785 | py | def premik(ukaz, x, y, smer):
smeri = "NESW"
premiki = [(0, -1), (1, 0), (0, 1), (-1, 0)]
ismer = smeri.index(smer)
if ukaz == "R":
smer = smeri[(ismer + 1) % 4]
elif ukaz == "L":
smer = smeri[(ismer - 1) % 4]
else:
dx, dy = premiki[ismer]
x += dx * ukaz
y += dy * ukaz
return x, y, smer
def izvedi(ime_datoteke):
koord = [(0, 0, 'N')]
smer = {"DESNO": "R", "LEVO": "L"}
with open(ime_datoteke) as d:
for vrstica in d:
x, y, s = koord[-1]
vrstica = vrstica.rstrip()
if len(vrstica.split()) == 1:
koord.append((premik(smer[vrstica], x, y, s)))
else:
koord.append(premik(int(vrstica.split()[1]), x, y, s))
return koord
def opisi_stanje(x, y, smer):
smeri = "NESW"
premiki = ['^', '>', 'v', '<']
ismer = premiki[smeri.index(smer)]
return "{0:3}:{1:<3} {2}".format(x,y,ismer)
def prevedi(ime_vhoda, ime_izhoda):
koord = izvedi(ime_vhoda)
seznam = [opisi_stanje(x,y,s) for x,y,s in koord]
with open(ime_izhoda, 'w') as d:
for vnos in seznam:
d.write(vnos+"\n")
def opisi_stanje_2(x, y, smer):
return "{stanje[1]}{xk:>5}:{yk}".format(stanje=opisi_stanje(x,y,smer).split(), xk = "("+str(x), yk=str(y)+")")
import unittest
class TestObvezna(unittest.TestCase):
def test_branje(self):
self.assertEqual(
izvedi("primer.txt"),
[(0, 0, 'N'), (0, 0, 'E'), (12, 0, 'E'), (12, 0, 'S'), (12, 2, 'S'),
(12, 2, 'E'), (15, 2, 'E'), (15, 2, 'N'), (15, 2, 'W')]
)
self.assertEqual(
izvedi("ukazi.txt"),
[(0, 0, 'N'), (0, 0, 'E'), (1, 0, 'E'), (1, 0, 'S'), (1, 0, 'W'),
(0, 0, 'W'), (0, 0, 'S'), (0, 0, 'E'), (1, 0, 'E'), (1, 0, 'S'),
(1, 3, 'S'), (1, 3, 'E'), (2, 3, 'E'), (2, 3, 'S'), (2, 3, 'W')]
)
def test_opisi_stanje(self):
self.assertEqual(opisi_stanje(0, 12, "N"), " 0:12 ^")
self.assertEqual(opisi_stanje(111, 0, "E"), "111:0 >")
self.assertEqual(opisi_stanje(-2, 111, "S"), " -2:111 v")
self.assertEqual(opisi_stanje(0, 0, "W"), " 0:0 <")
def test_prevedi(self):
from random import randint
import os
ime = "izhod{:05}.txt".format(randint(0, 99999))
try:
self.assertIsNone(prevedi("primer.txt", ime))
self.assertEqual(open(ime).read().rstrip(), """ 0:0 ^
0:0 >
12:0 >
12:0 v
12:2 v
12:2 >
15:2 >
15:2 ^
15:2 <""")
self.assertIsNone(prevedi("ukazi.txt", ime))
self.assertEqual(open(ime).read().rstrip(), """ 0:0 ^
0:0 >
1:0 >
1:0 v
1:0 <
0:0 <
0:0 v
0:0 >
1:0 >
1:0 v
1:3 v
1:3 >
2:3 >
2:3 v
2:3 <""")
finally:
os.remove(ime)
vime = "vhod{:05}.txt".format(randint(0, 99999))
open(vime, "wt").write("NAPREJ 23\nLEVO\nNAPREJ 17\n")
try:
self.assertIsNone(prevedi(vime, ime))
self.assertEqual(open(ime).read().rstrip(), """ 0:0 ^
0:-23 ^
0:-23 <
-17:-23 <""")
finally:
os.remove(ime)
os.remove(vime)
class TestDodatna(unittest.TestCase):
def test_opisi_stanje(self):
self.assertEqual(opisi_stanje_2(0, 12, "N"), "^ (0:12)")
self.assertEqual(opisi_stanje_2(111, 0, "E"), "> (111:0)")
self.assertEqual(opisi_stanje_2(-2, 111, "S"), "v (-2:111)")
self.assertEqual(opisi_stanje_2(0, 0, "W"), "< (0:0)")
if __name__ == "__main__":
unittest.main()
| [
"benjamin.fele@gmail.com"
] | benjamin.fele@gmail.com |
868d9325e4d3169cb0219450aed0e49a34dc2b90 | 851f7fde684774ca0388a28cb7035aa1e95f5de0 | /Ercess/settings.py | 57b1e7e01b4a74cf330b883c7aad6860f19d7c65 | [] | no_license | aditya2222/django-tickets | 01451a724cf97c8f2f338ba85a704e85ae57b008 | 3c2ecd252479fc5821873823cdbbb4641268a2d2 | refs/heads/master | 2022-12-16T17:17:07.821446 | 2019-05-12T02:58:05 | 2019-05-12T02:58:05 | 186,204,071 | 0 | 0 | null | 2022-11-22T03:22:25 | 2019-05-12T02:55:47 | JavaScript | UTF-8 | Python | false | false | 6,147 | py | """
Django settings for Ercess project.
Generated by 'django-admin startproject' using Django 2.1.5.
For more information on this file, see
https://docs.djangoproject.com/en/2.1/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/2.1/ref/settings/
"""
import os
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
TEMPLATES_DIR1 = os.path.join(BASE_DIR , 'templates/dashboard')
TEMPLATES_DIR2 = os.path.join(BASE_DIR , 'templates/Ercesscorp')
TEMPLATES_DIR3 = os.path.join(BASE_DIR , 'templates')
STATIC_DIR = os.path.join(BASE_DIR, 'templates/static')
MEDIA_DIR = os.path.join(BASE_DIR ,'media')
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/2.1/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = '$%kn8r7jbv#*&)=vyq$q9dg=*kwj!zhuvcu#re@v$%y*g6elc$'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
ALLOWED_HOSTS = ['178.128.11.7', '127.0.0.1']
# Application definition
INSTALLED_APPS = [
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'debug_toolbar',
'rest_framework',
'ckeditor',
'rest_framework.authtoken',
'dashboard',
'Ercesscorp.apps.ErcesscorpConfig',
]
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
'debug_toolbar.middleware.DebugToolbarMiddleware',
]
ROOT_URLCONF = 'Ercess.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [TEMPLATES_DIR1,TEMPLATES_DIR2,TEMPLATES_DIR3,] ,
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'Ercess.wsgi.application'
# Database
# https://docs.djangoproject.com/en/2.1/ref/settings/#databases
#
# DATABASES = {
# 'default': {
# 'ENGINE': 'django.db.backends.sqlite3',
# 'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
# }
# }
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.mysql',
'NAME': 'testSchema',
'USER': 'root' ,
'PASSWORD': 'tiktik123',
}
}
# Password validation
# https://docs.djangoproject.com/en/2.1/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/2.1/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
#SESSION_COOKIE_SECURE = True
SESSION_EXPIRE_AT_BROWSER_CLOSE = True
#CSRF_COOKIE_SECURE = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/2.1/howto/static-files/
STATIC_URL = '/static/'
STATICFILES_DIRS = [STATIC_DIR,]
EMAIL_BACKEND = 'django.core.mail.backends.smtp.EmailBackend'
EMAIL_HOST = 'smtp.gmail.com'
EMAIL_USE_SSL = True
EMAIL_PORT = 465
EMAIL_HOST_USER = os.environ.get('EMAIL_HOST_USER')
EMAIL_HOST_PASSWORD = os.environ.get('EMAIL_HOST_PASSWORD')
INTERNAL_IPS = ('127.0.0.1',)
MEDIA_ROOT = 'media/'
MEDIA_URL = '/media/'
'''
REST_FRAMEWORK = {
'DEFAULT_AUTHENTICATION_CLASSES': (
'rest_framework.authentication.TokenAuthentication',
'rest_framework.authentication.SessionAuthentication',
) ,
'DEFAULT_PERMISSION_CLASSES': (
'rest_framework.permissions.IsAuthenticated',
)
}
REST_FRAMEWORK = {
'DEFAULT_AUTHENTICATION_CLASSES': (
'rest_framework.authentication.BasicAuthentication',
'rest_framework.authentication.SessionAuthentication',
),
'DEFAULT_PERMISSION_CLASSES': (
'rest_framework.permissions.IsAuthenticated',
)
}
'''
#cors-headers
#CORS_ORIGIN_ALLOW_ALL = True
#CORS_ALLOW_CREDENTIALS = True
CKEDITOR_CONFIGS = {
'default': {
'skin': 'moono-lisa',
#'skin': 'office2013',
'toolbar':'custom',
'width':700,
'height': 300,
'toolbar_custom': [
['Preview'],
['Bold', 'Italic', 'Underline', 'Strike', 'Subscript', 'Superscript', '-', 'RemoveFormat'],
['Cut', 'Copy', 'Paste', 'PasteText', 'PasteFromWord', '-', 'Undo', 'Redo'],
['TextColor', 'BGColor'],
['Link', 'Unlink', 'Anchor'],
['Styles', 'Format', 'Font', 'FontSize'],
['Flash', 'Table', 'HorizontalRule', 'Smiley', 'SpecialChar', 'PageBreak', 'Iframe'],
['Find', 'Replace', '-', 'SelectAll'],
['NumberedList', 'BulletedList', '-', 'Outdent', 'Indent', '-', 'Blockquote', 'CreateDiv', '-',
'JustifyLeft', 'JustifyCenter', 'JustifyRight', 'JustifyBlock', '-', 'BidiLtr', 'BidiRtl',
'Language'],
],
}
}
| [
"adityasingh222247@gmail.com"
] | adityasingh222247@gmail.com |
0e2200d7cc05f631a298302b7d0012d4fc312b33 | 7e9bfbcd31faa30cdad64dff947e80cfe3f3a526 | /Backend/apps/users/adminx.py | 3aa920ca8f60c5eaeb3da689a605c911d57a03f2 | [
"Apache-2.0"
] | permissive | skyroot/Dailyfresh-B2C | 6c02578b5b39b8746c4bf9ebb2288775ffeabf33 | 7c94e9a4428e5116c91bf27cf696e6eee430748a | refs/heads/master | 2023-01-04T09:47:26.834763 | 2019-02-25T02:38:37 | 2019-02-25T02:38:37 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 551 | py | # encoding: utf-8
import xadmin
from xadmin import views
from .models import VerifyCode
class BaseSetting(object):
enable_themes = True
use_bootswatch = True
class GlobalSettings(object):
site_title = "天天生鲜"
site_footer = "github.com/BeanWei"
# menu_style = "accordion"
class VerifyCodeAdmin(object):
list_display = ['code', 'mobile', "add_time"]
xadmin.site.register(VerifyCode, VerifyCodeAdmin)
xadmin.site.register(views.BaseAdminView, BaseSetting)
xadmin.site.register(views.CommAdminView, GlobalSettings) | [
"157318439@qq.com"
] | 157318439@qq.com |
0b2a2046c0731579d937a1254246088e4dca3a45 | 94d70c1c19cf115aa415a04cd85ff687a17f5eca | /exp_classif.py | 4f8a6d76d95e2f51fec470bb4570b20bb22a9af1 | [
"MIT"
] | permissive | RandallBalestriero/PMASO | 50ca98b2ea918f38fed4cc442562a6322c80409f | 780b06f8d8496000f3ecda04a49c8eda72393b5d | refs/heads/master | 2021-04-15T12:20:19.250244 | 2019-01-28T21:16:33 | 2019-01-28T21:16:33 | 126,507,496 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,710 | py | from pylab import *
import tensorflow as tf
from sklearn.datasets import make_moons
from sklearn.datasets import load_digits
import tensorflow as tf
from layers import *
from utils import *
import cPickle
DATASET = 'MNIST'
neuronsss=10
sigmass=sys.argv[-2]
nonlinearity = int(sys.argv[-1])
print nonlinearity
x_train,y_train,x_test,y_test = load_data(DATASET)
x_train = transpose(x_train,[0,2,3,1])
x_test = transpose(x_test,[0,2,3,1])
pp = permutation(x_train.shape[0])
x_train = x_train[pp]+randn(len(pp),28,28,1)*0.05
y_train = y_train[pp]
XX = x_train[:2000]
YY = y_train[:2000]
input_shape = XX.shape
layers1 = [InputLayer(input_shape)]
layers1.append(DenseLayer(layers1[-1],K=32*nonlinearity,R=2,nonlinearity=None,sparsity_prior=0.,sigma=sigmass,learn_pi=1,p_drop=0.,bn=BN(0,0),U=0))
layers1.append(DenseLayer(layers1[-1],K=16*nonlinearity,R=2,nonlinearity=None,sparsity_prior=0.,sigma=sigmass,learn_pi=1,p_drop=0.,bn=BN(0,0),U=0))
layers1.append(FinalLayer(layers1[-1],R=neuronsss,sparsity_prior=0.00,sigma=sigmass,bn=BN(0,0)))
model1 = model(layers1)
model1.init_dataset(XX,YY)
LOSSE=train_layer_model(model1,rcoeff_schedule=schedule(0.000000000001,'linear'),CPT=132,random=0,fineloss=0)
for i in xrange(1,10):
model1.init_dataset(x_train[2000*i:2000*(i+1)])
model1.init_thetaq()
model1.E_step(10)
y_hat = argmax(model1.predict(),1)
print mean((y_hat==y_train[2000*i:2000*(i+1)]).astype('float32'))
model1.E_step(1)
y_hat = argmax(model1.predict(),1)
print mean((y_hat==y_train[2000*i:2000*(i+1)]).astype('float32'))
model1.E_step(0.001)
y_hat = argmax(model1.predict(),1)
print mean((y_hat==y_train[2000*i:2000*(i+1)]).astype('float32'))
| [
"randallbalestriero@gmail.com"
] | randallbalestriero@gmail.com |
62b067d9e926d110db5569bf37336152e61ec68f | 9867cb1c684aa1087f6320ad28b4e718b2879a70 | /examples/basics/location.py | 24066f16dd61b7b576f3e0b01007bc856f289836 | [
"MIT"
] | permissive | mcauser/Python_MQTT | 0c90a2264e523129ea1db15904dd2b66c85aaa28 | c5795d64f12c576433a3d59b528351f02aa84036 | refs/heads/master | 2023-01-22T03:14:49.070321 | 2018-07-15T19:58:00 | 2018-07-15T19:58:00 | 319,333,029 | 0 | 0 | NOASSERTION | 2020-12-07T13:45:36 | 2020-12-07T13:45:36 | null | UTF-8 | Python | false | false | 1,930 | py | """
'location.py'
====================================
Example of sending GPS data points
to an Adafruit IO Feed using the API
Author(s): Brent Rubell, Todd Treece
"""
# Import standard python modules
import time
# Import Adafruit IO REST client.
from Adafruit_IO import Client, Feed, RequestError
# Set to your Adafruit IO key.
# Remember, your key is a secret,
# so make sure not to publish it when you publish this code!
ADAFRUIT_IO_KEY = 'YOUR_AIO_KEY'
# Set to your Adafruit IO username.
# (go to https://accounts.adafruit.com to find your username)
ADAFRUIT_IO_USERNAME = 'YOUR_AIO_USERNAME'
# Create an instance of the REST client.
aio = Client(ADAFRUIT_IO_USERNAME, ADAFRUIT_IO_KEY)
# Assign a location feed, if one exists already
try:
location = aio.feeds('location')
except RequestError: # Doesn't exist, create a new feed
feed = Feed(name="location")
location = aio.create_feed(feed)
# limit feed updates to every 3 seconds, avoid IO throttle
loop_delay = 5
# We dont' have a GPS hooked up, but let's fake it for the example/test:
# (replace this data with values from a GPS hardware module)
value = 0
lat = 40.726190
lon = -74.005334
ele = 6 # elevation above sea level (meters)
while True:
print('\nSending Values to location feed...\n')
print('\tValue: ', value)
print('\tLat: ', lat)
print('\tLon: ', lon)
print('\tEle: ', ele)
# Send location data to Adafruit IO
aio.send_location_data(location.key, value, lat, lon, ele)
# shift all values (for test/demo purposes)
value += 1
lat -= 0.01
lon += -0.02
ele += 1
# Read the location data back from IO
print('\nData Received by Adafruit IO Feed:\n')
data = aio.receive(location.key)
print('\tValue: {0}\n\tLat: {1}\n\tLon: {2}\n\tEle: {3}'
.format(data.value, data.lat, data.lon, data.ele))
# wait loop_delay seconds to avoid api throttle
time.sleep(loop_delay)
| [
"robots199@me.com"
] | robots199@me.com |
afca54c190d92048b7f0dc6a287b5d5f3d152993 | c47d0e1a9e256a7cade218ccf4173c82183f67b4 | /codedining/urls.py | 8b35f409571998b5bec9e4657e0754f8a5e2def0 | [] | no_license | nagyistge/FoodDuk | 9e85c3e077a24ad8daa3e2f587bf40913d4eebcc | 2c89b2e99e3ec1d8833f422a4a9f1c372a350b5e | refs/heads/master | 2021-01-20T11:30:03.271597 | 2014-09-30T14:44:50 | 2014-09-30T14:44:50 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 356 | py | from django.conf.urls import patterns, include, url
from django.contrib import admin
admin.autodiscover()
urlpatterns = patterns('',
# Examples:
# url(r'^$', 'codedining.views.home', name='home'),
# url(r'^blog/', include('blog.urls')),
url(r'^', include('core.urls', namespace='core')),
url(r'^admin/', include(admin.site.urls)),
)
| [
"carpedm20@gmail.com"
] | carpedm20@gmail.com |
d4dd0cc6c611da8ad78ddfa152f67b1c6c957144 | 7434ef0a0840da62c449b73a3810d11bcf300644 | /fms/migrations/0005_auto_20171019_1201.py | af961ed9df24aca650ddd579956428bce93c07ec | [] | no_license | webmalc/maxibooking-billing-django | 40f497dc794e0b29868a01b482b5865764b35fd3 | d5ca86c3701b86e359b0648a5b76b0b71faa7810 | refs/heads/master | 2022-12-17T21:50:42.210125 | 2019-08-14T09:33:02 | 2019-08-14T09:33:02 | 202,347,569 | 2 | 0 | null | 2022-12-09T22:25:58 | 2019-08-14T12:36:22 | Python | UTF-8 | Python | false | false | 779 | py | # -*- coding: utf-8 -*-
# Generated by Django 1.11.1 on 2017-10-19 12:01
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('fms', '0004_auto_20171019_1145'),
]
operations = [
migrations.AlterField(
model_name='fms',
name='end_date',
field=models.CharField(db_index=True, default=1, max_length=100, verbose_name='end date'),
preserve_default=False,
),
migrations.AlterField(
model_name='kpp',
name='end_date',
field=models.CharField(db_index=True, default='test', max_length=100, verbose_name='end date'),
preserve_default=False,
),
]
| [
"m@webmalc.pw"
] | m@webmalc.pw |
27b07d65edf49577d7ee69214610b12ac1107ae3 | dd1fa9020beb9b0205a5d05e0026ccae1556d14b | /gongmo/smote.py | b9cce4e206cdacd3f801b0f1984e2c4bf7a26b53 | [] | no_license | kimjieun6307/itwill | 5a10250b6c13e6be41290e37320b15681af9ad9a | 71e427bccd82af9f19a2a032f3a08ff3e1f5911d | refs/heads/master | 2022-11-13T11:55:12.502959 | 2020-07-15T08:14:21 | 2020-07-15T08:14:21 | 267,373,834 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 6,817 | py | # -*- coding: utf-8 -*-
"""
Created on Thu May 28 18:01:47 2020
@author: user
"""
# -*- coding: utf-8 -*-
"""
Created on Wed May 27 11:23:29 2020
@author: user
"""
#################
## plant_a_df.csv
#################
import pandas as pd
import numpy as np
import matplotlib.pyplot as plt
from sklearn.linear_model import LogisticRegression # 로지스틱 회귀분석
from sklearn.naive_bayes import GaussianNB, MultinomialNB
from sklearn.svm import SVC
from sklearn.tree import DecisionTreeClassifier, export_graphviz
from sklearn.ensemble import RandomForestClassifier
from xgboost import XGBClassifier
from xgboost import plot_importance # 중요변수 시각화
from sklearn.preprocessing import MinMaxScaler
from sklearn.pipeline import Pipeline
from sklearn.model_selection import train_test_split, cross_validate
from sklearn.metrics import mean_squared_error, r2_score
from sklearn.metrics import accuracy_score, confusion_matrix, f1_score , classification_report
# 히트맵 : 분류정확도 결과를 시각화
import seaborn as sn # heatmap - Accuracy Score
import matplotlib.pyplot as plt
# tree 시각화 관련
from sklearn.tree.export import export_text # ==> print(export_text(model))
from sklearn import tree # ==> tree.plot_tree(model)
plant_a_df = pd.read_csv('plant_a_df.csv')
plant_a_df=plant_a_df.iloc[:, 1:8]
plant_a_df.info()
'''
<class 'pandas.core.frame.DataFrame'>
RangeIndex: 18815 entries, 0 to 18814
Data columns (total 7 columns):
# Column Non-Null Count Dtype
--- ------ -------------- -----
0 plant1_train_mea_ddhr 18815 non-null object
1 plant1_train_tem_in_loc1 18815 non-null float64
2 plant1_train_hum_in_loc1 18815 non-null float64
3 plant1_train_tem_coil_loc1 18815 non-null float64
4 plant1_train_tem_out_loc1 18815 non-null float64
5 plant1_train_hum_out_loc1 18815 non-null float64
6 24hour_cond_loc1
'''
plant_a_df.columns = ['ddhr', 'tem_in', 'hum_in', 'tem_coil', 'tem_out', 'hum_out', '24hour_cond']
col = plant_a_df.columns
x_col = col[1:6]
y_col = col[-1]
X=plant_a_df[x_col]
y=plant_a_df[y_col]
X.shape # (18815, 5)
y.value_counts()
'''
0.0 18700
1.0 115
'''
###############################
###불균형 데이터 처리 SMOTE 함수
#############################
# pip install -U imbalanced-learn # Anaconda Promt에서 설치
from imblearn.over_sampling import SMOTE
## auto##
sm = SMOTE(k_neighbors=5, random_state=71)
X_data, y_data = sm.fit_sample(X, y)
X_data.shape # (37400, 5)
y_data.shape # (37400,)
18815-37400 # -18585
y_data.value_counts()
'''
1.0 18700
0.0 18700
'''
x_train, x_test, y_train, y_test = train_test_split(X_data, y_data, test_size =0.3)
##########
# XGB
##########
xgb = XGBClassifier(random_state=123)
model_xgb = xgb.fit(x_train, y_train)
y_pred = model_xgb.predict(x_test)
acc = accuracy_score(y_test, y_pred)
acc # 0.9884135472370766
report = classification_report(y_test, y_pred)
print(report)
'''
precision recall f1-score support
0.0 1.00 0.98 0.99 5558
1.0 0.98 1.00 0.99 5662
accuracy 0.99 11220
macro avg 0.99 0.99 0.99 11220
weighted avg 0.99 0.99 0.99 11220
'''
pd.crosstab(y_pred, y_test)
'''
24hour_cond 0.0 1.0
row_0
0.0 5443 15
1.0 115 5647
'''
y_test.value_counts() #11,220
'''
1.0 5662
0.0 5558
'''
plant_b_df = pd.read_csv('plant_b_df.csv')
plant_b_df=plant_b_df.iloc[:, 1:8]
plant_b_df.info()
'''
<class 'pandas.core.frame.DataFrame'>
RangeIndex: 19406 entries, 0 to 19405
Data columns (total 7 columns):
# Column Non-Null Count Dtype
--- ------ -------------- -----
0 plant1_train_mea_ddhr 19406 non-null object
1 plant1_train_tem_in_loc2 19406 non-null float64
2 plant1_train_hum_in_loc2 19406 non-null float64
3 plant1_train_tem_coil_loc2 19406 non-null float64
4 plant1_train_tem_out_loc1 19406 non-null float64
5 plant1_train_hum_out_loc1 19406 non-null float64
6 24hour_cond_loc2 19406 non-null float64
'''
plant_b_df.columns = ['ddhr', 'tem_in', 'hum_in', 'tem_coil', 'tem_out', 'hum_out', '24hour_cond']
col
'''
Index(['ddhr', 'tem_in', 'hum_in', 'tem_coil', 'tem_out', 'hum_out',
'24hour_cond'],
'''
X_b = plant_b_df[x_col]
y_b = plant_b_df[y_col]
X_b.shape # (19406, 5)
y_b.value_counts()
'''
0.0 19199
1.0 207
'''
y_b_pred = model_xgb.predict(X_b)
acc = accuracy_score(y_b, y_b_pred)
acc # 0.9685148923013501
report=classification_report(y_b, y_b_pred)
print(report)
#########
# svm
##########
params = [0.001, 0.01, 0.1, 1, 10, 100]
kernel = ['linear', 'rbf']
best_score = 0
best_params={}
for k in kernel:
for g in params :
for c in params:
svc = SVC(kernel=k, gamma=g, C=c)
model = svc.fit(x_train, y_train)
score = model.score(x_test, y_test)
if score > best_score :
best_score = score
best_params = {'kernel': k, 'gamma' : g, 'C' : c}
print('best score : ', best_score)
print('best parameter : ', best_params)
svc = SVC( C =10, gamma=0.01, kernel ='rbf')
model = svc.fit(x_train, y_train)
y_pred = model.predict(x_test)
y_true = y_test
acc = accuracy_score(y_true, y_pred)
acc #0.9834224598930481
report = classification_report(y_test, y_pred)
print(report)
##################
### RandomForest
##################
rf = RandomForestClassifier()
model = rf.fit(X=x_train, y=y_train)
y_pred = model.predict(x_test)
y_true = y_test
acc = accuracy_score(y_true, y_pred)
acc # 0.9939393939393939
report = classification_report(y_true, y_pred)
print(report)
'''
precision recall f1-score support
0.0 1.00 0.99 0.99 5597
1.0 0.99 1.00 0.99 5623
accuracy 0.99 11220
macro avg 0.99 0.99 0.99 11220
weighted avg 0.99 0.99 0.99 11220
'''
model.feature_importances_
# array([0.20234667, 0.13226774, 0.35807944, 0.20498742, 0.10231872])
import matplotlib.pyplot as plt
plt.barh(x_col, model.feature_importances_ )
'''
plant1_train_tem_coil_loc1 >> 0.35807944
plant1_train_tem_out_loc1 >> 0.20498742
plant1_train_tem_in_loc1 >> 0.20234667
'''
#######################################################
## 0.5 ##
sm2 = SMOTE(0.5, k_neighbors=5, random_state=71)
X_data2, y_data2 = sm2.fit_sample(X, y)
X_data2.shape # (28050, 5)
y_data2.shape
28050-18815 #9235
y_data2.value_counts()
'''
0.0 18700
1.0 9350
'''
| [
"kofj2000@gmail.com"
] | kofj2000@gmail.com |
e4a7ea7735a2385fa442e00ebb8e4cf93689e97b | 53fab060fa262e5d5026e0807d93c75fb81e67b9 | /backup/user_212/ch50_2020_04_12_18_47_48_906877.py | ced60abc8d98108d8be1a72830cb9967001ff5bf | [] | no_license | gabriellaec/desoft-analise-exercicios | b77c6999424c5ce7e44086a12589a0ad43d6adca | 01940ab0897aa6005764fc220b900e4d6161d36b | refs/heads/main | 2023-01-31T17:19:42.050628 | 2020-12-16T05:21:31 | 2020-12-16T05:21:31 | 306,735,108 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 177 | py | def junta_nome_sobrenome (a,b):
nomes=[]
i=0
while i< len(a):
junto=[a[i], b[i]]
nomes.append(junto)
i += 1
return nomes
| [
"you@example.com"
] | you@example.com |
f8551e4ac31de6d0d2cc5bd77cce8f2222ffd7f3 | 1bf9f6b0ef85b6ccad8cb029703f89039f74cedc | /src/spring/azext_spring/vendored_sdks/appplatform/v2022_03_01_preview/aio/_configuration.py | 3d2b5f478c605e719aca28c70de7f710418b88c2 | [
"MIT",
"LicenseRef-scancode-generic-cla"
] | permissive | VSChina/azure-cli-extensions | a1f4bf2ea4dc1b507618617e299263ad45213add | 10b7bfef62cb080c74b1d59aadc4286bd9406841 | refs/heads/master | 2022-11-14T03:40:26.009692 | 2022-11-09T01:09:53 | 2022-11-09T01:09:53 | 199,810,654 | 4 | 2 | MIT | 2020-07-13T05:51:27 | 2019-07-31T08:10:50 | Python | UTF-8 | Python | false | false | 3,692 | py | # coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
from typing import Any, TYPE_CHECKING
from azure.core.configuration import Configuration
from azure.core.pipeline import policies
from azure.mgmt.core.policies import ARMHttpLoggingPolicy, AsyncARMChallengeAuthenticationPolicy
from .._version import VERSION
if TYPE_CHECKING:
# pylint: disable=unused-import,ungrouped-imports
from azure.core.credentials_async import AsyncTokenCredential
class AppPlatformManagementClientConfiguration(Configuration): # pylint: disable=too-many-instance-attributes
"""Configuration for AppPlatformManagementClient.
Note that all parameters used to create this instance are saved as instance
attributes.
:param credential: Credential needed for the client to connect to Azure. Required.
:type credential: ~azure.core.credentials_async.AsyncTokenCredential
:param subscription_id: Gets subscription ID which uniquely identify the Microsoft Azure
subscription. The subscription ID forms part of the URI for every service call. Required.
:type subscription_id: str
:keyword api_version: Api Version. Default value is "2022-03-01-preview". Note that overriding
this default value may result in unsupported behavior.
:paramtype api_version: str
"""
def __init__(self, credential: "AsyncTokenCredential", subscription_id: str, **kwargs: Any) -> None:
super(AppPlatformManagementClientConfiguration, self).__init__(**kwargs)
api_version = kwargs.pop("api_version", "2022-03-01-preview") # type: str
if credential is None:
raise ValueError("Parameter 'credential' must not be None.")
if subscription_id is None:
raise ValueError("Parameter 'subscription_id' must not be None.")
self.credential = credential
self.subscription_id = subscription_id
self.api_version = api_version
self.credential_scopes = kwargs.pop("credential_scopes", ["https://management.azure.com/.default"])
kwargs.setdefault("sdk_moniker", "mgmt-appplatform/{}".format(VERSION))
self._configure(**kwargs)
def _configure(self, **kwargs: Any) -> None:
self.user_agent_policy = kwargs.get("user_agent_policy") or policies.UserAgentPolicy(**kwargs)
self.headers_policy = kwargs.get("headers_policy") or policies.HeadersPolicy(**kwargs)
self.proxy_policy = kwargs.get("proxy_policy") or policies.ProxyPolicy(**kwargs)
self.logging_policy = kwargs.get("logging_policy") or policies.NetworkTraceLoggingPolicy(**kwargs)
self.http_logging_policy = kwargs.get("http_logging_policy") or ARMHttpLoggingPolicy(**kwargs)
self.retry_policy = kwargs.get("retry_policy") or policies.AsyncRetryPolicy(**kwargs)
self.custom_hook_policy = kwargs.get("custom_hook_policy") or policies.CustomHookPolicy(**kwargs)
self.redirect_policy = kwargs.get("redirect_policy") or policies.AsyncRedirectPolicy(**kwargs)
self.authentication_policy = kwargs.get("authentication_policy")
if self.credential and not self.authentication_policy:
self.authentication_policy = AsyncARMChallengeAuthenticationPolicy(
self.credential, *self.credential_scopes, **kwargs
)
| [
"noreply@github.com"
] | VSChina.noreply@github.com |
6ba35c246673106ae94ae50591a6cd0547b333af | 0178e6a705ee8aa6bb0b0a8512bf5184a9d00ded | /Sungjin/Math/n17626/17626.py | ce114cdc116413798b61bd317e01f0b80f045652 | [] | no_license | comojin1994/Algorithm_Study | 0379d513abf30e3f55d6a013e90329bfdfa5adcc | 965c97a9b858565c68ac029f852a1c2218369e0b | refs/heads/master | 2021-08-08T14:55:15.220412 | 2021-07-06T11:54:33 | 2021-07-06T11:54:33 | 206,978,984 | 0 | 1 | null | 2020-05-14T14:06:46 | 2019-09-07T14:23:31 | Python | UTF-8 | Python | false | false | 590 | py | import sys
input = sys.stdin.readline
if __name__ == '__main__':
n = int(input())
min_ = 4
for i in range(int(n**0.5), int((n//4)**0.5), -1):
if i*i ==n: min_ = 1; break
else:
temp = n - i*i
for j in range(int(temp**0.5), int((temp//3)**0.5), -1):
if i*i + j*j == n: min_ = min(min_, 2); continue
else:
temp = n - i*i - j*j
for k in range(int(temp**0.5), int((temp//2)**0.5), -1):
if i*i + j*j + k*k == n: min_ = min(min_, 3)
print(min_) | [
"comojin1994@gmail.com"
] | comojin1994@gmail.com |
a723e981b85752bf424cb2f10500a05148ebd07f | 76e931912629c37beedf7c9b112b53e7de5babd7 | /1-mouth01/day06/exe03.py | 2edcd5342b7f4bc8001dc0ad5d233dcae9b416ca | [
"Apache-2.0"
] | permissive | gary-gggggg/gary | c59ac21d8e065f296ff986d11a0e4cbf186a1bc4 | d8ba30ea4bc2b662a2d6a87d247f813e5680d63e | refs/heads/main | 2023-02-23T06:54:34.500683 | 2021-02-01T10:17:02 | 2021-02-01T10:17:02 | 334,905,744 | 4 | 0 | null | null | null | null | UTF-8 | Python | false | false | 511 | py | """在终端中打印香港的现有人数
在终端中打印上海的新增和现有人数
新疆新增人数增加 1
"""
hkinfo={"region":"hk","new":15,"now have":39,"total":4801,\
"cured":4320,"死亡":88}
shinfo={"region":"hk","new":6,"now have":61,"total":903,\
"cured":835,"死亡":7}
xjinfo={"region":"hk","new":0,"now have":49,"total":902,\
"cured":850,"死亡":3}
print(hkinfo["now have"])
print(shinfo["new"])
print(shinfo["now have"])
xjinfo["new"]=1
print(xjinfo) | [
"673248932@qq.com"
] | 673248932@qq.com |
2fa9bde1a5bb7a7c232b3647aeecd183a2c840f8 | 53fab060fa262e5d5026e0807d93c75fb81e67b9 | /backup/user_299/ch39_2020_04_19_01_30_42_281299.py | 02bfb76937bd86b9878c58fa8cf5282fe1d1fcc3 | [] | no_license | gabriellaec/desoft-analise-exercicios | b77c6999424c5ce7e44086a12589a0ad43d6adca | 01940ab0897aa6005764fc220b900e4d6161d36b | refs/heads/main | 2023-01-31T17:19:42.050628 | 2020-12-16T05:21:31 | 2020-12-16T05:21:31 | 306,735,108 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 437 | py | m = 2
termomax = 1
numrespon = 1
termos_n_p_m = {}
while m<1001:
n = m
while n != 1:
termos = 1
if n%2 == 0:
n = n/2
termos += 1
else:
n = 3*n + 1
termos += 1
termos_n_p_m[m] = termos
m += 1
for num,termos in termos_n_p_m.items():
if termos>termomax:
termomax = termos
numrespon = num
print(numrespon)
| [
"you@example.com"
] | you@example.com |
6060e0d04713e19612bcae4d6aefb8ec0cc87fd5 | 0aa0f7c36cab7580a33383ff07a1f1434811ea96 | /gzbj/optimus_2.1/optimus/backend/myBluePrint/ericic_v2/base_dao/cpu_layout_dao.py | b2ee6d955f879b4e41400b5a7e03ef4d36c1ebbc | [] | no_license | yanliangchen/ware_house | a1146b23e76bce39be67e9d65a240270e0f08c10 | 44780726924f9a398fb6197645b04cdcfb0305e7 | refs/heads/main | 2023-06-15T13:52:45.890814 | 2021-07-20T10:09:22 | 2021-07-20T10:09:22 | 387,731,840 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,473 | py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
# @File : cpu_layout_dao.py
# @Author: gaofzhan
# @Email: gaofeng.a.zhang@ericssoin.com
# @Date : 2020/12/21 11:04
# @Desc :
from sqlalchemy import and_
from backend.Model.connection import SESSION
from backend.myBluePrint.ericic_v2.model.nova_service_table import NovaServiceModel
from backend.myBluePrint.ericic_v2.model.nova_table import NovaModel
from backend.myBluePrint.ericic_v2.model.data_center_table import DataCenterModel
class CpuLayoutDao:
@classmethod
def get_dc_host_info(cls, cid, host_id):
db_session = SESSION()
try:
dc_entity = db_session.query(DataCenterModel).filter(DataCenterModel.id == cid).one_or_none()
host_entity = db_session.query(NovaServiceModel).filter(
and_(NovaServiceModel.dc_id == cid, NovaServiceModel.id == host_id)).one_or_none()
finally:
db_session.close()
return dict(dc_entity=dc_entity, host_entity=host_entity)
@classmethod
def get_vm_info(cls, cid, instance_name_list):
res = dict()
db_session = SESSION()
try:
for name in instance_name_list:
_filter_entity = and_(NovaModel.dc_id==cid, NovaModel.instance_name == name)
nova_entity = db_session.query(NovaModel).filter(_filter_entity).one_or_none()
res[name] = nova_entity
finally:
db_session.close()
return res
| [
"yanliang.li@ericsson.com"
] | yanliang.li@ericsson.com |
a2b0931d4d13460355caaeefdee16c122ece5713 | 46ae8264edb9098c9875d2a0a508bc071201ec8b | /res/scripts/client/gui/scaleform/daapi/view/metaretraincrewwindowmeta.py | 6fbc9747b2f0ba83a498c613a60f73314c7e2603 | [] | no_license | Difrex/wotsdk | 1fc6156e07e3a5302e6f78eafdea9bec4c897cfb | 510a34c67b8f4c02168a9830d23f5b00068d155b | refs/heads/master | 2021-01-01T19:12:03.592888 | 2016-10-08T12:06:04 | 2016-10-08T12:06:04 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,437 | py | # Embedded file name: scripts/client/gui/Scaleform/daapi/view/meta/RetrainCrewWindowMeta.py
from gui.Scaleform.framework.entities.abstract.AbstractWindowView import AbstractWindowView
class RetrainCrewWindowMeta(AbstractWindowView):
"""
DO NOT MODIFY!
Generated with yaml.
__author__ = 'yaml_processor'
@extends AbstractWindowView
"""
def submit(self, operationId):
self._printOverrideError('submit')
def changeRetrainType(self, retrainTypeIndex):
self._printOverrideError('changeRetrainType')
def as_setCrewDataS(self, data):
"""
:param data: Represented by RetrainCrewBlockVO (AS)
"""
if self._isDAAPIInited():
return self.flashObject.as_setCrewData(data)
def as_setVehicleDataS(self, data):
"""
:param data: Represented by RetrainVehicleBlockVO (AS)
"""
if self._isDAAPIInited():
return self.flashObject.as_setVehicleData(data)
def as_setCrewOperationDataS(self, data):
"""
:param data: Represented by RetrainCrewOperationVO (AS)
"""
if self._isDAAPIInited():
return self.flashObject.as_setCrewOperationData(data)
def as_setAllCrewDataS(self, data):
"""
:param data: Represented by RetrainCrewBlockVOBase (AS)
"""
if self._isDAAPIInited():
return self.flashObject.as_setAllCrewData(data) | [
"m4rtijn@gmail.com"
] | m4rtijn@gmail.com |
b0586cf5ec5ec204813d6e8f17ba8d4d356447cc | 314d628f165973451977f31e3860d015af00b4de | /dictionary/BaseSearcher.py | b1384d90228eae3dd638e19df05ad6f53ef08ef5 | [] | no_license | michaelliu03/py-seg | c76a2788ea1dbbf415825bb094cc0776d1873ea4 | 0230e32f0c066ad329a7e972166a61a7a7979569 | refs/heads/master | 2020-06-20T01:45:53.060309 | 2019-08-29T00:14:03 | 2019-08-29T00:14:03 | 196,948,212 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 801 | py | #!/usr/bin/env python
#-*-coding:utf-8-*-
# @File:Segment.py
# @Author: Michael.liu
# @Date:2019/2/12
# @Desc: NLP Segmentation ToolKit - Hanlp Python Version
from abc import ABCMeta,abstractmethod
class BaseSearcher(object):
def __init__(self):
# 待分词文本的char
self.c = []
# 指向当前处理字串的开始位置(前面的已经分词分完了)
self.offset = int()
def init1(self, c):
self.c = c
return self
def init2(self, text):
return self.init1(text.decode())
@abstractmethod
def next_item(self):
"""
分出下一个词
:return:
"""
pass
def getOffset(self):
"""
获取当前偏移
:return:
"""
return self.offset | [
"liuyu5@liepin.com"
] | liuyu5@liepin.com |
a167fe9589155c1ba39e2ee2c0e267543a62272a | 5d4f50e3b2e4043af8e625d5eb68c318612b0e1e | /79. Word Search.py | 01acb65b957fecb1c952176897c925d49edd02e5 | [] | no_license | Iansdfg/Leetcode2ndTime | 86b77204915286c66a3e2d95036f2577f1c85665 | 483ef12002a5b12bd5df6586a2133ed2bb7ae7e8 | refs/heads/master | 2020-06-04T17:39:57.137246 | 2019-08-01T04:44:52 | 2019-08-01T04:44:52 | 192,127,761 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 802 | py | class Solution(object):
def exist(self, board, word):
"""
:type board: List[List[str]]
:type word: str
:rtype: bool
"""
for i in range(len(board)):
for j in range(len(board[0])):
if self.findNext(i,j, board, word):return True
return False
def findNext(self, i,j, board, word):
if len(word)==0: return True
if i<0 or j<0 or i>= len(board) or j>= len(board[0]) or board[i][j]!=word[0]:return False
tmp = board[i][j]
board[i][j] = "#"
res = self.findNext(i+1,j,board, word[1:]) or self.findNext(i-1,j,board, word[1:]) or self.findNext(i,j+1,board, word[1:]) or self.findNext(i,j-1,board, word[1:])
board[i][j] = tmp
return res
| [
"noreply@github.com"
] | Iansdfg.noreply@github.com |
9d7f2e04641bfbd7f08ff5845fc6ef11252821f2 | bf25182c7288ed020d323635619d801f6544a196 | /GreatestExampleusingelif.py | a7ef9658c068c1775f0e151f1f063eb417528e5f | [] | no_license | shivaconceptsolution/pythononlineclass | d84c19f9730e29738f788099a72c1b0bdf0f2f30 | 03b6b729140450c0ea5f6849b2882334b1f8546b | refs/heads/master | 2020-05-23T22:21:44.122672 | 2019-06-01T07:38:26 | 2019-06-01T07:38:26 | 186,972,067 | 2 | 0 | null | null | null | null | UTF-8 | Python | false | false | 308 | py | a= int(input("enter first number")) #5
b= int(input("enter second number")) #7
c= int(input("enter third number")) #19
d = int(input("enter fourth number")) #45
if a>b and a>c and a>d:
print("a")
elif b>c and b>d:
print("b")
elif c>d:
print("c")
else:
print("d")
| [
"noreply@github.com"
] | shivaconceptsolution.noreply@github.com |
33d60371f26492ac09a1c7f81da016ec409e6b30 | b11fd2f3a6ba376810c772253342f6a8b552b114 | /triplaner/apps.py | e754f461ffa0ea8221b6c493b5465a36162a31ee | [] | no_license | ivanurban/travel_friend | 0561432906dd505fddeedbc4c020547763b5e0b1 | 0407d48f4cc6c68e4c7a8b2ae880adf828c6b681 | refs/heads/main | 2023-07-15T13:38:44.500529 | 2021-09-01T15:35:51 | 2021-09-01T15:35:51 | 400,756,285 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 150 | py | from django.apps import AppConfig
class TriplanerConfig(AppConfig):
default_auto_field = 'django.db.models.BigAutoField'
name = 'triplaner'
| [
"ivanurban_bg@yahoo.com"
] | ivanurban_bg@yahoo.com |
c960b5ea19915710566ba4ed4f56ebae8e6d0b55 | 0f0f8b3b027f412930ca1890b0666538358a2807 | /dotop/report/misc.py | 092d9d28dca2a5301ad1ce3d46249d5ded5828e9 | [] | no_license | konsoar/dotop_pos_v11 | 741bd5ca944dfd52eb886cab6f4b17b6d646e131 | 576c860917edd25661a72726d0729c769977f39a | refs/heads/master | 2021-09-06T13:25:34.783729 | 2018-02-07T02:11:12 | 2018-02-07T02:11:12 | 111,168,355 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 474 | py | # -*- coding: utf-8 -*-
# Part of dotop. See LICENSE file for full copyright and licensing details.
from pychart import color
colorline = [color.T(r=((r+3) % 11)/10.0,
g=((g+6) % 11)/10.0,
b=((b+9) % 11)/10.0)
for r in range(11) for g in range(11) for b in range(11)]
def choice_colors(n):
if n:
return colorline[0:-1:len(colorline)/n]
return []
if __name__=='__main__':
print choice_colors(10)
| [
"Administrator@20nuo003-PC"
] | Administrator@20nuo003-PC |
0617eb08c5186a7388550723544e56e4be446d72 | 53784d3746eccb6d8fca540be9087a12f3713d1c | /res/packages/scripts/scripts/client/gui/shared/gui_items/dossier/achievements/EFC2016Achievement.py | b513c72d778bf55a792d7a7eebf2af952f9ba98b | [] | no_license | webiumsk/WOT-0.9.17.1-CT | 736666d53cbd0da6745b970e90a8bac6ea80813d | d7c3cf340ae40318933e7205bf9a17c7e53bac52 | refs/heads/master | 2021-01-09T06:00:33.898009 | 2017-02-03T21:40:17 | 2017-02-03T21:40:17 | 80,870,824 | 0 | 0 | null | null | null | null | WINDOWS-1250 | Python | false | false | 868 | py | # 2017.02.03 21:52:24 Střední Evropa (běžný čas)
# Embedded file name: scripts/client/gui/shared/gui_items/dossier/achievements/EFC2016Achievement.py
from dossiers2.ui.achievements import ACHIEVEMENT_BLOCK as _AB
from abstract import SeriesAchievement
from abstract.mixins import Quest
class EFC2016Achievement(Quest, SeriesAchievement):
def __init__(self, dossier, value = None):
SeriesAchievement.__init__(self, 'EFC2016', _AB.SINGLE, dossier, value)
def _getCounterRecordNames(self):
return ((_AB.TOTAL, 'EFC2016WinSeries'), (_AB.TOTAL, 'maxEFC2016WinSeries'))
# okay decompyling c:\Users\PC\wotsources\files\originals\res\packages\scripts\scripts\client\gui\shared\gui_items\dossier\achievements\EFC2016Achievement.pyc
# decompiled 1 files: 1 okay, 0 failed, 0 verify failed
# 2017.02.03 21:52:24 Střední Evropa (běžný čas)
| [
"info@webium.sk"
] | info@webium.sk |
c04bbb0eb706ec5ab8cdc77b6a58b1419e7041c4 | 3e5150447a2c90c26354500f1df9660ef35c990b | /classes/Counter.py | ae21ce6d972dc8ad0f5ac0205cdf0a4f87eb96d0 | [] | no_license | kilirobbs/python-fiddle | 8d6417ebff9d6530e713b6724f8416da86c24c65 | 9c2f320bd2391433288cd4971c2993f1dd5ff464 | refs/heads/master | 2016-09-11T03:56:39.808358 | 2013-03-19T19:26:19 | 2013-03-19T19:26:19 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 255 | py | # http://docs.python.org/library/collections.html#collections.Counter
from collections import Counter
cnt = Counter()
for word in ['red', 'blue', 'red', 'green', 'blue', 'blue']:
cnt[word] += 1
print cnt
print "cnt['not_existing']=",cnt["not_existing"] | [
"cancerhermit@gmail.com"
] | cancerhermit@gmail.com |
2f9ed4891d301d514f4c66ab498b59a0be403743 | f6078890ba792d5734d289d7a0b1d429d945a03a | /hw1/submissions/duongmatthew/duongmatthew_24972_1251114_HW_1_2_area_polygon.py | 447f9be0617ad268c945c78e34e5542e35139e5e | [] | no_license | huazhige/EART119_Lab | 1c3d0b986a0f59727ee4ce11ded1bc7a87f5b7c0 | 47931d6f6a2c7bc053cd15cef662eb2f2027712c | refs/heads/master | 2020-05-04T23:40:53.709217 | 2019-06-11T18:30:45 | 2019-06-11T18:30:45 | 179,552,067 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,317 | py | #python2.7
"""
Created on Sat April 13, 2019
This script does the following:
Solve for the area of an irrgular polygon using for loops.
@author: maduong
"""
#==============================================================================
# Parameters
#==============================================================================
x = [1,3,4,3.5,2]
y = [1,1,2,5,4]
#inputs set up as a list
#==============================================================================
# Define functions
#==============================================================================
def X(x,y):
for i in range(5):
B = x[i-5]*y[i-4]+x[i-4]*y[i-3]+x[i-3]*y[i-2]+x[i-2]*y[i-1]+x[i-1]*y[i]
return B
#the first set of terms added together and defined
def Y(x,y):
for i in range(5):
C = y[i-5]*x[i-4]+y[i-4]*x[i-3]+y[i-3]*x[i-2]+y[i-2]*x[i-1]+y[i-1]*x[i]
return C
#the second set of terms added together and defined
A=.5*abs((X(x,y)-Y(x,y))) #the given equation for the area
#==============================================================================
# Print
#==============================================================================
print ('Area of the polygon', A) | [
"hge2@ucsc.edu"
] | hge2@ucsc.edu |
52cdea1bfb1375132c60044aec9f7e58288c58e3 | dc5dbd961188e301c1245f8b8c035d65ad48bf6f | /backend/location/migrations/0001_initial.py | 9f42e448c5f81bccfb773c8f4cb919f82328e615 | [] | no_license | crowdbotics-apps/knoto-fy-22184 | 1f580eb86812acd1b700b3927ae5ec515df03a35 | 3fb321895f22333db8cafd4d3cd6fced68c741ce | refs/heads/master | 2023-01-06T03:48:45.190344 | 2020-11-01T15:44:27 | 2020-11-01T15:44:27 | 309,118,753 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,032 | py | # Generated by Django 2.2.16 on 2020-11-01 15:43
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
('vehicle', '0001_initial'),
('taxi_profile', '0001_initial'),
]
operations = [
migrations.CreateModel(
name='MapLocation',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=255)),
('latitude', models.DecimalField(decimal_places=8, max_digits=12)),
('longitude', models.DecimalField(decimal_places=8, max_digits=12)),
],
),
migrations.CreateModel(
name='VehicleLocation',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('latitude', models.DecimalField(decimal_places=8, max_digits=12)),
('longitude', models.DecimalField(decimal_places=8, max_digits=12)),
('last_updated', models.DateTimeField(auto_now=True)),
('vehicle', models.OneToOneField(on_delete=django.db.models.deletion.CASCADE, related_name='vehiclelocation_vehicle', to='vehicle.Vehicle')),
],
),
migrations.CreateModel(
name='ProfileLocation',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('latitude', models.DecimalField(decimal_places=8, max_digits=12)),
('longitude', models.DecimalField(decimal_places=8, max_digits=12)),
('last_updated', models.DateTimeField(auto_now=True)),
('user', models.OneToOneField(on_delete=django.db.models.deletion.CASCADE, related_name='profilelocation_user', to='taxi_profile.UserProfile')),
],
),
]
| [
"team@crowdbotics.com"
] | team@crowdbotics.com |
b17128c90d064ae3ff053cd3a4029c861cbf4f1e | e10422c540b3199cc5663c1c226ae2b8f24fd5cf | /RPA/rpa.py | b01cbe15d6b3dbaf791268ebf4edd47b9db91e1e | [] | no_license | cccccsf/single_point | f014a9f0a18eb30ddd4a967a822eba3bd26ed53a | 61cc11b0c40e082b45c5458c8435dbea001af466 | refs/heads/master | 2020-05-09T10:10:05.035435 | 2019-05-07T12:44:30 | 2019-05-07T12:44:30 | 181,030,692 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,854 | py | #!/usr/bin/python3
import os
from Components import IniReader
from Components import Job
from Components import read_record_results
from OsComponents import record
import RPA
def rpa(path):
rec = 'LRPA begins.\n'
rec += '---'*25
print(rec)
record(path, rec)
# read infos from input.ini file
ini = IniReader()
rpa_nodes_b, memory_b, rpa_nodes_s, memory_s, molpro_path, molpro_key = ini.get_rpa()
# generate Input file and scr file
bilayer_path = os.path.join(path, 'rpa')
upper_path = os.path.join(bilayer_path, 'upperlayer')
under_path = os.path.join(bilayer_path, 'underlayer')
bilayer_job = Job(bilayer_path)
upper_job = Job(upper_path)
under_job = Job(under_path)
rpa_jobs_finished, rpa_jobs_new = [], []
if not RPA.if_cal_finish(bilayer_job):
Inp = RPA.RPAInput(bilayer_job, memory_b)
Inp.generate_input()
Scr = RPA.Scr(bilayer_job, rpa_nodes_b, molpro_key, molpro_path)
Scr.gen_scr()
rpa_jobs_new.append(bilayer_job)
else:
bilayer_job.status = 'finished'
rpa_jobs_finished.append(bilayer_job)
for job in [upper_job, under_job]:
if not RPA.if_cal_finish(job):
Inp = RPA.RPAInput(job, memory_s)
Inp.generate_input()
Scr = RPA.Scr(job, rpa_nodes_s, molpro_key, molpro_path)
Scr.gen_scr()
rpa_jobs_new.append(job)
else:
job.status = 'finished'
rpa_jobs_finished.append(job)
# submit jobs
if len(rpa_jobs_new) > 0:
new_finished_jobs = RPA.submit(rpa_jobs_new)
rpa_jobs_finished += new_finished_jobs
# read and record results
read_record_results(path, rpa_jobs_finished, RPA.get_energy, method='rpa')
rec = 'LRPA finished!\n'
rec += '***'*25
print(rec)
record(path, rec)
| [
"cccccsf@hotmail.com"
] | cccccsf@hotmail.com |
fa6aabd605cb14a55b6c372b2c4b27dfcd568106 | a2a6138df4d1b52b8ca0b62a16a0d6de1439d156 | /ansible/library/check_fabric_links.py | b11a4a452f839727393172ea2bb49e250720d745 | [] | no_license | yijxiang/preso_abstract_all_the_things | bc7a203d1b6b862aaa0646c5746762e2429f2167 | ce4e98a8d63f59992c33b04e3af53d6cc4c101b6 | refs/heads/master | 2021-06-01T02:10:54.966996 | 2016-06-18T14:01:25 | 2016-06-18T14:01:25 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,809 | py | from ansible.module_utils.basic import * #noqa
def main():
"""Main module"""
module = AnsibleModule(
argument_spec=dict(
device_name=dict(type='str', required=True),
fabric=dict(type='list', required=True),
fabric_link_prefix=dict(type='str', required=True),
bgp_neighbors=dict(type='dict', required=True),
),
)
device_name = module.params['device_name']
fabric = module.params['fabric']
fabric_link_prefix = module.params['fabric_link_prefix']
bgp_neighbors = module.params['bgp_neighbors']
result = []
good = bad = 0
healthy_fabric = True
for index, link in enumerate(fabric):
if device_name == link['left']:
peer = "{}{}::1".format(fabric_link_prefix, index+1)
elif device_name == link['right']:
peer = "{}{}::".format(fabric_link_prefix, index+1)
else:
continue
try:
is_up = bgp_neighbors['global']['peers'][peer]['is_up']
if not is_up:
msg="{l[left]}:{l[left_port]} --- {l[right]}:{l[right_port]} is down".format(l=link)
result.append(msg)
healthy_fabric = False
bad += 1
else:
good += 1
except KeyError:
bad += 1
msg="{l[left]}:{l[left_port]} --- {l[right]}:{l[right_port]} is not configured".format(l=link)
result.append(msg)
healthy_fabric = False
link_health = "Good links: {}, bad links: {}".format(good, bad)
if healthy_fabric:
module.exit_json(msg=link_health)
else:
result.insert(0, link_health)
module.fail_json(msg='\n'.join(result), healthy_fabric=healthy_fabric)
if __name__ == '__main__':
main()
| [
"dbarrosop@dravetech.com"
] | dbarrosop@dravetech.com |
720e006c5f1ea63c2f941264693787f9831072e8 | 69f8906371f6c5d66dd526157139368ce2b287f9 | /jschon/vocabulary/format.py | d6ee988750dedfeeec39f9e4ab82bc8334ff3e20 | [
"MIT"
] | permissive | jdewells/jschon-shamelessdowngrade | 1636ce0a1d2ddd974eb4cea231ecedec5a9763e6 | 14c5415c77b2f1e531bedd8aeeb8051fde7efb3e | refs/heads/main | 2023-06-22T19:45:55.318613 | 2021-07-16T11:33:13 | 2021-07-16T11:54:27 | 386,568,493 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,329 | py | from typing import Callable
from jschon.exceptions import CatalogueError
from jschon.json import AnyJSONCompatible, JSON
from jschon.jsonschema import JSONSchema, Scope
from jschon.vocabulary import Keyword
__all__ = [
'FormatKeyword',
'FormatValidator',
]
FormatValidator = Callable[[AnyJSONCompatible], None]
"""The type of a ``"format"`` keyword validator.
A :class:`~jschon.vocabulary.format.FormatValidator` is a
callable accepting a JSON-compatible Python object as its
only argument. It must raise a :exc:`ValueError` if the
argument is invalid per the applicable format specification.
"""
class FormatKeyword(Keyword):
key = "format"
def __init__(self, parentschema: JSONSchema, value: str):
super().__init__(parentschema, value)
try:
self.validator: FormatValidator = parentschema.catalogue.get_format_validator(value)
except CatalogueError:
self.validator = None
def evaluate(self, instance: JSON, scope: Scope) -> None:
scope.annotate(self.json.value)
if self.validator is not None:
try:
self.validator(instance.value)
except ValueError as e:
scope.fail(f'The instance is invalid against the "{self.json.value}" format: {e}')
else:
scope.noassert()
| [
"52427991+marksparkza@users.noreply.github.com"
] | 52427991+marksparkza@users.noreply.github.com |
2f238397bee22b9e1b4ff60009fe43d31d56a93c | 03a22b3c00dc5188da3ed1a19077874e3ad786c5 | /futoin/cid/tool/rusttool.py | 596ed4482d5011959c021f2fc2e12dcf86854d05 | [
"Apache-2.0"
] | permissive | iforgotband/cid-tool | fdf050169e5aa895ded9d9efb2741860ecd91a34 | f7d0e53057ecff156cf52c8dcae80c6408fb37d8 | refs/heads/master | 2021-08-15T02:51:17.717986 | 2017-11-17T07:59:24 | 2017-11-17T07:59:24 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,481 | py | #
# Copyright 2015-2017 (c) Andrey Galkin
#
from ..runenvtool import RunEnvTool
class rustTool(RunEnvTool):
"""Rust is a systems programming language.
Home: https://www.rust-lang.org
"""
__slots__ = ()
def getDeps(self):
if self._isGlobalRust():
return []
return ['rustup']
def getVersionParts(self):
return 3
def _isGlobalRust(self):
return self._detect.isAlpineLinux()
def _installTool(self, env):
if self._isGlobalRust():
self._install.apk('rust')
return
self._executil.callExternal([
env['rustupBin'], 'toolchain', 'install', env['rustVer']
])
def _updateTool(self, env):
self._installTool(env)
def uninstallTool(self, env):
if self._isGlobalRust():
return
self._executil.callExternal([
env['rustupBin'], 'toolchain', 'uninstall', env['rustVer']
])
self._have_tool = False
def envNames(self):
return ['rustBin', 'rustVer']
def initEnv(self, env):
if not self._isGlobalRust():
ver = env.setdefault('rustVer', 'stable')
self._environ['RUSTUP_TOOLCHAIN'] = ver
try:
res = self._executil.callExternal([
env['rustupBin'], 'which', 'rustc'
], verbose=False)
except:
return
super(rustTool, self).initEnv(env, 'rustc')
| [
"andrey@futoin.org"
] | andrey@futoin.org |
11f3ec41cdc453ce54cc8f179f787c754acfd76a | 15f321878face2af9317363c5f6de1e5ddd9b749 | /solutions_python/Problem_118/1693.py | e9fddbbbf5b5b476c5d099c47e0503b858d39c30 | [] | no_license | dr-dos-ok/Code_Jam_Webscraper | c06fd59870842664cd79c41eb460a09553e1c80a | 26a35bf114a3aa30fc4c677ef069d95f41665cc0 | refs/heads/master | 2020-04-06T08:17:40.938460 | 2018-10-14T10:12:47 | 2018-10-14T10:12:47 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 455 | py | ps = open("palindromicSquares.dat")
psList = []
for line in ps:
psList.append(int(line))
file = open("ups")
T = int(file.readline())
hh = 1
for x in range(T):
lin = file.readline()
line = lin.split()
A = int(line[0])
B = int(line[1])
count = 0
for pSq in psList:
if pSq >= A and pSq <= B:
count+=1
print "Case #" + str(hh) + ": " + str(count)
hh+=1 | [
"miliar1732@gmail.com"
] | miliar1732@gmail.com |
5dc54f6be27233e336ce2a0450bfe642158977e2 | e3a6939a2faaf9bfd7ed21e7a877d2bd2125b130 | /projects/migrations/0005_portfolio_main_image.py | 5a6cde616e5720954f3d6ab8324edf2cd9f82846 | [] | no_license | furkalokbu/PortfolioTime | b133a64ec1472a12b878b87cf8e0706fdf39a16a | c4233930cd0988a80c65edf2079d4a560987d225 | refs/heads/main | 2023-04-29T21:24:30.629206 | 2021-05-14T15:49:14 | 2021-05-14T15:49:14 | 367,320,283 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 620 | py | # Generated by Django 3.2.2 on 2021-05-13 18:21
from django.db import migrations, models
import djlime.utils
class Migration(migrations.Migration):
dependencies = [
("projects", "0004_alter_portfolio_options"),
]
operations = [
migrations.AddField(
model_name="portfolio",
name="main_image",
field=models.ImageField(
blank=True,
help_text="recommended size 1000x665",
null=True,
upload_to=djlime.utils.get_file_path,
verbose_name="image",
),
),
]
| [
"furkalokbu@gmail.com"
] | furkalokbu@gmail.com |
281722303c3bde8dbe4c5ce15c76da40eb564416 | 19486e71422fdded91d8139d7ee1ab6b4cc8d271 | /app/core/admin.py | b3df02c45e6c82517c80ed7ad095eece1a1196b8 | [
"MIT"
] | permissive | EugenDaniel/recipe-app-api | ebe221efb1e6a4209c24169cfe9ccc3554b958dd | 7eea36ab76437ab58bdf06604612fd4abb95d2df | refs/heads/main | 2023-06-15T14:30:03.135398 | 2021-06-30T18:22:05 | 2021-06-30T18:22:05 | 342,641,615 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 873 | py | from django.contrib import admin
from django.contrib.auth.admin import UserAdmin as BaseUserAdmin
from django.utils.translation import gettext as _
from core import models
class UserAdmin(BaseUserAdmin):
ordering = ["id"]
list_display = ["email", "name"]
fieldsets = (
(None, {'fields': ('email', 'password')}),
(_('Personal Info'), {'fields': ('name',)}),
(
_('Permissions'),
{'fields': ('is_active', 'is_staff', 'is_superuser')}
),
(_('Important dates'), {'fields': ('last_login',)})
)
add_fieldsets = (
(None, {
'classes': ('wide',),
'fields': ('email', 'password1', 'password2')
}),
)
admin.site.register(models.User, UserAdmin)
admin.site.register(models.Tag)
admin.site.register(models.Ingredient)
admin.site.register(models.Recipe)
| [
"="
] | = |
25b7cf86b988ac5ca4c8a5a21e13b136aa73138d | 15f321878face2af9317363c5f6de1e5ddd9b749 | /solutions_python/Problem_1/345.py | 3d6976a800e4a4c6ede3fadece3f71775d28e8e8 | [] | no_license | dr-dos-ok/Code_Jam_Webscraper | c06fd59870842664cd79c41eb460a09553e1c80a | 26a35bf114a3aa30fc4c677ef069d95f41665cc0 | refs/heads/master | 2020-04-06T08:17:40.938460 | 2018-10-14T10:12:47 | 2018-10-14T10:12:47 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,066 | py | import sys
def longestdist(engines, queries, index): # returns new index, or len(queries) on end
arr = []
for eng in engines:
i = index
while (i < len(queries)) and (eng != queries[i]):
i += 1
if i == len(queries):
return i # the end
else:
arr.append(i)
return max(arr)
def findmain(engines, queries):
iter = 0
i = longestdist(engines, queries, 0)
while i < len(queries):
iter += 1
i = longestdist(engines, queries, i)
return iter
def main():
try:
inp = open(sys.argv[1], 'rt')
except IOError:
print 'Invalid input file name'
sys.exit(2)
out = open(sys.argv[2], 'wt')
ncases = int(inp.readline())
print ncases
for i in xrange(ncases):
print i
nengines = int(inp.readline())
engines = []
for j in xrange(nengines):
engines.append(inp.readline()[:-1])
nqueries = int(inp.readline())
queries = []
for k in xrange(nqueries):
queries.append(inp.readline()[:-1])
out.write('Case #%d: %d\n' % (i + 1, findmain(engines, queries)))
inp.close()
out.close()
if __name__ == '__main__':
main()
| [
"miliar1732@gmail.com"
] | miliar1732@gmail.com |
106eb5f57a11be2db0edad76db3b88818a4e1e00 | ac5e52a3fc52dde58d208746cddabef2e378119e | /exps-gsn-edf/gsn-edf_ut=2.5_rd=0.5_rw=0.06_rn=4_u=0.075-0.35_p=harmonic-2/sched=RUN_trial=5/sched.py | d5d1750c2c337e17c713d1a3b3c2ae173fab9e54 | [] | no_license | ricardobtxr/experiment-scripts | 1e2abfcd94fb0ef5a56c5d7dffddfe814752eef1 | 7bcebff7ac2f2822423f211f1162cd017a18babb | refs/heads/master | 2023-04-09T02:37:41.466794 | 2021-04-25T03:27:16 | 2021-04-25T03:27:16 | 358,926,457 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 239 | py | -X FMLP -Q 0 -L 5 105 400
-X FMLP -Q 0 -L 5 84 250
-X FMLP -Q 1 -L 2 68 300
-X FMLP -Q 1 -L 2 35 125
-X FMLP -Q 2 -L 1 34 175
-X FMLP -Q 2 -L 1 31 400
-X FMLP -Q 3 -L 1 28 125
28 125
26 150
25 175
23 250
17 100
12 125
| [
"ricardo.btxr@gmail.com"
] | ricardo.btxr@gmail.com |
fa6352da3f57c640866d89be3d153cb0602e1f48 | 9743d5fd24822f79c156ad112229e25adb9ed6f6 | /xai/brain/wordbase/otherforms/_sittings.py | 500a3f748d08d50008c6da0ff4b4761549958d02 | [
"MIT"
] | permissive | cash2one/xai | de7adad1758f50dd6786bf0111e71a903f039b64 | e76f12c9f4dcf3ac1c7c08b0cc8844c0b0a104b6 | refs/heads/master | 2021-01-19T12:33:54.964379 | 2017-01-28T02:00:50 | 2017-01-28T02:00:50 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 226 | py |
#calss header
class _SITTINGS():
def __init__(self,):
self.name = "SITTINGS"
self.definitions = sitting
self.parents = []
self.childen = []
self.properties = []
self.jsondata = {}
self.basic = ['sitting']
| [
"xingwang1991@gmail.com"
] | xingwang1991@gmail.com |
716f5f51ef5e8c139c1eac1521eda1097a2cbf49 | 8b39cec9c5dfca96ad2c68f5cd65c4740fd922de | /python-flask-api/api.py | 00cffbf8b63a9ba805badef1a16bdc4366cdf24b | [] | no_license | evry-ace/google-esp-azad-auth-examples | 12d78f0ab439de4611c558d51e641a0646a77c7d | acdb6d0fc22bd3de0a0ba1acebe7a8ea4de76577 | refs/heads/master | 2021-07-07T07:05:47.813386 | 2019-11-25T18:52:50 | 2019-11-25T18:52:50 | 223,805,084 | 0 | 0 | null | 2021-01-05T13:46:11 | 2019-11-24T20:24:39 | TypeScript | UTF-8 | Python | false | false | 4,784 | py | # /server.py
import json
import logging
import os
from six.moves.urllib.request import urlopen
from functools import wraps
from flask import Flask, request, jsonify, _request_ctx_stack
from flask_cors import cross_origin
from jose import jwt
logging.basicConfig(level="DEBUG")
TENANT_ID = os.environ.get("TENANT_ID")
ISSUER = 'https://sts.windows.net/%s/' % TENANT_ID
DISCOVERY_URL = "%s/v2.0/.well-known/openid-configuration" % ISSUER
AUDIENCE = os.environ.get("AUDIENCE")
ALGORITHMS = ["RS256"]
APP = Flask(__name__)
print(ISSUER)
# Error handler
class AuthError(Exception):
def __init__(self, error, status_code):
self.error = error
self.status_code = status_code
@APP.errorhandler(AuthError)
def handle_auth_error(ex):
response = jsonify(ex.error)
response.status_code = ex.status_code
return response
# Format error response and append status code
def get_token_auth_header():
"""Obtains the Access Token from the Authorization Header
"""
auth = request.headers.get("Authorization", None)
if not auth:
raise AuthError({"code": "authorization_header_missing",
"description":
"Authorization header is expected"}, 401)
parts = auth.split()
if parts[0].lower() != "bearer":
raise AuthError({"code": "invalid_header",
"description":
"Authorization header must start with"
" Bearer"}, 401)
elif len(parts) == 1:
raise AuthError({"code": "invalid_header",
"description": "Token not found"}, 401)
elif len(parts) > 2:
raise AuthError({"code": "invalid_header",
"description":
"Authorization header must be"
" Bearer token"}, 401)
token = parts[1]
return token
def requires_auth(f):
"""Determines if the Access Token is valid
"""
@wraps(f)
def decorated(*args, **kwargs):
token = get_token_auth_header()
url = DISCOVERY_URL
discovery_h = urlopen(url)
discovery_doc = json.loads(discovery_h.read())
jwks_url = discovery_doc["jwks_uri"]
jwks_h = urlopen(jwks_url)
jwks = json.loads(jwks_h.read())
unverified_header = jwt.get_unverified_header(token)
rsa_key = {}
for key in jwks["keys"]:
if key["kid"] == unverified_header["kid"]:
rsa_key = {
"kty": key["kty"],
"kid": key["kid"],
"use": key["use"],
"n": key["n"],
"e": key["e"]
}
if rsa_key:
try:
payload = jwt.decode(
token,
rsa_key,
algorithms=ALGORITHMS,
audience=AUDIENCE,
issuer=ISSUER
)
except jwt.ExpiredSignatureError:
raise AuthError({"code": "token_expired",
"description": "token is expired"}, 401)
except jwt.JWTClaimsError:
raise AuthError({"code": "invalid_claims",
"description":
"incorrect claims,"
"please check the audience and issuer"}, 401)
except Exception:
raise AuthError({"code": "invalid_header",
"description":
"Unable to parse authentication"
" token."}, 401)
_request_ctx_stack.top.current_user = payload
return f(*args, **kwargs)
raise AuthError({"code": "invalid_header",
"description": "Unable to find appropriate key"}, 401)
return decorated
def requires_scope(required_scope):
"""Determines if the required scope is present in the Access Token
Args:
required_scope (str): The scope required to access the resource
"""
token = get_token_auth_header()
unverified_claims = jwt.get_unverified_claims(token)
if unverified_claims.get("scope"):
token_scopes = unverified_claims["scope"].split()
for token_scope in token_scopes:
if token_scope == required_scope:
return True
return False
# This needs authentication
@APP.route("/api")
@cross_origin(headers=["Content-Type", "Authorization"])
@requires_auth
def private():
response = "Hello from a private endpoint! You need to be authenticated to see this."
return jsonify(message=response)
APP.run()
| [
"endre.karlson@gmail.com"
] | endre.karlson@gmail.com |
e4c8593a81daa132dd2f7d09206b302b6af3fecf | 67ddedc825a4852349bb3e54f7d31cdeb34c64aa | /test/functional/wallet_txn_clone.py | b459a8048556463da75fd9e4d8bd93832a6aa2ed | [
"MIT"
] | permissive | geranium-coin/geranium | 3500632ed8e666d30d1b28494b1b7b5003c18ecc | 93c08aa10ea151f4efd8337c1d5599ee7e8d58ea | refs/heads/master | 2022-07-28T21:28:55.717800 | 2022-01-10T17:30:13 | 2022-01-10T17:30:13 | 440,774,432 | 2 | 0 | MIT | 2022-01-04T08:33:10 | 2021-12-22T07:39:53 | C++ | UTF-8 | Python | false | false | 6,392 | py | #!/usr/bin/env python3
# Copyright (c) 2014-2019 The Geranium Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""Test the wallet accounts properly when there are cloned transactions with malleated scriptsigs."""
import io
from test_framework.test_framework import GeraniumTestFramework
from test_framework.util import (
assert_equal,
connect_nodes,
disconnect_nodes,
)
from test_framework.messages import CTransaction, COIN
class TxnMallTest(GeraniumTestFramework):
def set_test_params(self):
self.num_nodes = 4
self.supports_cli = False
def skip_test_if_missing_module(self):
self.skip_if_no_wallet()
def add_options(self, parser):
parser.add_argument("--mineblock", dest="mine_block", default=False, action="store_true",
help="Test double-spend of 1-confirmed transaction")
parser.add_argument("--segwit", dest="segwit", default=False, action="store_true",
help="Test behaviour with SegWit txn (which should fail")
def setup_network(self):
# Start with split network:
super(TxnMallTest, self).setup_network()
disconnect_nodes(self.nodes[1], 2)
disconnect_nodes(self.nodes[2], 1)
def run_test(self):
if self.options.segwit:
output_type = "p2sh-segwit"
else:
output_type = "legacy"
# All nodes should start with 1,250 GEAM:
starting_balance = 1250
for i in range(4):
assert_equal(self.nodes[i].getbalance(), starting_balance)
self.nodes[i].getnewaddress() # bug workaround, coins generated assigned to first getnewaddress!
self.nodes[0].settxfee(.001)
node0_address1 = self.nodes[0].getnewaddress(address_type=output_type)
node0_txid1 = self.nodes[0].sendtoaddress(node0_address1, 1219)
node0_tx1 = self.nodes[0].gettransaction(node0_txid1)
node0_address2 = self.nodes[0].getnewaddress(address_type=output_type)
node0_txid2 = self.nodes[0].sendtoaddress(node0_address2, 29)
node0_tx2 = self.nodes[0].gettransaction(node0_txid2)
assert_equal(self.nodes[0].getbalance(),
starting_balance + node0_tx1["fee"] + node0_tx2["fee"])
# Coins are sent to node1_address
node1_address = self.nodes[1].getnewaddress()
# Send tx1, and another transaction tx2 that won't be cloned
txid1 = self.nodes[0].sendtoaddress(node1_address, 40)
txid2 = self.nodes[0].sendtoaddress(node1_address, 20)
# Construct a clone of tx1, to be malleated
rawtx1 = self.nodes[0].getrawtransaction(txid1, 1)
clone_inputs = [{"txid": rawtx1["vin"][0]["txid"], "vout": rawtx1["vin"][0]["vout"], "sequence": rawtx1["vin"][0]["sequence"]}]
clone_outputs = {rawtx1["vout"][0]["scriptPubKey"]["addresses"][0]: rawtx1["vout"][0]["value"],
rawtx1["vout"][1]["scriptPubKey"]["addresses"][0]: rawtx1["vout"][1]["value"]}
clone_locktime = rawtx1["locktime"]
clone_raw = self.nodes[0].createrawtransaction(clone_inputs, clone_outputs, clone_locktime)
# createrawtransaction randomizes the order of its outputs, so swap them if necessary.
clone_tx = CTransaction()
clone_tx.deserialize(io.BytesIO(bytes.fromhex(clone_raw)))
if (rawtx1["vout"][0]["value"] == 40 and clone_tx.vout[0].nValue != 40*COIN or rawtx1["vout"][0]["value"] != 40 and clone_tx.vout[0].nValue == 40*COIN):
(clone_tx.vout[0], clone_tx.vout[1]) = (clone_tx.vout[1], clone_tx.vout[0])
# Use a different signature hash type to sign. This creates an equivalent but malleated clone.
# Don't send the clone anywhere yet
tx1_clone = self.nodes[0].signrawtransactionwithwallet(clone_tx.serialize().hex(), None, "ALL|ANYONECANPAY")
assert_equal(tx1_clone["complete"], True)
# Have node0 mine a block, if requested:
if (self.options.mine_block):
self.nodes[0].generate(1)
self.sync_blocks(self.nodes[0:2])
tx1 = self.nodes[0].gettransaction(txid1)
tx2 = self.nodes[0].gettransaction(txid2)
# Node0's balance should be starting balance, plus 50GEAM for another
# matured block, minus tx1 and tx2 amounts, and minus transaction fees:
expected = starting_balance + node0_tx1["fee"] + node0_tx2["fee"]
if self.options.mine_block:
expected += 50
expected += tx1["amount"] + tx1["fee"]
expected += tx2["amount"] + tx2["fee"]
assert_equal(self.nodes[0].getbalance(), expected)
if self.options.mine_block:
assert_equal(tx1["confirmations"], 1)
assert_equal(tx2["confirmations"], 1)
else:
assert_equal(tx1["confirmations"], 0)
assert_equal(tx2["confirmations"], 0)
# Send clone and its parent to miner
self.nodes[2].sendrawtransaction(node0_tx1["hex"])
txid1_clone = self.nodes[2].sendrawtransaction(tx1_clone["hex"])
if self.options.segwit:
assert_equal(txid1, txid1_clone)
return
# ... mine a block...
self.nodes[2].generate(1)
# Reconnect the split network, and sync chain:
connect_nodes(self.nodes[1], 2)
self.nodes[2].sendrawtransaction(node0_tx2["hex"])
self.nodes[2].sendrawtransaction(tx2["hex"])
self.nodes[2].generate(1) # Mine another block to make sure we sync
self.sync_blocks()
# Re-fetch transaction info:
tx1 = self.nodes[0].gettransaction(txid1)
tx1_clone = self.nodes[0].gettransaction(txid1_clone)
tx2 = self.nodes[0].gettransaction(txid2)
# Verify expected confirmations
assert_equal(tx1["confirmations"], -2)
assert_equal(tx1_clone["confirmations"], 2)
assert_equal(tx2["confirmations"], 1)
# Check node0's total balance; should be same as before the clone, + 100 GEAM for 2 matured,
# less possible orphaned matured subsidy
expected += 100
if (self.options.mine_block):
expected -= 50
assert_equal(self.nodes[0].getbalance(), expected)
if __name__ == '__main__':
TxnMallTest().main()
| [
"manomay.jyotish.vadhuvar@gmail.com"
] | manomay.jyotish.vadhuvar@gmail.com |
a501755543715e2df21dcbe6ce7a7d3361bc3253 | 9edaf93c833ba90ae9a903aa3c44c407a7e55198 | /travelport/models/queue_next_modifiers.py | b169011c7b5de88ca57fdc2ee7b46de95122d1f9 | [] | no_license | tefra/xsdata-samples | c50aab4828b8c7c4448dbdab9c67d1ebc519e292 | ef027fe02e6a075d8ed676c86a80e9647d944571 | refs/heads/main | 2023-08-14T10:31:12.152696 | 2023-07-25T18:01:22 | 2023-07-25T18:01:22 | 222,543,692 | 6 | 1 | null | 2023-06-25T07:21:04 | 2019-11-18T21:00:37 | Python | UTF-8 | Python | false | false | 1,891 | py | from __future__ import annotations
from dataclasses import dataclass, field
__NAMESPACE__ = "http://www.travelport.com/schema/universal_v52_0"
@dataclass
class QueueNextModifiers:
"""
Can only be used when modifying an Universal Record in Queue mode.If not
specified along with ReturnRecord as false then current PNR in queue context
will be removed.
Parameters
----------
next_on_queue
Set to true to retrieve the next PNR on Queue ,if not set or set to
false system would return the current PNR.NextOnQueue cannot be
combined with Provider Locator Code and ReturnRecord as true
provider_locator_code
If providerLocatorCode is specified then system would return the
specified locator code in Queue mode .Provider Locator Code cannot
be combined with NextOnQueue and ReturnRecord as true
re_queue_current
Set to true to place the current PNR back on Queue
queue_session_token
Queue Session Token to hold session token for multiple queue
"""
class Meta:
namespace = "http://www.travelport.com/schema/universal_v52_0"
next_on_queue: None | bool = field(
default=None,
metadata={
"name": "NextOnQueue",
"type": "Attribute",
}
)
provider_locator_code: None | str = field(
default=None,
metadata={
"name": "ProviderLocatorCode",
"type": "Attribute",
"min_length": 5,
"max_length": 8,
}
)
re_queue_current: None | bool = field(
default=None,
metadata={
"name": "ReQueueCurrent",
"type": "Attribute",
}
)
queue_session_token: None | str = field(
default=None,
metadata={
"name": "QueueSessionToken",
"type": "Attribute",
}
)
| [
"chris@komposta.net"
] | chris@komposta.net |
e1df17b36f740b65145bfe2161d142a384266c2c | 56f5b2ea36a2258b8ca21e2a3af9a5c7a9df3c6e | /CMGTools/H2TauTau/prod/25aug_corrMC/up/mc/SUSYBBHToTauTau_M-900_8TeV-pythia6-tauola/Summer12_DR53X-PU_S10_START53_V7A-v1/AODSIM/PAT_CMG_V5_16_0_1377467562/HTT_24Jul_newTES_manzoni_Up_Jobs/Job_32/run_cfg.py | 6f5e245d271a325f3008125e0b1b592b4bfdab69 | [] | no_license | rmanzoni/HTT | 18e6b583f04c0a6ca10142d9da3dd4c850cddabc | a03b227073b2d4d8a2abe95367c014694588bf98 | refs/heads/master | 2016-09-06T05:55:52.602604 | 2014-02-20T16:35:34 | 2014-02-20T16:35:34 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,157 | py | import FWCore.ParameterSet.Config as cms
import os,sys
sys.path.append('/afs/cern.ch/user/m/manzoni/summer13/CMGTools/CMSSW_5_3_9/src/CMGTools/H2TauTau/prod/25aug_corrMC/up/mc/SUSYBBHToTauTau_M-900_8TeV-pythia6-tauola/Summer12_DR53X-PU_S10_START53_V7A-v1/AODSIM/PAT_CMG_V5_16_0_1377467562/HTT_24Jul_newTES_manzoni_Up_Jobs')
from base_cfg import *
process.source = cms.Source("PoolSource",
noEventSort = cms.untracked.bool(True),
inputCommands = cms.untracked.vstring('keep *',
'drop cmgStructuredPFJets_cmgStructuredPFJetSel__PAT'),
duplicateCheckMode = cms.untracked.string('noDuplicateCheck'),
fileNames = cms.untracked.vstring('/store/cmst3/group/cmgtools/CMG/SUSYBBHToTauTau_M-900_8TeV-pythia6-tauola/Summer12_DR53X-PU_S10_START53_V7A-v1/AODSIM/PAT_CMG_V5_16_0/cmgTuple_94_1_Iwx.root',
'/store/cmst3/group/cmgtools/CMG/SUSYBBHToTauTau_M-900_8TeV-pythia6-tauola/Summer12_DR53X-PU_S10_START53_V7A-v1/AODSIM/PAT_CMG_V5_16_0/cmgTuple_95_1_TN8.root',
'/store/cmst3/group/cmgtools/CMG/SUSYBBHToTauTau_M-900_8TeV-pythia6-tauola/Summer12_DR53X-PU_S10_START53_V7A-v1/AODSIM/PAT_CMG_V5_16_0/cmgTuple_96_1_QFN.root')
)
| [
"riccardo.manzoni@cern.ch"
] | riccardo.manzoni@cern.ch |
f2319ad1d29f92d6852e79f02e086e2527932504 | a222e2999251ba7f0d62c428ba8cc170b6d0b3b7 | /AtC_Beg_Con_111-120/ABC119/B.py | 32789877af7db37a5feed433bec40b59dc19e4a1 | [
"MIT"
] | permissive | yosho-18/AtCoder | 3e1f3070c5eb44f154c8104fbd5449f47446ce14 | 50f6d5c92a01792552c31ac912ce1cd557b06fb0 | refs/heads/master | 2020-06-02T10:21:29.458365 | 2020-05-29T12:40:48 | 2020-05-29T12:40:48 | 188,795,239 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 284 | py | n = int(input())
c = []
for i in range(n):#h:高さ
c.append([str(m) for m in input().split()])
for i in range(n):
c[i][0] = float(c[i][0])
ans = 0
for i in range(n):
if c[i][1] == "JPY":
ans += c[i][0]
else:
ans += c[i][0] * 3.8 * (10 ** 5)
print(ans) | [
"44283410+wato18@users.noreply.github.com"
] | 44283410+wato18@users.noreply.github.com |
fde2acc46aaca99e0ba37b7d748d936de64221f9 | b264ef874513cd4bbb6081bc133eb85f05738bdc | /google-cloud-sdk/lib/googlecloudsdk/command_lib/redis/util.py | 630093f4587be7b0b86b435bae777abe39f370c7 | [
"LicenseRef-scancode-unknown-license-reference",
"Apache-2.0"
] | permissive | smith701/terraform-automation | ab7a638c9da79fdd196a7c4332cfc2af67938f00 | 0b2ffbdc2da5eb807bc9c741543d95a962520f05 | refs/heads/master | 2021-06-15T06:37:42.057049 | 2019-05-30T11:02:14 | 2019-05-30T11:02:14 | 189,398,187 | 0 | 1 | null | 2021-04-30T20:46:46 | 2019-05-30T10:59:50 | Python | UTF-8 | Python | false | false | 3,369 | py | # -*- coding: utf-8 -*- #
# Copyright 2018 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Flag utilities for `gcloud redis`."""
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
from apitools.base.py import encoding
from googlecloudsdk.api_lib import redis
from googlecloudsdk.calliope import arg_parsers
from googlecloudsdk.calliope import base
from googlecloudsdk.command_lib.util.args import labels_util
import six
VALID_REDIS_CONFIG_KEYS = ('maxmemory-policy', 'notify-keyspace-events',
'activedefrag', 'lfu-log-factor', 'lfu-decay-time')
def GetClientForResource(resource_ref):
api_version = resource_ref.GetCollectionInfo().api_version
client = redis.Client(api_version)
return client
def GetMessagesForResource(resource_ref):
api_version = resource_ref.GetCollectionInfo().api_version
messages = redis.Messages(api_version)
return messages
def InstanceRedisConfigArgDictSpec():
return {k: six.text_type for k in VALID_REDIS_CONFIG_KEYS}
def InstanceRedisConfigArgType(value):
return arg_parsers.ArgDict(spec=InstanceRedisConfigArgDictSpec())(value)
def InstanceLabelsArgType(value):
return arg_parsers.ArgDict(
key_type=labels_util.KEY_FORMAT_VALIDATOR,
value_type=labels_util.VALUE_FORMAT_VALIDATOR)(
value)
def AdditionalInstanceUpdateArguments():
return InstanceUpdateLabelsFlags() + [
InstanceUpdateRedisConfigFlag(),
InstanceRemoveRedisConfigFlag()
]
def InstanceUpdateLabelsFlags():
remove_group = base.ArgumentGroup(mutex=True)
remove_group.AddArgument(labels_util.GetClearLabelsFlag())
remove_group.AddArgument(labels_util.GetRemoveLabelsFlag(''))
return [labels_util.GetUpdateLabelsFlag(''), remove_group]
def InstanceUpdateRedisConfigFlag():
return base.Argument(
'--update-redis-config',
metavar='KEY=VALUE',
type=InstanceRedisConfigArgType,
action=arg_parsers.UpdateAction,
help="""\
A list of Redis config KEY=VALUE pairs to update according to
http://redis.io/topics/config. If a config parameter is already set,
its value is modified; otherwise a new Redis config parameter is added.
Currently, the only supported parameters are: {}.
""".format(', '.join(VALID_REDIS_CONFIG_KEYS)))
def InstanceRemoveRedisConfigFlag():
return base.Argument(
'--remove-redis-config',
metavar='KEY',
type=arg_parsers.ArgList(),
action=arg_parsers.UpdateAction,
help="""\
A list of Redis config parameters to remove. Removing a non-existent
config parameter is silently ignored.""")
def PackageInstanceRedisConfig(config, messages):
return encoding.DictToAdditionalPropertyMessage(
config, messages.Instance.RedisConfigsValue, sort_items=True)
| [
"root@kube-cluster.us-east4-c.c.applied-fusion-241610.internal"
] | root@kube-cluster.us-east4-c.c.applied-fusion-241610.internal |
44307155e7e4797e0cfd6e146e90dd0337886fd4 | 08a07136d6a65a0afdcf8bdda7db6de2562f8a2b | /yawdadmin/templatetags/yawdadmin_filters.py | 1940f40b12115c5f92f3c809276881952c877a94 | [
"BSD-3-Clause"
] | permissive | osiloke/yawd-admin | 8206b0e8baad3a6d5d1eec343c3f4e5f101011d0 | 0df1d64503ae30b907e6f785ae31da9182a10338 | refs/heads/master | 2020-12-25T15:39:35.436980 | 2013-06-16T23:51:04 | 2013-06-16T23:51:04 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 852 | py | # -*- coding: utf-8 -*-
from django import template
register = template.Library()
@register.filter
def divide(value, num):
return int(value / num)
@register.filter
def app_title(value):
return value.replace('_', ' ')
@register.filter
def utfupper(value):
orig = [u'Ά', u'Έ', u'Ή', u'Ί', u'ΐ', u'Ό', u'Ύ', u'Ώ']
rep = [u'Α', u'Ε', u'Η', u'Ι', u'Ϊ', u'Ο', u'Υ', u'Ω']
return u''.join([rep[orig.index(x)] if x in orig else x
for x in value.upper()])
@register.filter
def istranslationinline(value):
"""
This filter is used if yawd-translations is installed.
"""
try:
from translations.admin import TranslationInline
except:
return False
if hasattr(value, 'opts') and isinstance(value.opts, TranslationInline):
return True
return False
| [
"ppetrid@yawd.eu"
] | ppetrid@yawd.eu |
295ed9b605d7211992ab60f67a12c5a2c80272bb | cf4f3c181dc04c4e698b53c3bb5dd5373b0cc1f4 | /meridian/tst/acupoints/test_naokong31.py | b2c780b58057f39f0191f3bff2a9a63e200d5087 | [
"Apache-2.0"
] | permissive | sinotradition/meridian | da3bba6fe42d3f91397bdf54520b3085f7c3bf1d | 8c6c1762b204b72346be4bbfb74dedd792ae3024 | refs/heads/master | 2021-01-10T03:20:18.367965 | 2015-12-14T14:58:35 | 2015-12-14T14:58:35 | 46,456,260 | 5 | 3 | null | 2015-11-29T15:00:20 | 2015-11-19T00:21:00 | Python | UTF-8 | Python | false | false | 299 | py | #!/usr/bin/python
#coding=utf-8
'''
@author: sheng
@license:
'''
import unittest
from meridian.acupoints import naokong31
class TestNaokong31Functions(unittest.TestCase):
def setUp(self):
pass
def test_xxx(self):
pass
if __name__ == '__main__':
unittest.main()
| [
"sinotradition@gmail.com"
] | sinotradition@gmail.com |
a8a90dddece566a95f1cea1bd8f97993627ed893 | 3060b94f8770ae5047fd9797097d88dfa67c5077 | /py4seo/Код с занятий/lesson15/case6.py | c744fb9963ddbddaddfc8183397e72160010ec74 | [] | no_license | vvscode/py--notes | 8758d532266e8f39857eb8474ab05e9b8cab3841 | 75ddb6608084eb0983ec061f1fc9d3ba75781d46 | refs/heads/master | 2023-05-05T21:25:11.932183 | 2020-01-05T20:06:09 | 2020-01-05T20:06:09 | 117,715,579 | 1 | 0 | null | 2023-04-21T20:44:33 | 2018-01-16T17:18:13 | HTML | UTF-8 | Python | false | false | 693 | py | import random
from requests_html import HTMLSession
from threading import Lock
from concurrent.futures import ThreadPoolExecutor
locker = Lock()
URLS = []
def parser():
while True:
url = random.choice(URLS)
try:
with HTMLSession() as session:
response = session.get(url, timeout=0.2)
print(f'SUCCESS | {url}')
except Exception as e:
print(e, type(e))
del url, response, session
def main():
max_threads = 200
with ThreadPoolExecutor(max_workers=max_threads) as executor:
for _ in range(max_threads):
executor.submit(parser)
if __name__ == '__main__':
main()
| [
"v.vanchuk@tut.by"
] | v.vanchuk@tut.by |
57f2eca3ee693f0fc10c2da2934b0f5b21cdc3d8 | f0d713996eb095bcdc701f3fab0a8110b8541cbb | /Mm4BaYNPaXHJKWA7M_18.py | fc7bcf94df41e074c5efb7e1d46d575269b4b54f | [] | no_license | daniel-reich/turbo-robot | feda6c0523bb83ab8954b6d06302bfec5b16ebdf | a7a25c63097674c0a81675eed7e6b763785f1c41 | refs/heads/main | 2023-03-26T01:55:14.210264 | 2021-03-23T16:08:01 | 2021-03-23T16:08:01 | 350,773,815 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 657 | py | """
Smash factor is a term in golf that relates to the amount of energy
transferred from the club head to the golf ball. The formula for calculating
smash factor is _ball speed divided by club speed_.
Create a function that takes ball speed `bs` and club speed `cs` as arguments
and returns the smash factor to the nearest hundredth.
### Examples
smash_factor(139.4, 93.8) ➞ 1.49
smash_factor(181.2, 124.5) ➞ 1.46
smash_factor(154.7, 104.3) ➞ 1.48
### Notes
* Remember to round to the nearest hundredth.
* All values will be valid (so no dividing by zero).
"""
def smash_factor(bs, cs):
return (round(bs/cs,2))
| [
"daniel.reich@danielreichs-MacBook-Pro.local"
] | daniel.reich@danielreichs-MacBook-Pro.local |
855d518bdb0ef8347485b6647d85f6b62a18eb21 | 04a21caac09bc47f2291dbf80bc8114c09886fa0 | /cride/users/models/users.py | 05ed0b0ca2abb6d66ef6921d37a59f4207bda200 | [
"MIT"
] | permissive | omiguelperez/advanced-django-cride | 312fda3ddb9a856d206fd5e51afb943aced6d20c | 711954212bd16ce4b08bc376f06a35d31883725c | refs/heads/master | 2022-02-18T02:02:02.734740 | 2019-04-01T03:53:03 | 2019-04-01T03:53:03 | 194,743,356 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,474 | py | """User model."""
from django.contrib.auth.models import AbstractUser
from django.core.validators import RegexValidator
from django.db import models
from cride.utils.models import CRideModel
class User(CRideModel, AbstractUser):
"""User model.
Extend from Django's Abstract User, change the username field
to email and add some extra fields.
"""
email = models.EmailField(
'email address',
unique=True,
error_messages={
'unique': 'A user with that email already exists.'
}
)
phone_regex = RegexValidator(
regex=r'\+?1?\d{9,15}$',
message="Phone number must be entered in the format: +999999999. Up to 15 digits allowed."
)
phone_number = models.CharField(validators=[phone_regex], max_length=17, blank=True)
USERNAME_FIELD = 'email'
REQUIRED_FIELDS = ['username', 'first_name', 'last_name']
is_client = models.BooleanField(
'client',
default=True,
help_text=(
'Help easily distinguish users and perform queries. '
'Clients are the main type of user.'
)
)
is_verified = models.BooleanField(
'verified',
default=True,
help_text='Set to true when the user have verified its email address.'
)
def __str__(self):
"""Return username."""
return self.username
def get_short_name(self):
"""Return username."""
return self.username
| [
"mr.omiguelperez@gmail.com"
] | mr.omiguelperez@gmail.com |
5a38dc3ff805c30370e2bf6a50996c0ae28797a4 | 8b4e456df9821aaba6dcafd34bea109506f97739 | /app/tasks.py | be73607b3d5fdfe3b6e4fed3f1d6b910e9a025b8 | [] | no_license | direyes71/recognizer | 54f777bd2ef096f26e4b4f017e6a15809898c6be | 12553fdee3690caa0ae8142f5f3427b9c0c2024c | refs/heads/master | 2021-04-09T16:46:32.422118 | 2015-06-04T15:37:36 | 2015-06-04T15:37:36 | 35,741,978 | 0 | 0 | null | 2015-06-04T15:37:36 | 2015-05-16T22:04:06 | Python | UTF-8 | Python | false | false | 1,748 | py | __author__ = 'diego'
from django.conf import settings
from face_client import FaceClient
from app.data import DEACTIVE_STATUS
from app.data import LEVEL_RECOGNIZE_HIGH
from app.data import LEVEL_RECOGNIZE_MEDIUM
from app.models import RequestRecognizer
def recognize_photo(request_id):
"""
This task execute the recognizer function
"""
request = RequestRecognizer.objects.get(id=request_id)
request.imagenByteArray = request.image_to_binary
request.save()
# Create one instance of library for connect to webservice
client = FaceClient(
'245c8bb50b2f42228a6a998863f5a1e0',
'c1800f96cf0647fb8412ae8d3dae1202',
)
# Call function web service
result = client.faces_recognize(
'all',
file=request.image,
aggressive=True,
namespace='CompareFaces',
)
# Validate if there are results
if result['photos'][0]['tags']:
recognize = None
level_recognize = ''
for item in result['photos'][0]['tags'][0]['uids']: # If exists coincidences
if item['confidence'] >= 80:
level_recognize = LEVEL_RECOGNIZE_HIGH
elif item['confidence'] >= 60 and item['confidence'] < 80:
level_recognize = LEVEL_RECOGNIZE_MEDIUM
if not recognize and item['confidence'] < 60:
request.access = False
request.status = DEACTIVE_STATUS
if not recognize or (recognize and item['confidence'] > recognize['confidence']):
recognize = item
recognize['uid'] = recognize['uid'].split('@')[0]
recognize['level'] = level_recognize
request.result_recognizer = recognize
request.save()
| [
"direyes71@hotmail.com"
] | direyes71@hotmail.com |
0cbd95477c3b901fce05ed8d66b190c37f77b985 | 039f2c747a9524daa1e45501ada5fb19bd5dd28f | /AGC002/AGC002a.py | 26bd610c9b311a3a572a138f8dae20f7e64b3e83 | [
"Unlicense"
] | permissive | yuto-moriizumi/AtCoder | 86dbb4f98fea627c68b5391bf0cc25bcce556b88 | 21acb489f1594bbb1cdc64fbf8421d876b5b476d | refs/heads/master | 2023-03-25T08:10:31.738457 | 2021-03-23T08:48:01 | 2021-03-23T08:48:01 | 242,283,632 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 168 | py | #AGC002a
def main():
import sys
input=sys.stdin.readline
sys.setrecursionlimit(10**6)
# map(int, input().split())
if __name__ == '__main__':
main() | [
"kurvan1112@gmail.com"
] | kurvan1112@gmail.com |
b0aa05aefe0a791d080c4799c9689c885aaad80b | 71dc727f9056934cd51692f8a3d26cf0dda44ef0 | /code/Chapter-13/button_demo.py | 08ab772ee6b373325d3b9a57ebb47c41ce106607 | [
"MIT"
] | permissive | justinclark-dev/CSC110 | 9d255020a50bbfdb195465c3e742dd2fcd61e3a4 | d738ec33b757ba8fa9cf35b2214c184d532367a0 | refs/heads/master | 2022-12-08T08:08:30.667241 | 2020-09-04T01:05:34 | 2020-09-04T01:05:34 | 232,606,910 | 0 | 1 | MIT | 2020-09-04T02:05:47 | 2020-01-08T16:28:37 | Python | UTF-8 | Python | false | false | 1,134 | py | # This program demonstrates a Button widget.
# When the user clicks the Button, an
# info dialog box is displayed.
import tkinter
import tkinter.messagebox
class MyGUI:
def __init__(self):
# Create the main window widget.
self.main_window = tkinter.Tk()
# Create a Button widget. The text 'Click Me!'
# should appear on the face of the Button. The
# do_something method should be executed when
# the user clicks the Button.
self.my_button = tkinter.Button(self.main_window, \
text='Click Me!', \
command=self.do_something)
# Pack the Button.
self.my_button.pack()
# Enter the tkinter main loop.
tkinter.mainloop()
# The do_something method is a callback function
# for the Button widget.
def do_something(self):
# Display an info dialog box.
tkinter.messagebox.showinfo('Response', \
'Thanks for clicking the button.')
# Create an instance of the MyGUI class.
my_gui = MyGUI()
| [
"justinclark.dev@gmail.com"
] | justinclark.dev@gmail.com |
35a435579aa3279d7688256fb5111391ac808ef5 | 22292acc8236d76be9b46ead2cc3c93ff143aa43 | /autoasia/main/views.py | f9934794fa6c6697ba82e95bc9132329cf2fbcb5 | [] | no_license | Aitodev/autoasialastfinish | 1cde3991b531fede949d492461d5e65712d30ba7 | a3ed23a0475c5522603dc3ddb5a1c6782bc627b1 | refs/heads/main | 2023-01-29T00:42:03.874396 | 2020-12-06T09:30:07 | 2020-12-06T09:30:07 | 318,996,215 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,426 | py | from .models import Bestproduct, Brand, Automodel, Category, Product
from django.shortcuts import render, redirect
from cart.forms import CartAddProductForm
from django.core.mail import send_mail
from .forms import ApplicationsForm
from django.views import View
from cart.cart import Cart
import telebot
bot = telebot.TeleBot("1387522266:AAHTqKbJzHhhwqwsi7-q8oCD-cxKMwj4k04")
def index(request):
products = Bestproduct.objects.all()
cart_product_form = CartAddProductForm()
brands = Brand.objects.all()
cart = Cart(request)
context = {
'best': products,
'brands': brands,
'cart': cart,
'cart_product_form': cart_product_form,
}
return render(request, 'main/index.html', context)
def about(request):
return render(request, 'main/about.html')
def shop(request):
return render(request, 'main/product.html')
def contact(request):
return render(request, 'main/contact.html')
def automodels(request, brand_pk):
automodels = Automodel.objects.filter(brand_id=brand_pk)
context = {
'automodels': automodels,
'brand_pk': brand_pk,
}
return render(request, 'main/automodels.html', context)
def category(request, brand_pk, model_pk):
categories = Category.objects.all()
context = {
'categories': categories,
'brand_pk': brand_pk,
'model_pk': model_pk,
}
return render(request, 'main/categories.html', context)
def products(request, brand_pk, model_pk, category_pk):
category = Category.objects.filter(id=category_pk)
if category.exists():
if not category.first().depends_on_brands:
products = Product.objects.filter(category_id=category_pk)
else:
products = Product.objects.filter(brand_id=brand_pk, automodel_id=model_pk, category_id=category_pk)
context = {
'products': products,
'brand_pk': brand_pk,
'model_pk': model_pk,
'category_pk': category_pk,
}
return render(request, 'main/products.html', context)
def product_view(request, product_pk):
brands = Brand.objects.all()
product = Product.objects.get(id=product_pk)
cart_product_form = CartAddProductForm()
cart = Cart(request)
context = {
'cart_product_form': cart_product_form,
'product_pk': product_pk,
'product': product,
'brands': brands,
'cart': cart,
}
return render(request, 'main/product.html', context)
class ApplicationsView(View):
def post(self, request):
if request.method == 'POST':
form = ApplicationsForm(request.POST)
# print(request.POST)
if form.is_valid():
form.save()
mail = form.cleaned_data['mail']
name = form.cleaned_data['name']
phone = form.cleaned_data['phone']
subject = 'Новая заявка!'
from_email = 'assassinaltair@bk.ru'
to_email = ['aitofullstackdev@gmail.com', 'aitolivelive@gmail.com']
message = 'Новая заявка на обратный звонок!' + '\r\n' + '\r\n' + 'Почта: ' + mail + '\r\n' + '\r\n' + 'Имя:' + name + '\r\n' + '\r\n' + 'Номер телефона: ' + phone
# send_mail(subject, message, from_email, to_email, fail_silently=False)
bot.send_message(-387514692, message)
return redirect('main:contact')
| [
"guruitcompany@gmail.com"
] | guruitcompany@gmail.com |
6b76ca335015094d199ba7d6198a583bf8f246c5 | 8aa82cdccce91f9bef512444f03d829866b0fa7c | /load_tex.py | 2856fb54b5e134123216acd3c11d4121eb97a028 | [
"MIT"
] | permissive | RuthAngus/interwebz | f67c11f3ff35717df8b107ecf9cb7434d1f06ec5 | 115c8e6b589277d1c269092c1cf32b343e40cf18 | refs/heads/master | 2021-01-17T08:46:17.995418 | 2016-06-23T16:11:58 | 2016-06-23T16:11:58 | 61,044,977 | 0 | 3 | null | 2016-06-23T13:10:03 | 2016-06-13T14:54:17 | HTML | UTF-8 | Python | false | false | 1,539 | py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
from __future__ import division, print_function
import re
import string
import tarfile
import fnmatch
import requests
import feedparser
__all__ = ["run", "process_file"]
URL = "http://arxiv.org/rss/astro-ph"
COMMENT_RE = re.compile(r"(?<!\\)%")
AMP_RE = re.compile(r"(?<!\\)&")
def run():
tree = feedparser.parse(URL)
for entry in tree.entries:
url = entry.id.replace("/abs/", "/e-print/")
print(url)
def process_file(fh):
with tarfile.open(fileobj=fh) as f:
for mem in f.getmembers():
if not fnmatch.fnmatch(mem.name, "*.tex"):
continue
with f.extractfile(mem) as txtf:
txt = txtf.read()
txt = txt.decode("utf-8")
for line in txt.splitlines():
groups = COMMENT_RE.findall(line)
if len(groups):
comment = "%".join(line.split("%")[1:]).strip(" \t%")
flag = (
len(comment) > 0 and
len(AMP_RE.findall(comment)) == 0 and
comment[0] not in string.punctuation
)
if flag:
print(comment)
return comment
def load_tex(arxiv_number):
with open("{0}".format(str(arxiv_number)), "rb") as f:
text = process_file(f)
return text
if __name__ == "__main__":
# load_tex("1605.08574v1")
with open("1605.08574v1", "rb") as f:
process_file(f)
| [
"ruth.angus@astro.ox.ac.uk"
] | ruth.angus@astro.ox.ac.uk |
e697db1a8a9a0813a52398bff58398e082e302be | 340f106a213c57d5621124187ca061690334364d | /models/learner_lstm.py | e449c68299db28bd180641ae6c78abddfe864ac9 | [] | no_license | sculd/financial-timeseries-prediction | c461bc7a7c8760ab090b8f53da50daa1f754da7f | c31784a63402580d0b04557e2ab29fc9a3126c9f | refs/heads/master | 2022-10-11T16:53:46.469168 | 2020-09-27T04:19:41 | 2020-09-27T04:19:41 | 209,095,902 | 0 | 0 | null | 2022-09-23T22:28:02 | 2019-09-17T15:52:12 | Python | UTF-8 | Python | false | false | 4,966 | py | import tensorflow as tf, math, pandas as pd, numpy as np
from data.read import get_data, look_back, TEST_SIZE_LSTM, TAIL_VALID_SIZE_LSTM, TRAIN_SZIE_LSTM, HEAD_VALID_SIZE_LSTM
from models.learner_common import batchmark_accuracy, accuracy, print_message
# part of the source code with the blog post at http://monik.in/a-noobs-guide-to-implementing-rnn-lstm-using-tensorflow/
##########################################################
test_data, test_labels, test_index, tail_valid_data, tail_valid_labels, tv_index, head_valid_data, head_valid_labels, \
hv_index, train_data, train_labels, train_index = get_data(TEST_SIZE_LSTM, TAIL_VALID_SIZE_LSTM, TRAIN_SZIE_LSTM, HEAD_VALID_SIZE_LSTM)
test_data, test_labels, tail_valid_data, tail_valid_labels, head_valid_data, head_valid_labels, train_data, train_labels = \
test_data.as_matrix(), test_labels.as_matrix(), tail_valid_data.as_matrix(), \
tail_valid_labels.as_matrix(), head_valid_data.as_matrix(), head_valid_labels.as_matrix(), train_data.as_matrix(), train_labels.as_matrix()
##########################################################
# With gradient descent training, even this much data is prohibitive.
# Subset the training data for faster turnaround.
batch_size = 500
reg_lambda = 0.01
num_steps = 10001
NODE_SIZE = 100
# up, down
num_labels = 2
device_name = "/gpu:0"
graph = tf.Graph()
with tf.device(device_name):
with graph.as_default():
data = tf.placeholder(tf.float32, [None, look_back + 0, 1])
target = tf.placeholder(tf.float32, [None, num_labels])
# lstm element
cell = tf.contrib.rnn.LSTMCell(NODE_SIZE, state_is_tuple=True)
val, _ = tf.nn.dynamic_rnn(cell, data, dtype = tf.float32)
val = tf.transpose(val, [1, 0, 2])
last = tf.gather(val, look_back - 1)
# lstm layout
weights = tf.Variable(tf.truncated_normal([NODE_SIZE, num_labels], stddev = 1.0 / math.sqrt(num_labels)))
biases = tf.Variable(tf.zeros([num_labels]))
prediction = tf.matmul(last, weights) + biases
loss = tf.reduce_mean(tf.nn.softmax_cross_entropy_with_logits(labels=target, logits=prediction))
loss += reg_lambda * (tf.nn.l2_loss(weights) + tf.nn.l2_loss(biases))
# optimize
optimizer = tf.train.AdamOptimizer(learning_rate = 0.01).minimize(loss)
#global_step = tf.Variable(0) # count the number of steps taken.
#learning_rate = tf.train.exponential_decay(0.05, global_step, 1, 0.99995, staircase=True)
#optimizer = tf.train.GradientDescentOptimizer(learning_rate).minimize(loss, global_step=global_step)
########################################################################
def reshape(data):
return data.reshape(tuple(list(data.shape) + [1]))
with tf.Session(graph=graph) as session:
tf.global_variables_initializer().run()
for step in range(num_steps):
offset = (step * batch_size) % (train_data.shape[0] - batch_size)
# Generate a minibatch.
batch_data = train_data[offset : (offset + batch_size)]
batch_labels = train_labels[offset : (offset + batch_size)]
_, predictions = session.run([optimizer, prediction], feed_dict = {data: reshape(batch_data), target: batch_labels})
if (step % 500 == 0):
print("at step %d" % step)
pred_tail_valid = session.run(prediction, feed_dict = {data: reshape(tail_valid_data), target: tail_valid_labels})
pred_head_valid = session.run(prediction, feed_dict = {data: reshape(head_valid_data), target: head_valid_labels})
pred_test = session.run(prediction, feed_dict = {data: reshape(test_data), target: test_labels})
print_message('batch', accuracy(predictions, batch_labels), batch_labels)
print_message('validation (tail)', accuracy(pred_tail_valid, tail_valid_labels), tail_valid_labels)
print_message('validation (head)', accuracy(pred_head_valid, head_valid_labels), head_valid_labels)
print_message('test', accuracy(pred_test, test_labels), test_labels)
print()
def pred_save(mark, prices, labels, index):
pred = session.run(prediction, feed_dict = {data: prices, target: labels})
pred_df = pd.DataFrame(data=pred, index=index, columns = ['up', 'down'])
pred_df.to_csv('predictions/pred_' + mark + '.csv')
pred_save('train', train_data, train_labels, train_index)
pred_save('valid_tail', tail_valid_data, tail_valid_labels, tv_index)
pred_save('valid_head', head_valid_data, head_valid_labels, hv_index)
pred_save('test', test_data, test_labels, test_index)
session.close()
del session
| [
"hjunlim@google.com"
] | hjunlim@google.com |
6a126068629fd9548d89ce0d8d6dbe2aa994c817 | 1bdb0d897228225d9d44e303bd509258565f868e | /bankAccount.py | 16cb47771e7af7f50d1fb6e5a9417b36630f6155 | [] | no_license | jtclayt/python_oop | 3b43bce8862fb1259a3a1ac008f32879bef4bcf0 | 098cb06fee15199c7650eb76dbbf84dc5d2a7781 | refs/heads/master | 2022-11-11T05:01:06.744805 | 2020-07-01T16:14:51 | 2020-07-01T16:14:51 | 276,210,058 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,475 | py | import unittest
class BankAccount:
def __init__(self, int_rate, balance=0):
if (int_rate < 0):
raise ValueError('Can not have negative interest rate')
if (balance < 0):
raise ValueError('Can not have negative balance')
self.int_rate = int_rate / 100
self.balance = balance
def deposit(self, amount):
if (amount <= 0):
raise ValueError('Amount must be positive')
else:
self.balance += amount
return self
def withdraw(self, amount):
if (amount <= 0):
raise ValueError('Amount must be positive')
elif (amount > self.balance):
raise ValueError('Insufficient funds')
else:
self.balance -= amount
return self
def display_account_info(self):
print(f'\nBalance: ${round(self.balance, 2)}')
def yield_interest(self):
self.balance *= 1 + self.int_rate
return self
class TestBankAccount(unittest.TestCase):
def setUp(self):
self.account1 = BankAccount(1, 200)
self.account2 = BankAccount(3, 2000)
def testCreateAccount(self):
testAccount = BankAccount(5, 100)
self.assertEqual(0.05, testAccount.int_rate)
self.assertEqual(100, testAccount.balance)
def testBadInterest(self):
self.assertRaises(ValueError, BankAccount, -2, 100)
def testBadBalance(self):
self.assertRaises(ValueError, BankAccount, 2, -100)
def testWithdrawal(self):
self.account1.withdraw(100)
self.assertEqual(100, self.account1.balance)
def testOverdraw(self):
self.assertRaises(ValueError, self.account1.withdraw, 500)
def testNegWithdrawal(self):
self.assertRaises(ValueError, self.account1.withdraw, -100)
def testDeposit(self):
self.account1.deposit(100)
self.assertEqual(300, self.account1.balance)
def testNegDeposit(self):
self.assertRaises(ValueError, self.account1.deposit, -100)
def testYieldInterest(self):
self.account1.yield_interest()
self.assertEqual(200*1.01, self.account1.balance)
def testUse(self):
self.account1.deposit(100).deposit(100).deposit(100).withdraw(400)
self.account1.display_account_info()
self.assertEqual(100, self.account1.balance)
self.account2.deposit(100).deposit(100).withdraw(400).withdraw(400)
self.account2.withdraw(400).yield_interest().display_account_info()
self.assertEqual(1000*1.03, self.account2.balance)
if __name__ == '__main__':
unittest.main()
| [
"jt.clayton92@yahoo.com"
] | jt.clayton92@yahoo.com |
854a8b4fec36223fca1c7345598fef9b213aaff1 | f5317a4991edfc4b4da9a42816251b391c8421e0 | /anima/ui/__init__.py | 6c5a6fb65387216f6db09b01bec54cdfe8c741ed | [
"BSD-2-Clause"
] | permissive | initcard/anima | efc564f802f47e89f4f3a1b0a7d38219ac113c3b | 7a86c518165a1bd2df0410f21c6fb8fcb7ce8f83 | refs/heads/master | 2021-01-17T16:03:40.403878 | 2017-06-17T12:52:19 | 2017-06-17T12:52:19 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 909 | py | # -*- coding: utf-8 -*-
# Copyright (c) 2012-2017, Anima Istanbul
#
# This module is part of anima-tools and is released under the BSD 2
# License: http://www.opensource.org/licenses/BSD-2-Clause
import os
from anima import logger
# Choose between PyQt4 or PySide
PYSIDE = 'PySide'
PYSIDE2 = 'PySide2'
PYQT4 = 'PyQt4'
# set the default
qt_lib_key = "QT_LIB"
qt_lib = PYSIDE
if qt_lib_key in os.environ:
qt_lib = os.environ[qt_lib_key]
def IS_PYSIDE():
return qt_lib == PYSIDE
def IS_PYSIDE2():
return qt_lib == PYSIDE2
def IS_PYQT4():
return qt_lib == PYQT4
def SET_PYSIDE():
logger.debug('setting environment to PySide')
global qt_lib
qt_lib = PYSIDE
def SET_PYSIDE2():
logger.debug('setting environment to PySide2')
global qt_lib
qt_lib = PYSIDE2
def SET_PYQT4():
logger.debug('setting environment to PyQt4')
global qt_lib
qt_lib = PYQT4
| [
"eoyilmaz@gmail.com"
] | eoyilmaz@gmail.com |
34bfea2a785d4b36dcdc4135370dbcf76b7c4e06 | 4305c06bdfc8b66b7643909904716c52b03a7cc4 | /redash/handlers/__init__.py | 8de5cedaacbd5620a9388e9cd71d85cb6f501c12 | [
"BSD-2-Clause"
] | permissive | Jayson0626/redash | dff8c5d4492f272b7c6b1a513e5017818661f1e3 | 0aebb373170260fd3ea6242487c4ecca3f063eff | refs/heads/master | 2020-09-11T03:10:37.318956 | 2019-11-14T18:23:00 | 2019-11-14T18:23:00 | 221,921,987 | 1 | 0 | BSD-2-Clause | 2019-11-15T12:42:28 | 2019-11-15T12:42:27 | null | UTF-8 | Python | false | false | 687 | py | from flask import jsonify
from flask_login import login_required
from redash.handlers.api import api
from redash.handlers.base import routes
from redash.monitor import get_status
from redash.permissions import require_super_admin
from redash.security import talisman
@routes.route('/ping', methods=['GET'])
@talisman(force_https=False)
def ping():
return 'PONG.'
@routes.route('/status.json')
@login_required
@require_super_admin
def status_api():
status = get_status()
return jsonify(status)
def init_app(app):
from redash.handlers import embed, queries, static, authentication, admin, setup, organization
app.register_blueprint(routes)
api.init_app(app)
| [
"arik@arikfr.com"
] | arik@arikfr.com |
8286e6d79e267242a94e76622826b3a7840d8083 | cedd479d853d87e04b6b8c005a63e5e9120d9b98 | /chef/forms.py | 84a060492713d6804275aa3b6f79068f2949dfd6 | [] | no_license | CSCI-441-Fall-2020-Group-Project/QUICKBYTES | 53807a777217fb25e60eda6d3fe955a2a1e9cf83 | ace40c39338bfea412f3538fc08235b06889a414 | refs/heads/master | 2023-01-12T13:30:28.216943 | 2020-11-04T14:58:30 | 2020-11-04T14:58:30 | 309,734,273 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 478 | py | from django.forms import ModelForm, Textarea
from manager.models import Worker_Complaint
from tickets.models import Orderstable
class SendBackForm(ModelForm):
class Meta:
model = Orderstable
fields = ('message',)
widgets = {'message': Textarea(attrs={'rows': 4}),}
class Worker_Complaint_Form(ModelForm):
class Meta:
model = Worker_Complaint
fields = ('complaint',)
widgets = {'complaint': Textarea(attrs={'rows': 4}),}
| [
"klronholt@mail.fhsu.edu"
] | klronholt@mail.fhsu.edu |
21bb286a335c009e4b243e8ca790163770effd79 | 523f8f5febbbfeb6d42183f2bbeebc36f98eadb5 | /76__.py | 0d10c6bf2bbba9b0c43fd3f5ee4fb959e013bf3f | [] | no_license | saleed/LeetCode | 655f82fdfcc3000400f49388e97fc0560f356af0 | 48b43999fb7e2ed82d922e1f64ac76f8fabe4baa | refs/heads/master | 2022-06-15T21:54:56.223204 | 2022-05-09T14:05:50 | 2022-05-09T14:05:50 | 209,430,056 | 2 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,052 | py | def minWindow(s, t):
"""
:type s: str
:type t: str
:rtype: str
"""
if len(s)==0 or len(t)==0 or len(t)>len(s):
return 0
dict={}
for i in t:
if dict.has_key(i):
dict[i]=dict[i]+1
else:
dict[i]=1
i=0
j=0
si=0
sj=0
maxLen=float("inf")
need=len(t)
while i<len(s) or j<len(s):
print need,i,j,s[i:j+1]
if need>0 and j==len(s):
break
elif need>0 and j<len(s):
if dict.has_key(s[j]) and dict[s[j]]>0:
need=need-1
dict[s[j]]=dict[s[j]]-1
j=j+1
elif need==0:
if j-i<maxLen:
si=i
sj=j
maxLen=j-i
if i<len(s):
if dict.has_key(s[i]) and dict[s[i]]==0:
need=need+1
dict[s[i]]=dict[s[i]]+1
i=i+1
else:
break
return maxLen,si,sj
S = "ADOBECODEBANC"
T = "ABC"
print minWindow(S,T)
| [
"1533441387@qq.com"
] | 1533441387@qq.com |
685c9a255c172419b5f8d0d0b19a1583cb33070d | 70054615f56be28373b00c9df96544ec822be683 | /res/scripts/client/gui/scaleform/managers/cursor.py | a9febe503d475fc983cbf4676e9b4ce0bd9d68d2 | [] | no_license | wanyancan/WOTDecompiled | c646ad700f5ec3fb81fb4e87862639ce0bdf0000 | 9ffb09007a61d723cdb28549e15db39c34c0ea1e | refs/heads/master | 2020-04-17T23:13:15.649069 | 2013-11-15T16:37:10 | 2013-11-15T16:37:10 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,046 | py | from debug_utils import LOG_DEBUG, LOG_ERROR
from gui.Scaleform.daapi.view.meta.CursorMeta import CursorMeta
from gui.Scaleform.framework.entities.View import View
import GUI
import BigWorld
__author__ = 'd_trofimov'
class Cursor(CursorMeta, View):
ARROW = 'arrow'
AUTO = 'auto'
BUTTON = 'button'
HAND = 'hand'
IBEAM = 'ibeam'
ROTATE = 'rotate'
RESIZE = 'resize'
MOVE = 'move'
DRAG_OPEN = 'dragopen'
DRAG_CLOSE = 'dragclose'
__DAAPI_ERROR = 'flashObject is Python Cursor class can`t be None!'
__isAutoShow = False
def __init__(self):
super(Cursor, self).__init__()
self.__isActivated = False
@classmethod
def setAutoShow(cls, flag):
cls.__isAutoShow = flag
@classmethod
def getAutoShow(cls):
return cls.__isAutoShow
def _populate(self):
super(Cursor, self)._populate()
self.attachCursor(self.__isAutoShow)
self.setAutoShow(False)
def _dispose(self):
super(Cursor, self)._dispose()
def attachCursor(self, automaticallyShow):
if automaticallyShow:
self.show()
if not self.__isActivated:
mcursor = GUI.mcursor()
mcursor.visible = False
LOG_DEBUG('Cursor attach')
BigWorld.setCursor(mcursor)
self.__isActivated = True
def detachCursor(self, automaticallyHide):
if self.__isActivated:
LOG_DEBUG('Cursor detach')
BigWorld.setCursor(None)
self.__isActivated = False
if automaticallyHide:
self.hide()
return
def show(self):
if self.flashObject is not None:
self.flashObject.visible = True
else:
LOG_ERROR(self.__DAAPI_ERROR)
return
def hide(self):
if self.flashObject is not None:
self.flashObject.visible = False
else:
LOG_ERROR(self.__DAAPI_ERROR)
return
def setCursorForced(self, cursor):
self.as_setCursorS(cursor)
| [
"james.sweet88@googlemail.com"
] | james.sweet88@googlemail.com |
9f781ee0b038c6d31e9c33a177b949be99668343 | ffb05b145989e01da075e2a607fb291955251f46 | /pypers/europython05/Quixote-2.0/server/util.py | 69ed675e4d678a36b67a60da4c6b4615a4a1720a | [] | no_license | micheles/papers | a5e7f2fa0cf305cd3f8face7c7ecc0db70ce7cc7 | be9070f8b7e8192b84a102444b1238266bdc55a0 | refs/heads/master | 2023-06-07T16:46:46.306040 | 2018-07-14T04:17:51 | 2018-07-14T04:17:51 | 32,264,461 | 2 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,180 | py | """$URL: svn+ssh://svn.mems-exchange.org/repos/trunk/quixote/server/util.py $
$Id: util.py 26427 2005-03-30 18:03:32Z dbinger $
Miscellaneous utility functions shared by servers.
"""
from optparse import OptionParser
from quixote.util import import_object
def get_server_parser(doc):
parser = OptionParser()
parser.set_description(doc)
default_host = 'localhost'
parser.add_option(
'--host', dest="host", default=default_host, type="string",
help="Host interface to listen on. (default=%s)" % default_host)
default_port = 8080
parser.add_option(
'--port', dest="port", default=default_port, type="int",
help="Port to listen on. (default=%s)" % default_port)
default_factory = 'quixote.demo.create_publisher'
parser.add_option(
'--factory', dest="factory",
default=default_factory,
help="Path to factory function to create the site Publisher. "
"(default=%s)" % default_factory)
return parser
def main(run):
parser = get_server_parser(run.__doc__)
(options, args) = parser.parse_args()
run(import_object(options.factory), host=options.host, port=options.port)
| [
"michele.simionato@gmail.com"
] | michele.simionato@gmail.com |
ffd3a770eb4e8b19edcfe5296e9e5c2ef5e67616 | 893597d91fe6de25cdd3e8427c4ebba29d3cabe1 | /tests/sources/test_inspect.py | dbf85ac7e7d99192102d0d5fbefb4c2bee8983bc | [
"Apache-2.0"
] | permissive | AugustasV/ploomber | d51cefd529bdbf6c2bc82485ba77add6bb346f2b | b631a1b21da64bb7b9525db1c29c32ee3c0e48b4 | refs/heads/master | 2023-08-27T02:22:55.556200 | 2021-10-31T02:52:28 | 2021-10-31T02:52:28 | 423,189,549 | 0 | 0 | Apache-2.0 | 2021-10-31T15:44:17 | 2021-10-31T15:44:16 | null | UTF-8 | Python | false | false | 430 | py | from pathlib import Path
import pytest
from test_pkg.decorated.functions import (decorated_function, function,
double_decorated_function)
from ploomber.sources import inspect
@pytest.mark.parametrize('fn', [
function,
decorated_function,
double_decorated_function,
])
def test_getfile_from_wrapped_function(fn):
assert Path(inspect.getfile(fn)).name == 'functions.py'
| [
"github@blancas.io"
] | github@blancas.io |
f6023ce1e639a9795ea6c1a87d4b39fa0b1737d7 | e4af0a837ef7c26e68331cf7390279493d38ff8a | /LetsCook/core/utils.py | 39eeac327ba278a363bf106e638ceedacc4fd642 | [
"MIT"
] | permissive | ivo-bass/iCook | 0d42bcc34f1b53546940fab0779de74473d1c91f | c45f97ac3d8da0c52ccd85ecac0bab51bc4c8048 | refs/heads/main | 2023-07-14T16:49:07.181122 | 2021-08-22T20:30:54 | 2021-08-22T20:30:54 | 383,261,562 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,839 | py | import datetime
from cloudinary import uploader
from LetsCook.profiles.models import Choice
from LetsCook.recipes.models import Recipe
def get_recipes_for_day(request, day):
"""
Filters the choices of the user by date
and returns the recipes chosen for that day
:param day: datetime instance
:return: list of recipes
"""
user = request.user
choices_for_day = user.choice_set.filter(date=day)
recipes = []
if choices_for_day:
recipes = [ch.recipe for ch in choices_for_day]
return recipes
def get_recipes_for_current_days(request):
"""
Returns the chosen recipes for yesterday, today and tomorrow
"""
today = datetime.date.today()
yesterday = today - datetime.timedelta(days=1)
tomorrow = today + datetime.timedelta(days=1)
recipes_today = get_recipes_for_day(request, today)
recipes_yesterday = get_recipes_for_day(request, yesterday)
recipes_tomorrow = get_recipes_for_day(request, tomorrow)
return recipes_today, recipes_yesterday, recipes_tomorrow
def get_top_recipes():
"""
Filters recipes by likes count, comments count
and views count than returns the top recipes
"""
most_views, most_likes, most_comments = None, None, None
all_public_recipes = Recipe.objects.filter(public=True)
if all_public_recipes:
most_views = Recipe.objects.filter(public=True).order_by('recipe_views').last()
most_likes = list(sorted(all_public_recipes, key=lambda obj: -obj.likes_count))[0]
most_comments = list(sorted(all_public_recipes, key=lambda obj: -obj.comments_count))[0]
return most_views, most_likes, most_comments
def get_search_results(request):
"""
Performs search using a keyword case insensitive in
title, description and ingredients fields and returns set union
"""
searched = request.POST['searched'].lower()
in_title = Recipe.objects.filter(
title__icontains=searched,
public=True,
)
in_description = Recipe.objects.filter(
description__icontains=searched,
public=True,
)
in_ingredients = Recipe.objects.filter(
ingredient__name__icontains=searched,
public=True,
)
recipes = set(in_title | in_description | in_ingredients)
context = {
'searched': searched,
'recipes': recipes,
}
return context
def delete_previous_image(self, model):
"""
Deletes the old image from cloudinary database
after uploading a new one
"""
db_profile = model.objects.get(pk=self.instance.pk)
new_image = self.files.get('image')
if new_image:
try:
old_image = db_profile.image.public_id
uploader.destroy(old_image)
except Exception as exc:
print(exc)
def save_suggestion(request):
"""
Takes the request and saves the choice of recipe
with the given date for the current user
"""
recipe_pk = request.POST.get('recipe-pk')
if recipe_pk:
recipe = Recipe.objects.get(pk=recipe_pk)
user = request.user
choice_made = Choice(
recipe=recipe,
user=user,
)
date = request.POST.get('date')
if date:
choice_made.date = date
choice_made.save()
def add_view_count(request, recipe):
"""
If user is not author add view count to recipe
"""
if not recipe.author.id == request.user.id:
recipe.recipe_views = recipe.recipe_views + 1
recipe.save()
def check_image_in_cloudinary(recipe):
"""
Try to find recipe image in cloudinary and
if does not exist set recipe image to None
"""
try:
uploader.explicit(recipe.image.public_id, type='upload')
except Exception as exc:
print(exc)
recipe.image = None
recipe.save() | [
"ivailo.ignatoff@gmail.com"
] | ivailo.ignatoff@gmail.com |
99a80ce4d6c6af0dccffa137efce3eea8692bc3c | dbfd6af70ff5d50a3c64f83f0b14fb981f457ec5 | /Simulation/컨베이어 벨트.py | ddf00729d788b1109fd768bb741fa4c6c2a844db | [] | no_license | subinmun1997/AI_LeeBrosCode | 5b7119c8c538ae41219b6e10a2fc368a5b142236 | 478b75ac680d401c9c2100d83c6816e04a6794e2 | refs/heads/main | 2023-02-21T11:13:55.432042 | 2021-01-25T14:45:57 | 2021-01-25T14:45:57 | 331,019,546 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 970 | py | # 변수 선언 및 입력
n, t = tuple(map(int, input().split()))
u = list(map(int, input().split()))
d = list(map(int, input().split()))
for _ in range(t):
# Step 1
# 위에서 가장 오른쪽에 있는 숫자를 따로 temp값에 저장해놓습니다.
temp = u[n - 1]
# Step 2
# 위에 있는 숫자들을 완성합니다.
# 오른쪽에서부터 채워넣어야 하며,
# 맨 왼쪽 숫자는 아래에서 가져와야함에 유의합니다.
for i in range(n - 1, 0, -1):
u[i] = u[i - 1]
u[0] = d[n - 1]
# Step 3
# 아래에 있는 숫자들을 완성합니다.
# 마찬가지로 오른쪽에서부터 채워넣어야 하며,
# 맨 왼쪽 숫자는 위에서 미리 저장해놨던 temp값을 가져와야함에 유의합니다.
for i in range(n - 1, 0, -1):
d[i] = d[i - 1]
d[0] = temp
# 출력
for elem in u:
print(elem, end=" ")
print()
for elem in d:
print(elem, end=" ") | [
"qzxy812@gmail.com"
] | qzxy812@gmail.com |
f132d43d5b32dff7529d6d5042a1b47f798f8035 | 55f074aa22510c2d3b56caad1aed958acab122cf | /python/comparatist/utils/c.py | 9d4532909c5048ddc1c747d72fe518a1744f3636 | [
"MIT"
] | permissive | tkf/comparatist | 7cddb0e15f392f85cf29c2b7284f6a46283ebf5a | 44f30077857fc96cb77539f3fe0a7e8112f86c82 | refs/heads/master | 2020-06-15T22:25:57.580483 | 2016-12-03T00:54:06 | 2016-12-03T00:54:06 | 75,262,050 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 270 | py | import os
import numpy
libdir = os.path.join(os.path.dirname(os.path.abspath(__file__)),
os.path.pardir, os.path.pardir, os.path.pardir,
'lib')
def load_library(name):
return numpy.ctypeslib.load_library(name, libdir)
| [
"aka.tkf@gmail.com"
] | aka.tkf@gmail.com |
e81edc8fa4f2db2662be6a62f249442713160be7 | ffe606c85de9009d2c15356f82daa524c343b925 | /nn.py | fb1e03dfca0733519e41a0249f5c09647be117ad | [] | no_license | jbinkleyj/story_writer | d88ff7e3360fb8afd12445d1cb237788636b3083 | dc5106a35f5fbce72f8cf0801c0ad4cbc0c9f12f | refs/heads/master | 2020-07-09T15:54:02.492373 | 2017-12-16T07:26:59 | 2017-12-16T07:26:59 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,023 | py | import sys
import torch
import torchtext
from itertools import product
from torch import nn
from torch.autograd import Variable
from nltk.translate.bleu_score import SmoothingFunction, sentence_bleu
def calcbleu(gen, targets):
#calcbleu(generated, targets, DS.stoi_targets["<end>"]):
cc = SmoothingFunction()
bleu = sentence_bleu(targets,gen,smoothing_function=cc.method3)
return bleu
class vecs:
def __init__(self):
self.gl = torchtext.vocab.GloVe(name='6B', dim=300, unk_init=torch.FloatTensor.uniform_)
self.cache = {}
def get(self,w):
if w not in self.cache:
tmp = self.gl[w]
tmp = tmp/tmp.norm()
self.cache[w] = tmp.squeeze()
return self.cache[w]
class load_data:
def __init__(self,train="data/train.txt.ner",valid="data/valid.txt.ner"):
self.train = self.ds(train)
self.val = self.ds(valid)
self.rawtitles = self.train[0]
self.vecs = vecs()
self.titles = self.mktitles([x for x in self.train[0]])
def ds(self,fn):
with open(fn) as f:
sources, targs = zip(*[x.strip().split("\t",maxsplit=1) for x in f.readlines()])
targets = []
for t in targs:
t = t.split('\t')
tmp = []
for x in t:
tmp.append(x.split(" "))
targets.append(tmp)
return sources, targets
def mktitles(self,data):
titles = []
for x in data:
tmp = torch.stack([self.vecs.get(w) for w in x.split(" ")])
tmp,_ = torch.max(tmp,0)
titles.append(tmp.squeeze())
return torch.stack(titles)
def nn(self,title,k=1):
v = torch.stack([self.vecs.get(w) for w in title.split(" ")])
v,_ = torch.max(v,0)
v = v.view(1,300)
mul = torch.mm(self.titles,v.t())
_,best = torch.sort(mul,0,True)
return best[:k]
DS = load_data()
valtitles = DS.val[0]
valstories = DS.val[1]
bleu = 0
for i,title in enumerate(valtitles):
best = DS.nn(title)[0][0]
story = DS.train[1][best][0]
targets = valstories[i]
bleu += calcbleu(story,targets)
i+=1
print(bleu/i)
print(i)
| [
"kedzior@uw.edu"
] | kedzior@uw.edu |
df3801b7049a0dac555f418c36f92cc8763165f4 | 62a212c3d7936c727e09b48d3c10495ea8db12fe | /src/backend/flask_interface/common.py | e1122b81e0d47e278f2af21def554a913f272778 | [] | no_license | antonpaquin/Homulili | 080a2398e9ee7f19566be3de8a30903ae03a3b9e | 3c56ee5c41d5bf3f86a3325c6117d6795e12cdf2 | refs/heads/master | 2021-09-06T15:19:53.166674 | 2018-02-08T00:21:20 | 2018-02-08T00:21:20 | 110,213,888 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,544 | py | import json
import requests
import logging
from . import config
from . import secret
auth_header = {
'auth_token': secret.api_admin_key,
}
def standard_request(model: str, method: str, params: dict, logger: logging.Logger):
method_map = {
'create': requests.put,
'read': requests.get,
'update': requests.patch,
'delete': requests.delete,
'index': lambda **x: requests.request(method='VIEW', **x),
'command': requests.post,
}
pparams = {key: value for key, value in params.items() if value is not None}
logger.info('{model}::{method} with params {params}'.format(
method=method,
model=model,
params=str(pparams),
))
url = 'http://{hostname}:{port}/{model}'.format(
hostname=config.api_hostname,
port=config.api_public_port,
model=model,
)
requests_call = method_map[method]
response = requests_call(
url=url,
headers=auth_header,
params=pparams,
)
try:
jsn = json.loads(response.text)
except Exception:
logger.error('{method} failed -- response was not json -- {resp}'.format(
method=method,
resp=response.text,
))
raise RuntimeError(response.text)
if jsn['status'] == 'success':
return jsn['data']
else:
logger.error('{method} failed -- err: {err_message}'.format(
method=method,
err_message=jsn['err_message'],
))
raise RuntimeError(jsn)
| [
"antonpaquin@gmail.com"
] | antonpaquin@gmail.com |
406c35803742f4bc20ff3095e6fe1e009e263b66 | 303ae7311f1d97982e62c4f0b5b14fff28ae346f | /core/auth/local/user_management.py | bd403c7bd134f27dbb8897386f3a14b5bd74cd00 | [
"Apache-2.0"
] | permissive | phAlvex/yeti | bc807598308473a2d2d0690d1ed31ffa2d10e50b | 431c05a194ba081f2279d487ab5ed3a24a5e7bd7 | refs/heads/master | 2022-02-23T15:50:34.296463 | 2019-07-19T10:33:34 | 2019-07-19T10:33:34 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,137 | py | import os
import hmac
from core.logger import userLogger
from hashlib import sha512
from flask import current_app
from flask_login.mixins import AnonymousUserMixin
from werkzeug.security import check_password_hash, generate_password_hash
from core.user import User
from mongoengine import DoesNotExist
DEFAULT_PERMISSIONS = {
"feed": {
"read": True,
"write": True,
"toggle": True,
"refresh": True
},
"observable": {
"read": True,
"write": True,
"tag": True
},
"indicator": {
"read": True,
"write": True
},
"exporttemplate": {
"read": True,
"write": True
},
"entity": {
"read": True,
"write": True
},
"scheduledanalytics": {
"read": True,
"write": True,
"toggle": True,
"refresh": True
},
"oneshotanalytics": {
"read": True,
"write": True,
"toggle": True,
"run": True
},
"inlineanalytics": {
"read": True,
"write": True,
"toggle": True
},
"tag": {
"read": True,
"write": True
},
"export": {
"read": True,
"write": True,
"toggle": True,
"refresh": True
},
"attachedfiles": {
"read": True,
"write": True
},
"file": {
"read": True,
"write": True
},
"link": {
"read": True,
"write": True
},
"neighbors": {
"read": True,
"write": True
},
"investigation": {
"read": True,
"write": True
},
"user": {
"read": True,
"write": True
},
"admin": True,
}
def get_default_user():
try:
# Assume authentication is anonymous if only 1 user
if User.objects.count() < 2:
userLogger.info("Default user logged in : yeti")
return User.objects.get(username="yeti")
return AnonymousUserMixin()
except DoesNotExist:
return create_user("yeti", "yeti")
def create_user(username, password, permissions=DEFAULT_PERMISSIONS):
u = User(username=username, permissions=permissions)
u = set_password(u, password)
return u.save()
def authenticate(username, password):
try:
u = User.objects.get(username=username)
if check_password_hash(u.password, password):
userLogger.info("User logged in : %s",username)
return u
else:
userLogger.warn("Attempt to log in to : %s",username)
return False
except DoesNotExist:
return False
def generate_session_token(user):
key = current_app.config['SECRET_KEY']
return hmac.new(
key, (user.username + user.password + os.urandom(12).encode('hex')),
sha512).hexdigest()
def set_password(user, password):
user.password = generate_password_hash(
password, method='pbkdf2:sha256:20000')
user.api_key = User.generate_api_key()
user.session_token = generate_session_token(user)
userLogger.info("User password changed : %s",user.username)
return user
| [
"tomchop@gmail.com"
] | tomchop@gmail.com |
34b2e08e7714e4745e39947c427960299373d020 | ca7aa979e7059467e158830b76673f5b77a0f5a3 | /Python_codes/p02695/s972481944.py | 7868151bd0fa7570aab06c7c35a30fa00b6d703e | [] | no_license | Aasthaengg/IBMdataset | 7abb6cbcc4fb03ef5ca68ac64ba460c4a64f8901 | f33f1c5c3b16d0ea8d1f5a7d479ad288bb3f48d8 | refs/heads/main | 2023-04-22T10:22:44.763102 | 2021-05-13T17:27:22 | 2021-05-13T17:27:22 | 367,112,348 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 653 | py | #!/usr/bin/env python3
# スペース区切りの整数の入力
def dfs(seq):
ans = 0
if len(seq) == N:
score_ret = 0
for a, b, c, d in data:
if seq[b-1] - seq[a-1] == c:
score_ret += d
return score_ret
else:
for i in range(seq[-1], M+1):
seq_next = seq + (i,)
score = dfs(seq_next)
ans = max(ans, score)
return ans
# スペース区切りの整数の入力
N, M, Q = map(int, input().split())
#配列の入力
data = [list(map(int, input().split())) for _ in range(Q)]
ans = -1
score = dfs((1,))
ans = max(ans, score)
print(ans)
| [
"66529651+Aastha2104@users.noreply.github.com"
] | 66529651+Aastha2104@users.noreply.github.com |
ec6393792cd31f7832708be203a21bf5ded25a23 | 0ca218c0f54dac33a2ade4accfdf8f5be3207588 | /test/ext/mypy/plugin_files/mixin_one.py | a471edf6c7e3997860faca348f7f8e35d214b860 | [
"MIT"
] | permissive | sqlalchemy/sqlalchemy | 9d949c67c9b5396b1f33e7ff0f3230c81babf5be | b382bff6e3464f039db0fd1f2ce1b79038675e48 | refs/heads/main | 2023-08-31T17:40:59.565421 | 2023-08-30T15:01:41 | 2023-08-30T15:01:41 | 159,271,175 | 8,083 | 1,489 | MIT | 2023-09-12T18:53:55 | 2018-11-27T03:35:03 | Python | UTF-8 | Python | false | false | 883 | py | from sqlalchemy import Column
from sqlalchemy import Integer
from sqlalchemy import String
from sqlalchemy.orm import declarative_base
from sqlalchemy.orm import registry
reg: registry = registry()
# TODO: also reg.as_declarative_base()
Base = declarative_base()
class HasUpdatedAt:
updated_at = Column(Integer)
@reg.mapped
class Foo(HasUpdatedAt):
__tablename__ = "foo"
id: int = Column(Integer(), primary_key=True)
name: str = Column(String)
class Bar(HasUpdatedAt, Base):
__tablename__ = "bar"
id = Column(Integer(), primary_key=True)
num = Column(Integer)
Foo.updated_at.in_([1, 2, 3])
Bar.updated_at.in_([1, 2, 3])
f1 = Foo(name="name", updated_at=5)
b1 = Bar(num=5, updated_at=6)
# test that we detected this as an unmapped mixin
# EXPECTED_MYPY: Unexpected keyword argument "updated_at" for "HasUpdatedAt"
HasUpdatedAt(updated_at=5)
| [
"mike_mp@zzzcomputing.com"
] | mike_mp@zzzcomputing.com |
0c0d07d4410786cfc16ba6af62bf0486ccfe2623 | 730f89724aca038c15191f01d48e995cb94648bc | /tasks/migrations/0029_partialtaskpay.py | c02b5fb006dacc19326560a8b0d32bd266982cf2 | [] | no_license | Happyandhappy/django_email | 14bc3f63376f2568754292708ec8ca7f2e2cf195 | ea858c9fac79112542551b7ba6e899e348f24de3 | refs/heads/master | 2020-03-22T14:22:08.431334 | 2018-07-21T13:41:23 | 2018-07-21T13:41:23 | 140,174,033 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 942 | py | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('tasks', '0028_auto_20150223_2049'),
]
operations = [
migrations.CreateModel(
name='PartialTaskPay',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('created_at', models.DateTimeField(auto_now_add=True)),
('price', models.DecimalField(verbose_name='Price', max_digits=8, decimal_places=2)),
('task', models.ForeignKey(to='tasks.Task')),
],
options={
'ordering': ('created_at',),
'verbose_name': 'task partial pay',
'verbose_name_plural': 'task partial pays',
},
bases=(models.Model,),
),
]
| [
"greyfrapp@gmail.com"
] | greyfrapp@gmail.com |
6045341af03e3670ab8772fa437eb69b3f8c2e05 | aad455a944d7feb4be56419b18f9080b53d693de | /3rdparty/wordextract.py | 0b32a1c832a3dd882f6d0c72c07e38021db5ede0 | [] | no_license | brock7/scripts | 8dec0469bc791576b4ccd00bff049e98dd4db344 | 356cac32de62e00241d423a754f8cead268971cd | refs/heads/master | 2020-06-01T07:34:34.568699 | 2018-08-31T23:10:38 | 2018-08-31T23:10:38 | 26,199,799 | 16 | 8 | null | null | null | null | UTF-8 | Python | false | false | 1,160 | py | #!/usr/bin/python
#Word Extractor from a site.
import sys, urllib2, re, sets
#Min length of word
MIN_LENGTH = 3
#Max length of word
MAX_LENGTH = 10
def StripTags(text):
finished = 0
while not finished:
finished =1
start = text.find("<")
if start >= 0:
stop = text[start:].find(">")
if stop >= 0:
text = text[:start] + text[start+stop+1:]
finished = 0
return text
if len(sys.argv) != 3:
print "\nUsage: ./wordextract.py <site> <file to save words>"
print "Ex: ./wordextract.py http://www.test.com wordlist.txt\n"
sys.exit(1)
site = sys.argv[1]
if site[:7] != "http://":
site = "http://"+site
print "\n[+] Retrieving Source:",site
source = StripTags(urllib2.urlopen(site).read())
words = re.findall("\w+",source)
words = list(sets.Set(words))
l = len(words)
print "[+] Found:",l,"words"
print "[+] Trimming words to length"
for word in words:
if not MIN_LENGTH <= len(word) <= MAX_LENGTH:
words.remove(word)
print "\n[+] Removed:",l-len(words),"words"
print "[+] Writing:",len(words),"words to",sys.argv[2]
file = open(sys.argv[2],"a")
for word in words:
file.writelines(word+"\n")
file.close()
print "\n[-] Complete\n" | [
"you@example.com"
] | you@example.com |
e7d016cdd0000ba7f19a90e22ad37b7456237f4e | 2dd560dc468af0af4ca44cb4cd37a0b807357063 | /Leetcode/479. Largest Palindrome Product/solution2.py | 162641f2ea1d3ac3b545653e2ce54b6591d2196e | [
"MIT"
] | permissive | hi0t/Outtalent | 460fe4a73788437ba6ce9ef1501291035c8ff1e8 | 8a10b23335d8e9f080e5c39715b38bcc2916ff00 | refs/heads/master | 2023-02-26T21:16:56.741589 | 2021-02-05T13:36:50 | 2021-02-05T13:36:50 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 413 | py | class Solution:
def largestPalindrome(self, n: int) -> int:
if n == 1: return 9
if n == 2: return 987
for a in range(2, 9 * 10 ** (n - 1)):
hi = (10 ** n) - a
lo = int(str(hi)[::-1])
if a ** 2 - 4 * lo < 0: continue
if (a ** 2 - 4 * lo) ** .5 == int((a ** 2 - 4 * lo) ** .5):
return (lo + 10 ** n * (10 ** n - a)) % 1337
| [
"info@crazysquirrel.ru"
] | info@crazysquirrel.ru |
bf77c313214f47a0c51a6baa9a655483873e78d4 | 32eeb97dff5b1bf18cf5be2926b70bb322e5c1bd | /benchmark/signal/extra/conscript.py | 1256a745cd8fbbfe988b7a8497e313ad8805ceda | [] | no_license | Prefest2018/Prefest | c374d0441d714fb90fca40226fe2875b41cf37fc | ac236987512889e822ea6686c5d2e5b66b295648 | refs/heads/master | 2021-12-09T19:36:24.554864 | 2021-12-06T12:46:14 | 2021-12-06T12:46:14 | 173,225,161 | 5 | 0 | null | null | null | null | UTF-8 | Python | false | false | 876 | py | try:
element = driver.find_element_by_android_uiautomator("new UiSelector().text(\"Enable local backups?\")")
element = driver.find_element_by_android_uiautomator("new UiSelector().resourceId(\"org.thoughtcrime.securesms:id/confirmation_check\")")
element.click()
element = driver.find_element_by_android_uiautomator("new UiSelector().text(\"Enable backups\")")
element.click()
except NoSuchElementException:
time.sleep(0.1)
try:
element = driver.find_element_by_android_uiautomator("new UiSelector().text(\"Delete backups?\")")
element = driver.find_element_by_android_uiautomator("new UiSelector().text(\"Delete backups\")")
element.click()
except NoSuchElementException:
time.sleep(0.1)
try:
element = driver.find_element_by_android_uiautomator("new UiSelector().text(\"Cancel\")")
element.click()
time.sleep(1)
except NoSuchElementException:
time.sleep(0.1)
| [
"prefest2018@gmail.com"
] | prefest2018@gmail.com |
e9a8f1ba0feef0e1ee57af8e73ceae70281e6f82 | aaf4a46f6bde17bfcbb8334f83d4e5972c1ed9cc | /ZeeAnalyzer/python/zeeSkimVetoId2016.py | 2740ff6b35238e99d2d95e6f22c7aab001789b51 | [] | no_license | taroni/usercode | 72811057e54691edfd3eee1f5cd2eab163ff97c6 | 94888ed661f1c31f0fb2c8593add5efd8ecaafa4 | refs/heads/master | 2021-06-06T20:52:41.776442 | 2019-07-09T13:31:39 | 2019-07-09T13:31:39 | 11,950,720 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,495 | py | import FWCore.ParameterSet.Config as cms
# run on MIONAOD
RUN_ON_MINIAOD = True
print "ZEE SKIM. RUN ON MINIAOD = ",RUN_ON_MINIAOD
# cuts
ELECTRON_CUT=("pt > 10 && abs(eta)<2.5")
DIELECTRON_CUT=("mass > 70 && mass < 110 && daughter(0).pt>20 && daughter(1).pt()>10")
# single lepton selectors
if RUN_ON_MINIAOD:
goodZeeElectrons = cms.EDFilter("PATElectronRefSelector",
src = cms.InputTag("slimmedElectrons"),
cut = cms.string(ELECTRON_CUT)
)
else:
goodZeeElectrons = cms.EDFilter("GsfElectronRefSelector",
src = cms.InputTag("gedGsfElectrons"),
cut = cms.string(ELECTRON_CUT)
)
# electron ID (sync with the AlCaReco: https://raw.githubusercontent.com/cms-sw/cmssw/CMSSW_7_5_X/Calibration/EcalAlCaRecoProducers/python/WZElectronSkims_cff.py)
###RHO is wrong, I should use the effective area correction for the PU https://twiki.cern.ch/twiki/bin/view/CMS/EgammaPFBasedIsolationRun2#Rho_effective_area_corrections, not the SC rh. Rho to use fixedGridRhoFastjetAll
identifiedElectrons = goodZeeElectrons.clone(cut = cms.string(goodZeeElectrons.cut.value() +
" \
&& ( (isEB && (gsfTrack.hitPattern().numberOfLostHits(\'MISSING_INNER_HITS\')<=2) \
&& ( (abs(superCluster().position().eta()) <=1.) \
&& ( (pfIsolationVariables().sumChargedHadronPt + max(0.0,pfIsolationVariables().sumNeutralHadronEt + pfIsolationVariables().sumPhotonEt - 0.1703 * superCluster().position().rho())) <0.175 )) \
|| ( ( 1.< abs(superCluster().position().eta()) <=1.479) \
&& ( (pfIsolationVariables().sumChargedHadronPt + max(0.0,pfIsolationVariables().sumNeutralHadronEt + pfIsolationVariables().sumPhotonEt - 0.1715 *superCluster().position().rho())) <0.173 )) \
&& (full5x5_sigmaIetaIeta<0.0115) \
&& ( - 0.228<deltaPhiSuperClusterTrackAtVtx< 0.228 ) \
&& ( -0.00749<deltaEtaSuperClusterTrackAtVtx<0.00749 ) \
&& (hadronicOverEm<0.346) ) \
|| (isEE \
&& (gsfTrack.hitPattern().numberOfLostHits(\'MISSING_INNER_HITS\')<=3) \
&& ((( 1.479< abs(superCluster().position().eta()) <=2.0) \
&& ( (pfIsolationVariables().sumChargedHadronPt + max(0.0,pfIsolationVariables().sumNeutralHadronEt + pfIsolationVariables().sumPhotonEt - 0.1213 * superCluster().position().rho())) <0.159))) \
|| ( ( 2.0 < abs(superCluster().position().eta()) <=2.2) \
&& ( (pfIsolationVariables().sumChargedHadronPt + max(0.0,pfIsolationVariables().sumNeutralHadronEt + pfIsolationVariables().sumPhotonEt - 0.1230 * superCluster().position().rho())) < 0.159 )) \
|| (( 2.2 < abs(superCluster().position().eta()) <=2.3) \
&& ( (pfIsolationVariables().sumChargedHadronPt + max(0.0,pfIsolationVariables().sumNeutralHadronEt + pfIsolationVariables().sumPhotonEt - 0.1635 * superCluster().position().rho())) < 0.159 )) \
|| ( ( 2.3 < abs(superCluster().position().eta()) <=2.4) \
&& ( (pfIsolationVariables().sumChargedHadronPt + max(0.0,pfIsolationVariables().sumNeutralHadronEt + pfIsolationVariables().sumPhotonEt - 0.1937 * superCluster().position().rho())) < 0.159 )) \
|| ( (2.4 < abs(superCluster().position().eta()) <=2.5) \
&& ( (pfIsolationVariables().sumChargedHadronPt + max(0.0,pfIsolationVariables().sumNeutralHadronEt + pfIsolationVariables().sumPhotonEt - 0.2393 * superCluster().position().rho())) < 0.159 )) \
&& (full5x5_sigmaIetaIeta<0.037)\
&& ( -0.213<deltaPhiSuperClusterTrackAtVtx<0.213 ) \
&& ( -0.00895<deltaEtaSuperClusterTrackAtVtx<0.00895 )\
&& (hadronicOverEm<0.211) \
))"
)
)
# dilepton selectors
diZeeElectrons = cms.EDProducer("CandViewShallowCloneCombiner",
decay = cms.string("identifiedElectrons identifiedElectrons"),
checkCharge = cms.bool(False),
cut = cms.string(DIELECTRON_CUT)
)
# dilepton counters
diZeeElectronsFilter = cms.EDFilter("CandViewCountFilter",
src = cms.InputTag("diZeeElectrons"),
minNumber = cms.uint32(1)
)
#sequences
zdiElectronSequence = cms.Sequence( goodZeeElectrons * identifiedElectrons * diZeeElectrons * diZeeElectronsFilter )
| [
"Silvia.Taroni@cern.ch"
] | Silvia.Taroni@cern.ch |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.