blob_id
stringlengths 40
40
| directory_id
stringlengths 40
40
| path
stringlengths 3
288
| content_id
stringlengths 40
40
| detected_licenses
listlengths 0
112
| license_type
stringclasses 2
values | repo_name
stringlengths 5
115
| snapshot_id
stringlengths 40
40
| revision_id
stringlengths 40
40
| branch_name
stringclasses 684
values | visit_date
timestamp[us]date 2015-08-06 10:31:46
2023-09-06 10:44:38
| revision_date
timestamp[us]date 1970-01-01 02:38:32
2037-05-03 13:00:00
| committer_date
timestamp[us]date 1970-01-01 02:38:32
2023-09-06 01:08:06
| github_id
int64 4.92k
681M
⌀ | star_events_count
int64 0
209k
| fork_events_count
int64 0
110k
| gha_license_id
stringclasses 22
values | gha_event_created_at
timestamp[us]date 2012-06-04 01:52:49
2023-09-14 21:59:50
⌀ | gha_created_at
timestamp[us]date 2008-05-22 07:58:19
2023-08-21 12:35:19
⌀ | gha_language
stringclasses 147
values | src_encoding
stringclasses 25
values | language
stringclasses 1
value | is_vendor
bool 2
classes | is_generated
bool 2
classes | length_bytes
int64 128
12.7k
| extension
stringclasses 142
values | content
stringlengths 128
8.19k
| authors
listlengths 1
1
| author_id
stringlengths 1
132
|
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
ce554e2695eb9840c6d0399b1f782c9eb8d9d10e
|
d30cb6a597f6a5fad9a01da77594a225daf9a211
|
/Lesson 4 - File Handling/project/attempt_1/suffix.py
|
34ed3d75c984c7bdfaaf0517676f8b2ac263c7dd
|
[] |
no_license
|
jmwoloso/Python_2
|
290ef8b0c7db8347fa25cf39da26f39e218d9c68
|
06c45545ed064d0e9c4fd15cc81cf454cb079c9d
|
refs/heads/master
| 2020-04-24T02:18:34.058148
| 2015-08-02T21:02:02
| 2015-08-02T21:02:02
| 37,082,608
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,197
|
py
|
#!/usr/bin/python3
# A Program Used for Assigning Suffixes
# suffix.py
#
# Created by: Jason Wolosonovich
# 02-24-2015
#
# Lesson 4 - Project 1, Attempt 1
"""
Contains a dict that houses the extensions of many common
file types.
"""
#global file_suffix_dict
file_suffix_dict = {
1 : ".txt",
2 : ".doc",
3 : ".docx",
4 : ".png",
5 : ".jpeg",
6 : ".py",
7 : ".pyc",
8 : ".rtf",
9 : ".log",
10 : ".csv",
11 : ".dat",
12 : ".ppt",
13 : ".tar",
14 : ".tar.gz",
15 : ".mpg",
16 : ".mpeg",
17 : ".mp4",
18 : ".wmv",
19 : ".svg",
20 : ".xls",
21 : ".xlsx",
22 : ".accdb",
23 : ".db",
24 : ".bat",
25 : ".sql",
26 : ".tar.bz2",
27 : ""
}
|
[
"jmwoloso@asu.edu"
] |
jmwoloso@asu.edu
|
15ffd68d61b4a460ef95ddadae10b0d714791ef3
|
bc233c24523f05708dd1e091dca817f9095e6bb5
|
/bitmovin_api_sdk/models/dolby_digital_plus_downmixing_preferred_mode.py
|
dbe4b9349d8eabb5c12189d531b9a85ed63ac8e2
|
[
"MIT"
] |
permissive
|
bitmovin/bitmovin-api-sdk-python
|
e3d6cf8eb8bdad62cb83ec77c0fc4950b06b9cdd
|
b0860c0b1be7747cf22ad060985504da625255eb
|
refs/heads/main
| 2023-09-01T15:41:03.628720
| 2023-08-30T10:52:13
| 2023-08-30T10:52:13
| 175,209,828
| 13
| 14
|
MIT
| 2021-04-29T12:30:31
| 2019-03-12T12:47:18
|
Python
|
UTF-8
|
Python
| false
| false
| 268
|
py
|
# coding: utf-8
from enum import Enum
from six import string_types, iteritems
from bitmovin_api_sdk.common.poscheck import poscheck_model
class DolbyDigitalPlusDownmixingPreferredMode(Enum):
LO_RO = "LO_RO"
LT_RT = "LT_RT"
PRO_LOGIC_II = "PRO_LOGIC_II"
|
[
"openapi@bitmovin.com"
] |
openapi@bitmovin.com
|
db4947dd7f21941b4aac995c4fe2285f661d7466
|
09e57dd1374713f06b70d7b37a580130d9bbab0d
|
/benchmark/startQiskit_noisy1448.py
|
ae49b0570c58881f82ac7b3f628b829ccd29533b
|
[
"BSD-3-Clause"
] |
permissive
|
UCLA-SEAL/QDiff
|
ad53650034897abb5941e74539e3aee8edb600ab
|
d968cbc47fe926b7f88b4adf10490f1edd6f8819
|
refs/heads/main
| 2023-08-05T04:52:24.961998
| 2021-09-19T02:56:16
| 2021-09-19T02:56:16
| 405,159,939
| 2
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 4,294
|
py
|
# qubit number=5
# total number=51
import cirq
import qiskit
from qiskit.providers.aer import QasmSimulator
from qiskit.test.mock import FakeVigo
from qiskit import QuantumCircuit, QuantumRegister, ClassicalRegister
from qiskit import BasicAer, execute, transpile
from pprint import pprint
from qiskit.test.mock import FakeVigo
from math import log2,floor, sqrt, pi
import numpy as np
import networkx as nx
def build_oracle(n: int, f) -> QuantumCircuit:
# implement the oracle O_f^\pm
# NOTE: use U1 gate (P gate) with \lambda = 180 ==> CZ gate
# or multi_control_Z_gate (issue #127)
controls = QuantumRegister(n, "ofc")
oracle = QuantumCircuit(controls, name="Zf")
for i in range(2 ** n):
rep = np.binary_repr(i, n)
if f(rep) == "1":
for j in range(n):
if rep[j] == "0":
oracle.x(controls[j])
# oracle.h(controls[n])
if n >= 2:
oracle.mcu1(pi, controls[1:], controls[0])
for j in range(n):
if rep[j] == "0":
oracle.x(controls[j])
# oracle.barrier()
return oracle
def make_circuit(n:int,f) -> QuantumCircuit:
# circuit begin
input_qubit = QuantumRegister(n,"qc")
classical = ClassicalRegister(n, "qm")
prog = QuantumCircuit(input_qubit, classical)
prog.h(input_qubit[0]) # number=3
prog.h(input_qubit[1]) # number=4
prog.h(input_qubit[1]) # number=26
prog.cz(input_qubit[4],input_qubit[1]) # number=27
prog.h(input_qubit[1]) # number=28
prog.h(input_qubit[2]) # number=5
prog.h(input_qubit[3]) # number=6
prog.h(input_qubit[4]) # number=21
prog.h(input_qubit[1]) # number=34
prog.cz(input_qubit[4],input_qubit[1]) # number=35
prog.z(input_qubit[4]) # number=46
prog.rx(0.8011061266653969,input_qubit[2]) # number=37
prog.h(input_qubit[1]) # number=36
Zf = build_oracle(n, f)
repeat = floor(sqrt(2 ** n) * pi / 4)
for i in range(repeat):
prog.append(Zf.to_gate(), [input_qubit[i] for i in range(n)])
prog.h(input_qubit[0]) # number=1
prog.h(input_qubit[1]) # number=2
prog.h(input_qubit[2]) # number=7
prog.h(input_qubit[3]) # number=8
prog.h(input_qubit[0]) # number=48
prog.cz(input_qubit[1],input_qubit[0]) # number=49
prog.h(input_qubit[0]) # number=50
prog.x(input_qubit[0]) # number=39
prog.cx(input_qubit[1],input_qubit[0]) # number=40
prog.cx(input_qubit[0],input_qubit[1]) # number=42
prog.x(input_qubit[1]) # number=43
prog.cx(input_qubit[0],input_qubit[1]) # number=44
prog.x(input_qubit[2]) # number=11
prog.y(input_qubit[1]) # number=45
prog.x(input_qubit[3]) # number=12
prog.h(input_qubit[2]) # number=41
if n>=2:
prog.mcu1(pi,input_qubit[1:],input_qubit[0])
prog.cx(input_qubit[1],input_qubit[0]) # number=22
prog.x(input_qubit[4]) # number=47
prog.x(input_qubit[0]) # number=23
prog.cx(input_qubit[1],input_qubit[0]) # number=24
prog.cx(input_qubit[0],input_qubit[1]) # number=30
prog.x(input_qubit[1]) # number=31
prog.cx(input_qubit[0],input_qubit[1]) # number=32
prog.x(input_qubit[2]) # number=15
prog.h(input_qubit[4]) # number=29
prog.x(input_qubit[3]) # number=16
prog.h(input_qubit[0]) # number=17
prog.h(input_qubit[1]) # number=18
prog.h(input_qubit[2]) # number=19
prog.h(input_qubit[3]) # number=20
# circuit end
for i in range(n):
prog.measure(input_qubit[i], classical[i])
return prog
if __name__ == '__main__':
key = "00000"
f = lambda rep: str(int(rep == key))
prog = make_circuit(5,f)
backend = FakeVigo()
sample_shot =7924
info = execute(prog, backend=backend, shots=sample_shot).result().get_counts()
backend = FakeVigo()
circuit1 = transpile(prog,backend,optimization_level=2)
writefile = open("../data/startQiskit_noisy1448.csv","w")
print(info,file=writefile)
print("results end", file=writefile)
print(circuit1.depth(),file=writefile)
print(circuit1,file=writefile)
writefile.close()
|
[
"wangjiyuan123@yeah.net"
] |
wangjiyuan123@yeah.net
|
2646f88f0590dd62b8ba725e67e06c4e9c20406e
|
b0fce7b572c78ee67ea0e2bd27e2837fffe66891
|
/setup.py
|
87b23bf3cfa596e1163e4cf81c7292a8ba217f97
|
[
"MIT"
] |
permissive
|
ZizhouJia/pyson
|
300bd4c68cec3c0a42c5f3135e0447149ca86ebe
|
ba80336e6ec43456c0d1bf3e71109609b9489181
|
refs/heads/master
| 2020-08-15T07:55:24.704936
| 2019-11-29T08:46:35
| 2019-11-29T08:46:35
| 215,304,822
| 2
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 725
|
py
|
import setuptools
setuptools.setup(
name="pypyson",
version='0.01',
description="A JSON like more powerful object notation for python",
license="MIT License",
author="ZizhouJia",
author_email="jiazizhou@126.com",
url="http://github.com/ZizhouJia/pyson",
packages=setuptools.find_packages(),
install_requires=["antlr4-python3-runtime"],
classifiers=[
'Development Status :: 3 - Alpha',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 3',
'Operating System :: OS Independent'
],
data_files=[('pyson/init', ['pyson/init/checker_scheme.pyson'])],
python_requires='>=3.6'
)
|
[
"jiazizhou@126.com"
] |
jiazizhou@126.com
|
1d831cb9cfb9b9f7db96f0499fe3f0d02ab6c4ee
|
6302d46032f704aa2c8bb6e2810c19e3bb90c1c4
|
/server/netflix_backend/movies_api/migrations/0002_auto_20210219_1954.py
|
7ccbe630791c36378645ba0357d4a4f295324d1c
|
[] |
no_license
|
raghavendra-musubi/netflix-django-rest-react-redux
|
304d28f68e13e9962f31593441ae1b7b36743952
|
fe78061ccc1c27ff78697cb5f21d92a313b8a7c0
|
refs/heads/main
| 2023-03-09T21:32:30.409919
| 2021-02-24T19:03:32
| 2021-02-24T19:03:32
| 340,214,274
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 535
|
py
|
# Generated by Django 3.1.6 on 2021-02-19 19:54
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('movies_api', '0001_initial'),
]
operations = [
migrations.AlterField(
model_name='movie',
name='category_id',
field=models.PositiveSmallIntegerField(choices=[(1, 'Adventure'), (2, 'Action'), (3, 'Thriller'), (4, 'Horror'), (5, 'Comedy'), (6, 'Musical'), (7, 'Romance'), (8, 'Drama'), (9, 'Fantasy')]),
),
]
|
[
"raghavendra@techis.io"
] |
raghavendra@techis.io
|
0907267e98b96a3bfb69062100eb901fb42b8d3d
|
f7463bd0ab18b41611d5ac725f65d3db3a3a7a1d
|
/Generation Python - A Beginner's Course/13_Functions/13.5(return_v2)/7.py
|
05f6fc472054d62e8f0ac4d289c449cf867dab43
|
[] |
no_license
|
Sergey-Laznenko/Stepik
|
f81c5aeead3fbd20628129d60ccce92b34724b97
|
5e1a1a76c3f6ed487cf8fc847913c890c8eac840
|
refs/heads/master
| 2022-12-28T19:01:48.670540
| 2020-10-18T15:23:58
| 2020-10-18T15:23:58
| 279,022,462
| 1
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 561
|
py
|
def is_palindrome(a):
if str(a) == str(a)[::-1]:
return True
else:
return False
def is_prime(b):
if b in (0, 1):
return False
if b % 2 == 0:
return False
for i in range(3, round(b ** (1 / 2) + 1), 2):
if b % i == 0:
return False
return True
def is_even(c):
if c % 2 == 0:
return True
else:
return False
pws = input().split(':')
a = pws[0]
b = pws[1]
c = pws[2]
if is_palindrome(a) == is_prime(b) == is_even(c):
print('True')
else:
print('False')
|
[
"Laznenko.Sergey@gmail.com"
] |
Laznenko.Sergey@gmail.com
|
70cb3d09402bd71b84303f0fe648479b8846a4b2
|
e93d1931789c99922a6b5ff3cf7e3bfe1c8bce3d
|
/blog/urls.py
|
feda7193e8da66168c2c798b1763fd13b33d3f73
|
[] |
no_license
|
nhutphong/djangoblog
|
2653fcc34285788e7b34048acc7a078c88536c5c
|
e4bf2a0d43727c248b2a2006910a68063f99f186
|
refs/heads/master
| 2023-03-16T10:59:51.700275
| 2022-10-18T03:40:39
| 2022-10-18T03:40:39
| 237,549,725
| 1
| 0
| null | 2022-03-12T01:04:49
| 2020-02-01T02:23:09
|
Python
|
UTF-8
|
Python
| false
| false
| 1,089
|
py
|
from django.urls import path
from django.contrib.auth.decorators import login_required
from .views import (
ArticleListView,
ArticleCreateView,
ArticleDetailView,
ArticleUpdateView,
ArticleDeleteView,
PaginationListView,
SearchResultsView,
)
from . import views_filter
app_name = 'articles'
urlpatterns = [
#/blog/
path('demo/', views_filter.demo, name='demo-list'),
path('filter/', views_filter.filter_test, name='filter-list'),
path('pagination/', PaginationListView.as_view(), name='pagination-list'),
path('timkiem/', SearchResultsView.as_view(), name='search-results'),
path('', ArticleListView.as_view(), name='article-list'),
path('create/', ArticleCreateView.as_view(), name='article-create'),
path('<slug:slug>/', ArticleDetailView.as_view(), name='article-detail'),
path(
'<slug:slug>/update/',
ArticleUpdateView.as_view(),
name='article-update'
),
path(
'<slug:slug>/delete/',
ArticleDeleteView.as_view(),
name='article-delete'
)
]
|
[
"nhutphong@outlook.com"
] |
nhutphong@outlook.com
|
71574601ac2b63d3341288b90ea931c5e3941b71
|
13a32b92b1ba8ffb07e810dcc8ccdf1b8b1671ab
|
/home--tommy--mypy/mypy/lib/python2.7/site-packages/scikits/statsmodels/tools/decorators.py
|
b67ab7f9182886af449a828b1f8d2348ab11ea16
|
[
"Unlicense"
] |
permissive
|
tommybutler/mlearnpy2
|
8ec52bcd03208c9771d8d02ede8eaa91a95bda30
|
9e5d377d0242ac5eb1e82a357e6701095a8ca1ff
|
refs/heads/master
| 2022-10-24T23:30:18.705329
| 2022-10-17T15:41:37
| 2022-10-17T15:41:37
| 118,529,175
| 0
| 2
|
Unlicense
| 2022-10-15T23:32:18
| 2018-01-22T23:27:10
|
Python
|
UTF-8
|
Python
| false
| false
| 7,973
|
py
|
from numpy.testing import *
import warnings
__all__ = ['resettable_cache','cache_readonly', 'cache_writable']
class CacheWriteWarning(UserWarning):
pass
class ResettableCache(dict):
"""
Dictionary whose elements mey depend one from another.
If entry `B` depends on entry `A`, changing the values of entry `A` will
reset the value of entry `B` to a default (None); deleteing entry `A` will
delete entry `B`. The connections between entries are stored in a
`_resetdict` private attribute.
Parameters
----------
reset : dictionary, optional
An optional dictionary, associated a sequence of entries to any key
of the object.
items : var, optional
An optional dictionary used to initialize the dictionary
Examples
--------
>>> reset = dict(a=('b',), b=('c',))
>>> cache = resettable_cache(a=0, b=1, c=2, reset=reset)
>>> assert_equal(cache, dict(a=0, b=1, c=2))
>>> print "Try resetting a"
>>> cache['a'] = 1
>>> assert_equal(cache, dict(a=1, b=None, c=None))
>>> cache['c'] = 2
>>> assert_equal(cache, dict(a=1, b=None, c=2))
>>> cache['b'] = 0
>>> assert_equal(cache, dict(a=1, b=0, c=None))
>>> print "Try deleting b"
>>> del(cache['a'])
>>> assert_equal(cache, {})
"""
def __init__(self, reset=None, **items):
self._resetdict = reset or {}
dict.__init__(self, **items)
def __setitem__(self, key, value):
dict.__setitem__(self, key, value)
for mustreset in self._resetdict.get(key, []):
self[mustreset] = None
def __delitem__(self, key):
dict.__delitem__(self, key)
for mustreset in self._resetdict.get(key, []):
del(self[mustreset])
resettable_cache = ResettableCache
class CachedAttribute(object):
def __init__(self, func, cachename=None, resetlist=None):
self.fget = func
self.name = func.__name__
self.cachename = cachename or '_cache'
self.resetlist = resetlist or ()
def __get__(self, obj, type=None):
if obj is None:
return self.fget
# Get the cache or set a default one if needed
_cachename = self.cachename
_cache = getattr(obj, _cachename, None)
if _cache is None:
setattr(obj, _cachename, resettable_cache())
_cache = getattr(obj, _cachename)
# Get the name of the attribute to set and cache
name = self.name
_cachedval = _cache.get(name, None)
# print "[_cachedval=%s]" % _cachedval
if _cachedval is None:
# Call the "fget" function
_cachedval = self.fget(obj)
# Set the attribute in obj
# print "Setting %s in cache to %s" % (name, _cachedval)
try:
_cache[name] = _cachedval
except KeyError:
setattr(_cache, name, _cachedval)
# Update the reset list if needed (and possible)
resetlist = self.resetlist
if resetlist is not ():
try:
_cache._resetdict[name] = self.resetlist
except AttributeError:
pass
# else:
# print "Reading %s from cache (%s)" % (name, _cachedval)
return _cachedval
def __set__(self, obj, value):
errmsg = "The attribute '%s' cannot be overwritten" % self.name
warnings.warn(errmsg, CacheWriteWarning)
class CachedWritableAttribute(CachedAttribute):
#
def __set__(self, obj, value):
_cache = getattr(obj, self.cachename)
name = self.name
try:
_cache[name] = value
except KeyError:
setattr(_cache, name, value)
class _cache_readonly(object):
"""
Decorator for CachedAttribute
"""
def __init__(self, cachename=None, resetlist=None):
self.func = None
self.cachename = cachename
self.resetlist = resetlist or None
def __call__(self, func):
return CachedAttribute(func,
cachename=self.cachename,
resetlist=self.resetlist)
cache_readonly = _cache_readonly()
class cache_writable(_cache_readonly):
"""
Decorator for CachedWritableAttribute
"""
def __call__(self, func):
return CachedWritableAttribute(func,
cachename=self.cachename,
resetlist=self.resetlist)
#this has been copied from nitime a long time ago
#TODO: ceck whether class has change in nitime
class OneTimeProperty(object):
"""A descriptor to make special properties that become normal attributes.
This is meant to be used mostly by the auto_attr decorator in this module.
Author: Fernando Perez, copied from nitime
"""
def __init__(self,func):
"""Create a OneTimeProperty instance.
Parameters
----------
func : method
The method that will be called the first time to compute a value.
Afterwards, the method's name will be a standard attribute holding
the value of this computation.
"""
self.getter = func
self.name = func.func_name
def __get__(self,obj,type=None):
"""This will be called on attribute access on the class or instance. """
if obj is None:
# Being called on the class, return the original function. This way,
# introspection works on the class.
#return func
#print 'class access'
return self.getter
val = self.getter(obj)
#print "** auto_attr - loading '%s'" % self.name # dbg
setattr(obj, self.name, val)
return val
if __name__ == "__main__":
### Tests resettable_cache ----------------------------------------------------
reset = dict(a=('b',), b=('c',))
cache = resettable_cache(a=0, b=1, c=2, reset=reset)
assert_equal(cache, dict(a=0, b=1, c=2))
#
print "Try resetting a"
cache['a'] = 1
assert_equal(cache, dict(a=1, b=None, c=None))
cache['c'] = 2
assert_equal(cache, dict(a=1, b=None, c=2))
cache['b'] = 0
assert_equal(cache, dict(a=1, b=0, c=None))
#
print "Try deleting b"
del(cache['a'])
assert_equal(cache, {})
### ---------------------------------------------------------------------------
class Example(object):
#
def __init__(self):
self._cache = resettable_cache()
self.a = 0
#
@cache_readonly
def b(self):
return 1
@cache_writable(resetlist='d')
def c(self):
return 2
@cache_writable(resetlist=('e', 'f'))
def d(self):
return self.c + 1
#
@cache_readonly
def e(self):
return 4
@cache_readonly
def f(self):
return self.e + 1
ex = Example()
print "(attrs : %s)" % str(ex.__dict__)
print "(cached : %s)" % str(ex._cache)
print "Try a :", ex.a
print "Try accessing/setting a readonly attribute"
assert_equal(ex.__dict__, dict(a=0, _cache={}))
print "Try b #1:", ex.b
b = ex.b
assert_equal(b, 1)
assert_equal(ex.__dict__, dict(a=0, _cache=dict(b=1,)))
# assert_equal(ex.__dict__, dict(a=0, b=1, _cache=dict(b=1)))
ex.b = -1
print "Try dict", ex.__dict__
assert_equal(ex._cache, dict(b=1,))
#
print "Try accessing/resetting a cachewritable attribute"
c = ex.c
assert_equal(c, 2)
assert_equal(ex._cache, dict(b=1, c=2))
d = ex.d
assert_equal(d, 3)
assert_equal(ex._cache, dict(b=1, c=2, d=3))
ex.c = 0
assert_equal(ex._cache, dict(b=1, c=0, d=None, e=None, f=None))
d = ex.d
assert_equal(ex._cache, dict(b=1, c=0, d=1, e=None, f=None))
ex.d = 5
assert_equal(ex._cache, dict(b=1, c=0, d=5, e=None, f=None))
|
[
"tbutler.github@internetalias.net"
] |
tbutler.github@internetalias.net
|
9da746164e40ff74bb887fd59775557656eb228e
|
21e87dc5abaf8c8dfe7adfb72c38648f415d038c
|
/16_developer_tools/11_compileall/example/subfolder2/c.py
|
4713d0f8c91464a958dcfae43283a515af70bba3
|
[] |
no_license
|
ariesduanmu/python3_standard_library
|
f2badbb6047b6003ddeccb77ba2892074510f0ff
|
905ae53d0970be442bcf3d2a9dc3eadbc58367e5
|
refs/heads/master
| 2022-04-23T21:05:52.862076
| 2020-04-23T16:44:14
| 2020-04-23T16:44:14
| 241,277,069
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 177
|
py
|
# -*- coding: utf-8 -*-
# @Author: Li Qin
# @Date: 2020-02-24 09:30:22
# @Last Modified by: Li Qin
# @Last Modified time: 2020-02-24 09:30:56
def minus(a, b):
return a-b
|
[
"aries.duanmu@gmail.com"
] |
aries.duanmu@gmail.com
|
d06b868fa88c5d499dd32895fd542a19fc18deb0
|
eed7b5aa4861086d34e539e7bbfeff4286506692
|
/src/Game/Effects/spend_power.py
|
2286d1f6e407736c8ea6bf6088203090a386bc5c
|
[] |
no_license
|
dfwarden/DeckBuilding
|
0be2ccb68fc9a69c8eaa1d8acedeaa7cebef1a31
|
0b5a7573a3cf33430fe61e4ff8a8a7a0ae20b258
|
refs/heads/master
| 2021-01-18T09:52:51.880892
| 2015-02-03T03:21:17
| 2015-02-03T03:21:17
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 337
|
py
|
class SpendPower:
""" Represents an effect to spend power this turn """
def __init__(self, power):
""" Initialize the Effect with the power to spend """
self.power = power
def perform(self, context):
""" Perform the Game Effect """
context.owner.spendPower(self.power)
|
[
"cloew123@gmail.com"
] |
cloew123@gmail.com
|
5309fa1188f170b8efbe4b43b64fe524a1b8e1e9
|
db8ab70de135d8bddc2c6df865b98ed76c2b92ee
|
/model/toxic_comment_classifier.py
|
f0bf2a9509d6372736d134cd7b3551e2797e332d
|
[] |
no_license
|
boyuan12/ToxicBlockPlus
|
718af4970f27e9eba9c454268a75c53c007f7737
|
f90a46b9748a8d4dcdfc9e8c19279cc6aeed46c5
|
refs/heads/main
| 2023-02-26T21:20:56.878995
| 2021-02-09T01:15:34
| 2021-02-09T01:15:34
| 335,865,276
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,278
|
py
|
from typing import List
from bentoml import api, artifacts, env, BentoService
from bentoml.frameworks.keras import KerasModelArtifact
from bentoml.service.artifacts.common import PickleArtifact
from bentoml.adapters import DataframeInput, JsonOutput
from keras.preprocessing import text, sequence
import numpy as np
import pandas as pd
list_of_classes = ["toxic", "severe_toxic", "obscene", "threat", "insult", "identity_hate"]
max_text_length = 400
@env(pip_packages=['tensorflow==1.14.0', 'keras==2.3.1', 'pandas', 'numpy'])
@artifacts([PickleArtifact('x_tokenizer'), KerasModelArtifact('model')])
class ToxicCommentClassification(BentoService):
def tokenize_df(self, df):
comments = df['comment_text'].values
tokenized = self.artifacts.x_tokenizer.texts_to_sequences(comments)
input_data = sequence.pad_sequences(tokenized, maxlen=max_text_length)
return input_data
@api(input=DataframeInput(), output=JsonOutput(), batch=True)
def predict(self, df: pd.DataFrame) -> List[str]:
input_data = self.tokenize_df(df)
prediction = self.artifacts.model.predict(input_data)
result = []
for i in prediction:
result.append(list_of_classes[np.argmax(i)])
return result
|
[
"boyuanliu6@yahoo.com"
] |
boyuanliu6@yahoo.com
|
434bfb4f4cc27692073954c84c66e1218f428b56
|
af47e1dd1405ebd5267e7f8cf22f7b4429fcef00
|
/scattertext/termscoring/g2.py
|
7ba2d01a83690dfca816ad03c8a93d0365684bad
|
[
"MIT",
"CC-BY-NC-SA-4.0",
"LicenseRef-scancode-proprietary-license",
"Apache-2.0"
] |
permissive
|
JasonKessler/scattertext
|
72ce3b35d71af595f7797de845ba93b4bb0091b4
|
b41e3a875faf6dd886e49e524345202432db1b21
|
refs/heads/master
| 2023-05-11T06:42:51.108527
| 2023-05-06T19:23:59
| 2023-05-06T19:23:59
| 63,827,736
| 2,187
| 303
|
Apache-2.0
| 2023-05-06T19:24:00
| 2016-07-21T01:47:12
|
Python
|
UTF-8
|
Python
| false
| false
| 4,143
|
py
|
import numpy as np
import pandas as pd
from scipy.stats import chi2
from statsmodels.stats.multitest import fdrcorrection
from scattertext.termscoring.CorpusBasedTermScorer import CorpusBasedTermScorer
def g2_term(O, E):
res = O.astype(np.float64) * (np.log(O) - np.log(E))
res[O == 0] = 0
return res
def sign(a: np.array) -> np.array:
return np.nan_to_num(a / np.abs(a), 0)
def qchisq(alpha: np.array, df: int) -> np.array:
return chi2.ppf(1 - alpha, df=df) # qchisq(alpha, df=1, lower.tail=FALSE)
class G2(CorpusBasedTermScorer):
"""
G^2 (log likelihood ratio)s from (Rayson and Garside 2000)
A direct translation of the R function from (Evert 2023)
Stephanie Evert. 2023. Measuring Keyness. https://osf.io/x8z9n.
G2.term <- function (O, E) {
res <- O * log(O / E)
res[O == 0] <- 0
res
}
G2 <- function (f1, f2, N1, N2, alpha=NULL, correct=TRUE) {
stopifnot(length(f1) == length(f2))
## observed and expected contingency tables
N <- N1 + N2
R1 <- f1 + f2
O11 <- f1; E11 <- R1 * N1 / N
O12 <- f2; E12 <- R1 * N2 / N
O21 <- N1 - f1; E21 <- N1 - E11
O22 <- N2 - f2; E22 <- N2 - E12
## log-likelihood statistic (simplest formula)
G2 <- 2 * (G2.term(O11, E11) + G2.term(O12, E12) + G2.term(O21, E21) + G2.term(O22, E22))
res <- sign(O11 - E11) * G2 # set sign to distinguish positive vs. negative keywords
## weed out non-significant items if alpha is specified
if (!is.null(alpha)) {
if (correct) alpha <- alpha / length(f1)
theta <- qchisq(alpha, df=1, lower.tail=FALSE)
res[G2 < theta] <- 0 # set to 0 if not significant at level alpha
}
res
}
"""
def _set_scorer_args(self, **kwargs):
self.alpha_ = kwargs.get('alpha', None)
self.correct_ = kwargs.get('correct', True)
def get_score_df(self, label_append=''):
N1, N2, f1, f2 = self._get_ns_and_fs(())
gsquare, res = self._get_g2_and_res(N1, N2, f1, f2)
df = pd.DataFrame({
'G2': gsquare,
'Score': res,
'P': chi2.sf(gsquare, df=1),
})
return df.assign(
CorrectedP = lambda df: fdrcorrection(pvals=df.P.values, alpha=0.05, method='indep')[1]
)
def get_scores(self, *args) -> pd.Series:
N1, N2, f1, f2 = self._get_ns_and_fs(args)
gsquare, res = self._get_g2_and_res(N1, N2, f1, f2)
## weed out non-significant items if alpha is specified
if self.alpha_ is not None:
alpha = self.alpha_
if self.correct_:
alpha = alpha / len(f1)
theta = qchisq(alpha, df=1)
res[gsquare < theta] = 0 # set to 0 if not significant at level alpha
return pd.Series(res, index=self._get_terms())
def _get_g2_and_res(self, N1, N2, f1, f2):
N = N1 + N2
R1 = f1 + f2
E11, E12, E21, E22, O11, O12, O21, O22 = self.__get_contingency_table(N, N1, N2, R1, f1, f2)
## log-likelihood statistic (simplest formula)
gsquare = 2 * (g2_term(O11, E11) + g2_term(O12, E12) + g2_term(O21, E21) + g2_term(O22, E22))
res = sign(O11 - E11) * gsquare # set sign to distinguish positive vs. negative keywords
return gsquare, res
def __get_contingency_table(self, N, N1, N2, R1, f1, f2):
O11 = f1
E11 = R1 * N1 / N
O12 = f2
E12 = R1 * N2 / N
O21 = N1 - f1
E21 = N1 - E11
O22 = N2 - f2
E22 = N2 - E12
return E11, E12, E21, E22, O11, O12, O21, O22
def _get_ns_and_fs(self, args):
cat_X, ncat_X = self._get_cat_and_ncat(self._get_X())
N1 = self._get_cat_size()
N2 = self._get_ncat_size()
if len(args) == 0:
f1 = cat_X.sum(axis=0).A1
f2 = ncat_X.sum(axis=0).A1
else:
f1, f2 = self.__get_f1_f2_from_args(args)
f1 = np.array(f1).astype(np.float64)
f2 = np.array(f2).astype(np.float64)
return N1, N2, f1, f2
def get_name(self):
return 'G2'
|
[
"JasonKessler@users.noreply.github.com"
] |
JasonKessler@users.noreply.github.com
|
2ef11f6cdbf8403c0d448a2d67022c40b83c6620
|
ac5e52a3fc52dde58d208746cddabef2e378119e
|
/exps-gsn-edf/gsn-edf_ut=3.5_rd=0.8_rw=0.06_rn=4_u=0.075-0.35_p=harmonic-2/sched=RUN_trial=60/sched.py
|
48c7bde40359bfb09d24223c9a5ccb1161b938c2
|
[] |
no_license
|
ricardobtxr/experiment-scripts
|
1e2abfcd94fb0ef5a56c5d7dffddfe814752eef1
|
7bcebff7ac2f2822423f211f1162cd017a18babb
|
refs/heads/master
| 2023-04-09T02:37:41.466794
| 2021-04-25T03:27:16
| 2021-04-25T03:27:16
| 358,926,457
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 385
|
py
|
-X FMLP -Q 0 -L 3 104 400
-X FMLP -Q 0 -L 3 78 300
-X FMLP -Q 0 -L 3 69 300
-X FMLP -Q 0 -L 3 64 300
-X FMLP -Q 1 -L 2 62 250
-X FMLP -Q 1 -L 2 54 175
-X FMLP -Q 1 -L 2 49 300
-X FMLP -Q 2 -L 2 49 150
-X FMLP -Q 2 -L 2 43 150
-X FMLP -Q 2 -L 2 37 250
-X FMLP -Q 3 -L 1 35 250
-X FMLP -Q 3 -L 1 34 100
-X FMLP -Q 3 -L 1 30 150
22 100
21 200
8 175
|
[
"ricardo.btxr@gmail.com"
] |
ricardo.btxr@gmail.com
|
0397c9f0d2e40acf497622b8b4cb2e5299202bba
|
471ea669e21abdb4e4915610b4b5eb43ea3cffe9
|
/剑指Offer/31.整数中1出现的次数.py
|
d663b71492aabfba5cd8ae82b899c772a9d0eb39
|
[] |
no_license
|
JiahuaLink/nowcoder-leetcode
|
26aed099e215cfc1d8e8afffc62fafa26b26b06f
|
0155fc33511cbe892f58550d561d3aa3efcd56b9
|
refs/heads/master
| 2023-07-09T03:05:31.227720
| 2021-08-03T06:50:36
| 2021-08-03T06:50:36
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 883
|
py
|
# 方法一:判断整数每个数字
class Solution:
def NumberOf1Between1AndN_Solution(self, n):
count = 0
for i in range(1, n+1):
temp = i
while(temp):
if temp%10 == 1:
count += 1
temp /= 10
return count
# 方法二:将整数转为字符串逐位判断
class Solution:
def NumberOf1Between1AndN_Solution(self, n):
count = 0
for i in range(1, n+1):
s = str(i)
for j in s:
if j == '1':
count += 1
return count
# 方法三:将整数转为字符串,组合含有‘1’的字符串,再统计‘1’的个数
def NumberOf1Between1AndN_Solution(self, n):
a = map(str, range(n+1))
ones = [i for i in a if '1' in i]
return ''.join(ones).count('1')
|
[
"noreply@github.com"
] |
JiahuaLink.noreply@github.com
|
3220c6fedfbdef66c2d1bc9c9c4a39bc047ce8ae
|
40c2bce56832d97797c115f60d1e0459fd4ebf93
|
/Eclipse_Project_2/Section_1_5/database.py
|
3c56f2e590f4f519ae6e0c1a2f4d52010d0af71a
|
[] |
no_license
|
amanoj319319319/Eclipse_Python_LastSeleniumTest
|
0be2e7f615160248f329b4df0e9d109612b29560
|
4d0978e4c2dfe9c3a9d4b429f7ff6340278c0252
|
refs/heads/master
| 2023-04-27T09:14:38.726807
| 2021-05-19T08:18:40
| 2021-05-19T08:18:40
| 267,038,244
| 0
| 0
| null | 2021-05-19T08:17:45
| 2020-05-26T12:35:36
|
Python
|
UTF-8
|
Python
| false
| false
| 6,224
|
py
|
#connecting to the database using database credentials and finding version of the database
'''
import cx_Oracle
con=cx_Oracle.connect('system/Manoj319319319')
if con!=None:
print ("successfully connected")
print ("Version is:-",con.version)
else:
print ("connection failed")
'''
#creating a table name in the database
'''
import cx_Oracle
try:
con=cx_Oracle.connect('system/Manoj319319319')
query="create table employees(eno number,ename varchar2(10),esal number(10,2))"
cursor=con.cursor()
cursor.execute(query)
print ("Table created succesfully")
except Exception as e:
print (e)
finally:
if cursor:
cursor.close()
if con:
con.close()
'''
#deleted a particular table name in the database
'''
import cx_Oracle
try:
con=cx_Oracle.connect('system/Manoj319319319')
query="drop table employees"
cursor=con.cursor()
cursor.execute(query)
print ("Table dropped succesfully")
except Exception as e:
print (e)
finally:
if cursor:
cursor.close()
if con:
con.close()
'''
#creating a table in the database
'''
import cx_Oracle
try:
con=cx_Oracle.connect('system/Manoj319319319')
query="create table employees(eno number,ename varchar2(10),esal number(10,2))"
cursor=con.cursor()
cursor.execute(query)
print ("Table created succesfully")
except Exception as e:
print (e)
finally:
if cursor:
cursor.close()
if con:
con.close()
'''
#Inserting multiple values to the required paramters in the employees table
'''
import cx_Oracle
try:
con=cx_Oracle.connect('system/Manoj320320320')
cursor=con.cursor()
query = "insert into employees values(:eno,:ename,:esal)"
records=[(101,"manoj",10000),(102,"anki",20000),(103,"jyothi",30000)]
cursor.executemany(query,records)
con.commit()
print ("Record Inserted succesfully")
except Exception as e:
print (e)
finally:
if cursor:
cursor.close()
if con:
con.close()
'''
#Reading input from the console
'''
import cx_Oracle
try:
con=cx_Oracle.connect('system/Manoj320320320')
cursor=con.cursor()
while True:
eno=int(input("Enter employee number:-"))
ename =(input("Enter employee name:-"))
esal = float(input("Enter employee salary:-"))
query = "insert into employees values(%d,'%s',%f)"
cursor.execute(query %(eno,ename,esal))
con.commit()
print ("Records Inserted succesfully")
option=input("Do you want to insert one more record[yes/no]")
if option == "no":
break
except Exception as e:
print (e)
finally:
if cursor:
cursor.close()
if con:
con.close()
'''
#Updating records in the database using SQL query
#The employees whose salary was less than 5000,i i had to increment Rs 1000 to their existing salary
'''
import cx_Oracle
try:
con=cx_Oracle.connect('system/Manoj320320320')
cursor=con.cursor()
increment=float(input("Enter increment amount:-"))
salaryrange=float(input("Enter salary range:-"))
query="update employees set esal=esal+%f where esal<%f"
cursor.execute(query %(increment, salaryrange))
con.commit()
print ("Records are updated successfully")
except Exception as e:
print (e)
finally:
if cursor:
cursor.close()
if con:
con.close()
'''
#Deleting records from the employees table based on their salary ranges
#in the temployees table whose salary was greater than 5000 they were deleted from the table by me
'''
import cx_Oracle
try:
con=cx_Oracle.connect('system/Manoj320320320')
cursor=con.cursor()
cutoff=float(input("Enter cutoff amount:-"))
query="delete from employees where esal>%f"
cursor.execute(query %(cutoff))
con.commit()
print ("Records are deleted successfully")
except Exception as e:
print (e)
finally:
if cursor:#if cursor means if cursor is not equal to None
cursor.close()
if con:
con.close()
'''
'''
DDL coommands are ;;; table created , table dropped
DML Commnds are ;;;; insert operation , update operation , delete operation (for doing this ,
commit() method is must)
'''
#desc employees
#select * from employees;
#how to use fetchone() method to retrive data from the table
'''
import cx_Oracle
try:
con=cx_Oracle.connect('system/Manoj320320320')
cursor=con.cursor()
query="select * from employees"
cursor.execute(query)
row=cursor.fetchone()
while row is not None:
print(row)
row = cursor.fetchone()
except Exception as e:
if con:
con.rollback()
print ("There is a problem:-",e)
finally:
if cursor:#if cursor means if cursor is not equal to None
cursor.close()
if con:
con.close()
'''
#how to use fetchall() method to retrive data from the table
'''
import cx_Oracle
try:
con=cx_Oracle.connect('system/Manoj320320320')
cursor=con.cursor()
query="select * from employees"
cursor.execute(query)
rows=cursor.fetchall()
print (rows)
for row in rows:
print ("Employee number is:-",row[0])
print("Employee name is:-", row[1])
print("Employee salary is:-", row[2])
print ("***************")
except Exception as e:
if con:
con.rollback()
print ("There is a problem:-",e)
finally:
if cursor:#if cursor means if cursor is not equal to None
cursor.close()
if con:
con.close()
'''
#how to use fetchmany() method to retrive data from the table
'''
import cx_Oracle
try:
con=cx_Oracle.connect('system/Manoj320320320')
cursor=con.cursor()
query="select * from employees"
cursor.execute(query)
n=int(input("How many rows do you want:-"))
data = cursor.fetchmany(n)
for row in data:
print ("Employee number is:-",row[0])
print("Employee name is:-", row[1])
print("Employee salary is:-", row[2])
print ("***************")
except Exception as e:
if con:
con.rollback()
print ("There is a problem:-",e)
finally:
if cursor:#if cursor means if cursor is not equal to None
cursor.close()
if con:
con.close()
'''
|
[
"a.manoj16@gmail.com"
] |
a.manoj16@gmail.com
|
57c166495a5ba9c4d4d739bff152b1a67f6e3fea
|
5ceea4106e0df754ae581c1f5e2d16082d7b6386
|
/hackerRank/Algorithms/Implementation/bon-appetit.py
|
0da733b5b6475b0511073b0a9b33e4e31f2c3664
|
[] |
no_license
|
vikramlance/Python-Programming
|
b0d4bd70145bfaa7a66434656c5970fbc57e8bd3
|
4094961e3c613e33f2d8a6d30281c60ed09d8c80
|
refs/heads/master
| 2022-06-17T00:58:50.646615
| 2022-06-03T03:39:35
| 2022-06-03T03:39:35
| 53,989,511
| 1
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 250
|
py
|
'''
https://www.hackerrank.com/challenges/bon-appetit
'''
n,k=raw_input().split()
n=int(n)
k=int(k)
a=map(int, raw_input().split())
b=int(raw_input())
if (2*b== (sum(a) - a[k])):
print "Bon Appetit"
else:
print ( b - ((sum(a) - a[k])//2))
|
[
"noreply@github.com"
] |
vikramlance.noreply@github.com
|
76f8185eb90a42766f86ea066b38f022fd6156e5
|
131688c1006670be2bab5ce062521ce9b79b64af
|
/week2/design_hashset.py
|
ff2fc0f9e3e5b52cdb2f8f1875abad001dd4aa75
|
[
"MIT"
] |
permissive
|
ravichalla/wallbreaker
|
4e3dc98ff02fd8a7bace2466c071c65a37124426
|
0d587f12c60df5e4bca47f9183484a69d284d1f5
|
refs/heads/master
| 2020-06-08T05:44:35.510146
| 2020-01-29T02:25:19
| 2020-01-29T02:25:19
| 193,169,833
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 959
|
py
|
class MyHashSet:
def __init__(self):
self.capacity = 5000
self.arr = [None] * self.capacity
def add(self, key):
hash_val = hash(key) % self.capacity
if self.arr[hash_val] == None:
self.arr[hash_val] = [key]
else:
if key not in self.arr[hash_val]:
self.arr[hash_val].append(key)
def remove(self, key) -> None:
hash_val = hash(key) % self.capacity
if self.arr[hash_val] == None:
return
for ind in range(len(self.arr[hash_val])):
if self.arr[hash_val][ind] == key:
del self.arr[hash_val][ind]
return
def contains(self, key):
hash_val = hash(key) % self.capacity
if self.arr[hash_val] == None:
return False
else:
for h_key in self.arr[hash_val]:
if h_key == key:
return True
return False
|
[
"ravichalla95@gmail.com"
] |
ravichalla95@gmail.com
|
3c8d65c57a7bdbf95d8cdf533395ad17aa5f6a99
|
03e115c1937ec7bd1e249f82db0225828eaaa186
|
/2-GUI (tkinter)/3imagenes.py
|
5d2a2f4a214587d17e084764b7496fb9400deb31
|
[] |
no_license
|
mivargas/Master-python
|
236c04205637ddd44d1cc879de2b7c48418153f9
|
9d1c04a8d658aa0dd8620ed792fa2133adfa57e7
|
refs/heads/master
| 2023-03-06T13:35:58.177058
| 2021-02-16T00:06:00
| 2021-02-16T00:06:00
| 321,731,390
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 296
|
py
|
from tkinter import *
from PIL import Image, ImageTk
ventana = Tk()
ventana.geometry("700x500")
Label(ventana, text="HOLA SOY MIGUEL").pack(anchor=W)
imagen = Image.open("./imagenes/labsxd.png")
render = ImageTk.PhotoImage(imagen)
Label(ventana, image=render).pack(anchor=E)
ventana.mainloop()
|
[
"miguelvargas619@gmail.com"
] |
miguelvargas619@gmail.com
|
5c02e94311a37dbaf15d56d180884328cdaf081d
|
761a20a79420bc6da491c5a487f6cf218f598b66
|
/DemoTest/graphicsTkinter003.py
|
d8eb0e19c8eb84a4b963313d1955a8126b63903c
|
[] |
no_license
|
namexiaohuihui/linuxlogin
|
50b21e247d2e06c479907aa2f94f4b5979b4025d
|
ad8ffce5e87624f40f89eedc0229ba70cd66699b
|
refs/heads/master
| 2020-03-24T16:56:06.313441
| 2018-08-03T14:06:48
| 2018-08-03T14:06:48
| 142,843,487
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 541
|
py
|
# -*- coding: utf-8 -*-
"""
@__author__ :DingDong
@file: graphicsTkinter001.py
@time: 2018/5/17 21:37
@Entry Name:operating
"""
from tkinter import *
from tkinter.messagebox import showinfo
def reply(name):
showinfo(title='弹窗',message='你的名字: %s !' % name)
top = Tk()
top.title('账号登陆')
# top.iconbitma('1178420.gif')
Label(top,text='请输入你的名字 :').pack(side=TOP)
ent = Entry(top)
ent.pack(side=TOP)
btn = Button(top,text='登陆',command=lambda :reply(ent.get()))
btn.pack(side=TOP)
top.mainloop()
|
[
"704866169@qq.com"
] |
704866169@qq.com
|
aef37705f286e46903ffcd71491000c635addd56
|
2dc17d12ff6ea9794177c81aa4f385e4e09a4aa5
|
/archive/531LonelyPixelI.py
|
85d2ad88ff367da5b050933632ef6d2bb1308b12
|
[] |
no_license
|
doraemon1293/Leetcode
|
924b19f840085a80a9e8c0092d340b69aba7a764
|
48ba21799f63225c104f649c3871444a29ab978a
|
refs/heads/master
| 2022-10-01T16:20:07.588092
| 2022-09-08T02:44:56
| 2022-09-08T02:44:56
| 122,086,222
| 0
| 0
| null | null | null | null |
WINDOWS-1252
|
Python
| false
| false
| 913
|
py
|
# coding=utf-8
'''
Created on 2017�4�11�
@author: Administrator
'''
class Solution(object):
def findLonelyPixel(self, picture):
"""
:type picture: List[List[str]]
:rtype: int
"""
if picture:
rows = [0] * len(picture)
cols = [0] * len(picture[0])
for row in range(len(picture)):
for col in range(len(picture[row])):
if picture[row][col] == "B":
rows[row] += 1
cols[col] += 1
ans = 0
for row in range(len(picture)):
for col in range(len(picture[row])):
if rows[row] == 1 and cols[col] == 1 and picture[row][col] == "B":
ans += 1
return ans
else:
return 0
picture = ["BBB"]
print Solution().findLonelyPixel(picture)
|
[
"yanhuang1293@gmail.com"
] |
yanhuang1293@gmail.com
|
b09d2c84b36ef30c97f1bc81ce017ce57b4ec3d9
|
7f863ca7ed47981f69e42fc1add75ba4acad921a
|
/code-lab/DSA - Long Hike(Fractional Knapsack).py
|
f4d12e06067fcbb5aa404a68e6bcd836d7d613b3
|
[
"CC0-1.0"
] |
permissive
|
Nahid-Hassan/fullstack-software-development
|
e9f920be9a999c78f156e6102683b93a50c4e597
|
892ffb33e46795061ea63378279a6469de317b1a
|
refs/heads/main
| 2023-08-19T14:16:57.801056
| 2021-09-29T15:34:35
| 2021-09-29T15:34:35
| 376,595,866
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,049
|
py
|
weights = 1
weights_values = [(14, 2), (20, 4), (18, 3)]
def knapsack_slow(weights_values, weights):
values_by_weights = [(x / y, y) for x, y in weights_values]
values_by_weights.sort(reverse=True)
# print(values_by_weights)
print(values_by_weights)
bags = []
for i in range(len(values_by_weights)):
if sum(bags) == weights:
break
if values_by_weights[i][1] <= weights - sum(bags):
bags.append(values_by_weights[i][1])
# weights -= values_by_weights[i][1]
else:
# temp = values_by_weights[i][1]
bags.append(weights)
print(weights + '----------')
return bags
def knapsack_fast(weights_values, weights):
bags = []
volume = 0
temp_weights = weights
values_by_weights = [(x/y, y) for x, y in weights_values]
values_by_weights.sort()
for i in range(len(weights_values)):
if weights == 0:
return (bags, volume)
if values_by_weights[i][1]:
pass
|
[
"nahid.cseru@gmail.com"
] |
nahid.cseru@gmail.com
|
f1033a3a96ab29a179996c21324e2e9a90a9b91e
|
8ecd899a8558ad0a644ecefa28faf93e0710f6fb
|
/ABC163/ABC163_A.py
|
bb4bb6c963e85231718a3704fce4761be0b06a79
|
[] |
no_license
|
yut-inoue/AtCoder_ABC
|
b93885547049788d452e86b442a4a9f5ee191b0e
|
3d2c4b2b2f8871c75f86040ad07ccd7736ad3dbe
|
refs/heads/master
| 2021-07-03T09:09:20.478613
| 2021-02-21T13:20:31
| 2021-02-21T13:20:31
| 227,140,718
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 179
|
py
|
r=int(input())
#a,b=map(int,input().split())
#l=list(map(int,input().split()))
#l=[list(map(int,input().split())) for i in range(n)]
ans=2*r*3.14159
print('{:.5f}'.format(ans))
|
[
"yinoue.1996787@gmail.com"
] |
yinoue.1996787@gmail.com
|
abc19a89f586d28d24cd2468c387a49113282b1c
|
03520abb58a66aeed9a556d53e3a21006af02dde
|
/named_storms/migrations/0079_nsempsa_covered_data_snapshot.py
|
73c0e3338456b81ae6bf6fe88566507a8f794e9c
|
[] |
no_license
|
flackdl/cwwed
|
23ce1d1a5e48a57ee2cb3229860f1b97ccc81636
|
0a1454897d397cd5e1652643616abe883ccc853b
|
refs/heads/master
| 2023-07-21T20:05:35.093270
| 2023-06-29T15:29:26
| 2023-06-29T15:29:26
| 118,942,664
| 1
| 0
| null | 2023-09-12T21:56:59
| 2018-01-25T17:06:47
|
Python
|
UTF-8
|
Python
| false
| false
| 530
|
py
|
# Generated by Django 2.2.6 on 2019-10-17 17:32
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('named_storms', '0078_auto_20191017_1705'),
]
operations = [
migrations.AddField(
model_name='nsempsa',
name='covered_data_snapshot',
field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, to='named_storms.NamedStormCoveredDataSnapshot'),
),
]
|
[
"flackattack@gmail.com"
] |
flackattack@gmail.com
|
a5c3f6f93d1f5122a502c4bff3c84593cf568c96
|
15f321878face2af9317363c5f6de1e5ddd9b749
|
/solutions_python/Problem_201/2402.py
|
f7e5ebea73232aadb5a3d4e17099e09452301ea6
|
[] |
no_license
|
dr-dos-ok/Code_Jam_Webscraper
|
c06fd59870842664cd79c41eb460a09553e1c80a
|
26a35bf114a3aa30fc4c677ef069d95f41665cc0
|
refs/heads/master
| 2020-04-06T08:17:40.938460
| 2018-10-14T10:12:47
| 2018-10-14T10:12:47
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,266
|
py
|
def increment_by_value(d, l, index, value):
if index in d:
d[index] += value
else:
d[index] = value
l.append(index)
l.sort()
def occupy(s):
num_stalls, num_people = [int(x) for x in s.split(" ")]
free, free_index = dict(), []
free[num_stalls] = 1
free_index.append(num_stalls)
count = 0
while 1:
#print("free ", free)
#print("free index ", free_index)
length = free_index[-1]
num_served = free[length]
free[length] = 0
free_index.remove(length)
#print("serving ", num_served, " people")
if length % 2 == 0:
increment_by_value(free, free_index, length // 2, num_served)
increment_by_value(free, free_index, length // 2 - 1, num_served)
max, min = length // 2, length // 2 - 1
else:
increment_by_value(free, free_index, length // 2, num_served * 2)
#free[length // 2] += 1
max, min = length // 2, length // 2
count += num_served
if count >= num_people:
return max, min
def main():
l = int(input())
for i in range(l):
max, min= occupy(input())
print("Case #{}: {} {}".format(i + 1, max, min))
main()
|
[
"miliar1732@gmail.com"
] |
miliar1732@gmail.com
|
cfbf28112e456f0999b8c8dc64ea310f31fb5227
|
ebd5c4632bb5f85c9e3311fd70f6f1bf92fae53f
|
/Sourcem8/pirates/leveleditor/worldData/del_fuego_building_int_tattoo.py
|
615b94089f0e48bcfbf591d8fc665740418ee377
|
[] |
no_license
|
BrandonAlex/Pirates-Online-Retribution
|
7f881a64ec74e595aaf62e78a39375d2d51f4d2e
|
980b7448f798e255eecfb6bd2ebb67b299b27dd7
|
refs/heads/master
| 2020-04-02T14:22:28.626453
| 2018-10-24T15:33:17
| 2018-10-24T15:33:17
| 154,521,816
| 2
| 1
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 3,540
|
py
|
from pandac.PandaModules import Point3, VBase3, Vec4
objectStruct = {
'Objects': {
'1153434880.63dzlu0': {
'Type': 'Building Interior',
'Name': '',
'AdditionalData': [
'interior_spanish_store_tattoo'],
'Instanced': True,
'Objects': {
'1201136836.84dxschafe': {
'Type': 'Door Locator Node',
'Name': 'door_locator',
'Hpr': VBase3(-180.0, 0.0, 0.0),
'Pos': Point3(-7.141, -11.488, 0.0060000000000000001),
'Scale': VBase3(1.0, 1.0, 1.0) } },
'Visual': {
'Model': 'models/buildings/interior_spanish_npc' } } },
'Node Links': [],
'Layers': {
'Collisions': [
'1184008208.59kmuller',
'1184016064.62kmuller',
'1184013852.84kmuller',
'1185822696.06kmuller',
'1184006140.32kmuller',
'1184002350.98kmuller',
'1184007573.29kmuller',
'1184021176.59kmuller',
'1184005963.59kmuller',
'1188324241.31akelts',
'1184006537.34kmuller',
'1184006605.81kmuller',
'1187139568.33kmuller',
'1188324186.98akelts',
'1184006730.66kmuller',
'1184007538.51kmuller',
'1184006188.41kmuller',
'1184021084.27kmuller',
'1185824396.94kmuller',
'1185824250.16kmuller',
'1185823630.52kmuller',
'1185823760.23kmuller',
'1185824497.83kmuller',
'1185824751.45kmuller',
'1187739103.34akelts',
'1188323993.34akelts',
'1184016538.29kmuller',
'1185822200.97kmuller',
'1184016225.99kmuller',
'1195241421.34akelts',
'1195242796.08akelts',
'1184020642.13kmuller',
'1195237994.63akelts',
'1184020756.88kmuller',
'1184020833.4kmuller',
'1185820992.97kmuller',
'1185821053.83kmuller',
'1184015068.54kmuller',
'1184014935.82kmuller',
'1185821432.88kmuller',
'1185821701.86kmuller',
'1195240137.55akelts',
'1195241539.38akelts',
'1195238422.3akelts',
'1195238473.22akelts',
'1185821453.17kmuller',
'1184021269.96kmuller',
'1185821310.89kmuller',
'1185821165.59kmuller',
'1185821199.36kmuller',
'1185822035.98kmuller',
'1184015806.59kmuller',
'1185822059.48kmuller',
'1185920461.76kmuller',
'1194984449.66akelts',
'1185824206.22kmuller',
'1184003446.23kmuller',
'1184003254.85kmuller',
'1184003218.74kmuller',
'1184002700.44kmuller',
'1186705073.11kmuller',
'1187658531.86akelts',
'1186705214.3kmuller',
'1185824927.28kmuller',
'1184014204.54kmuller',
'1184014152.84kmuller'] },
'ObjectIds': {
'1153434880.63dzlu0': '["Objects"]["1153434880.63dzlu0"]',
'1201136836.84dxschafe': '["Objects"]["1153434880.63dzlu0"]["Objects"]["1201136836.84dxschafe"]' } }
extraInfo = {
'camPos': Point3(-1202.78, 260.68599999999998, 149.845),
'camHpr': VBase3(-98.880099999999999, -28.781600000000001, 0),
'focalLength': 1.3999999761599999 }
|
[
"brandoncarden12345@gmail.com"
] |
brandoncarden12345@gmail.com
|
cf38d83b92adeb028ec0a5e36ef6ed766d954ac0
|
0e4d09b2a1b93aaa6d623d16905854d993a934ae
|
/Python/Django/surprise_me/surprise_me/settings.py
|
f424efc942cbefeba7f250e60cd038b1cc08d43a
|
[] |
no_license
|
freefaller69/DojoAssignments
|
ee7f6308b02041be3244f795422e0e044d4a41b2
|
f40426ac448026c1172048665f36024ad22f0d81
|
refs/heads/master
| 2021-01-17T10:23:39.419514
| 2017-07-25T00:50:41
| 2017-07-25T00:50:41
| 84,012,790
| 0
| 1
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 3,253
|
py
|
"""
Django settings for surprise_me project.
Generated by 'django-admin startproject' using Django 1.11.1.
For more information on this file, see
https://docs.djangoproject.com/en/1.11/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.11/ref/settings/
"""
import os
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/1.11/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = '%*yk4s@mw!nm_8^^jkde_l^vdqldj2=v@dzqj&h6%z9l$t2b$='
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS = [
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'apps.surprise',
]
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 'surprise_me.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'surprise_me.wsgi.application'
# Database
# https://docs.djangoproject.com/en/1.11/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
}
}
# Password validation
# https://docs.djangoproject.com/en/1.11/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/1.11/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.11/howto/static-files/
STATIC_URL = '/static/'
|
[
"freefaller@gmail.com"
] |
freefaller@gmail.com
|
a8ba2d5c8302ea20dac6cf2c653d709d5b012a3b
|
c35d5713b9991efeb0f8a2665c91c74127138594
|
/bufferbloat.py
|
4e0431721722d6c7a93a6b3fc70e29396676f8fa
|
[] |
no_license
|
vs9390/bufferbloat
|
70849c13f24e0f7744a7852e8ed838a6235dbd0f
|
cc5341b5f0c0f835e6ec2e3d536abd2d80a5b096
|
refs/heads/master
| 2020-04-21T14:16:58.228215
| 2019-02-07T19:30:32
| 2019-02-07T19:30:32
| 169,629,667
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 5,038
|
py
|
#!/usr/bin/python
"CS144 In-class exercise: Buffer Bloat"
from mininet.topo import Topo
from mininet.node import CPULimitedHost
from mininet.link import TCLink
from mininet.net import Mininet
from mininet.node import RemoteController
from mininet.log import lg, info
from mininet.util import dumpNodeConnections
from mininet.cli import CLI
from monitor import monitor_qlen
from subprocess import Popen, PIPE
from time import sleep, time
from multiprocessing import Process
from argparse import ArgumentParser
import sys
import os
# Parse arguments
parser = ArgumentParser(description="BufferBloat tests")
parser.add_argument('--bw-host', '-B',
dest="bw_host",
type=float,
action="store",
help="Bandwidth of host links",
required=True)
parser.add_argument('--bw-net', '-b',
dest="bw_net",
type=float,
action="store",
help="Bandwidth of network link",
required=True)
parser.add_argument('--delay',
dest="delay",
type=float,
help="Delay in milliseconds of host links",
default=10)
parser.add_argument('--dir', '-d',
dest="dir",
action="store",
help="Directory to store outputs",
default="results",
required=True)
parser.add_argument('-n',
dest="n",
type=int,
action="store",
help="Number of nodes in star.",
required=True)
parser.add_argument('--nflows',
dest="nflows",
action="store",
type=int,
help="Number of flows per host (for TCP)",
required=True)
parser.add_argument('--maxq',
dest="maxq",
action="store",
help="Max buffer size of network interface in packets",
default=500)
parser.add_argument('--cong',
dest="cong",
help="Congestion control algorithm to use",
default="reno")
parser.add_argument('--diff',
help="Enabled differential service",
action='store_true',
dest="diff",
default=False)
# Expt parameters
args = parser.parse_args()
class StarTopo(Topo):
"Star topology for Buffer Bloat experiment"
def __init__(self, n=2, cpu=None, bw_host=1000, bw_net=1.5,
delay=10, maxq=None, diff=False):
# Add default members to class.
super(StarTopo, self ).__init__()
# Create switch and host nodes
for i in xrange(n):
self.addHost( 'h%d' % (i+1), cpu=cpu )
self.addSwitch('s0', fail_mode='open')
self.addLink('h1', 's0', bw=bw_host,
max_queue_size=int(maxq) )
for i in xrange(1, n):
self.addLink('h%d' % (i+1), 's0', bw=bw_host)
def ping_latency(net):
"(Incomplete) verify link latency"
h1 = net.getNodeByName('h1')
h1.sendCmd('ping -c 2 10.0.0.2')
result = h1.waitOutput()
print "Ping result:"
print result.strip()
def bbnet():
"Create network and run Buffer Bloat experiment"
print "starting mininet ...."
# Seconds to run iperf; keep this very high
seconds = 3600
start = time()
# Reset to known state
topo = StarTopo(n=args.n, bw_host=args.bw_host,
delay='%sms' % args.delay,
bw_net=args.bw_net, maxq=args.maxq, diff=args.diff)
net = Mininet(topo=topo, host=CPULimitedHost, link=TCLink,
autoPinCpus=True, controller=lambda name: RemoteController("c0",
ip="0.0.0.0",
port=6653))
# c0 = net.addController('c0', controller=RemoteController, ip="127.0.0.1", port=6653)
net.start()
dumpNodeConnections(net.hosts)
net.pingAll()
print args.diff
if args.diff:
print "Differentiate Traffic Between iperf and wget"
os.system("bash tc_cmd_diff.sh")
else:
print "exec tc_cmd.sh"
os.system("bash tc_cmd.sh %s" % args.maxq)
sleep(2)
ping_latency(net)
print "Initially, the delay between two hosts is around %dms" % (int(args.delay)*2)
h2 = net.getNodeByName('h2')
h1 = net.getNodeByName('h1')
h1.cmd('cd ./http/; nohup python2.7 ./webserver.py &')
h1.cmd('cd ../')
h2.cmd('iperf -s -w 16m -p 5001 -i 1 > iperf-recv.txt &')
CLI( net )
h1.cmd("sudo pkill -9 -f webserver.py")
h2.cmd("rm -f index.html*")
Popen("killall -9 cat", shell=True).wait()
if __name__ == '__main__':
bbnet()
|
[
"None"
] |
None
|
781728cd41d7b2d6039a59dec118afaea02aea57
|
df3e3e937e85ae03bc6714bf9aa487d9338d44fd
|
/mpmp/exceptions.py
|
feb76e4c5975c4adf3db8b9f293ccc2c91ce9877
|
[
"BSD-3-Clause"
] |
permissive
|
mayala1925/mpmp
|
9a6b4be43f9bc29874e9c0cdfa0866d70b61263c
|
7bd4d49e4acd745447dc0018ac121d1a45e8bfbc
|
refs/heads/master
| 2023-08-16T13:23:08.019630
| 2021-10-13T23:09:07
| 2021-10-13T23:09:07
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,675
|
py
|
"""
Exceptions specific to pan-cancer prediction experiments
"""
class ResultsFileExistsError(Exception):
"""
Custom exception to raise when the results file already exists for the
given gene and cancer type.
This allows calling scripts to choose how to handle this case (e.g. to
print an error message and continue, or to abort execution).
"""
pass
class NoTrainSamplesError(Exception):
"""
Custom exception to raise when there are no train samples in a
cross-validation fold for a given cancer type.
This allows calling scripts to choose how to handle this case (e.g. to
print an error message and continue, or to abort execution).
"""
pass
class NoTestSamplesError(Exception):
"""
Custom exception to raise when there are no test samples in a
cross-validation fold for a given cancer type.
This allows calling scripts to choose how to handle this case (e.g. to
print an error message and continue, or to abort execution).
"""
pass
class OneClassError(Exception):
"""
Custom exception to raise when there is only one class present in the
test set for the given cancer type.
This allows calling scripts to choose how to handle this case (e.g. to
print an error message and continue, or to abort execution).
"""
pass
class GenesNotFoundError(Exception):
"""
Custom exception to raise when genes provided for classification are not
part of existing datasets with oncogene/TSG info.
This allows calling scripts to choose how to handle this case (e.g. to
print an error message and continue, or to abort execution).
"""
pass
|
[
"jjc2718@gmail.com"
] |
jjc2718@gmail.com
|
2bc1432323a455395c7e8d97b4f3896a33278eb9
|
c1c00ced90d47b9425fa11b6e0e5148a26a70085
|
/tests/test_cli.py
|
d3438f75559b5b4993b1f8da97e7d6b0531eb024
|
[
"MIT"
] |
permissive
|
destos/Patterner
|
a8e90e30f0f2ca9411beb39e4cb8ef9e25fedc23
|
3e32468e843ec817b94da9df543c891ca69927fc
|
refs/heads/master
| 2020-04-25T14:44:23.872391
| 2019-02-27T05:50:01
| 2019-02-27T05:50:01
| 172,852,064
| 3
| 1
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 718
|
py
|
"""Sample integration test module using pytest-describe and expecter."""
# pylint: disable=redefined-outer-name,unused-variable,expression-not-assigned
import pytest
from click.testing import CliRunner
from expecter import expect
from patterner.cli import main
@pytest.fixture
def runner():
return CliRunner()
def describe_cli():
def describe_conversion():
def when_integer(runner):
result = runner.invoke(main, ['42'])
expect(result.exit_code) == 0
expect(result.output) == "12.80165\n"
def when_invalid(runner):
result = runner.invoke(main, ['foobar'])
expect(result.exit_code) == 0
expect(result.output) == ""
|
[
"patrick@forringer.com"
] |
patrick@forringer.com
|
7581ff553d9d2380b9a3fa8d04bc19aa2433dd6d
|
664c3ced94ab0e9a5bac547028db59a3ca1f2074
|
/10. Use classes to create active objects /EG10-07 Time Tracker with exception handler.py
|
6ab5ddc9eaa23b6a53954e815d434c237f056033
|
[
"MIT"
] |
permissive
|
nikcbg/Begin-to-Code-with-Python
|
2b1283a7818e26d3471677b51d1832cde52c4ddc
|
a72fdf18ca15f564be895c6394a91afc75fc3e2c
|
refs/heads/master
| 2021-06-23T23:09:36.009442
| 2021-06-23T11:17:24
| 2021-06-23T11:17:24
| 209,285,197
| 0
| 0
|
MIT
| 2021-03-17T07:48:09
| 2019-09-18T10:50:51
|
Python
|
UTF-8
|
Python
| false
| false
| 6,121
|
py
|
# EG10-07 Time Tracker with exception handler
import pickle
from BTCInput import *
# Create the contact class
class Contact:
min_session_length = 0.5
max_session_length = 3.5
@staticmethod
def validate_session_length(session_length):
'''
Validates a session length and returns
True if the session is valid or False if not
'''
if session_length < Contact.min_session_length:
return False
if session_length > Contact.max_session_length:
return False
return True
def __init__(self, name, address, telephone):
self.name = name
self.address = address
self.telephone = telephone
self.hours_worked = 0
def get_hours_worked(self):
'''
Gets the hours worked for this contact
'''
return self.hours_worked
def add_session(self, session_length):
'''
Adds the value of the parameter
onto the hours spent with this contact
Raises an exception if the session length is invalid
'''
if not Contact.validate_session_length(session_length):
raise Exception('Invalid session length')
self.hours_worked = self.hours_worked + session_length
return
def new_contact():
'''
Reads in a new contact and stores it
'''
print('Create new contact')
# add the data attributes
name=read_text('Enter the contact name: ')
address=read_text('Enter the contact address: ')
telephone=read_text('Enter the contact phone: ')
# create a new instance
new_contact=Contact(name=name,address=address,telephone=telephone)
# add the new contact to the contact list
contacts.append(new_contact)
def find_contact(search_name):
'''
Finds the contact with the matching name
Returns a contact instance or None if there is
no contact with the given name
'''
# remove any whitespace from around the search name
search_name = search_name.strip()
# convert the search name to lower case
search_name = search_name.lower()
for contact in contacts:
# get the name out of the contact
name=contact.name
# remove any whitespace from around the name
name=name.strip()
# convert the name to lower case
name = name.lower()
# see if the names match
if name.startswith(search_name):
# return the contact that was found
return contact
# if we get here no contact was found
# with the given name
return None
def display_contact():
'''
Reads in a name to search for and then displays
the content information for that name or a
message indicating that the name was not found
'''
print('Find contact')
search_name = read_text('Enter the contact name: ')
contact=find_contact(search_name)
if contact!=None:
# Found a contact
print('Name:', contact.name)
print('Address:', contact.address)
print('Telephone:', contact.telephone)
print('Hours on the case:', contact.get_hours_worked())
else:
print('This name was not found.')
def edit_contact():
'''
Reads in a name to search for and then allows
the user to edit the details of that contact
If there is no contact the funciton displays a
message indicating that the name was not found
'''
print('Edit contact')
search_name=read_text('Enter the contact name: ')
contact=find_contact(search_name)
if contact!=None:
# Found a contact
print('Name: ',contact.name)
new_name=read_text('Enter new name or . to leave unchanged: ')
if new_name!='.':
contact.name=new_name
new_address=read_text('Enter new address or . to leave unchanged: ')
if new_address!='.':
contact.address=new_address
new_phone=read_text('Enter new telephone or . to leave unchanged: ')
if new_phone!='.':
contact.telephone=new_phone
else:
print('This name was not found.')
def add_session_to_contact():
'''
Reads in a name to search for and then allows
the user to add a session spent working for
that contact
'''
print('add session')
search_name=read_text('Enter the contact name: ')
contact=find_contact(search_name)
if contact!=None:
# Found a contact
print('Name: ',contact.name)
print('Previous hours worked:',contact.get_hours_worked())
session_length=read_float(prompt='Session length: ')
try:
contact.add_session(session_length)
print('Updated hours worked:', contact.get_hours_worked())
except Exception as e:
print('Add hours failed:',e)
else:
print('This name was not found.')
def save_contacts(file_name):
'''
Saves the contacts to the given filename
Contacts are stored in binary as pickled file
Exceptions will be raised if the save fails
'''
print('save contacts')
with open(file_name,'wb') as out_file:
pickle.dump(contacts,out_file)
def load_contacts(file_name):
'''
Loads the contacts from the given filename
Contacts are stored in binary as pickled file
Exceptions will be raised if the load fails
'''
global contacts
print('Load contacts')
with open(file_name,'rb') as input_file:
contacts=pickle.load(input_file)
menu='''Time Tracker
1. New Contact
2. Find Contact
3. Edit Contact
4. Add Session
5. Exit Program
Enter your command: '''
filename='contacts.pickle'
try:
load_contacts(filename)
except:
print('Contacts file not found')
contacts=[]
while True:
command=read_int_ranged(prompt=menu,min_value=1,max_value=5)
if command==1:
new_contact()
elif command==2:
display_contact()
elif command==3:
edit_contact()
elif command==4:
add_session_to_contact()
elif command==5:
save_contacts(filename)
break
|
[
"nkcbg@yahoo.com"
] |
nkcbg@yahoo.com
|
8fda8333924bdd0b3d4d4a1fc03469652dc5986d
|
df823d33423d37251c49b4be12ee022170138071
|
/python/mycurses.py
|
198e2605f0b0559ffdd2ed3200c896e81e5f5c89
|
[] |
no_license
|
von/sandbox
|
ca2a87870f0f5e3153cb33fd940f1b4cb9da7985
|
5e47e93c32bc85f986f39b1d4df8a384c7ff0019
|
refs/heads/main
| 2023-04-30T02:14:36.466490
| 2023-04-18T14:11:54
| 2023-04-18T14:11:54
| 331,739
| 4
| 1
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 722
|
py
|
#!/usr/bin/env python3
import curses
import os
def main(stdscr):
win1 = curses.newwin(3, 30, 2,0)
win1.border()
win2 = curses.newwin(10, 30, 10,0)
win2.border()
stdscr.addstr(0,0, "Testing...")
win1.addstr(0,0, "Foobar")
win2.addstr(0,0, "I win")
stdscr.refresh()
win1.refresh()
win2.refresh()
stdscr.getch()
win2.clear()
win2.addstr(0,0, "2..3..")
win2.refresh()
stdscr.getch()
ls = os.popen("ls")
for i,line in enumerate(ls):
try:
win2.addstr(i, 0, line.encode("utf-8"))
except curses.error:
# Assume we've hit the end of the window
break
win2.refresh()
stdscr.getch()
curses.wrapper(main)
|
[
"von@vwelch.com"
] |
von@vwelch.com
|
4d97fad9266a037d603b5a43d20dff72f6a5cdfc
|
ebd5c4632bb5f85c9e3311fd70f6f1bf92fae53f
|
/PORMain/pirates/shipparts/WheelInteractive.py
|
ac3fc5be3f7f39bac48476e6aa630f9acf8c2189
|
[] |
no_license
|
BrandonAlex/Pirates-Online-Retribution
|
7f881a64ec74e595aaf62e78a39375d2d51f4d2e
|
980b7448f798e255eecfb6bd2ebb67b299b27dd7
|
refs/heads/master
| 2020-04-02T14:22:28.626453
| 2018-10-24T15:33:17
| 2018-10-24T15:33:17
| 154,521,816
| 2
| 1
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 620
|
py
|
from pirates.interact.SimpleInteractive import SimpleInteractive
from pirates.piratesbase import PLocalizer
class WheelInteractive(SimpleInteractive):
def __init__(self, ship):
self.ship = ship
wheel = ship.model.locators.find('**/location_wheel')
if not wheel:
wheel = ship.model.root.attachNewNode('dummyWheel')
SimpleInteractive.__init__(self, wheel, 'wheel-%s' % ship.doId, PLocalizer.InteractWheel)
def interactionAllowed(self, avId):
return self.ship.canTakeWheel(avId)
def requestInteraction(self, avId):
self.ship.requestPilot(avId)
|
[
"brandoncarden12345@gmail.com"
] |
brandoncarden12345@gmail.com
|
184b601a9277e7e6f8aa27a0c38c389b529ad172
|
59b3dce3c770e70b2406cc1dd623a2b1f68b8394
|
/python_3/lessons/Timing_Computations/src/ count_thirtyone_days.py
|
682d87d1e84181941930cc296f2428ddc1f00032
|
[] |
no_license
|
patrickbeeson/python-classes
|
04ed7b54fc4e1152a191eeb35d42adc214b08e39
|
b5041e71badd1ca2c013828e3b2910fb02e9728f
|
refs/heads/master
| 2020-05-20T07:17:36.693960
| 2015-01-23T14:41:46
| 2015-01-23T14:41:46
| 29,736,517
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 203
|
py
|
from datetime import datetime, timedelta
now = datetime.now()
delta = timedelta(31)
delivery = now + delta
print("Today: %s" % now.strftime("%d"))
print("Delivery: %s" % delivery.strftime("%d"))
|
[
"patrickbeeson@gmail.com"
] |
patrickbeeson@gmail.com
|
b9c8c3198ea3b91ca79b7666122aeba124f8d46b
|
8be217fe977aa0bcd9e375c75b0fb522f5bf0101
|
/mergetwosortedlists21.py
|
0065e807191a8350423dd2e81ae12019e30106ab
|
[] |
no_license
|
blueones/LeetcodePractices
|
c63a5e773bebea17e988e8bb4962e012d7d402ba
|
194375ba0c07e420f420aafec98aede2f9f5d8fa
|
refs/heads/master
| 2021-07-14T14:21:55.389334
| 2021-01-24T22:13:21
| 2021-01-24T22:13:21
| 230,814,709
| 0
| 1
| null | 2020-02-25T02:58:04
| 2019-12-29T23:18:25
|
Python
|
UTF-8
|
Python
| false
| false
| 947
|
py
|
# Definition for singly-linked list.
class ListNode:
def __init__(self, x):
self.val = x
self.next = None
class Solution:
def mergeTwoLists(self, l1: ListNode, l2: ListNode) -> ListNode:
beforeN = ListNode(0)
dummyN = beforeN
if l1 == None and l2 == None:
return None
while l1 or l2:
#print("here?")
if l1 and l2:
if l1.val >= l2.val:
beforeN.next = l2
l2 = l2.next
else:
beforeN.next = l1
l1 = l1.next
elif l1 == None:
beforeN.next = l2
break
elif l2 == None:
beforeN.next = l1
break
beforeN = beforeN.next
return dummyN.next
sunnyNode = ListNode(1)
sunnyNode2 = None
print(Solution().mergeTwoLists(sunnyNode,sunnyNode2))
|
[
"yiq.shang@gmail.com"
] |
yiq.shang@gmail.com
|
d8d742854ec7842465b985ad93830852b7b6d3a1
|
8d14d526969d8e970254f08563ff2c6e6583dd35
|
/Python/2019/Hafta20191122/venv/Scripts/easy_install-script.py
|
530572ec9997ac842f173dfe02dcf73848586a38
|
[] |
no_license
|
osmanraifgunes/MedipolCodes
|
c29db62896162c4b1a2c8c274877fff63149f826
|
943b014269e9a7b529e74741ce14447dbd7d5df5
|
refs/heads/master
| 2023-01-09T10:31:02.907945
| 2020-06-09T18:05:04
| 2020-06-09T18:05:04
| 218,612,787
| 6
| 13
| null | 2023-01-07T18:58:55
| 2019-10-30T19:59:16
|
Python
|
UTF-8
|
Python
| false
| false
| 453
|
py
|
#!C:\code\MedipolCodes\Python\Hafta20191122\venv\Scripts\python.exe
# EASY-INSTALL-ENTRY-SCRIPT: 'setuptools==40.8.0','console_scripts','easy_install'
__requires__ = 'setuptools==40.8.0'
import re
import sys
from pkg_resources import load_entry_point
if __name__ == '__main__':
sys.argv[0] = re.sub(r'(-script\.pyw?|\.exe)?$', '', sys.argv[0])
sys.exit(
load_entry_point('setuptools==40.8.0', 'console_scripts', 'easy_install')()
)
|
[
"osmanraifgunes@gmail.com"
] |
osmanraifgunes@gmail.com
|
699c75b97c7a8afdb70e4ce79f91ad7f94158668
|
95a2bb2ef56ca80ad7cb51d67a42242bf18fa337
|
/jump/models.py
|
b17ca1bff68351cf244316d1a03fec9d36836f23
|
[] |
no_license
|
zhangxianbo/soms
|
ac2183c0a285fe56456461101ecc78ca314c3929
|
0ba1802b0e2e9111e0f1855480723be8e2941bcd
|
refs/heads/master
| 2021-04-09T15:46:14.086425
| 2016-07-14T08:15:21
| 2016-07-14T08:15:21
| 62,615,511
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,671
|
py
|
#coding=utf-8
from django.db import models
from datetime import datetime
# Create your models here.
class Host(models.Model):
hostid = models.AutoField(primary_key=True)
idc = models.CharField('机房',max_length=50)
addr = models.CharField('机架等标识',max_length=50)
sn = models.CharField('序列号',max_length=30,blank=True)
ip = models.GenericIPAddressField('ip地址')
port = models.IntegerField()
online = models.CharField('在线状态',max_length=10)
use = models.CharField('用途',max_length=50,blank=True)
switch = models.CharField('交换机',max_length=50,blank=True)
comment = models.CharField('备注',max_length=100, blank=True, null=True)
def __unicode__(self):
return u' %s' % (self.ip)
class User(models.Model):
userid = models.AutoField(primary_key=True)
username = models.CharField('用户名',max_length=20)
password = models.CharField('密码',max_length=100,blank=True)
#ip = models.ManyToManyField(Host)
name = models.CharField('姓名',max_length=50,blank=True)
email = models.EmailField('邮箱',max_length=50,blank=True)
update_time = models.DateTimeField('更新时间',default=datetime.now)
def __unicode__(self):
return u'%s' % (self.username)
class Meta:
ordering = ['username']
class Userhost(models.Model):
#uid = models.OneToOneField(User)
#hid = models.ManyToManyField(Host)
uid = models.ForeignKey(User)
hid = models.ForeignKey(Host)
permcode = models.CharField('权限位',max_length=10,blank=True)
def __unicode__(self):
return u'%s %s %s' % (self.uid,self.hid,self.permcode)
|
[
"root@localhost.localdomain"
] |
root@localhost.localdomain
|
cffdbd0ab90b9b59ef7a69aff564ea1323fbe6b4
|
3181efe062a6745fc2e5d182260b8e94ce6c5701
|
/0MyProject_Quant/海龟反转策略/4_2.方向过滤参数自动选择及策略回测_并行.py
|
2c92a341e8f5ec2c95609c7db032528948d0fb42
|
[] |
no_license
|
LibreChou/PythonLearning
|
e240fddc559dc8614d4db95e79d047b18cc1be52
|
562ded21e84b68f43c539c65b91aed3a880162ce
|
refs/heads/master
| 2023-03-12T12:18:33.501881
| 2021-03-04T11:33:42
| 2021-03-04T11:33:42
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 5,317
|
py
|
# Author:Zhang Yuan
import warnings
warnings.filterwarnings('ignore')
from MyPackage import *
import numpy as np
import pandas as pd
import matplotlib.pyplot as plt
import matplotlib.patches as patches
import seaborn as sns
import statsmodels.api as sm
from scipy import stats
#------------------------------------------------------------
__mypath__ = MyPath.MyClass_Path("") # 路径类
mylogging = MyDefault.MyClass_Default_Logging(activate=True, filename=__mypath__.get_desktop_path()+"\\方向过滤策略回测.log") # 日志记录类,需要放在上面才行
myfile = MyFile.MyClass_File() # 文件操作类
myword = MyFile.MyClass_Word() # word生成类
myexcel = MyFile.MyClass_Excel() # excel生成类
mytime = MyTime.MyClass_Time() # 时间类
myplt = MyPlot.MyClass_Plot() # 直接绘图类(单个图窗)
mypltpro = MyPlot.MyClass_PlotPro() # Plot高级图系列
myfig = MyPlot.MyClass_Figure(AddFigure=False) # 对象式绘图类(可多个图窗)
myfigpro = MyPlot.MyClass_FigurePro(AddFigure=False) # Figure高级图系列
mynp = MyArray.MyClass_NumPy() # 多维数组类(整合Numpy)
mypd = MyArray.MyClass_Pandas() # 矩阵数组类(整合Pandas)
mypdpro = MyArray.MyClass_PandasPro() # 高级矩阵数组类
myDA = MyDataAnalysis.MyClass_DataAnalysis() # 数据分析类
myDefault = MyDefault.MyClass_Default_Matplotlib() # 画图恢复默认设置类
# myMql = MyMql.MyClass_MqlBackups() # Mql备份类
# myBaidu = MyWebCrawler.MyClass_BaiduPan() # Baidu网盘交互类
# myImage = MyImage.MyClass_ImageProcess() # 图片处理类
myBT = MyBackTest.MyClass_BackTestEvent() # 事件驱动型回测类
myBTV = MyBackTest.MyClass_BackTestVector() # 向量型回测类
myML = MyMachineLearning.MyClass_MachineLearning() # 机器学习综合类
mySQL = MyDataBase.MyClass_MySQL(connect=False) # MySQL类
mySQLAPP = MyDataBase.MyClass_SQL_APPIntegration() # 数据库应用整合
myWebQD = MyWebCrawler.MyClass_QuotesDownload(tushare=False) # 金融行情下载类
myWebR = MyWebCrawler.MyClass_Requests() # Requests爬虫类
myWebS = MyWebCrawler.MyClass_Selenium(openChrome=False) # Selenium模拟浏览器类
myWebAPP = MyWebCrawler.MyClass_Web_APPIntegration() # 爬虫整合应用类
myEmail = MyWebCrawler.MyClass_Email() # 邮箱交互类
myReportA = MyQuant.MyClass_ReportAnalysis() # 研报分析类
myFactorD = MyQuant.MyClass_Factor_Detection() # 因子检测类
myKeras = MyDeepLearning.MyClass_tfKeras() # tfKeras综合类
myTensor = MyDeepLearning.MyClass_TensorFlow() # Tensorflow综合类
myMT5 = MyMql.MyClass_ConnectMT5(connect=False) # Python链接MetaTrader5客户端类
myMT5Pro = MyMql.MyClass_ConnectMT5Pro(connect=False) # Python链接MT5高级类
myMT5Indi = MyMql.MyClass_MT5Indicator() # MT5指标Python版
myDefault.set_backend_default("Pycharm") # Pycharm下需要plt.show()才显示图
#------------------------------------------------------------
'''
# 1.根据前面 信号利润过滤测试 输出的文档,解析文档名称,读取参数,选择极值。
# 2.一个特定的策略参数作为一个目录,存放该下面所有指标的结果。
# 3.不同名称的指标会自动判断极值,且输出图片。最后会输出表格文档,整理这些极值。
# 4.由于不是大型计算,并行是一次性所有并行。
# 5.并行运算注意内存释放,并且不要一次性都算完,这样容易爆内存。分组进行并行。
'''
'''
# 说明
# 这里的策略回测是建立在前面已经对指标的范围过滤做了参数选择。
# 前面对每个具体策略都通过指标过滤方式,算出了各个指标过滤效果的极值。我们根据极值对应的指标值做回测。
# 画的图中,分别展示 过滤前训练集价格和指标、过滤前训练集策略、过滤后全集价格和指标、过滤后全集策略以及训练集策略。
# 方向过滤作用到整个样本。
# 并行以品种来并行,以时间框来分组。
# 由于指标较多,并行运算时间长,防止出错输出日志。
'''
#%%
from MyPackage.MyProjects.向量化策略测试.Direct_Filter import Auto_Choose_DFilter_Param
choo_para = Auto_Choose_DFilter_Param()
myDefault.set_backend_default("agg")
#%% ******需要修改******
choo_para.symbol_list = myMT5Pro.get_main_symbol_name_list()
choo_para.total_folder = "F:\\工作---策略研究\\公开的海龟策略\\_海龟反转研究"
choo_para.core_num = -1
#%%
from MyPackage.MyProjects.向量化策略测试.Direct_Filter import Direct_Filter_BackTest
rf_bt = Direct_Filter_BackTest()
myplt.set_backend("agg") # agg 后台输出图片,不占pycharm内存
#%%
rf_bt.symbol_list = choo_para.symbol_list
rf_bt.total_folder = choo_para.total_folder
rf_bt.core_num = -1
#%% ******修改函数******
# 策略的当期信号(不用平移):para_list策略参数,默认-1为lag_trade,-2为holding。
def stratgy_signal(dataframe, para_list=list or tuple):
return myBTV.stra.turtle_reverse(dataframe, para_list[0], price_arug= ["High", "Low", "Close"])
rf_bt.stratgy_signal = stratgy_signal
#%%
# ---多进程必须要在这里执行
if __name__ == '__main__':
# ---
print("开始方向过滤参数自动选择:")
choo_para.main_func()
print("开始方向过滤策略回测:")
rf_bt.main_func()
|
[
"39754824+MuSaCN@users.noreply.github.com"
] |
39754824+MuSaCN@users.noreply.github.com
|
7c5b13fc736557163c95d289141ff4870117e2e0
|
b5a9469cb779031936bb613719397d7b3c279626
|
/backend/apps/privacy/sitemaps.py
|
2bc9aa6eae410f25322fcf965d670fd616158b73
|
[] |
no_license
|
arsavit/Cidsfg_copy
|
a34858d63749db0e821cb2f26b1eb31c4565c0f9
|
0145e9f1a397899b03a8d767fb96f1d238ec21f9
|
refs/heads/main
| 2023-07-08T11:18:10.042595
| 2021-08-11T08:09:27
| 2021-08-11T08:09:27
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 416
|
py
|
from django.contrib.sitemaps import Sitemap
from .models import Privacy
class PrivacySitemap(Sitemap):
""" Получение страницы ПОЛИТИКА КОНФИДЕНЦИАЛЬНОСТИ """
changefreq = 'weekly'
priority = 0.9
location = '/privacy/'
def items(self):
return Privacy.objects.all().order_by('-id')[:1]
def lastmod(self, obj):
return obj.updated
|
[
"arsavit@gmail.com"
] |
arsavit@gmail.com
|
9d9a806f2ec508f3d202103ff17d592e98259b7b
|
26f23588e80acc2b28d4cc70a8fbcf78c5b33a20
|
/PythonSkills/decorator/basic02.py
|
ac7856a1b7b8a973b0e4280108fd34948670b37e
|
[] |
no_license
|
Timehsw/PythonCouldbeEverything
|
aa31b3e32bf68b49fe8e96b971637353a8ef644f
|
85d4f1a2c93c7b1edc34ceb9e8bb3c8d7beb30e9
|
refs/heads/master
| 2021-01-01T15:38:25.253094
| 2018-01-22T06:49:05
| 2018-01-22T06:49:05
| 97,661,530
| 5
| 2
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 698
|
py
|
# -*- coding: utf-8 -*-
'''
Created by hushiwei on 2018/1/7.
学习装饰器
闭包
函数里面可以定义函数
函数可以被传递赋值
函数可以被返回
那么装饰器就是,在函数之前额外做些事情
'''
'''
装饰器
有参函数
'''
def a_new_decorator(a_func):
def wrapTheFunction(*args,**kwargs):
print "I am doing some boring work before execution a_func()"
a_func(*args,**kwargs)
print "I am doing some boring work after execution a_func()"
return wrapTheFunction
@a_new_decorator
def a_function_requiring_decoration(name="hushiwei"):
print "I am %s"%name
a_function_requiring_decoration("Mike")
|
[
"hsw.time@gmail.com"
] |
hsw.time@gmail.com
|
627bcc579421c9e68946a4001c3726b2fc02b966
|
e7b665624c1134f7a6b3ab7c043cfa5ec83227bb
|
/CycleGAN/__init__.py
|
c73deef2fd0626ca2afa3252d2b902d7958b1f51
|
[] |
no_license
|
zhijie-ai/GAN
|
46f896909d1f5caedb7725cf44d328e24f4ad699
|
5e64b416209058721c582c3b71a1e9ca25cf169d
|
refs/heads/master
| 2022-10-26T10:28:08.279901
| 2019-08-26T14:09:15
| 2019-08-26T14:09:15
| 204,423,289
| 1
| 3
| null | 2022-10-07T00:52:36
| 2019-08-26T07:45:08
|
Python
|
UTF-8
|
Python
| false
| false
| 622
|
py
|
#----------------------------------------------
# -*- encoding=utf-8 -*- #
# __author__:'xiaojie' #
# CreateTime: #
# 2019/7/5 22:13 #
# #
# 天下风云出我辈, #
# 一入江湖岁月催。 #
# 皇图霸业谈笑中, #
# 不胜人生一场醉。 #
#----------------------------------------------
# CycleGAN的分别用keras和tf的2种实现方式
|
[
"15311484394@189.cn"
] |
15311484394@189.cn
|
dcb95199ae8b2d00c2e425403a3da419cc0d1c69
|
c8a41e7b2caa015903dc5aff2d8e34a5cbd34b8d
|
/python/itertools/compress-the-string.py
|
0208eec6cbcbd5eed48a9fa26c1a73150292dc0a
|
[] |
no_license
|
mauricioabreu/hacker-rank
|
bad197fec3582979df148a8212d330097191c2b7
|
3d2aaae53f438e4ef8d9382cc0c22003248c6787
|
refs/heads/master
| 2021-01-10T07:25:23.869714
| 2018-06-16T23:17:51
| 2018-06-16T23:17:51
| 46,177,986
| 6
| 1
| null | 2016-08-27T16:18:36
| 2015-11-14T14:38:13
|
Python
|
UTF-8
|
Python
| false
| false
| 212
|
py
|
from itertools import groupby
chars = raw_input().strip()
groups = []
for key, value in groupby(chars):
groups.append((len(list(value)), int(key)))
print ' '.join(['(%s, %s)' % (k, v) for k, v in groups])
|
[
"mauricio.abreua@gmail.com"
] |
mauricio.abreua@gmail.com
|
ec3c3e9a1609b3241c9287dcf01219c6d607eeb7
|
d12b53101c289a1d752862e20ffc079e3ab4e057
|
/2.0/overturn.py
|
a15642ab7a89395e9a8230990f94277a71dc0b9f
|
[] |
no_license
|
UCAS007/adavanced-aritificial-intelligence
|
13708985b65fe0d27ed1fe93e05eb54ddef9949d
|
d88fcc8f5a59f290a866a04db6bcbe133bdc3ba3
|
refs/heads/master
| 2021-01-10T15:21:07.819354
| 2016-05-03T14:03:00
| 2016-05-03T14:03:00
| 45,598,387
| 7
| 2
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 4,038
|
py
|
"""
Fmax=0.995512 Perceptron
"""
import pickle
from sklearn.feature_extraction.text import CountVectorizer
from sklearn.feature_extraction.text import TfidfTransformer
from sklearn.linear_model import SGDClassifier,Perceptron
from sklearn.grid_search import GridSearchCV
from sklearn.pipeline import Pipeline
from sklearn import metrics
import jieba
import mydataset
import os
import csv,codecs
import preprocessing as pp
import time
def train():
trainFileName='train.pkl'
testFileName='test.pkl'
pipelineFileName='pipeline.pkl'
if(os.path.exists(trainFileName)):
fin=open(trainFileName,'r')
trainData=pickle.load(fin)
trainClass=pickle.load(fin)
fin.close()
else:
trainText=mydataset.getAllTrainTextList()
i=0;
N=trainText.__len__()
trainData=[]
trainClass=[]
for (tag,text) in trainText:
i=i+1
if(i%5000==0):
print('i=%08d finished %5.5f%% using jieba to cut the text\n'%(i,i*100.0/N))
trainData.append(text)
trainClass.append(tag)
fout=open(trainFileName,'w')
pickle.dump(trainData,fout)
pickle.dump(trainClass,fout)
fout.close()
#if(os.path.exists(pipelineFileName)):
if(False):
fin=open(pipelineFileName,'r')
pipeline=pickle.load(fin)
fin.close()
else:
pipeline = Pipeline([
('vect', CountVectorizer()),
('tfidf', TfidfTransformer()),
('clf', Perceptron()),
])
#pipeline.set_params(vect__max_df=0.6,clf__alpha=1e-07,clf__penalty='l2',tfidf__norm='l1',tfidf__use_idf=True,vect__ngram_range=(1,2))
pipeline.set_params(vect__max_df=0.6,tfidf__norm='l1',tfidf__use_idf=True,vect__ngram_range=(1,2))
trainNum=trainData.__len__()
pipeline.fit(trainData[0:trainNum],trainClass[0:trainNum])
fout=open(pipelineFileName,'w')
pickle.dump(pipeline,fout)
fout.close()
#################################### output train result
trainNum=trainData.__len__()
#print 'train result '+"#"*30
prec=pipeline.predict(trainData[0:trainNum])
expected=trainClass[0:trainNum]
#print("Classification report for classifier:\n%s\n"
#% (metrics.classification_report(expected, prec)))
TP=0.0
TN=0.0
FP=0.0
FN=0.0
N=trainData.__len__()
for i in range(0,trainNum):
if(prec[i]==expected[i]):
if(prec[i]==u'1'):
TP=TP+1
else:
TN=TN+1
else:
if(prec[i]==u'1'):
FP=FP+1
else:
FN=FN+1
P=TP/(TP+FP)
R=TP/(TP+FN)
F=2*P*R/(P+R)
#print('train result: P=%f,R=%f,F=%f\n'%(P,R,F))
return F,pipeline
############################################# output test result
if __name__ == '__main__' :
trainFileName='train.pkl'
testFileName='test.pkl'
pipelineFileName='pipeline.pkl'
bestPipelineFileName='bestPipeline.pkl'
Fmax=0
for i in range(1,10):
print ('i=%d \n'%(i))
t1=time.time()
F,pipeline=train()
t2=time.time()
print (t2-t1).__str__()+'s'
if(F>Fmax):
Fmax=F
bestPipeline=pipeline
print('Fmax=%f \n'%(Fmax))
fout=open(bestPipelineFileName,'w')
pickle.dump(bestPipeline,fout)
fout.close()
print('Fmax=%f \n' % (Fmax))
if(os.path.exists(testFileName)):
fin=open(testFileName,'r')
testText=pickle.load(fin)
fin.close()
else:
testText=mydataset.getTestTextList()
fout=open(testFileName,'w')
pickle.dump(testText,fout)
fout.close()
outputFileName='../output/upload.csv'
fileOutput=codecs.open(outputFileName,'w','utf-8')
prec=pipeline.predict(testText)
N=800001
for i in prec:
fileOutput.write(N.__str__()+','+i+'\n')
N=N+1
fileOutput.close()
os.system("mplayer ~/music/alert.mp3")
|
[
"youdaoyzbx@163.com"
] |
youdaoyzbx@163.com
|
069240fe041da4600557e9ba6ab166a4c5a27da8
|
0c6bd6305cbd128fe7426f66ec9bf4d01fb9b40c
|
/backend_apps_web_based/flask/RESTful_api_part3/test.py
|
fe6c099a291d13fe3bef662c606846c898e25092
|
[] |
no_license
|
yennanliu/web_development
|
08dbffc549214952f7b02dc29837474b0ad6e980
|
8dc6b224040b3953999d5e8d4a7f26d8e92ca931
|
refs/heads/master
| 2021-05-24T03:12:24.344381
| 2020-12-30T09:31:15
| 2020-12-30T09:31:15
| 72,651,027
| 1
| 1
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,281
|
py
|
import sys, os, json, requests
import pytest, unittest
from flask_sqlalchemy import SQLAlchemy
# main flask app
from app import app
db = SQLAlchemy(app)
def TestHelloworld():
response = requests.get('http://0.0.0.0:5000/')
assert response.status_code == 200
def TestApi():
response = requests.get('http://0.0.0.0:5000/product/api/v1.0/products')
print (response)
assert response.status_code == 200
class TestDB(unittest.TestCase):
# setup and tear down
# executed prior to each test
def setUp(self):
app.config['TESTING'] = True
app.config['WTF_CSRF_ENABLED'] = False
app.config['DEBUG'] = False
app.config['SQLALCHEMY_DATABASE_URI'] = 'sqlite:///' + 'database.db'
self.app = app.test_client()
db.drop_all()
db.create_all()
self.assertEqual(app.debug, False)
# executed after each test
def tearDown(self):
db.session.remove()
db.drop_all()
# tests
def test_main_page(self):
response = self.app.get('/', follow_redirects=True)
self.assertEqual(response.status_code, 200)
# test models
# TODO, will update this when creating DB via db model
if __name__ == "__main__":
TestHelloworld()
TestApi()
unittest.main()
|
[
"f339339@gmail.com"
] |
f339339@gmail.com
|
e35602b4e63050a98f36b2620a2b840278beb790
|
ca7aa979e7059467e158830b76673f5b77a0f5a3
|
/Python_codes/p02972/s165580953.py
|
33b7efe88d0ed61cd2af5b945b54fc7bd16ee28d
|
[] |
no_license
|
Aasthaengg/IBMdataset
|
7abb6cbcc4fb03ef5ca68ac64ba460c4a64f8901
|
f33f1c5c3b16d0ea8d1f5a7d479ad288bb3f48d8
|
refs/heads/main
| 2023-04-22T10:22:44.763102
| 2021-05-13T17:27:22
| 2021-05-13T17:27:22
| 367,112,348
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 590
|
py
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
#
# Created: Jul, 13, 2020 08:25:55 by Nobody
# $Author$
# $Date$
# $URL$
__giturl__ = "$URL$"
from sys import stdin
input = stdin.readline
def main():
N = int(input())
A = [-1]+list(map(int, input().split()))
D = [-1]*(N+1)
for i in range(N, 0, -1):
if i > int(N/2):
D[i] = A[i]
else:
temp_sum = 0
for j in range(N//i, 1, -1):
temp_sum += D[i*j]
D[i] = (temp_sum % 2) ^ A[i]
print(sum(D[1:]))
for i in range(1, N+1):
if D[i]:
print(i)
if(__name__ == '__main__'):
main()
|
[
"66529651+Aastha2104@users.noreply.github.com"
] |
66529651+Aastha2104@users.noreply.github.com
|
57ba84aabc962427d8bb568812dcabaa61ca840a
|
e705de3a44a7cc922e93c76c3aa6e6108222e538
|
/problems/0128_longest_consecutive_sequence.py
|
5e3a863543e3dfb214407f3bf1547862272121e1
|
[] |
no_license
|
sokazaki/leetcode_solutions
|
34d4877dc7d13dc80ef067211a316c48c6269eca
|
42cf52eeef537806c9e3ec7a6e5113c53d0f18a3
|
refs/heads/master
| 2021-06-21T22:23:25.403545
| 2021-02-21T16:47:19
| 2021-02-21T16:47:19
| 193,951,202
| 0
| 1
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 826
|
py
|
# O(N) Solution with Hashmap
import unittest
def longestConsecutive(nums):
nums = set(nums)
res = 0
while nums:
first = last = nums.pop()
while first-1 in nums:
first -= 1
nums.remove(first)
while last+1 in nums:
last += 1
nums.remove(last)
res = max(res, last-first+1)
return res
class Test(unittest.TestCase):
def test_longestConsecutive(self):
self.assertEqual(longestConsecutive([100,4,200,1,3,2]), 4)
self.assertEqual(longestConsecutive([100,4,200,1,33,2]), 2)
self.assertEqual(longestConsecutive([1,44,200,1,3,2]), 3)
self.assertEqual(longestConsecutive([]), 0)
self.assertEqual(longestConsecutive([100,44,200,11,33,2]), 1)
if __name__ == "__main__":
unittest.main()
|
[
"noreply@github.com"
] |
sokazaki.noreply@github.com
|
06a59e43096e806dd20c21c29f851772da55e59a
|
e2a0d262b5a3c26a30ed02c78cb905363df9241c
|
/com/11_class2.py
|
2d657a02b7bdf2f9b0f1bb5a9fc78a3329a1a38c
|
[] |
no_license
|
Kyeongrok/python_selinium
|
75b158f0c46aa5d2b7b627dd4a6775c3c6ab66ef
|
233c90b3294949813cc910a8b0b2f5fed7df80a9
|
refs/heads/master
| 2020-04-01T03:35:06.925347
| 2018-10-27T05:03:51
| 2018-10-27T05:03:51
| 152,827,717
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 389
|
py
|
class people():
name = "kyeongrok"
def sayHello(self):
print("hello")
def leftHand(self):
print("i'm left hand")
def rightHand(self):
print("i'm right hand")
def setName(self, name):
self.name = name
kyeongrok = people()
kyeongrok.setName("iu")
print(kyeongrok.name)
kyeongrok.sayHello()
kyeongrok.leftHand()
kyeongrok.rightHand()
|
[
"kyeongrok.kim@okiconcession.com"
] |
kyeongrok.kim@okiconcession.com
|
042eb90c4d3065ab75fc8c59d35336f3d37f6d12
|
cc08f8eb47ef92839ba1cc0d04a7f6be6c06bd45
|
/Personal/JaipurCity/smart_city/models.py
|
4edef2ae9219cc2bf12785fae1060189c82a680f
|
[] |
no_license
|
ProsenjitKumar/PycharmProjects
|
d90d0e7c2f4adc84e861c12a3fcb9174f15cde17
|
285692394581441ce7b706afa3b7af9e995f1c55
|
refs/heads/master
| 2022-12-13T01:09:55.408985
| 2019-05-08T02:21:47
| 2019-05-08T02:21:47
| 181,052,978
| 1
| 1
| null | 2022-12-08T02:31:17
| 2019-04-12T17:21:59
| null |
UTF-8
|
Python
| false
| false
| 2,081
|
py
|
from django.contrib.gis.db import models
from django.contrib.gis.geos import Point
class SmartRestaurant(models.Model):
restaurant = models.CharField(max_length=254)
rating = models.FloatField()
type = models.CharField(max_length=254)
cuisines = models.CharField(max_length=254)
cost = models.CharField(max_length=254)
address = models.CharField(max_length=254)
features = models.CharField(max_length=254)
latitude = models.FloatField()
longitude = models.FloatField()
point = models.PointField()
created_at = models.DateTimeField(auto_now_add=True)
def __str__(self):
return self.restaurant
class Fort(models.Model):
title = models.CharField(max_length=254)
rating = models.FloatField()
category = models.CharField(max_length=254)
descriptio = models.CharField(max_length=254)
latitude = models.FloatField()
longitude = models.FloatField()
point = models.PointField()
def __str__(self):
return self.title
class Hospital55(models.Model):
hospital_n = models.CharField(max_length=255)
hospital_r = models.FloatField()
contact_nu = models.CharField(max_length=255)
address = models.CharField(max_length=255)
latitude = models.FloatField()
longitude = models.FloatField()
point = models.PointField()
def __str__(self):
return self.hospital_n
class Market(models.Model):
market_nam = models.CharField(max_length=255)
rating = models.FloatField()
location = models.CharField(max_length=255)
latitude = models.FloatField()
longitude = models.FloatField()
point = models.PointField()
def __str__(self):
return self.market_nam
class PoliceStation(models.Model):
police_sta = models.CharField(max_length=255)
rating = models.FloatField()
contact_nu = models.CharField(max_length=255)
address = models.CharField(max_length=255)
latitude = models.FloatField()
longitude = models.FloatField()
point = models.PointField()
def __str__(self):
return self.police_sta
|
[
"prosenjitearnkuar@gmail.com"
] |
prosenjitearnkuar@gmail.com
|
4b208c47d8b238082b1f0e0926b8ca03994e7acb
|
50a20e25c1cb7ac05b0d7eb05bf174973a866a4b
|
/Day20/Day20.py
|
a80a573319bf5001f8c8df8dc3bb1248856c81d2
|
[] |
no_license
|
bakkerjangert/AoC_2016
|
1733b15bbb762d9fff0c986e33d404c5b7148591
|
3ccafab3f6d8b8efb4bf7de0549e22a4bd4de527
|
refs/heads/master
| 2023-02-04T06:09:32.862621
| 2020-12-18T14:39:00
| 2020-12-18T14:39:00
| 322,620,456
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 956
|
py
|
import numpy as np
import pylab as plt
with open('input.txt') as f:
lines = f.read().splitlines()
start = []
end = []
for line in lines:
start.append(int(line.split('-')[0]))
end.append(int(line.split('-')[1]))
# for i in range(len(start)):
# print(start[i], '-', end[i])
ips = np.array([start, end])
ips = ips.transpose()
ips = np.sort(ips, axis=0)
# print(ips)
# print(len(ips[:,0]))
i = 0
while i + 1 < len(ips[:, 0]):
print(f'{ips[i + 1, 0]} < {ips[i, 1]} < {ips[i + 1, 1]}')
if ips[i + 1, 0] <= ips[i, 1] <= ips[i + 1, 1]:
ips[i, 1] = ips[i + 1, 1]
ips = np.delete(ips, i + 1, 0)
elif ips[i + 1, 1] <= ips[i, 1]:
ips = np.delete(ips, i + 1, 0)
else:
i += 1
print(ips)
print(len(ips[:,0]))
print(f'the answer to part 1 is {ips[0, 1] + 1}')
#part b
count = 0
for i in range(len(ips[:,0]) - 1):
count += (ips[i + 1, 0] - ips[i, 1] - 1)
print(f'the answer to part 2 is {count}')
|
[
"gert-jan.bakker@rhdhv.com"
] |
gert-jan.bakker@rhdhv.com
|
316c6f696121e8eb21ad87bd9966d1689f929134
|
37cfcdfa3b8f1499f5899d2dfa2a48504a690abd
|
/test/functional/p2p_disconnect_ban.py
|
1886e64fb2499ff15b887e636597f96dd7018069
|
[
"MIT"
] |
permissive
|
CJwon-98/Pyeongtaekcoin
|
28acc53280be34b69c986198021724181eeb7d4d
|
45a81933a98a7487f11e57e6e9315efe740a297e
|
refs/heads/master
| 2023-08-17T11:18:24.401724
| 2021-10-14T04:32:55
| 2021-10-14T04:32:55
| 411,525,736
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 5,354
|
py
|
#!/usr/bin/env python3
# Copyright (c) 2014-2018 The Pyeongtaekcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""Test node disconnect and ban behavior"""
import time
from test_framework.test_framework import PyeongtaekcoinTestFramework
from test_framework.util import (
assert_equal,
assert_raises_rpc_error,
connect_nodes_bi,
wait_until,
)
class DisconnectBanTest(PyeongtaekcoinTestFramework):
def set_test_params(self):
self.num_nodes = 2
def run_test(self):
self.log.info("Test setban and listbanned RPCs")
self.log.info("setban: successfully ban single IP address")
assert_equal(len(self.nodes[1].getpeerinfo()), 2) # node1 should have 2 connections to node0 at this point
self.nodes[1].setban(subnet="127.0.0.1", command="add")
wait_until(lambda: len(self.nodes[1].getpeerinfo()) == 0, timeout=10)
assert_equal(len(self.nodes[1].getpeerinfo()), 0) # all nodes must be disconnected at this point
assert_equal(len(self.nodes[1].listbanned()), 1)
self.log.info("clearbanned: successfully clear ban list")
self.nodes[1].clearbanned()
assert_equal(len(self.nodes[1].listbanned()), 0)
self.nodes[1].setban("127.0.0.0/24", "add")
self.log.info("setban: fail to ban an already banned subnet")
assert_equal(len(self.nodes[1].listbanned()), 1)
assert_raises_rpc_error(-23, "IP/Subnet already banned", self.nodes[1].setban, "127.0.0.1", "add")
self.log.info("setban: fail to ban an invalid subnet")
assert_raises_rpc_error(-30, "Error: Invalid IP/Subnet", self.nodes[1].setban, "127.0.0.1/42", "add")
assert_equal(len(self.nodes[1].listbanned()), 1) # still only one banned ip because 127.0.0.1 is within the range of 127.0.0.0/24
self.log.info("setban remove: fail to unban a non-banned subnet")
assert_raises_rpc_error(-30, "Error: Unban failed", self.nodes[1].setban, "127.0.0.1", "remove")
assert_equal(len(self.nodes[1].listbanned()), 1)
self.log.info("setban remove: successfully unban subnet")
self.nodes[1].setban("127.0.0.0/24", "remove")
assert_equal(len(self.nodes[1].listbanned()), 0)
self.nodes[1].clearbanned()
assert_equal(len(self.nodes[1].listbanned()), 0)
self.log.info("setban: test persistence across node restart")
self.nodes[1].setban("127.0.0.0/32", "add")
self.nodes[1].setban("127.0.0.0/24", "add")
# Set the mocktime so we can control when bans expire
old_time = int(time.time())
self.nodes[1].setmocktime(old_time)
self.nodes[1].setban("192.168.0.1", "add", 1) # ban for 1 seconds
self.nodes[1].setban("2001:4d48:ac57:400:cacf:e9ff:fe1d:9c63/19", "add", 1000) # ban for 1000 seconds
listBeforeShutdown = self.nodes[1].listbanned()
assert_equal("192.168.0.1/32", listBeforeShutdown[2]['address'])
# Move time forward by 3 seconds so the third ban has expired
self.nodes[1].setmocktime(old_time + 3)
assert_equal(len(self.nodes[1].listbanned()), 3)
self.stop_node(1)
self.start_node(1)
listAfterShutdown = self.nodes[1].listbanned()
assert_equal("127.0.0.0/24", listAfterShutdown[0]['address'])
assert_equal("127.0.0.0/32", listAfterShutdown[1]['address'])
assert_equal("/19" in listAfterShutdown[2]['address'], True)
# Clear ban lists
self.nodes[1].clearbanned()
connect_nodes_bi(self.nodes, 0, 1)
self.log.info("Test disconnectnode RPCs")
self.log.info("disconnectnode: fail to disconnect when calling with address and nodeid")
address1 = self.nodes[0].getpeerinfo()[0]['addr']
node1 = self.nodes[0].getpeerinfo()[0]['addr']
assert_raises_rpc_error(-32602, "Only one of address and nodeid should be provided.", self.nodes[0].disconnectnode, address=address1, nodeid=node1)
self.log.info("disconnectnode: fail to disconnect when calling with junk address")
assert_raises_rpc_error(-29, "Node not found in connected nodes", self.nodes[0].disconnectnode, address="221B Baker Street")
self.log.info("disconnectnode: successfully disconnect node by address")
address1 = self.nodes[0].getpeerinfo()[0]['addr']
self.nodes[0].disconnectnode(address=address1)
wait_until(lambda: len(self.nodes[0].getpeerinfo()) == 1, timeout=10)
assert not [node for node in self.nodes[0].getpeerinfo() if node['addr'] == address1]
self.log.info("disconnectnode: successfully reconnect node")
connect_nodes_bi(self.nodes, 0, 1) # reconnect the node
assert_equal(len(self.nodes[0].getpeerinfo()), 2)
assert [node for node in self.nodes[0].getpeerinfo() if node['addr'] == address1]
self.log.info("disconnectnode: successfully disconnect node by node id")
id1 = self.nodes[0].getpeerinfo()[0]['id']
self.nodes[0].disconnectnode(nodeid=id1)
wait_until(lambda: len(self.nodes[0].getpeerinfo()) == 1, timeout=10)
assert not [node for node in self.nodes[0].getpeerinfo() if node['id'] == id1]
if __name__ == '__main__':
DisconnectBanTest().main()
|
[
"cjone98692996@gmail.com"
] |
cjone98692996@gmail.com
|
6f136cc6daa5a8670845de0e72b5aa253d75137b
|
de5dc978e0a5b9fc4ecbbdd00c1cebe57c465775
|
/wso2_apim_storeclient/models/__init__.py
|
5c4b27271173e651942c4c622035cbca741cb8fe
|
[] |
no_license
|
junetigerlee/python-wso2-apim-storeclient
|
8c3502dfd039eca0093c218cb6ac1183c050edb5
|
60c84988a2417a0104aaa53ed082902012d6247d
|
refs/heads/master
| 2021-01-01T16:12:12.197633
| 2017-07-25T06:21:21
| 2017-07-25T06:21:21
| 97,787,392
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 2,500
|
py
|
# coding: utf-8
"""
WSO2 API Manager - Store
This specifies a **RESTful API** for WSO2 **API Manager** - Store. Please see [full swagger definition](https://raw.githubusercontent.com/wso2/carbon-apimgt/v6.0.4/components/apimgt/org.wso2.carbon.apimgt.rest.api.store/src/main/resources/store-api.yaml) of the API which is written using [swagger 2.0](http://swagger.io/) specification.
OpenAPI spec version: 0.11.0
Contact: architecture@wso2.com
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from __future__ import absolute_import
# import models into model package
from .api import API
from .api_info import APIInfo
from .api_info_object_with_basic_api_details_ import APIInfoObjectWithBasicAPIDetails_
from .api_list import APIList
from .api_object import APIObject
from .api_object_business_information import APIObjectBusinessInformation
from .api_object_endpoint_ur_ls import APIObjectEndpointURLs
from .api_object_environment_ur_ls import APIObjectEnvironmentURLs
from .application import Application
from .application_1 import Application1
from .application_2 import Application2
from .application_3 import Application3
from .application_info import ApplicationInfo
from .application_info_object_with_basic_application_details import ApplicationInfoObjectWithBasicApplicationDetails
from .application_key import ApplicationKey
from .application_key_details import ApplicationKeyDetails
from .application_key_generate_request import ApplicationKeyGenerateRequest
from .application_key_generation_request_object import ApplicationKeyGenerationRequestObject
from .application_list import ApplicationList
from .description_of_individual_errors_that_may_have_occurred_during_a_request_ import DescriptionOfIndividualErrorsThatMayHaveOccurredDuringARequest_
from .document import Document
from .document_1 import Document1
from .document_list import DocumentList
from .error import Error
from .error_list_item import ErrorListItem
from .error_object_returned_with_4_xx_http_status import ErrorObjectReturnedWith4XXHTTPStatus
from .subscription import Subscription
from .subscription_1 import Subscription1
from .subscription_2 import Subscription2
from .subscription_list import SubscriptionList
from .tag import Tag
from .tag_1 import Tag1
from .tag_list import TagList
from .tier import Tier
from .tier_1 import Tier1
from .tier_list import TierList
from .token import Token
from .token_details_for_invoking_ap_is import TokenDetailsForInvokingAPIs
|
[
"junetigerlee@gmail.com"
] |
junetigerlee@gmail.com
|
570bc08e2531a3c60bc6778ff86427b795d3d936
|
9e610e88158b973a2129cb794176dc1a9b0b6bfd
|
/juicer/util/jinja2_custom.py
|
535940968e98e01641d04d94dbc96fd93dc925e3
|
[] |
no_license
|
eubr-bigsea/juicer
|
8735b3aefcf66a5207364270e7ee9ec809b94ad4
|
4714187a6cb8ca7d1e09d8eae4cf4898ae7dcc58
|
refs/heads/master
| 2023-08-31T07:01:52.091443
| 2023-08-14T21:45:03
| 2023-08-14T21:56:48
| 68,124,762
| 6
| 8
| null | 2023-08-01T01:21:18
| 2016-09-13T16:09:11
|
Python
|
UTF-8
|
Python
| false
| false
| 903
|
py
|
# -*- coding: utf-8 -*-
from jinja2 import nodes
from jinja2.ext import Extension
import autopep8
class AutoPep8Extension(Extension):
# a set of names that trigger the extension.
tags = {'autopep8'}
def __init__(self, environment):
super(AutoPep8Extension, self).__init__(environment)
# add the defaults to the environment
environment.extend()
def parse(self, parser):
lineno = next(parser.stream).lineno
body = parser.parse_statements(['name:endautopep8'], drop_needle=True)
args = []
result = nodes.CallBlock(self.call_method('_format_support', args),
[], [], body).set_lineno(lineno)
return result
@staticmethod
def _format_support(caller):
options = autopep8.parse_args(['--max-line-length', '81', '-'])
return autopep8.fix_code(caller(), options=options)
|
[
"waltersf@gmail.com"
] |
waltersf@gmail.com
|
7d6112f173be2b434f9779490f1979f1d893a056
|
01d92ca39cd4836aaef67e2efcf88a44671c7213
|
/code_pack_19/basic_logger_2.py
|
360836d78049880cad49c8acca8c494b507ccf7d
|
[] |
no_license
|
manuelpereira292/py3_bootcamp
|
247f411b80f09c46aeeba90a96e6a5d3fd329f2c
|
1988553394cb993db82c39993ed397e497bd5ae8
|
refs/heads/master
| 2022-08-20T02:25:51.265204
| 2020-05-15T22:26:27
| 2020-05-15T22:26:27
| 263,367,513
| 1
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 383
|
py
|
import logging
logging.basicConfig(filename="code_pack_19/sample1.log", level=logging.INFO)
log = logging.getLogger("ex")
try:
raise RuntimeError
except RuntimeError:
log.exception("Error!")
# Let's use our file reading knowledge to
# read the log file
with open("code_pack_19/sample1.log") as file_handler:
for line in file_handler:
print(line)
|
[
"manuelpereira292@gmail.com"
] |
manuelpereira292@gmail.com
|
98697225e835037618221274549d17e44739d9f0
|
b31e7898aa5131125f243eaff973049b17e08512
|
/.venv/lib/python3.10/site-packages/anyio/_core/_signals.py
|
8ea54af86c4be12340de02dc2a6f7eba387e0d98
|
[] |
no_license
|
ramsred/MyProjects
|
f2978eeda3d73421daf0da9f2d012caef6c3ccda
|
a7f90ef1ecfbc7517be61e71286bd14405985de5
|
refs/heads/master
| 2023-07-09T03:19:17.683705
| 2023-07-02T19:30:19
| 2023-07-02T19:30:19
| 71,980,729
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 863
|
py
|
from __future__ import annotations
from typing import AsyncIterator
from ._compat import DeprecatedAsyncContextManager
from ._eventloop import get_asynclib
def open_signal_receiver(
*signals: int,
) -> DeprecatedAsyncContextManager[AsyncIterator[int]]:
"""
Start receiving operating system signals.
:param signals: signals to receive (e.g. ``signal.SIGINT``)
:return: an asynchronous context manager for an asynchronous iterator which yields signal
numbers
.. warning:: Windows does not support signals natively so it is best to avoid relying on this
in cross-platform applications.
.. warning:: On asyncio, this permanently replaces any previous signal handler for the given
signals, as set via :meth:`~asyncio.loop.add_signal_handler`.
"""
return get_asynclib().open_signal_receiver(*signals)
|
[
"venkataramireddy534@gmail.com"
] |
venkataramireddy534@gmail.com
|
5a58a787ab85afbf656093287cdf31bb5fd3798e
|
a6e4a6f0a73d24a6ba957277899adbd9b84bd594
|
/sdk/python/pulumi_azure_native/network/get_hub_virtual_network_connection.py
|
a68415eeb18718c1b8b9c127daf3badcdf55b420
|
[
"BSD-3-Clause",
"Apache-2.0"
] |
permissive
|
MisinformedDNA/pulumi-azure-native
|
9cbd75306e9c8f92abc25be3f73c113cb93865e9
|
de974fd984f7e98649951dbe80b4fc0603d03356
|
refs/heads/master
| 2023-03-24T22:02:03.842935
| 2021-03-08T21:16:19
| 2021-03-08T21:16:19
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 7,803
|
py
|
# coding=utf-8
# *** WARNING: this file was generated by the Pulumi SDK Generator. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union
from .. import _utilities, _tables
from . import outputs
__all__ = [
'GetHubVirtualNetworkConnectionResult',
'AwaitableGetHubVirtualNetworkConnectionResult',
'get_hub_virtual_network_connection',
]
@pulumi.output_type
class GetHubVirtualNetworkConnectionResult:
"""
HubVirtualNetworkConnection Resource.
"""
def __init__(__self__, allow_hub_to_remote_vnet_transit=None, allow_remote_vnet_to_use_hub_vnet_gateways=None, enable_internet_security=None, etag=None, id=None, name=None, provisioning_state=None, remote_virtual_network=None, routing_configuration=None):
if allow_hub_to_remote_vnet_transit and not isinstance(allow_hub_to_remote_vnet_transit, bool):
raise TypeError("Expected argument 'allow_hub_to_remote_vnet_transit' to be a bool")
pulumi.set(__self__, "allow_hub_to_remote_vnet_transit", allow_hub_to_remote_vnet_transit)
if allow_remote_vnet_to_use_hub_vnet_gateways and not isinstance(allow_remote_vnet_to_use_hub_vnet_gateways, bool):
raise TypeError("Expected argument 'allow_remote_vnet_to_use_hub_vnet_gateways' to be a bool")
pulumi.set(__self__, "allow_remote_vnet_to_use_hub_vnet_gateways", allow_remote_vnet_to_use_hub_vnet_gateways)
if enable_internet_security and not isinstance(enable_internet_security, bool):
raise TypeError("Expected argument 'enable_internet_security' to be a bool")
pulumi.set(__self__, "enable_internet_security", enable_internet_security)
if etag and not isinstance(etag, str):
raise TypeError("Expected argument 'etag' to be a str")
pulumi.set(__self__, "etag", etag)
if id and not isinstance(id, str):
raise TypeError("Expected argument 'id' to be a str")
pulumi.set(__self__, "id", id)
if name and not isinstance(name, str):
raise TypeError("Expected argument 'name' to be a str")
pulumi.set(__self__, "name", name)
if provisioning_state and not isinstance(provisioning_state, str):
raise TypeError("Expected argument 'provisioning_state' to be a str")
pulumi.set(__self__, "provisioning_state", provisioning_state)
if remote_virtual_network and not isinstance(remote_virtual_network, dict):
raise TypeError("Expected argument 'remote_virtual_network' to be a dict")
pulumi.set(__self__, "remote_virtual_network", remote_virtual_network)
if routing_configuration and not isinstance(routing_configuration, dict):
raise TypeError("Expected argument 'routing_configuration' to be a dict")
pulumi.set(__self__, "routing_configuration", routing_configuration)
@property
@pulumi.getter(name="allowHubToRemoteVnetTransit")
def allow_hub_to_remote_vnet_transit(self) -> Optional[bool]:
"""
Deprecated: VirtualHub to RemoteVnet transit to enabled or not.
"""
return pulumi.get(self, "allow_hub_to_remote_vnet_transit")
@property
@pulumi.getter(name="allowRemoteVnetToUseHubVnetGateways")
def allow_remote_vnet_to_use_hub_vnet_gateways(self) -> Optional[bool]:
"""
Deprecated: Allow RemoteVnet to use Virtual Hub's gateways.
"""
return pulumi.get(self, "allow_remote_vnet_to_use_hub_vnet_gateways")
@property
@pulumi.getter(name="enableInternetSecurity")
def enable_internet_security(self) -> Optional[bool]:
"""
Enable internet security.
"""
return pulumi.get(self, "enable_internet_security")
@property
@pulumi.getter
def etag(self) -> str:
"""
A unique read-only string that changes whenever the resource is updated.
"""
return pulumi.get(self, "etag")
@property
@pulumi.getter
def id(self) -> Optional[str]:
"""
Resource ID.
"""
return pulumi.get(self, "id")
@property
@pulumi.getter
def name(self) -> Optional[str]:
"""
The name of the resource that is unique within a resource group. This name can be used to access the resource.
"""
return pulumi.get(self, "name")
@property
@pulumi.getter(name="provisioningState")
def provisioning_state(self) -> str:
"""
The provisioning state of the hub virtual network connection resource.
"""
return pulumi.get(self, "provisioning_state")
@property
@pulumi.getter(name="remoteVirtualNetwork")
def remote_virtual_network(self) -> Optional['outputs.SubResourceResponse']:
"""
Reference to the remote virtual network.
"""
return pulumi.get(self, "remote_virtual_network")
@property
@pulumi.getter(name="routingConfiguration")
def routing_configuration(self) -> Optional['outputs.RoutingConfigurationResponse']:
"""
The Routing Configuration indicating the associated and propagated route tables on this connection.
"""
return pulumi.get(self, "routing_configuration")
class AwaitableGetHubVirtualNetworkConnectionResult(GetHubVirtualNetworkConnectionResult):
# pylint: disable=using-constant-test
def __await__(self):
if False:
yield self
return GetHubVirtualNetworkConnectionResult(
allow_hub_to_remote_vnet_transit=self.allow_hub_to_remote_vnet_transit,
allow_remote_vnet_to_use_hub_vnet_gateways=self.allow_remote_vnet_to_use_hub_vnet_gateways,
enable_internet_security=self.enable_internet_security,
etag=self.etag,
id=self.id,
name=self.name,
provisioning_state=self.provisioning_state,
remote_virtual_network=self.remote_virtual_network,
routing_configuration=self.routing_configuration)
def get_hub_virtual_network_connection(connection_name: Optional[str] = None,
resource_group_name: Optional[str] = None,
virtual_hub_name: Optional[str] = None,
opts: Optional[pulumi.InvokeOptions] = None) -> AwaitableGetHubVirtualNetworkConnectionResult:
"""
HubVirtualNetworkConnection Resource.
API Version: 2020-11-01.
:param str connection_name: The name of the vpn connection.
:param str resource_group_name: The resource group name of the VirtualHub.
:param str virtual_hub_name: The name of the VirtualHub.
"""
__args__ = dict()
__args__['connectionName'] = connection_name
__args__['resourceGroupName'] = resource_group_name
__args__['virtualHubName'] = virtual_hub_name
if opts is None:
opts = pulumi.InvokeOptions()
if opts.version is None:
opts.version = _utilities.get_version()
__ret__ = pulumi.runtime.invoke('azure-native:network:getHubVirtualNetworkConnection', __args__, opts=opts, typ=GetHubVirtualNetworkConnectionResult).value
return AwaitableGetHubVirtualNetworkConnectionResult(
allow_hub_to_remote_vnet_transit=__ret__.allow_hub_to_remote_vnet_transit,
allow_remote_vnet_to_use_hub_vnet_gateways=__ret__.allow_remote_vnet_to_use_hub_vnet_gateways,
enable_internet_security=__ret__.enable_internet_security,
etag=__ret__.etag,
id=__ret__.id,
name=__ret__.name,
provisioning_state=__ret__.provisioning_state,
remote_virtual_network=__ret__.remote_virtual_network,
routing_configuration=__ret__.routing_configuration)
|
[
"noreply@github.com"
] |
MisinformedDNA.noreply@github.com
|
e78649e5c08756d7f0bd2c588219d7302dd0f4e2
|
df716b2868b289a7e264f8d2b0ded52fff38d7fc
|
/plaso/parsers/sqlite_plugins/safari.py
|
22bb6098cb41d93825e091cb3aafa5f5caee31fd
|
[
"Apache-2.0"
] |
permissive
|
ir4n6/plaso
|
7dd3cebb92de53cc4866ae650d41c255027cf80a
|
010f9cbdfc82e21ed6658657fd09a7b44115c464
|
refs/heads/master
| 2021-04-25T05:50:45.963652
| 2018-03-08T15:11:58
| 2018-03-08T15:11:58
| 122,255,666
| 0
| 0
|
Apache-2.0
| 2018-02-20T21:00:50
| 2018-02-20T21:00:50
| null |
UTF-8
|
Python
| false
| false
| 5,409
|
py
|
# -*- coding: utf-8 -*-
"""Parser for the Safari History files.
The Safari History is stored in SQLite database files named History.db
"""
from __future__ import unicode_literals
from dfdatetime import cocoa_time as dfdatetime_cocoa_time
from plaso.containers import events
from plaso.containers import time_events
from plaso.lib import definitions
from plaso.parsers import sqlite
from plaso.parsers.sqlite_plugins import interface
class SafariHistoryPageVisitedEventData(events.EventData):
"""Safari history event data.
Attributes:
title (str): title of the webpage visited.
url (str): URL visited.
host(str): hostname of the server.
visit_count (int): number of times the website was visited.
was_http_non_get (bool): True if the webpage was visited using a
non-GET HTTP request.
"""
DATA_TYPE = 'safari:history:visit_sqlite'
def __init__(self):
"""Initializes event data."""
super(SafariHistoryPageVisitedEventData,
self).__init__(data_type=self.DATA_TYPE)
self.title = None
self.url = None
self.visit_count = None
self.host = None
self.was_http_non_get = None
self.visit_redirect_source = None
class SafariHistoryPluginSqlite(interface.SQLitePlugin):
"""Parse Safari History Files.
Safari history file is stored in a SQLite database file named History.db
"""
NAME = 'safari_history'
DESCRIPTION = 'Parser for Safari history SQLite database files.'
QUERIES = [
(('SELECT history_items.id, history_items.url, history_items.visit'
'_count, history_visits.id AS visit_id, history_visits.history_item,'
'history_visits.visit_time, history_visits.redirect_destination, '
'history_visits.title, history_visits.http_non_get, '
'history_visits.redirect_source '
'FROM history_items, history_visits '
'WHERE history_items.id = history_visits.history_item '
'ORDER BY history_visits.visit_time'), 'ParsePageVisitRow')
]
REQUIRED_TABLES = frozenset(['history_items', 'history_visits'])
SCHEMAS = [{
'history_client_versions': (
'CREATE TABLE history_client_versions (client_version INTEGER '
'PRIMARY KEY,last_seen REAL NOT NULL)'),
'history_event_listeners': (
'CREATE TABLE history_event_listeners (listener_name TEXT PRIMARY '
'KEY NOT NULL UNIQUE,last_seen REAL NOT NULL)'),
'history_events': (
'CREATE TABLE history_events (id INTEGER PRIMARY KEY '
'AUTOINCREMENT,event_type TEXT NOT NULL,event_time REAL NOT '
'NULL,pending_listeners TEXT NOT NULL,value BLOB)'),
'history_items': (
'CREATE TABLE history_items (id INTEGER PRIMARY KEY '
'AUTOINCREMENT,url TEXT NOT NULL UNIQUE,domain_expansion TEXT '
'NULL,visit_count INTEGER NOT NULL,daily_visit_counts BLOB NOT '
'NULL,weekly_visit_counts BLOB NULL,autocomplete_triggers BLOB '
'NULL,should_recompute_derived_visit_counts INTEGER NOT '
'NULL,visit_count_score INTEGER NOT NULL)'),
'history_tombstones': (
'CREATE TABLE history_tombstones (id INTEGER PRIMARY KEY '
'AUTOINCREMENT,start_time REAL NOT NULL,end_time REAL NOT NULL,url '
'TEXT,generation INTEGER NOT NULL DEFAULT 0)'),
'history_visits': (
'CREATE TABLE history_visits (id INTEGER PRIMARY KEY '
'AUTOINCREMENT,history_item INTEGER NOT NULL REFERENCES '
'history_items(id) ON DELETE CASCADE,visit_time REAL NOT NULL,title '
'TEXT NULL,load_successful BOOLEAN NOT NULL DEFAULT 1,http_non_get '
'BOOLEAN NOT NULL DEFAULT 0,synthesized BOOLEAN NOT NULL DEFAULT '
'0,redirect_source INTEGER NULL UNIQUE REFERENCES '
'history_visits(id) ON DELETE CASCADE,redirect_destination INTEGER '
'NULL UNIQUE REFERENCES history_visits(id) ON DELETE CASCADE,origin '
'INTEGER NOT NULL DEFAULT 0,generation INTEGER NOT NULL DEFAULT '
'0,attributes INTEGER NOT NULL DEFAULT 0,score INTEGER NOT NULL '
'DEFAULT 0)'),
'metadata': (
'CREATE TABLE metadata (key TEXT NOT NULL UNIQUE, value)')}]
def ParsePageVisitRow(self, parser_mediator, query, row, **unused_kwargs):
"""Parses a visited row.
Args:
parser_mediator (ParserMediator): mediates interactions between parsers
and other components, such as storage and dfvfs.
query (str): query that created the row.
row (sqlite3.Row): row.
"""
query_hash = hash(query)
was_http_non_get = self._GetRowValue(query_hash, row, 'http_non_get')
event_data = SafariHistoryPageVisitedEventData()
event_data.offset = self._GetRowValue(query_hash, row, 'id')
event_data.query = query
event_data.title = self._GetRowValue(query_hash, row, 'title')
event_data.url = self._GetRowValue(query_hash, row, 'url')
event_data.visit_count = self._GetRowValue(query_hash, row, 'visit_count')
event_data.was_http_non_get = bool(was_http_non_get)
timestamp = self._GetRowValue(query_hash, row, 'visit_time')
date_time = dfdatetime_cocoa_time.CocoaTime(timestamp=timestamp)
event = time_events.DateTimeValuesEvent(
date_time, definitions.TIME_DESCRIPTION_LAST_VISITED)
parser_mediator.ProduceEventWithEventData(event, event_data)
sqlite.SQLiteParser.RegisterPlugin(SafariHistoryPluginSqlite)
|
[
"onager@deerpie.com"
] |
onager@deerpie.com
|
04c13e2c86dc8ea3c26a9f4c6c9286fc9ec47a0e
|
7eb3009e95a15a992c0c21afe0884008ba10544d
|
/game/src/leveleditor/objectproperties/ColorEditor.py
|
6262d9c64458825c0e02be2bb60267cca144a4fc
|
[] |
no_license
|
tsp-team/ttsp-src
|
be391ebc44f01463ff2e802ab039438e07a645f3
|
9bf1869adbc4f0c1dff69095c04f4604a515c4e4
|
refs/heads/master
| 2022-12-04T09:50:36.944988
| 2020-08-23T21:01:32
| 2020-08-23T21:01:32
| 263,228,539
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 2,649
|
py
|
from .BaseEditor import BaseEditor
from src.leveleditor import LEUtils
from PyQt5 import QtWidgets, QtCore
class ColorEditor(BaseEditor):
def __init__(self, parent, item, model):
BaseEditor.__init__(self, parent, item, model)
self.lineEdit = QtWidgets.QLineEdit("", self)
self.lineEdit.returnPressed.connect(self.__confirmColorText)
self.layout().addWidget(self.lineEdit)
self.colorLbl = QtWidgets.QLabel("", self)
self.colorLbl.setSizePolicy(QtWidgets.QSizePolicy.Expanding, QtWidgets.QSizePolicy.Preferred)
self.layout().addWidget(self.colorLbl)
self.editButton = QtWidgets.QPushButton("Pick Color", self)
self.editButton.clicked.connect(self.__pickColor)
self.layout().addWidget(self.editButton)
self.colorDlg = None
self.adjustToColor(LEUtils.strToQColor(self.getItemData()))
def __confirmColorText(self):
self.setModelData(self.model, self.item.index())
self.adjustToColor(LEUtils.strToQColor(self.lineEdit.text()))
def __pickColor(self):
self.origColor = LEUtils.strToQColor(self.getItemData())
color = LEUtils.strToQColor(self.getItemData())
colorDlg = QtWidgets.QColorDialog(color, self)
colorDlg.setOptions(QtWidgets.QColorDialog.DontUseNativeDialog)
colorDlg.setModal(True)
colorDlg.currentColorChanged.connect(self.adjustToColorAndSetData)
colorDlg.finished.connect(self.__colorDlgFinished)
colorDlg.open()
colorDlg.blockSignals(True)
colorDlg.setCurrentColor(color)
colorDlg.blockSignals(False)
self.colorDlg = colorDlg
def __colorDlgFinished(self, ret):
if ret:
color = self.colorDlg.currentColor()
self.adjustToColorAndSetData(color)
else:
self.adjustToColorAndSetData(self.origColor)
self.colorDlg = None
def adjustToColorAndSetData(self, color):
if not color.isValid():
return
self.adjustToColor(color)
self.setModelData(self.model, self.item.index())
def adjustToColor(self, color):
self.colorLbl.setStyleSheet("border: 1px solid black; background-color: rgb(%i, %i, %i);" % (color.red(), color.green(), color.blue()))
vals = self.getItemData().split(' ')
alpha = vals[3]
self.lineEdit.setText("%i %i %i %s" % (color.red(), color.green(), color.blue(), alpha))
def setEditorData(self, index):
self.lineEdit.setText(self.getItemData())
def setModelData(self, model, index):
model.setData(index, self.lineEdit.text(), QtCore.Qt.EditRole)
|
[
"brianlach72@gmail.com"
] |
brianlach72@gmail.com
|
2ba8af9a37d1b2976828cea090385da648a31a6a
|
45cb74f15ebf96b431e5689e554fcdc42062ee08
|
/4-magnet_particles/solution.py
|
2c864f0e918fe8e2abfb2ca11901b3067432a85e
|
[] |
no_license
|
acu192/codewars
|
d296bc95fd067f0059045494fc445f62f95c060a
|
905c5397461976335dbcf6a5bb0ffb6b359a29c0
|
refs/heads/master
| 2021-01-23T03:52:58.009582
| 2017-08-04T05:03:06
| 2017-08-04T05:03:06
| 86,128,943
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 746
|
py
|
"""
https://www.codewars.com/kata/magnet-particules-in-boxes
"""
from math import pow
def doubles(maxk, maxn):
return sum(sum(pow(n+1, -2*k) for n in range(1, maxn+1))/k for k in range(1, maxk+1))
def assertFuzzyEquals(actual, expected, msg=""):
merr = 1e-6
inrange = abs(actual - expected) <= merr
if (inrange == False):
msg = "At 1e-6: Expected value must be {:0.6f} but got {:0.6f}"
msg = msg.format(expected, actual)
return msg
return True
print assertFuzzyEquals(doubles(1, 10), 0.5580321939764581)
print assertFuzzyEquals(doubles(10, 1000), 0.6921486500921933)
print assertFuzzyEquals(doubles(10, 10000), 0.6930471674194457)
print assertFuzzyEquals(doubles(20, 10000), 0.6930471955575918)
|
[
"ryan@rhobota.com"
] |
ryan@rhobota.com
|
057418d9203386866b6b7fbc6ffe76f306489dcc
|
bddc40a97f92fafb8cbbbfdbdfe6774996578bb0
|
/exercicioLista_funcoes/ex12.py
|
ee26987f60bcef8d32b6fc9a3cf3d93898187be6
|
[] |
no_license
|
andrehmiguel/treinamento
|
8f83041bd51387dd3e5cafed09c4bb0a08d0e375
|
ed18e6a8cfba0baaa68757c12893c62a0938a67e
|
refs/heads/main
| 2023-01-31T13:15:58.113392
| 2020-12-16T02:47:44
| 2020-12-16T02:47:44
| 317,631,214
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 680
|
py
|
# 12. Embaralha palavra . Construa uma função que receba uma string como parâmetro e
# devolva outra string com os carateres embaralhados. Por exemplo: se função
# receber a palavra python , pode retornar npthyo , ophtyn ou qualquer outra
# combinação possível, de forma aleatória. Padronize em sua função que todos os
# caracteres serão devolvidos em caixa alta ou caixa baixa, independentemente de
# como foram digitados.
from random import shuffle
def embaralha(palavra):
lista = list(palavra)
shuffle(lista)
lista = ''.join(lista)
print(lista.upper())
palavra = input('Insira uma palavra ou frase para embaralhar: ').strip()
embaralha(palavra)
|
[
"andrehmiguel@outlook.com"
] |
andrehmiguel@outlook.com
|
f44bca1b4446469132b9c580b3d500987df2806b
|
0902ddd4a455c10c2c7dedac872069b8223e7250
|
/ppomppu_scraper/__init__.py
|
0701f16db84fd59186ef86e4a4e6ae60f5c5b01e
|
[] |
no_license
|
kyuhwas/ppomppu_scraper
|
f77f8561b6eb4a4aab18f3e0da32da230ebdc7b9
|
a4e24b969898c164d7ef33f034495b50c097e94d
|
refs/heads/master
| 2021-10-19T00:33:14.382761
| 2019-02-16T00:04:49
| 2019-02-16T00:04:49
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 260
|
py
|
from .utils import now
from .utils import get_soup
from .utils import normalize_text
from .utils import strf_to_datetime
from .utils import news_dateformat
from .utils import user_dateformat
from .parser import parse_page
from .scraper import yield_parsed_page
|
[
"soy.lovit@gmail.com"
] |
soy.lovit@gmail.com
|
fe8c1da06cb5220b0e5ee515224cc1101de51d57
|
6be8aa517e679b33b47d35f100e6590902a8a1db
|
/DP/Problem54.py
|
72cbb8c1c999c705d1e1d21afdf23d8dfda03060
|
[] |
no_license
|
LeeJuhae/Algorithm-Python
|
7ca4762712e5e84d1e277abecb3bf39c9cbd4e56
|
729947b4428205adfbac194a5527b0eeafe1c525
|
refs/heads/master
| 2023-04-24T01:02:36.430970
| 2021-05-23T07:17:25
| 2021-05-23T07:17:25
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 540
|
py
|
# https://www.acmicpc.net/problem/17182
import sys
from itertools import permutations
read = sys.stdin.readline
n, st = map(int, read().strip().split())
dp = [list(map(int, read().strip().split())) for _ in range(n)]
for k in range(n):
for i in range(n):
for j in range(n):
dp[i][j] = min(dp[i][j], dp[i][k] + dp[k][j])
ans = float('inf')
for cites in permutations(range(n), n):
prev = st
tmp = 0
for city in cites:
tmp += dp[prev][city]
prev = city
ans = min(ans, tmp)
print(ans)
|
[
"gusdn0657@gmail.com"
] |
gusdn0657@gmail.com
|
9d7fa1949f2329fb360cf30a14031fc756ee8814
|
83f0cdbc9e1f7261dcd1ff5fc0c8ef4280e84fbb
|
/ADaM/python/cdisc_library.py
|
8437f8e380df5cc47b45fd6272dc69f18a942760
|
[
"MIT"
] |
permissive
|
mihir-shinde/CSS2020-Hackathon
|
0c39d59ddb1503f0c4170b230f789b8f29fee9ae
|
f9538ee425fe7eb0573757cdd2346d1f8c7998c1
|
refs/heads/master
| 2023-03-16T05:06:26.518324
| 2020-09-25T16:20:12
| 2020-09-25T16:20:12
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 4,958
|
py
|
import requests
class CDISCConnector:
BASE_URL = "https://library.cdisc.org/api/"
def __init__(self, username, password):
self._client = None
self._username = username
self._password = password
self._cache = {}
@property
def client(self):
if self._client is None:
session = requests.Session()
session.auth = (self._username, self._password)
session.headers
self._client = session
return self._client
def flush(self):
self._cache = {}
def _get(self, path):
url = self.BASE_URL + path
if url not in self._cache:
response = self.client.get(url)
if response.status_code == 200:
self._cache[url] = response.json()
return self._cache.get(url, {})
@property
def products(self):
return self.get_products()
def get_products(self):
contents = self._get("mdr/products")
specs = {}
if contents:
for aspect, asp_def in contents.get("_links").items():
if aspect == "self":
continue
for spec, spec_def in asp_def.get("_links").items():
if spec == "self":
continue
# Assumption
href = spec_def[0].get('href')
specs[spec] = href
return specs
def adam(self, version="1-1"):
"""
Get the ADaM Specifications
"""
path = f"mdr/adam/adamig-{version}"
response = self._get(path)
if not response.status_code == 200:
if response.status_code == 401:
print("Authentication not recognised")
return {}
elif response.status_code == 404:
print("Standard or Dataset not found")
return {}
return response.json()
def adam_dataset(self, dataset, version="1-1"):
"""
Get the ADaM Dataset Specifications
"""
path = f"mdr/adam/adamig-{version}/{dataset}"
response = self._get(path)
if not response.status_code == 200:
if response.status_code == 401:
print("Authentication not recognised")
return {}
elif response.status_code == 404:
print("Standard or Dataset not found")
return {}
return response.json()
def adam_var(self, dataset, variable, version="1-1"):
"""
Get the ADaM Dataset variable Specifications
"""
path = f"mdr/adam/adamig-{version}/datastructures/{dataset}/variables/{variable}"
response = self._get(path)
if not response.status_code == 200:
if response.status_code == 401:
print("Authentication not recognised")
return {}
elif response.status_code == 404:
print("Standard or Dataset not found")
return {}
return response.json()
def sdtm(self, version="3-3"):
"""
Get the SDTM Specifications
"""
response = self._get(f"mdr/sdtmig/{version}")
if not response.status_code == 200:
if response.status_code == 401:
print("Authentication not recognised")
return {}
elif response.status_code == 404:
print("Standard or Dataset not found")
return {}
return response.json()
def sdtm_dataset(self, dataset, version="3-3"):
"""
Get the SDTM Dataset Specifications
"""
response = self._get(f"mdr/sdtmig/{version}/datasets/{dataset}")
if not response.status_code == 200:
if response.status_code == 401:
print("Authentication not recognised")
return {}
elif response.status_code == 404:
print("Standard or Dataset not found")
return {}
return response.json()
def sdtm_variable(self, dataset, variable, version="3-3"):
"""
Get the SDTM Specifications
"""
response = self._get(f"mdr/sdtmig/{version}/datasets/{dataset}/variables/{variable}")
if not response.status_code == 200:
if response.status_code == 401:
print("Authentication not recognised")
return {}
elif response.status_code == 404:
print("Standard or Dataset not found")
return {}
return response.json()
def get_terminology_by_name(self, name, parent):
"""
Given the username for the Codelist find the
"""
pass
def terminology_set(self, name, parent="sdtm"):
"""
Get the codelist
"""
|
[
"glow@mdsol.com"
] |
glow@mdsol.com
|
cc2d67c10951e85ac38fb33a2a8857e71a6610fd
|
1c67732a24042a991cc9f7e764d4640522391972
|
/back/gamedata/admin.py
|
d0e2b17e7528b7c6c839144c30b720f95932f249
|
[] |
no_license
|
sungguenja/bsgg
|
1061ccc6f5f08ed9ad14d3a332af020ec7a5df22
|
447283378ac3bb8f489e2a4662bfb6513bc37be2
|
refs/heads/master
| 2023-04-01T14:15:05.491775
| 2021-04-06T09:46:25
| 2021-04-06T09:46:25
| 318,800,558
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 308
|
py
|
from django.contrib import admin
from .models import Area, Animal, Item, AreaItem, AreaAnimal, AnimalItem
# Register your models here.
admin.site.register(Area)
admin.site.register(Animal)
admin.site.register(Item)
admin.site.register(AreaItem)
admin.site.register(AreaAnimal)
admin.site.register(AnimalItem)
|
[
"59605197+sungguenja@users.noreply.github.com"
] |
59605197+sungguenja@users.noreply.github.com
|
0315172cd8f2f418b8753f197edeb6c03507474d
|
ac0b9c85542e6d1ef59c5e9df4618ddf22223ae0
|
/kratos/applications/FluidDynamicsApplication/python_scripts/apply_custom_velocity_constraints.py
|
22b0262260595debdf02adca990f94e5f573eb8c
|
[] |
no_license
|
UPC-EnricBonet/trunk
|
30cb6fbd717c1e78d95ec66bc0f6df1a041b2b72
|
1cecfe201c8c9a1b87b2d87faf8e505b7b1f772d
|
refs/heads/master
| 2021-06-04T05:10:06.060945
| 2016-07-15T15:29:00
| 2016-07-15T15:29:00
| 33,677,051
| 3
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 3,124
|
py
|
from KratosMultiphysics import *
from FluidDynamicsApplication import *
def Factory(settings, Model):
if(type(settings) != Parameters):
raise Exception("expected input shall be a Parameters object, encapsulating a json string")
return ApplyCustomVelocityConstraintProcess(Model, settings["Parameters"])
##all the processes python processes should be derived from "python_process"
class ApplyCustomVelocityConstraintProcess(Process):
def __init__(self, Model, settings ):
Process.__init__(self)
model_part = Model[settings["model_part_name"].GetString()]
if settings["is_fixed_x"].GetBool() == True:
# Auxiliar x-component parameters creation
x_params = Parameters("{}")
x_params.AddValue("model_part_name",settings["model_part_name"])
x_params.AddValue("mesh_id",settings["mesh_id"])
x_params.AddValue("is_fixed",settings["is_fixed_x"])
x_params.AddValue("value",settings["value"][0])
x_params.AddEmptyValue("variable_name").SetString("VELOCITY_X")
self.x_component_process = ApplyConstantScalarValueProcess(model_part, x_params)
if settings["is_fixed_y"].GetBool() == True:
# Auxiliar y-component parameters creation
y_params = Parameters("{}")
y_params.AddValue("model_part_name",settings["model_part_name"])
y_params.AddValue("mesh_id",settings["mesh_id"])
y_params.AddValue("is_fixed",settings["is_fixed_y"])
y_params.AddValue("value",settings["value"][1])
y_params.AddEmptyValue("variable_name").SetString("VELOCITY_Y")
self.y_component_process = ApplyConstantScalarValueProcess(model_part, y_params)
if settings["is_fixed_z"].GetBool() == True:
# Auxiliar x-component parameters creation
z_params = Parameters("{}")
z_params.AddValue("model_part_name",settings["model_part_name"])
z_params.AddValue("mesh_id",settings["mesh_id"])
z_params.AddValue("is_fixed",settings["is_fixed_z"])
z_params.AddValue("value",settings["value"][2])
z_params.AddEmptyValue("variable_name").SetString("VELOCITY_Z")
self.z_component_process = ApplyConstantScalarValueProcess(model_part, z_params)
# Auxiliar vector with the fixicity settings
self.fixicity_vec = [settings["is_fixed_x"].GetBool(),
settings["is_fixed_y"].GetBool(),
settings["is_fixed_z"].GetBool()]
def ExecuteInitialize(self):
if self.fixicity_vec[0] == True:
self.x_component_process.ExecuteInitialize()
if self.fixicity_vec[1] == True:
self.y_component_process.ExecuteInitialize()
if self.fixicity_vec[2] == True:
self.z_component_process.ExecuteInitialize()
|
[
"enriquebonetgil@hotmail.com"
] |
enriquebonetgil@hotmail.com
|
8251ffe046d39813fb96ab3eda7aaf564efa9dde
|
21155deb4419380b995c09946a680a261c524b5b
|
/meraki/models/subnet_model.py
|
f08d566a104c927c12dbea3f8f178de10ea8c155
|
[
"MIT"
] |
permissive
|
dexterlabora/meraki-python-sdk
|
620efab5e6b6eb32ca52308be1cb740748fc0f30
|
f6e6d61bd8694548169cd872b0642def69115bcb
|
refs/heads/master
| 2023-05-25T06:50:21.845198
| 2019-06-13T12:22:34
| 2019-06-13T12:22:34
| 182,791,973
| 0
| 1
|
NOASSERTION
| 2023-05-22T21:37:22
| 2019-04-22T13:22:08
|
Python
|
UTF-8
|
Python
| false
| false
| 1,690
|
py
|
# -*- coding: utf-8 -*-
"""
meraki
This file was automatically generated for meraki by APIMATIC v2.0 ( https://apimatic.io ).
"""
class SubnetModel(object):
"""Implementation of the 'Subnet' model.
TODO: type model description here.
Attributes:
local_subnet (string): The CIDR notation subnet used within the VPN
use_vpn (bool): Indicates the presence of the subnet in the VPN
"""
# Create a mapping from Model property names to API property names
_names = {
"local_subnet":'localSubnet',
"use_vpn":'useVpn'
}
def __init__(self,
local_subnet=None,
use_vpn=None):
"""Constructor for the SubnetModel class"""
# Initialize members of the class
self.local_subnet = local_subnet
self.use_vpn = use_vpn
@classmethod
def from_dictionary(cls,
dictionary):
"""Creates an instance of this model from a dictionary
Args:
dictionary (dictionary): A dictionary representation of the object as
obtained from the deserialization of the server's response. The keys
MUST match property names in the API description.
Returns:
object: An instance of this structure class.
"""
if dictionary is None:
return None
# Extract variables from the dictionary
local_subnet = dictionary.get('localSubnet')
use_vpn = dictionary.get('useVpn')
# Return an object of this model
return cls(local_subnet,
use_vpn)
|
[
"git@apimatic.io"
] |
git@apimatic.io
|
7eede2990f6e638af015bc568bd54608b7a9581e
|
91d1a6968b90d9d461e9a2ece12b465486e3ccc2
|
/events_write_1/event-bu_delete.py
|
1d62d387f9b2e00234829669c850e7bdd2a0f3aa
|
[] |
no_license
|
lxtxl/aws_cli
|
c31fc994c9a4296d6bac851e680d5adbf7e93481
|
aaf35df1b7509abf5601d3f09ff1fece482facda
|
refs/heads/master
| 2023-02-06T09:00:33.088379
| 2020-12-27T13:38:45
| 2020-12-27T13:38:45
| 318,686,394
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,042
|
py
|
#!/usr/bin/python
# -*- codding: utf-8 -*-
import os
import sys
sys.path.append(os.path.dirname(os.path.abspath(os.path.dirname(__file__))))
from common.execute_command import write_one_parameter
# url : https://awscli.amazonaws.com/v2/documentation/api/latest/reference/events/delete-event-bus.html
if __name__ == '__main__':
"""
create-event-bus : https://awscli.amazonaws.com/v2/documentation/api/latest/reference/events/create-event-bus.html
describe-event-bus : https://awscli.amazonaws.com/v2/documentation/api/latest/reference/events/describe-event-bus.html
"""
parameter_display_string = """
# name : The name of the event bus to delete.
"""
add_option_dict = {}
#######################################################################
# parameter display string
add_option_dict["parameter_display_string"] = parameter_display_string
# ex: add_option_dict["no_value_parameter_list"] = "--single-parameter"
write_one_parameter("events", "delete-event-bus", "name", add_option_dict)
|
[
"hcseo77@gmail.com"
] |
hcseo77@gmail.com
|
67a4431f2cf41a56085422a65fa040772f0312e1
|
5edbc16216806de0c32634fae1ae67c4773fbf65
|
/wiki/migrations/0002_auto_20160820_2351.py
|
8878c2f9679bb51843d6d084ebf7537e0c527bb0
|
[] |
no_license
|
MilesWilliams/klaritywiki
|
431d9139309c2997aeaeeb02afce9b4da43cff8d
|
197c0f9c4094a64e437eb2a51b531747c262290b
|
refs/heads/master
| 2020-12-02T20:44:30.703329
| 2016-08-22T12:10:48
| 2016-08-22T12:10:48
| 66,269,030
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 385
|
py
|
# -*- coding: utf-8 -*-
# Generated by Django 1.10 on 2016-08-20 21:51
from __future__ import unicode_literals
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('wiki', '0001_initial'),
]
operations = [
migrations.RenameModel(
old_name='Category',
new_name='Categories',
),
]
|
[
"miles@klarity.co.za"
] |
miles@klarity.co.za
|
ebec3629eb42d836bab2a456034eb71b975018dd
|
bad62c2b0dfad33197db55b44efeec0bab405634
|
/sdk/workloads/azure-mgmt-workloads/setup.py
|
98da49aa95a0f0e5fcc66f523e70cc2345923cf2
|
[
"MIT",
"LicenseRef-scancode-generic-cla",
"LGPL-2.1-or-later"
] |
permissive
|
test-repo-billy/azure-sdk-for-python
|
20c5a2486456e02456de17515704cb064ff19833
|
cece86a8548cb5f575e5419864d631673be0a244
|
refs/heads/master
| 2022-10-25T02:28:39.022559
| 2022-10-18T06:05:46
| 2022-10-18T06:05:46
| 182,325,031
| 0
| 0
|
MIT
| 2019-07-25T22:28:52
| 2019-04-19T20:59:15
|
Python
|
UTF-8
|
Python
| false
| false
| 2,764
|
py
|
#!/usr/bin/env python
#-------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#--------------------------------------------------------------------------
import re
import os.path
from io import open
from setuptools import find_packages, setup
# Change the PACKAGE_NAME only to change folder and different name
PACKAGE_NAME = "azure-mgmt-workloads"
PACKAGE_PPRINT_NAME = "Workloads Management"
# a-b-c => a/b/c
package_folder_path = PACKAGE_NAME.replace('-', '/')
# a-b-c => a.b.c
namespace_name = PACKAGE_NAME.replace('-', '.')
# Version extraction inspired from 'requests'
with open(os.path.join(package_folder_path, 'version.py')
if os.path.exists(os.path.join(package_folder_path, 'version.py'))
else os.path.join(package_folder_path, '_version.py'), 'r') as fd:
version = re.search(r'^VERSION\s*=\s*[\'"]([^\'"]*)[\'"]',
fd.read(), re.MULTILINE).group(1)
if not version:
raise RuntimeError('Cannot find version information')
with open('README.md', encoding='utf-8') as f:
readme = f.read()
with open('CHANGELOG.md', encoding='utf-8') as f:
changelog = f.read()
setup(
name=PACKAGE_NAME,
version=version,
description='Microsoft Azure {} Client Library for Python'.format(PACKAGE_PPRINT_NAME),
long_description=readme + '\n\n' + changelog,
long_description_content_type='text/markdown',
license='MIT License',
author='Microsoft Corporation',
author_email='azpysdkhelp@microsoft.com',
url='https://github.com/Azure/azure-sdk-for-python',
keywords="azure, azure sdk", # update with search keywords relevant to the azure service / product
classifiers=[
'Development Status :: 4 - Beta',
'Programming Language :: Python',
'Programming Language :: Python :: 3 :: Only',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: 3.7',
'Programming Language :: Python :: 3.8',
'Programming Language :: Python :: 3.9',
'Programming Language :: Python :: 3.10',
'License :: OSI Approved :: MIT License',
],
zip_safe=False,
packages=find_packages(exclude=[
'tests',
# Exclude packages that will be covered by PEP420 or nspkg
'azure',
'azure.mgmt',
]),
include_package_data=True,
package_data={
'pytyped': ['py.typed'],
},
install_requires=[
'msrest>=0.6.21',
'azure-common~=1.1',
'azure-mgmt-core>=1.3.0,<2.0.0',
],
python_requires=">=3.6"
)
|
[
"noreply@github.com"
] |
test-repo-billy.noreply@github.com
|
8aa95b8aee556ee8fa7fb2ff5c965d5021d95fbd
|
60561fd3efd5ecd8f984c4767c8e1017f66dbfd0
|
/apps/unsubscribes/migrations/0002_unsubscribeemail_user.py
|
a5468036448b33392ed58db1295c00f26159ef47
|
[] |
no_license
|
kaushalaman97/react
|
fd3b691340ba877ace3b9feec0a93103b30f466f
|
4b34ace3357fbba0aa6616d761da2f501993bcc4
|
refs/heads/main
| 2023-03-08T16:33:48.675925
| 2021-02-26T14:23:38
| 2021-02-26T14:23:38
| 342,596,858
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 622
|
py
|
# Generated by Django 3.1.4 on 2021-02-24 08:54
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
('unsubscribes', '0001_initial'),
]
operations = [
migrations.AddField(
model_name='unsubscribeemail',
name='user',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL),
),
]
|
[
"mohit.kaushal@techstriker.com"
] |
mohit.kaushal@techstriker.com
|
a60bfa7980001c986bed8b71d56e75e0c5b2a66e
|
1730f8cea72838a677b52fe82e72d91aa8f68f75
|
/003_queues/003_solutionCourseProvided.py
|
37326ef9b6a6f674a399d5971a030bad629104f7
|
[
"MIT"
] |
permissive
|
remichartier/026_UdacityTechnicalInterviewPrep
|
354097e25972a7214b8d1f84fcd3e80b69e79333
|
fa52b5f57bdd4e79751059971bb9f73fa0ca8004
|
refs/heads/main
| 2023-04-07T15:25:16.499791
| 2021-04-18T05:15:23
| 2021-04-18T05:15:23
| 354,467,066
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 327
|
py
|
# I managed to write all of the methods in one line!
class Queue(object):
def __init__(self, head=None):
self.storage = [head]
def enqueue(self, new_element):
self.storage.append(new_element)
def peek(self):
return self.storage[0]
def dequeue(self):
return self.storage.pop(0)
|
[
"remipr.chartier@gmail.com"
] |
remipr.chartier@gmail.com
|
f165e00d444f850aee54fecab36cf98b9209d337
|
09e57dd1374713f06b70d7b37a580130d9bbab0d
|
/benchmark/startQiskit_noisy2453.py
|
5211053a79a7324e34dd64e88d63b85985dd3c0e
|
[
"BSD-3-Clause"
] |
permissive
|
UCLA-SEAL/QDiff
|
ad53650034897abb5941e74539e3aee8edb600ab
|
d968cbc47fe926b7f88b4adf10490f1edd6f8819
|
refs/heads/main
| 2023-08-05T04:52:24.961998
| 2021-09-19T02:56:16
| 2021-09-19T02:56:16
| 405,159,939
| 2
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 4,238
|
py
|
# qubit number=4
# total number=42
import cirq
import qiskit
from qiskit.providers.aer import QasmSimulator
from qiskit.test.mock import FakeVigo
from qiskit import QuantumCircuit, QuantumRegister, ClassicalRegister
from qiskit import BasicAer, execute, transpile
from pprint import pprint
from qiskit.test.mock import FakeVigo
from math import log2
import numpy as np
import networkx as nx
def bitwise_xor(s: str, t: str) -> str:
length = len(s)
res = []
for i in range(length):
res.append(str(int(s[i]) ^ int(t[i])))
return ''.join(res[::-1])
def bitwise_dot(s: str, t: str) -> str:
length = len(s)
res = 0
for i in range(length):
res += int(s[i]) * int(t[i])
return str(res % 2)
def build_oracle(n: int, f) -> QuantumCircuit:
# implement the oracle O_f
# NOTE: use multi_control_toffoli_gate ('noancilla' mode)
# https://qiskit.org/documentation/_modules/qiskit/aqua/circuits/gates/multi_control_toffoli_gate.html
# https://quantumcomputing.stackexchange.com/questions/3943/how-do-you-implement-the-toffoli-gate-using-only-single-qubit-and-cnot-gates
# https://quantumcomputing.stackexchange.com/questions/2177/how-can-i-implement-an-n-bit-toffoli-gate
controls = QuantumRegister(n, "ofc")
target = QuantumRegister(1, "oft")
oracle = QuantumCircuit(controls, target, name="Of")
for i in range(2 ** n):
rep = np.binary_repr(i, n)
if f(rep) == "1":
for j in range(n):
if rep[j] == "0":
oracle.x(controls[j])
oracle.mct(controls, target[0], None, mode='noancilla')
for j in range(n):
if rep[j] == "0":
oracle.x(controls[j])
# oracle.barrier()
return oracle
def make_circuit(n:int,f) -> QuantumCircuit:
# circuit begin
input_qubit = QuantumRegister(n,"qc")
classical = ClassicalRegister(n, "qm")
prog = QuantumCircuit(input_qubit, classical)
prog.h(input_qubit[3]) # number=39
prog.cz(input_qubit[0],input_qubit[3]) # number=40
prog.h(input_qubit[3]) # number=41
prog.cx(input_qubit[0],input_qubit[3]) # number=23
prog.cx(input_qubit[0],input_qubit[3]) # number=33
prog.x(input_qubit[3]) # number=34
prog.cx(input_qubit[0],input_qubit[3]) # number=35
prog.cx(input_qubit[0],input_qubit[3]) # number=25
prog.cx(input_qubit[0],input_qubit[3]) # number=12
prog.h(input_qubit[2]) # number=30
prog.cz(input_qubit[0],input_qubit[2]) # number=31
prog.h(input_qubit[2]) # number=32
prog.x(input_qubit[2]) # number=21
prog.h(input_qubit[2]) # number=36
prog.cz(input_qubit[0],input_qubit[2]) # number=37
prog.h(input_qubit[2]) # number=38
prog.h(input_qubit[1]) # number=2
prog.h(input_qubit[2]) # number=3
prog.h(input_qubit[3]) # number=4
prog.h(input_qubit[0]) # number=5
prog.h(input_qubit[3]) # number=16
prog.cz(input_qubit[1],input_qubit[3]) # number=17
prog.h(input_qubit[3]) # number=18
oracle = build_oracle(n-1, f)
prog.append(oracle.to_gate(),[input_qubit[i] for i in range(n-1)]+[input_qubit[n-1]])
prog.h(input_qubit[1]) # number=6
prog.h(input_qubit[2]) # number=7
prog.h(input_qubit[3]) # number=8
prog.h(input_qubit[0]) # number=9
prog.h(input_qubit[0]) # number=26
prog.cz(input_qubit[3],input_qubit[0]) # number=27
prog.h(input_qubit[0]) # number=28
prog.cx(input_qubit[3],input_qubit[0]) # number=14
prog.y(input_qubit[2]) # number=29
# circuit end
for i in range(n):
prog.measure(input_qubit[i], classical[i])
return prog
if __name__ == '__main__':
a = "111"
b = "0"
f = lambda rep: bitwise_xor(bitwise_dot(a, rep), b)
prog = make_circuit(4,f)
backend = FakeVigo()
sample_shot =8000
info = execute(prog, backend=backend, shots=sample_shot).result().get_counts()
backend = FakeVigo()
circuit1 = transpile(prog,backend,optimization_level=2)
writefile = open("../data/startQiskit_noisy2453.csv","w")
print(info,file=writefile)
print("results end", file=writefile)
print(circuit1.__len__(),file=writefile)
print(circuit1,file=writefile)
writefile.close()
|
[
"wangjiyuan123@yeah.net"
] |
wangjiyuan123@yeah.net
|
56369aec96a4cef7cf632a602fd07ffec540ec5f
|
ee3e8773f86da51e39fe1b1a57237ad558c0f991
|
/plotting/easy_plotting.py
|
ef5f64774999291358476bfc58818463ad0dfdd9
|
[] |
no_license
|
qyx268/plato
|
72cd9ca2a6d5e28cd1618433ebc6af21fd2161e7
|
b7c84c021bc26d63c768e9d08e28bbaf77d79a87
|
refs/heads/master
| 2021-01-15T21:07:56.182831
| 2016-04-15T12:33:21
| 2016-04-15T12:33:21
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 2,316
|
py
|
from collections import OrderedDict
from general.nested_structures import flatten_struct
from plotting.data_conversion import vector_length_to_tile_dims
import plotting.matplotlib_backend as eplt
import numpy as np
__author__ = 'peter'
def ezplot(anything, plots = None, hang = True, **plot_preference_kwargs):
"""
Make a plot of anything. Anything at all.
:param anything: Anything.
"""
data_dict = flatten_struct(anything)
figure, plots = plot_data_dict(data_dict, plots, mode = 'static', hang = hang, **plot_preference_kwargs)
return figure, plots
def plot_data_dict(data_dict, plots = None, mode = 'static', hang = True, figure = None, size = None, **plot_preference_kwargs):
"""
Make a plot of data in the format defined in data_dict
:param data_dict: dict<str: plottable_data>
:param plots: Optionally, a dict of <key: IPlot> identifying the plot objects to use (keys should
be the same as those in data_dict).
:return: The plots (same ones you provided if you provided them)
"""
assert mode in ('live', 'static')
if isinstance(data_dict, list):
assert all(len(d) == 2 for d in data_dict), "You can provide data as a list of 2 tuples of (plot_name, plot_data)"
data_dict = OrderedDict(data_dict)
if plots is None:
plots = {k: eplt.get_plot_from_data(v, mode = mode, **plot_preference_kwargs) for k, v in data_dict.iteritems()}
if figure is None:
if size is not None:
from pylab import rcParams
rcParams['figure.figsize'] = size
figure = eplt.figure()
n_rows, n_cols = vector_length_to_tile_dims(len(data_dict))
for i, (k, v) in enumerate(data_dict.iteritems()):
eplt.subplot(n_rows, n_cols, i+1)
plots[k].update(v)
eplt.title(k, fontdict = {'fontsize': 8})
oldhang = eplt.isinteractive()
eplt.interactive(not hang)
eplt.show()
eplt.interactive(oldhang)
return figure, plots
def funplot(func, xlims = None, n_points = 100):
"""
Plot a function
:param func:
:param xlims:
:param n_points:
:return:
"""
if xlims is None:
xlims = eplt.gca().get_xbound()
xs, xe = xlims
x = np.linspace(xs, xe, n_points)
eplt.plot(x, func(x))
eplt.gca().set_xbound(*xlims)
|
[
"peter.ed.oconnor@gmail.com"
] |
peter.ed.oconnor@gmail.com
|
29cc73d94435bfd91f4071297e290173c3e70a6f
|
86cc876d2b7cbc29d5c13a73d4d985079c73ed68
|
/thingflow/adapters/mqtt.py
|
fe0b20c00a3689ab9dac8f62fb3d9c69fce6d0b5
|
[
"Apache-2.0"
] |
permissive
|
masayoshi-louis/thingflow-python
|
74fe6f90a37803a27bd69eff9163f7fb668836b4
|
c191a8fedac6a962994945830c872e957f929e29
|
refs/heads/master
| 2020-03-26T08:13:58.334964
| 2017-08-08T03:59:09
| 2017-08-08T03:59:09
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 6,643
|
py
|
# Copyright 2016 by MPI-SWS and Data-Ken Research.
# Licensed under the Apache 2.0 License.
import time
from collections import namedtuple
try:
import paho.mqtt.client as paho
except ImportError:
print("could not import paho.mqtt.client")
import ssl
from thingflow.base import InputThing, OutputThing, EventLoopOutputThingMixin
MQTTEvent = namedtuple('MQTTEvent', ['timestamp', 'state', 'mid', 'topic', 'payload', 'qos', 'dup', 'retain' ])
import random
random.seed()
import datetime
class MockMQTTClient(object):
def __init__(self, client_id=""):
self.userdata = None
self.client_id = client_id
self.on_message = None
self.on_connect = None
self.on_publish = None
def connect(self, host, port=1883):
if self.on_connect:
self.on_connect(self, self.userdata, None, 0)
return 0
def subscribe(self, topics):
pass
def publish(self, topic, payload, qos, retain=False):
if self.on_publish:
self.on_publish(self, self.userdata, 0)
def username_pw_set(self, username, password=""):
pass
def loop(self, timeout=1.0, max_packets=1):
s = random.randint(1, max_packets)
for i in range(0, s):
msg = MQTTEvent(datetime.datetime.now(), 0, i, 'bogus/bogus', 'xxx', 0, False, False)
if self.on_message:
self.on_message(self, self.userdata, msg)
time.sleep(timeout)
return 0
def disconnect(self):
pass
class MQTTWriter(InputThing):
"""Subscribes to internal events and pushes them out to MQTT.
The topics parameter is a list of (topic, qos) pairs.
Events should be serialized before passing them to the writer.
"""
def __init__(self, host, port=1883, client_id="", client_username="", client_password=None, server_tls=False, server_cert=None, topics=[], mock_class=None):
self.host = host
self.port = port
self.client_id = client_id
self.client_username = client_id
self.client_password = client_password
self.topics = topics
self.server_tls = server_tls
self.server_cert = server_cert
if mock_class:
self.client = MockMQTTClient(self.client_id)
else:
self.client = paho.Client(self.client_id)
if self.client_username:
self.client.username_pw_set(self.client_username, password=self.client_password)
self._connect()
def _connect(self):
if self.server_tls:
raise Exception("TBD")
print(self.client.tls_set(self.server_tls.server_cert, cert_reqs=ssl.CERT_OPTIONAL))
print(self.client.connect(self.host, self.port))
else:
self.client.connect(self.host, self.port)
self.client.subscribe(self.topics)
def on_connect(client, userdata, flags, rc):
print("Connected with result code "+str(rc))
self.client.on_connect = on_connect
def on_publish(client, userdata, mid):
print("Successfully published mid %d" % mid)
self.client.on_publish = on_publish
def on_next(self, msg):
"""Note that the message is passed directly to paho.mqtt.client. As such,
it must be a string, a bytearray, an int, a float or None. Usually, you would
use something like to_json (in thingflow.filters.json) to do the
serialization of events.
"""
# publish the message to the topics
retain = msg.retain if hasattr(msg, 'retain') else False
for (topic, qos) in self.topics:
self.client.publish(topic, msg, qos, retain)
def on_error(self, e):
self.client.disconnect()
def on_completed(self):
self.client.disconnect()
def __str__(self):
return 'MQTTWriter(%s)' % ', '.join([topic for (topic,qos) in self.topics])
class MQTTReader(OutputThing, EventLoopOutputThingMixin):
"""An reader that creates a stream from an MQTT broker. Initialize the
reader with a list of topics to subscribe to. The topics parameter
is a list of (topic, qos) pairs.
Pre-requisites: An MQTT broker (on host:port) --- tested with mosquitto
The paho.mqtt python client for mqtt (pip install paho-mqtt)
"""
def __init__(self, host, port=1883, client_id="", client_username="", client_password=None, server_tls=False, server_cert=None, topics=[], mock_class=None):
super().__init__()
self.stop_requested = False
self.host = host
self.port = port
self.client_id = client_id
self.client_username = client_id
self.client_password = client_password
self.topics = topics
self.server_tls = server_tls
self.server_cert = server_cert
if mock_class:
self.client = MockMQTTClient(self.client_id)
else:
self.client = paho.Client(self.client_id)
if self.client_username:
self.client.username_pw_set(self.client_username, password=self.client_password)
self._connect()
def on_message(client, userdata, msg):
m = MQTTEvent(msg.timestamp, msg.state, msg.mid, msg.topic, msg.payload, msg.qos, msg.dup, msg.retain)
self._dispatch_next(m)
self.client.on_message = on_message
def _connect(self):
if self.server_tls:
raise Exception("TBD")
print(self.client.tls_set(self.server_tls.server_cert, cert_reqs=ssl.CERT_OPTIONAL))
print(self.client.connect(self.host, self.port))
else:
self.client.connect(self.host, self.port)
def on_connect(client, userdata, flags, rc):
print("Connected with result code "+str(rc))
# Subscribing in on_connect() means that if we lose the connection and
# reconnect then subscriptions will be renewed.
client.subscribe(self.topics)
self.client.on_connect = on_connect
def _observe_event_loop(self):
print("starting event loop")
while True:
if self.stop_requested:
break
result = self.client.loop(1)
if result != 0:
self._connect()
self.stop_requested = False
self.client.disconnect()
print("Stopped private event loop")
def _stop_loop(self):
self.stop_requested = True
print("requesting stop")
def __str__(self):
return 'MQTTReader(%s)' % ', '.join([topic for (topic,qos) in self.topics])
|
[
"jeff@data-ken.org"
] |
jeff@data-ken.org
|
e06d2b176396a29ae9f62cab21aeb06a0c165897
|
e0980f704a573894350e285f66f4cf390837238e
|
/.history/news/models_20201124125405.py
|
ad4367cc96dbe5bc9bd177dc7020584d0a479ff6
|
[] |
no_license
|
rucpata/WagtailWebsite
|
28008474ec779d12ef43bceb61827168274a8b61
|
5aa44f51592f49c9a708fc5515ad877c6a29dfd9
|
refs/heads/main
| 2023-02-09T15:30:02.133415
| 2021-01-05T14:55:45
| 2021-01-05T14:55:45
| 303,961,094
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 221
|
py
|
from django.db import models
from wagtail.contrib.forms.models import AbstractEmailForm
# Create your models here.
class NewsPage(AbstractEmailForm):
tempalte ='news/news_page.html'
leanding_page_template = ''
|
[
"rucinska.patrycja@gmail.com"
] |
rucinska.patrycja@gmail.com
|
e7f9eb6f18a705e2446062b9a7609948f8193c95
|
46349356d4812a6bf04a1dff4ee3311864f8b7ff
|
/ma_py/mic_utils/estimate_gg_pdf_nm_fast/estimate_gg_pdf.py
|
494d1dd50b6c30a772ba4dcee8a2594e1c295ed2
|
[] |
no_license
|
alexdoberman/ma
|
1ca9d20f64d0e8c87feff9f7bb04d09d3088aeb3
|
219e5e87b80c6a795c0d4161b3ad22b9973ed745
|
refs/heads/master
| 2022-07-17T13:15:21.672335
| 2020-05-12T15:10:40
| 2020-05-12T15:10:40
| 263,365,873
| 12
| 2
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 3,807
|
py
|
# -*- coding: utf-8 -*-
import numpy as np
import soundfile as sf
import matplotlib.pyplot as plt
import glob
import math
from scipy import optimize
import scipy.stats as stats
def fun_ML_c(f, *args):
"""
Calc log likelihood for complex data
:param f: - shape
:param args:
:return:
"""
(scale, y) = args
K = y.shape[0]
B = math.gamma(1.0/f) / math.gamma(2.0/f)
p1 = K*(np.log(f) - np.log(np.pi * math.gamma(1.0/f) *B *scale))
p2 = np.sum(np.power( (np.abs(y)**2)/(B*scale), f))
R = p1 - p2
return - R
def estimate_shape_factor_c_(y, scale):
"""
Estimate shape factor for complex data
:param y: - complex array
:param scale:
:return:
"""
args = (scale, y)
minimum = optimize.brent(fun_ML_c, args=args, brack=(0.02, .3))
return minimum
def estimate_scale_c(y, shape_factor):
"""
Estimate scale for complex data
:param y:
:param shape_factor:
:return:
"""
K = y.shape[0]
B = math.gamma(1.0/shape_factor) / math.gamma(2.0/shape_factor)
scale = np.power( np.sum(np.power(np.abs(y), 2*shape_factor))*shape_factor/K, 1.0/shape_factor) / B
return scale
def estimate_gg_pdf_param_c(y, tol = 0.0000001):
"""
Estim GG pdf params for complex data
:param y:
:param tol:
:return:
"""
shape_factor_prev = 1
scale_prev = np.mean(np.power(np.abs(y), 2))
max_iter = 200
print ('scale_prev = {}'.format(scale_prev))
for _iter in range(max_iter):
shape_factor = estimate_shape_factor_c(y, scale_prev)
scale = estimate_scale_c(y, shape_factor)
print (" iter = {} shape = {} scale = {}".format(_iter, shape_factor, scale))
if (np.abs(scale - scale_prev) < tol and np.abs(shape_factor - shape_factor_prev) < tol):
return shape_factor, scale
scale_prev = scale
shape_factor_prev = shape_factor
print("Warning: estimate_gg_pdf_param_c - not convergent!")
return None, None
def main():
n_fft = 512
gg_params = []
for freq_bin in range(1, int(n_fft / 2)):
print('Process freq_ind = {}'.format(freq_bin))
path = "./out_bin/bin_{}.npy".format(freq_bin)
y = np.load(path)
f, scale = estimate_gg_pdf_param_c(y)
gg_params.append([freq_bin, f, scale])
np.save("gg_params_freq_f_scale", np.array(gg_params))
np.save("gg_params_freq_f_scale", np.array(gg_params))
def estimate_shape_factor_c(y, scale):
"""
Estimate shape factor for complex data
:param y: - complex array
:param scale:
:return:
"""
args = (scale, y)
ff = np.linspace(0.02, 0.9, 200)
L = []
for i in ff:
args = (scale, y)
L.append(fun_ML_c(i, *args))
L = np.array(L)
min_index = np.argmin(L)
l_min = np.min(min_index - 5, 0)
r_min = min_index + 5
a = ff[l_min]
b = ff[r_min]
c = ff[min_index]
minimum = optimize.brent(fun_ML_c, args=args, brack=(a, b))
return minimum
#return L[min_index]
def debug_run():
freq_bin = 1
print('Process freq_ind = {}'.format(freq_bin))
path = "./out_bin/bin_{}.npy".format(freq_bin)
y = np.load(path)
# f, scale = estimate_gg_pdf_param_c(y)
# print (f, scale)
ff = np.linspace(0.02, 0.9, 200)
L = []
for i in ff:
args = (0.04692564477433535, y)
L.append(fun_ML_c(i, *args))
L = np.array(L)
min_index = np.argmin(L)
l_min = np.min(min_index - 5, 0)
r_min = min_index + 5
a = ff[l_min]
b = ff[r_min]
c = ff[min_index ]
print (l_min,min_index,r_min)
print (a,c,b)
plt.plot(ff, L, label="L")
plt.legend(loc='best')
plt.show()
if __name__ == '__main__':
#debug_run()
main()
|
[
"lavrentyev@speechpro.com"
] |
lavrentyev@speechpro.com
|
c70d686b8a66449aa75277ec024a414043f77dab
|
8b00e2b136636841b38eb182196e56f4721a1e4c
|
/trio/_util.py
|
121513b20e80d517c58bc5e6fb5c7f2255ca441a
|
[
"Apache-2.0",
"LicenseRef-scancode-unknown-license-reference",
"MIT"
] |
permissive
|
xyicheng/trio
|
77c8c1e08e3aa4effe8cf04e879720ccfcdb7d33
|
fa091e2e91d196c2a57b122589a166949ea03103
|
refs/heads/master
| 2021-01-23T00:05:59.618483
| 2017-03-16T04:25:05
| 2017-03-16T04:25:05
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 3,494
|
py
|
import sys
from functools import wraps
import async_generator
__all__ = ["aitercompat", "acontextmanager"]
# Decorator to handle the change to __aiter__ in 3.5.2
def aiter_compat(aiter_impl):
if sys.version_info < (3, 5, 2):
@wraps(aiter_impl)
async def __aiter__(*args, **kwargs):
return aiter_impl(*args, **kwargs)
return __aiter__
else:
return aiter_impl
# Very much derived from the one in contextlib, by copy/pasting and then
# asyncifying everything.
# So this is a derivative work licensed under the PSF License, which requires
# the following notice:
#
# Copyright © 2001-2017 Python Software Foundation; All Rights Reserved
class _AsyncGeneratorContextManager:
def __init__(self, func, args, kwds):
self._agen = func(*args, **kwds).__aiter__()
async def __aenter__(self):
if sys.version_info < (3, 5, 2):
self._agen = await self._agen
try:
return await self._agen.asend(None)
except StopAsyncIteration:
raise RuntimeError("async generator didn't yield") from None
async def __aexit__(self, type, value, traceback):
if type is None:
try:
await self._agen.asend(None)
except StopAsyncIteration:
return
else:
raise RuntimeError("async generator didn't stop")
else:
if value is None:
# Need to force instantiation so we can reliably
# tell if we get the same exception back
value = type()
try:
await self._agen.athrow(type, value, traceback)
raise RuntimeError("async generator didn't stop after athrow()")
except StopAsyncIteration as exc:
# Suppress StopIteration *unless* it's the same exception that
# was passed to throw(). This prevents a StopIteration
# raised inside the "with" statement from being suppressed.
return (exc is not value)
except RuntimeError as exc:
# Don't re-raise the passed in exception. (issue27112)
if exc is value:
return False
# Likewise, avoid suppressing if a StopIteration exception
# was passed to throw() and later wrapped into a RuntimeError
# (see PEP 479).
if exc.__cause__ is value:
return False
raise
except:
# only re-raise if it's *not* the exception that was
# passed to throw(), because __exit__() must not raise
# an exception unless __exit__() itself failed. But throw()
# has to raise the exception to signal propagation, so this
# fixes the impedance mismatch between the throw() protocol
# and the __exit__() protocol.
#
if sys.exc_info()[1] is not value:
raise
def acontextmanager(func):
"""Like @contextmanager, but async."""
if not async_generator.isasyncgenfunction(func):
raise TypeError(
"must be an async generator (native or from async_generator; "
"if using @async_generator then @acontextmanager must be on top.")
@wraps(func)
def helper(*args, **kwds):
return _AsyncGeneratorContextManager(func, args, kwds)
return helper
|
[
"njs@pobox.com"
] |
njs@pobox.com
|
5a813bd10a9a6555bcb7a31df0d331852598cdba
|
5088fffefcbb3458ee2c8fca6d822487e13c4169
|
/04-zanke/monte_carlo.py
|
92515bfcf694126c02f491519d94ea6ab3eda678
|
[] |
no_license
|
matijapretnar/uvod-v-programiranje
|
95de86fb63d6d06558984c05a40690f78d15aa5f
|
464a9c566ed3564a6baba60e7c79f9e25399d45e
|
refs/heads/master
| 2023-04-06T00:28:57.011142
| 2023-04-04T10:49:56
| 2023-04-04T10:49:56
| 52,275,510
| 5
| 34
| null | 2022-03-16T10:12:55
| 2016-02-22T13:32:48
|
Python
|
UTF-8
|
Python
| false
| false
| 853
|
py
|
import random
def oceni_pi(n):
v_krogu = 0
for i in range(1, n + 1):
x = random.uniform(-1, 1)
y = random.uniform(-1, 1)
if x ** 2 + y ** 2 <= 1:
v_krogu += 1
print(4 * v_krogu / i)
delez_v_krogu = v_krogu / n
return 4 * delez_v_krogu
def nakljucna_tocka_v_krogu(x0=0, y0=0, r=1):
while True:
x = random.uniform(x0 - r, x0 + r)
y = random.uniform(y0 - r, y0 + r)
if (x - x0) ** 2 + (y - y0) ** 2 <= r ** 2:
return x, y
def nesmiselna_naloga(st_poskusov):
razdalja_manj_kot_pol = 0
for _ in range(st_poskusov):
x1, y1 = nakljucna_tocka_v_krogu()
x2, y2 = nakljucna_tocka_v_krogu()
if (x2 - x1) ** 2 + (y2 - y1) ** 2 <= (1 / 2) ** 2:
razdalja_manj_kot_pol += 1
return razdalja_manj_kot_pol / st_poskusov
|
[
"matija@pretnar.info"
] |
matija@pretnar.info
|
9a0ceb1f8a9e8cca78d4939bcf31c244e4acd324
|
e1abd868bfad11bf93c50eee1dc9976674de2358
|
/scaffold/suite/mass_flux_spatial_scales_plot.py
|
e0c9cd702c8515ce963bc91851a1de04cd43b566
|
[] |
no_license
|
markmuetz/scaffold_analysis
|
5c7e9d04b24abe3462c8946381f4cab264bf09e0
|
c02d32536c801b23ac8a71e36d25fa922e7cfd94
|
refs/heads/master
| 2022-06-03T16:13:54.775718
| 2022-05-31T13:22:24
| 2022-05-31T13:22:24
| 92,677,664
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 7,255
|
py
|
from itertools import groupby
import matplotlib
import numpy as np
matplotlib.use('Agg')
import pylab as plt
from omnium import Analyser
from scaffold.utils import cm_to_inch
class MassFluxSpatialScalesPlotter(Analyser):
"""Plots histograms of mass flux for each power of 2 (n), and expt."""
analysis_name = 'mass_flux_spatial_scales_plot'
multi_expt = True
input_dir = 'omnium_output/{version_dir}/{expt}'
input_filename = '{input_dir}/atmos.mass_flux_spatial_scales_combined.nc'
output_dir = 'omnium_output/{version_dir}/suite_{expts}'
output_filenames = ['{output_dir}/atmos.mass_flux_spatial_scales_plot.dummy']
def load(self):
self.load_cubes()
def run(self):
pass
def save(self, state, suite):
with open(self.task.output_filenames[0], 'w') as f:
f.write('done')
def display_results(self):
self.nbins = None
self.x_cutoff = 0
self.xlim = None
self.ylim = None
self._plot_mass_flux_spatial()
plt.close('all')
def _plot_mass_flux_spatial(self):
self.append_log('plotting mass_flux_spatial')
heights = []
ns = []
for expt in self.task.expts:
cubes = self.expt_cubes[expt]
sorted_cubes = []
for cube in cubes:
(height_level_index, thresh_index, n) = cube.attributes['mass_flux_spatial_key']
mf_key = (height_level_index, thresh_index, n)
sorted_cubes.append((mf_key, cube))
# Each element is a tuple like: ((1, 2, 32), cube)
# Sorting will put in correct order, sorting on initial tuple.
sorted_cubes.sort()
# Group on first element of tuple, i.e. on 1 for ((1, 2, 32), cube)
for height_index, key_cubes in groupby(sorted_cubes, lambda x: x[0][0]):
if height_index not in heights:
heights.append(height_index)
hist_data = []
dmax = 0
for i, key_cube in enumerate(key_cubes):
# middle cube is the one with the middle thresh_index.
mf_key = key_cube[0]
cube = key_cube[1]
# Pick out middle element, i.e. thresh_index == 1.
if mf_key[1] == 1:
hist_data.append((mf_key, cube))
dmax = max(cube.data.max(), dmax)
# assert len(hist_data) == 3
for mf_key, hist_datum in hist_data:
(height_index, thresh_index, n) = mf_key
if n not in ns:
ns.append(n)
name = '{}.z{}.n{}.hist'.format(expt, height_index, n)
plt.figure(name)
plt.clf()
plt.title('{} z{} n{} mass_flux_spatial_hist'.format(expt, height_index, n))
hist_kwargs = {}
if self.xlim:
hist_kwargs['range'] = self.xlim
else:
#hist_kwargs['range'] = (0, 0.1)
pass
if self.nbins:
hist_kwargs['bins'] = self.nbins
filtered_data = hist_datum.data[hist_datum.data >= self.x_cutoff]
y, bin_edges = np.histogram(filtered_data, **hist_kwargs)
bin_centers = 0.5 * (bin_edges[1:] + bin_edges[:-1])
# N.B. full width bins.
width = bin_edges[1:] - bin_edges[:-1]
plt.bar(bin_centers, y / n**2, width=width)
if self.xlim:
plt.xlim(self.xlim)
if self.ylim:
plt.ylim(self.ylim)
plt.savefig(self.file_path(name + '.png'))
name = '{}.z{}.all_n.hist'.format(expt, height_index)
plt.figure(name)
plt.plot(bin_centers, y / n**2, label=n)
plt.figure('combined_expt_z{}_n{}'.format(height_index, n))
plt.plot(bin_centers, y / n**2, label=expt)
both_name = 'both_z{}'.format(height_index)
if plt.fignum_exists(both_name):
f = plt.figure(both_name)
ax1, ax2 = f.axes
# f_poster
f_p = plt.figure('poster_' + both_name)
ax1_p, ax2_p = f_p.axes
else:
f, (ax1, ax2) = plt.subplots(2, 1, sharex=True, num=both_name)
ax1.set_ylabel('Frequency (rescaled)')
ax2.set_ylabel('Frequency (rescaled)')
ax2.set_xlabel('Mass flux (kg s$^{-1}$ m$^{-2}$)')
if self.xlim:
ax1.set_xlim(self.xlim)
f_p, (ax1_p, ax2_p) = plt.subplots(1, 2, sharex=True, num='poster_' + both_name)
ax1_p.set_ylabel('Frequency (rescaled)')
ax1_p.set_xlabel('Mass flux (kg s$^{-1}$ m$^{-2}$)')
ax2_p.set_xlabel('Mass flux (kg s$^{-1}$ m$^{-2}$)')
if self.xlim:
ax1_p.set_xlim(self.xlim)
ax2_p.set_xlim(self.xlim)
styles = {1: 'b-',
2: 'b--',
4: 'b-.'}
if expt == 'S0' and n <= 4:
style = styles[n]
ax1.plot(bin_centers, y / n**2, style, label=n)
ax1_p.plot(bin_centers, y / n**2, style, label=n)
if n == 1:
ax2.plot(bin_centers, y / n**2, label=expt)
ax2_p.plot(bin_centers, y / n**2, label=expt)
for height_index in heights:
f = plt.figure('both_z{}'.format(height_index))
ax1, ax2 = f.axes
ax1.legend(loc='upper right')
ax2.legend(loc='upper right')
plt.savefig(self.file_path('both_z{}.png'.format(height_index)))
f_p = plt.figure('poster_both_z{}'.format(height_index))
f_p.set_size_inches(*cm_to_inch(25, 9))
ax1_p, ax2_p = f_p.axes
ax1_p.legend(loc='upper right')
ax2_p.legend(loc='upper right')
plt.tight_layout()
plt.savefig(self.file_path('poster_both_z{}.png'.format(height_index)))
for expt in self.task.expts:
name = '{}.z{}.all_n.hist'.format(expt, height_index)
plt.figure(name)
plt.title(name)
plt.legend()
plt.savefig(self.file_path(name + '.png'))
for n in ns:
plt.figure('combined_expt_z{}_n{}'.format(height_index, n))
plt.title('combined_expt_z{}_n{}'.format(height_index, n))
plt.legend()
if self.xlim:
plt.xlim(self.xlim)
plt.savefig(self.file_path('z{}_n{}_combined.png'.format(height_index, n)))
|
[
"markmuetz@gmail.com"
] |
markmuetz@gmail.com
|
236a43ce48ae7a3dc607333f6288c4fc335cd1aa
|
99feebd7e64a1961bd3f3c3b152c013b35bc9bad
|
/testCase/accounts_login_password_test.py
|
9c57ce1499bc222b00590c7440c7932a340c9b86
|
[] |
no_license
|
andy-29/AutomatedTest
|
a551fb8d2d608c5191a9f1d71a30188f9a19bba5
|
1c3d2b5295f4b6df4e9321f6a75740a3970df3e4
|
refs/heads/master
| 2020-06-16T15:24:24.418593
| 2019-06-05T06:26:19
| 2019-06-05T06:26:19
| 195,621,212
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,518
|
py
|
import os, sys
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
sys.path.append(BASE_DIR)
func = os.path.basename(__file__).split('_test.py')[0]
from common.gmpackage import *
@ddt
class Accounts_Login_Password(unittest.TestCase):
'''
登入接口
'''
def setUp(self):
self.host = g.host
self.api_name = g.api_name(func)
self.url = self.host + self.api_name
@data(*(get_values(func, "test_accounts_login_password")))
def test_accounts_login_password(self, value):
self._testMethodDoc = '登入接口'
r = gmhttp.login()
self.assertEqual(0,r.get('error'))
@data(*(get_values(func, "test_accounts_login_password_errorPwd")))
def test_accounts_login_password_errorPwd(self, value):
self._testMethodDoc = '账号正确,密码错误'
user = value.get('requestdata').get('phone')
pwd = value.get('requestdata').get('password')
r = gmhttp.login(user,pwd)
self.assertEqual(r, value.get('assertdata'))
@data(*(get_values(func, "test_accounts_login_password_errorTel")))
def test_accounts_login_password_errorTel(self, value):
self._testMethodDoc = '账号错误'
user = value.get('requestdata').get('phone')
pwd = value.get('requestdata').get('password')
r = gmhttp.login(user,pwd)
self.assertEqual(r, value.get('assertdata'))
def tearDown(self):
pass
if __name__ == "__main__":
Accounts_Login_Password.run()
|
[
"dayuezaichunji@163.com"
] |
dayuezaichunji@163.com
|
8f9ef0086d4ee19c301005731bf09b20b0fc8a5c
|
9c21e49150c99751231ad399bdba1850bb60c88c
|
/keepers/migrations/0012_auto_20180619_0056.py
|
359b76f9d01a20e6c2e0917a4540eb44a4c47177
|
[
"MIT"
] |
permissive
|
netvigator/auctions
|
3ab4086cb0bfbc736b17ede4e928f3ead2b08a4c
|
fc3766226cc65ac8694dffc74e893ecff8e7d07c
|
refs/heads/main
| 2023-05-25T15:55:01.249670
| 2023-05-06T14:51:12
| 2023-05-06T14:51:12
| 92,816,101
| 0
| 0
|
MIT
| 2023-02-16T05:24:34
| 2017-05-30T09:14:39
|
Python
|
UTF-8
|
Python
| false
| false
| 669
|
py
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11 on 2018-06-19 00:56
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('keepers', '0011_auto_20180615_1818'),
]
operations = [
migrations.AlterField(
model_name='item',
name='cSite',
field=models.CharField(max_length=14, verbose_name='Site'),
),
migrations.AlterField(
model_name='item',
name='tCreate',
field=models.DateTimeField(auto_now_add=True, db_index=True, verbose_name='created on'),
),
]
|
[
"gravesricharde@yahoo.com"
] |
gravesricharde@yahoo.com
|
3118055357e21e818369addcd8052d38382bdada
|
060ce17de7b5cdbd5f7064d1fceb4ded17a23649
|
/fn_soar_utils/fn_soar_utils/components/funct_soar_utils_artifact_hash.py
|
fa0c9212fa4a7c4ee6fd5991f38a41c0ca9545f1
|
[
"MIT"
] |
permissive
|
ibmresilient/resilient-community-apps
|
74bbd770062a22801cef585d4415c29cbb4d34e2
|
6878c78b94eeca407998a41ce8db2cc00f2b6758
|
refs/heads/main
| 2023-06-26T20:47:15.059297
| 2023-06-23T16:33:58
| 2023-06-23T16:33:58
| 101,410,006
| 81
| 107
|
MIT
| 2023-03-29T20:40:31
| 2017-08-25T14:07:33
|
Python
|
UTF-8
|
Python
| false
| false
| 2,521
|
py
|
# -*- coding: utf-8 -*-
# (c) Copyright IBM Corp. 2018, 2022. All Rights Reserved.
# pragma pylint: disable=unused-argument, no-self-use
"""Function implementation"""
from json import dumps
from logging import getLogger
from hashlib import algorithms_guaranteed, new
from resilient_lib import get_file_attachment, get_file_attachment_metadata, validate_fields
from resilient_circuits import ResilientComponent, function, StatusMessage, FunctionResult, FunctionError
LOG = getLogger(__name__)
class FunctionComponent(ResilientComponent):
"""Component that implements SOAR function 'artifact_hash"""
@function("soar_utils_artifact_hash")
def _artifact_hash_function(self, event, *args, **kwargs):
"""Function: Calculate hashes for a file artifact."""
try:
# Validate required inputs
validate_fields(["incident_id", "artifact_id"], kwargs)
# Get the function parameters:
incident_id = kwargs.get("incident_id") # number
artifact_id = kwargs.get("artifact_id") # number
LOG.info("incident_id: %s", incident_id)
LOG.info("artifact_id: %s", artifact_id)
yield StatusMessage("Reading artifact...")
client = self.rest_client()
metadata = get_file_attachment_metadata(client, incident_id, artifact_id=artifact_id)
data = get_file_attachment(client, incident_id, artifact_id=artifact_id)
results = {
"filename": metadata["name"],
"content_type": metadata["content_type"],
"size": metadata["size"],
"created": metadata["created"]
}
# Hashlib provides a list of all "algorithms_available", but there's duplication, so
# use the standard list: ('md5', 'sha1', 'sha224', 'sha256', 'sha384', 'sha512')
for algo in algorithms_guaranteed:
impl = new(algo)
impl.update(data)
# shake algorithms require a 'length' parameter
if algo.startswith("shake_"):
results[algo] = impl.hexdigest(int(algo.split('_')[-1]))
else:
results[algo] = impl.hexdigest()
LOG.info("%s sha1=%s", metadata["name"], results["sha1"])
# Produce a FunctionResult with the return value
LOG.debug(dumps(results))
yield FunctionResult(results)
except Exception:
yield FunctionError()
|
[
"travis@example.org"
] |
travis@example.org
|
a694e62f4c790eab767286b4be22a9c5f5e4a41e
|
8b20fdc16253b2b4e07ce28f4fd3120db4566783
|
/pythainlp/__init__.py
|
47bffa93eda32cec984d87f336d8c648c66c28bf
|
[
"Apache-2.0",
"Swift-exception"
] |
permissive
|
johnnyduo/pythainlp
|
d8a850fa7b6d9dfed5eb23f84264caea1703f5fb
|
dbefc4c88ee8051a14e3be1a10a57670f861cd37
|
refs/heads/master
| 2021-06-19T23:49:43.564140
| 2017-07-06T10:36:58
| 2017-07-06T10:36:58
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 898
|
py
|
# -*- coding: utf-8 -*-
from __future__ import absolute_import
import six
if six.PY3:
"""
ไว้ใส่ความสามารถที่รองรับเฉพาะ Python 3.4+ เท่านั้น
"""
from pythainlp.sentiment import sentiment
from pythainlp.spell import hunspell,spell
from pythainlp.romanization import romanization,pyicu,royin
from pythainlp.tokenize import word_tokenize,tcc,etcc
from pythainlp.rank import rank
from pythainlp.change import texttothai,texttoeng
from pythainlp.number import nttn,nttt,ntnt,ntt,ttn,ttnt,number_format,numtowords,ReadNumber
from pythainlp.date import now
from pythainlp.tag import old,pos_tag
from pythainlp.collation import collation
from pythainlp.test import TestUM
from pythainlp.Text import Text
from pythainlp.MetaSound import MetaSound
from pythainlp.soundex import LK82,Udom83
from pythainlp.util import ngrams
|
[
"wannaphong@yahoo.com"
] |
wannaphong@yahoo.com
|
11af023167cde8c35bb2c4b22b1dd4d44852c42d
|
e89164093c99b2be87b201804718aa73a2ffdae3
|
/leetcode/783. Minimum Distance Between BST Nodes.py
|
df5419cd15909bd4d9943cca22830c3f802cb3ea
|
[] |
no_license
|
gsrr/leetcode
|
748d585d0219ad1a1386794910c7410b50ce3c93
|
992bb618b605c3345318a0eeb2d2df4d11f6a2d5
|
refs/heads/master
| 2021-07-06T12:40:03.052470
| 2021-05-28T17:28:43
| 2021-05-28T17:28:43
| 76,116,620
| 0
| 1
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 704
|
py
|
# Definition for a binary tree node.
# class TreeNode(object):
# def __init__(self, x):
# self.val = x
# self.left = None
# self.right = None
def ldr(node, arr):
if node.left != None:
ldr(node.left, arr)
arr.append(node.val)
if node.right != None:
ldr(node.right, arr)
class Solution(object):
def minDiffInBST(self, root):
"""
:type root: TreeNode
:rtype: int
"""
if root == None:
return 0
arr = []
ldr(root, arr)
minval = arr[1] - arr[0]
for i in xrange(2, len(arr)):
minval = min(arr[i] - arr[i - 1], minval)
return minval
|
[
"jerrycheng1128@gmail.com"
] |
jerrycheng1128@gmail.com
|
069534b71755db5b1b403c9d65cf61f1b0a9f491
|
6b6e20004b46165595f35b5789e7426d5289ea48
|
/workers/test/test_exportactionlogsworker.py
|
0e4a728b421dd60c2b029851e5dfe04326ee7a91
|
[
"Apache-2.0"
] |
permissive
|
anwarchk/quay
|
2a83d0ab65aff6a1120fbf3a45dd72f42211633b
|
23c5120790c619174e7d36784ca5aab7f4eece5c
|
refs/heads/master
| 2020-09-12T18:53:21.093606
| 2019-11-15T19:29:02
| 2019-11-15T19:29:02
| 222,517,145
| 0
| 0
|
Apache-2.0
| 2019-11-18T18:32:35
| 2019-11-18T18:32:35
| null |
UTF-8
|
Python
| false
| false
| 4,628
|
py
|
import json
import os
from datetime import datetime, timedelta
import boto
from httmock import urlmatch, HTTMock
from moto import mock_s3_deprecated as mock_s3
from app import storage as test_storage
from data import model, database
from data.logs_model import logs_model
from storage import S3Storage, StorageContext, DistributedStorage
from workers.exportactionlogsworker import ExportActionLogsWorker, POLL_PERIOD_SECONDS
from test.fixtures import *
_TEST_CONTENT = os.urandom(1024)
_TEST_BUCKET = 'some_bucket'
_TEST_USER = 'someuser'
_TEST_PASSWORD = 'somepassword'
_TEST_PATH = 'some/cool/path'
_TEST_CONTEXT = StorageContext('nyc', None, None, None, None)
@pytest.fixture(params=['test', 'mock_s3'])
def storage_engine(request):
if request.param == 'test':
yield test_storage
else:
with mock_s3():
# Create a test bucket and put some test content.
boto.connect_s3().create_bucket(_TEST_BUCKET)
engine = DistributedStorage(
{'foo': S3Storage(_TEST_CONTEXT, 'some/path', _TEST_BUCKET, _TEST_USER, _TEST_PASSWORD)},
['foo'])
yield engine
def test_export_logs_failure(initialized_db):
# Make all uploads fail.
test_storage.put_content('local_us', 'except_upload', 'true')
repo = model.repository.get_repository('devtable', 'simple')
user = model.user.get_user('devtable')
worker = ExportActionLogsWorker(None)
called = [{}]
@urlmatch(netloc=r'testcallback')
def handle_request(url, request):
called[0] = json.loads(request.body)
return {'status_code': 200, 'content': '{}'}
def format_date(datetime):
return datetime.strftime("%m/%d/%Y")
now = datetime.now()
with HTTMock(handle_request):
with pytest.raises(IOError):
worker._process_queue_item({
'export_id': 'someid',
'repository_id': repo.id,
'namespace_id': repo.namespace_user.id,
'namespace_name': 'devtable',
'repository_name': 'simple',
'start_time': format_date(now + timedelta(days=-10)),
'end_time': format_date(now + timedelta(days=10)),
'callback_url': 'http://testcallback/',
'callback_email': None,
}, test_storage)
test_storage.remove('local_us', 'except_upload')
assert called[0]
assert called[0][u'export_id'] == 'someid'
assert called[0][u'status'] == 'failed'
@pytest.mark.parametrize('has_logs', [
True,
False,
])
def test_export_logs(initialized_db, storage_engine, has_logs):
# Delete all existing logs.
database.LogEntry3.delete().execute()
repo = model.repository.get_repository('devtable', 'simple')
user = model.user.get_user('devtable')
now = datetime.now()
if has_logs:
# Add new logs over a multi-day period.
for index in range(-10, 10):
logs_model.log_action('push_repo', 'devtable', user, '0.0.0.0', {'index': index},
repo, timestamp=now + timedelta(days=index))
worker = ExportActionLogsWorker(None)
called = [{}]
@urlmatch(netloc=r'testcallback')
def handle_request(url, request):
called[0] = json.loads(request.body)
return {'status_code': 200, 'content': '{}'}
def format_date(datetime):
return datetime.strftime("%m/%d/%Y")
with HTTMock(handle_request):
worker._process_queue_item({
'export_id': 'someid',
'repository_id': repo.id,
'namespace_id': repo.namespace_user.id,
'namespace_name': 'devtable',
'repository_name': 'simple',
'start_time': format_date(now + timedelta(days=-10)),
'end_time': format_date(now + timedelta(days=10)),
'callback_url': 'http://testcallback/',
'callback_email': None,
}, storage_engine)
assert called[0]
assert called[0][u'export_id'] == 'someid'
assert called[0][u'status'] == 'success'
url = called[0][u'exported_data_url']
if url.find('http://localhost:5000/exportedlogs/') == 0:
storage_id = url[len('http://localhost:5000/exportedlogs/'):]
else:
assert url.find('https://some_bucket.s3.amazonaws.com/some/path/exportedactionlogs/') == 0
storage_id, _ = url[len('https://some_bucket.s3.amazonaws.com/some/path/exportedactionlogs/'):].split('?')
created = storage_engine.get_content(storage_engine.preferred_locations,
'exportedactionlogs/' + storage_id)
created_json = json.loads(created)
if has_logs:
found = set()
for log in created_json['logs']:
if log.get('terminator'):
continue
found.add(log['metadata']['index'])
for index in range(-10, 10):
assert index in found
else:
assert created_json['logs'] == [{'terminator': True}]
|
[
"jimmy.zelinskie+git@gmail.com"
] |
jimmy.zelinskie+git@gmail.com
|
ca87e2d4a6d85f9a84b735aec448de0ffb39330a
|
8ac156c3bfeb4ce28836a1820cb88959424dab14
|
/test/test_ocr_page_result_with_lines_with_location.py
|
db398b1f5b831b331b45a635bf3ed2b22f00da5b
|
[
"Apache-2.0"
] |
permissive
|
Cloudmersive/Cloudmersive.APIClient.Python.OCR
|
7b593464d31d3038663bedca3c085a161e356f20
|
90acf41a9b307213ef79f63ea4c749469ef61006
|
refs/heads/master
| 2023-04-03T06:03:41.917713
| 2023-03-27T05:30:38
| 2023-03-27T05:30:38
| 138,450,272
| 6
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,161
|
py
|
# coding: utf-8
"""
ocrapi
The powerful Optical Character Recognition (OCR) APIs let you convert scanned images of pages into recognized text. # noqa: E501
OpenAPI spec version: v1
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from __future__ import absolute_import
import unittest
import cloudmersive_ocr_api_client
from cloudmersive_ocr_api_client.models.ocr_page_result_with_lines_with_location import OcrPageResultWithLinesWithLocation # noqa: E501
from cloudmersive_ocr_api_client.rest import ApiException
class TestOcrPageResultWithLinesWithLocation(unittest.TestCase):
"""OcrPageResultWithLinesWithLocation unit test stubs"""
def setUp(self):
pass
def tearDown(self):
pass
def testOcrPageResultWithLinesWithLocation(self):
"""Test OcrPageResultWithLinesWithLocation"""
# FIXME: construct object with mandatory attributes with example values
# model = cloudmersive_ocr_api_client.models.ocr_page_result_with_lines_with_location.OcrPageResultWithLinesWithLocation() # noqa: E501
pass
if __name__ == '__main__':
unittest.main()
|
[
"35204726+Cloudmersive@users.noreply.github.com"
] |
35204726+Cloudmersive@users.noreply.github.com
|
816c45d294921e6362d0eaa5cc2305ba0fb01d7f
|
a2fd604a8ef45b4e08cf832348d20b65e4468a79
|
/phoenix/tests/test_caches.py
|
a4d7e9263d733aae95b47899c92b2a290f0313d0
|
[] |
no_license
|
darraes/data_structures
|
8ca76a3fc3e961860861cd43f5b866b8e7e50427
|
4ff2c60e05d9275b163db59ed37b9f46ba50f3c0
|
refs/heads/master
| 2020-04-17T10:19:59.357548
| 2019-02-28T21:42:44
| 2019-02-28T21:42:44
| 166,497,344
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 2,679
|
py
|
import unittest
from phoenix.cache import *
class TestFunctions(unittest.TestCase):
def test_lru_new_insertions(self):
cache = LRUCache(3)
cache.put("k1", "v1")
self.assertEqual("v1", cache.get("k1"))
cache.put("k2", "v2")
self.assertEqual("v2", cache.get("k2"))
cache.put("k3", "v3")
self.assertEqual("v3", cache.get("k3"))
cache.put("k4", "v4")
self.assertEqual("v4", cache.get("k4"))
self.assertEqual("v3", cache.get("k3"))
self.assertEqual("v2", cache.get("k2"))
self.assertEqual(None, cache.get("k1"))
def test_lru_tail_to_head(self):
cache = LRUCache(3)
cache.put("k1", "v1")
cache.put("k2", "v2")
cache.put("k3", "v3")
cache.put("k1", "v11")
cache.put("k4", "v4")
self.assertEqual(None, cache.get("k2"))
self.assertEqual("v3", cache.get("k3"))
cache.put("k5", "v5")
self.assertEqual(None, cache.get("k1"))
def test_lru_middle_to_head(self):
cache = LRUCache(3)
cache.put("k1", "v1")
cache.put("k2", "v2")
cache.put("k3", "v3")
cache.put("k2", "v22")
cache.put("k4", "v4")
self.assertEqual(None, cache.get("k1"))
self.assertEqual("v22", cache.get("k2"))
cache.put("k5", "v5")
self.assertEqual(None, cache.get("k3"))
def test_lru_head_to_head(self):
cache = LRUCache(3)
cache.put("k1", "v1")
cache.put("k2", "v2")
cache.put("k3", "v3")
cache.put("k3", "v4")
cache.put("k4", "v4")
self.assertEqual(None, cache.get("k1"))
self.assertEqual("v4", cache.get("k4"))
cache.put("k5", "v5")
self.assertEqual(None, cache.get("k2"))
def test_lfu_4(self):
cache = LFUCache(0)
cache.put(0, 0)
self.assertEqual(None, cache.get(0))
def test_lfu_3(self):
cache = LFUCache(2)
cache.put(1, 1)
cache.put(2, 2)
self.assertEqual(1, cache.get(1))
cache.put(3, 3)
self.assertEqual(None, cache.get(2))
self.assertEqual(3, cache.get(3))
cache.put(4, 4)
self.assertEqual(None, cache.get(1))
self.assertEqual(3, cache.get(3))
self.assertEqual(4, cache.get(4))
def test_lfu_2(self):
cache = LFUCache(5)
cache.put("k1", "v1")
cache.put("k2", "v2")
cache.put("k3", "v3")
cache.put("k4", "v4")
cache.put("k5", "v5")
cache.put("k2", "v2")
cache.put("k3", "v3")
cache.put("k2", "v2")
cache.put("k6", "v6")
cache.put("k3", "v3")
|
[
"daniel.arraes@gmail.com"
] |
daniel.arraes@gmail.com
|
34177aaf3d8e4472f51189bd33d2c6658fe3cd66
|
9b422078f4ae22fe16610f2ebc54b8c7d905ccad
|
/xlsxwriter/test/comparison/test_image_bytes01.py
|
02dba5d0f8a119b040fad480338e187a1031b18b
|
[
"BSD-2-Clause-Views"
] |
permissive
|
projectsmahendra/XlsxWriter
|
73d8c73ea648a911deea63cb46b9069fb4116b60
|
9b9d6fb283c89af8b6c89ad20f72b8208c2aeb45
|
refs/heads/master
| 2023-07-21T19:40:41.103336
| 2023-07-08T16:54:37
| 2023-07-08T16:54:37
| 353,636,960
| 0
| 0
|
NOASSERTION
| 2021-04-01T08:57:21
| 2021-04-01T08:57:20
| null |
UTF-8
|
Python
| false
| false
| 1,466
|
py
|
###############################################################################
#
# Tests for XlsxWriter.
#
# Copyright (c), 2013-2021, John McNamara, jmcnamara@cpan.org
#
from ..excel_comparison_test import ExcelComparisonTest
from ...workbook import Workbook
from io import BytesIO
class TestCompareXLSXFiles(ExcelComparisonTest):
"""
Test file created by XlsxWriter against a file created by Excel.
"""
def setUp(self):
self.set_filename('image01.xlsx')
def test_create_file(self):
"""Test the creation of a simple XlsxWriter file with image(s)."""
workbook = Workbook(self.got_filename)
worksheet = workbook.add_worksheet()
image_file = open(self.image_dir + 'red.png', 'rb')
image_data = BytesIO(image_file.read())
image_file.close()
worksheet.insert_image('E9', 'red.png', {'image_data': image_data})
workbook.close()
self.assertExcelEqual()
def test_create_file_in_memory(self):
"""Test the creation of a simple XlsxWriter file with image(s)."""
workbook = Workbook(self.got_filename, {'in_memory': True})
worksheet = workbook.add_worksheet()
image_file = open(self.image_dir + 'red.png', 'rb')
image_data = BytesIO(image_file.read())
image_file.close()
worksheet.insert_image('E9', 'red.png', {'image_data': image_data})
workbook.close()
self.assertExcelEqual()
|
[
"jmcnamara@cpan.org"
] |
jmcnamara@cpan.org
|
3c51dcc2e73e3f43318e71887d695fe2532c06b9
|
a4ea525e226d6c401fdb87a6e9adfdc5d07e6020
|
/src/azure-cli/azure/cli/command_modules/network/aaz/latest/network/virtual_appliance/site/_delete.py
|
f453c8731d6e69b3932912be786b732d7da64fb3
|
[
"MIT",
"BSD-3-Clause",
"LGPL-2.0-or-later",
"GPL-1.0-or-later",
"MPL-2.0",
"LGPL-2.1-only",
"Apache-2.0",
"LGPL-2.1-or-later",
"BSD-2-Clause"
] |
permissive
|
Azure/azure-cli
|
13340eeca2e288e66e84d393fa1c8a93d46c8686
|
a40fd14ad0b6e89720a2e58d4d9be3a6ce1535ca
|
refs/heads/dev
| 2023-08-17T06:25:37.431463
| 2023-08-17T06:00:10
| 2023-08-17T06:00:10
| 51,040,886
| 4,018
| 3,310
|
MIT
| 2023-09-14T11:11:05
| 2016-02-04T00:21:51
|
Python
|
UTF-8
|
Python
| false
| false
| 5,731
|
py
|
# --------------------------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
#
# Code generated by aaz-dev-tools
# --------------------------------------------------------------------------------------------
# pylint: skip-file
# flake8: noqa
from azure.cli.core.aaz import *
@register_command(
"network virtual-appliance site delete",
is_preview=True,
confirmation="Are you sure you want to perform this operation?",
)
class Delete(AAZCommand):
"""Delete an Azure network virtual appliance site.
:example: Delete an Azure network virtual appliance site.
az network virtual-appliance site delete -n MySite -g MyRG --appliance-name MyName -y
"""
_aaz_info = {
"version": "2021-08-01",
"resources": [
["mgmt-plane", "/subscriptions/{}/resourcegroups/{}/providers/microsoft.network/networkvirtualappliances/{}/virtualappliancesites/{}", "2021-08-01"],
]
}
AZ_SUPPORT_NO_WAIT = True
def _handler(self, command_args):
super()._handler(command_args)
return self.build_lro_poller(self._execute_operations, None)
_args_schema = None
@classmethod
def _build_arguments_schema(cls, *args, **kwargs):
if cls._args_schema is not None:
return cls._args_schema
cls._args_schema = super()._build_arguments_schema(*args, **kwargs)
# define Arg Group ""
_args_schema = cls._args_schema
_args_schema.appliance_name = AAZStrArg(
options=["--appliance-name"],
help="The name of Network Virtual Appliance.",
required=True,
id_part="name",
)
_args_schema.resource_group = AAZResourceGroupNameArg(
required=True,
)
_args_schema.name = AAZStrArg(
options=["-n", "--name"],
help="The name of Network Virtual Appliance Site.",
required=True,
id_part="child_name_1",
)
return cls._args_schema
def _execute_operations(self):
self.pre_operations()
yield self.VirtualApplianceSitesDelete(ctx=self.ctx)()
self.post_operations()
@register_callback
def pre_operations(self):
pass
@register_callback
def post_operations(self):
pass
class VirtualApplianceSitesDelete(AAZHttpOperation):
CLIENT_TYPE = "MgmtClient"
def __call__(self, *args, **kwargs):
request = self.make_request()
session = self.client.send_request(request=request, stream=False, **kwargs)
if session.http_response.status_code in [202]:
return self.client.build_lro_polling(
self.ctx.args.no_wait,
session,
self.on_200,
self.on_error,
lro_options={"final-state-via": "location"},
path_format_arguments=self.url_parameters,
)
if session.http_response.status_code in [200]:
return self.client.build_lro_polling(
self.ctx.args.no_wait,
session,
self.on_200,
self.on_error,
lro_options={"final-state-via": "location"},
path_format_arguments=self.url_parameters,
)
if session.http_response.status_code in [204]:
return self.client.build_lro_polling(
self.ctx.args.no_wait,
session,
self.on_204,
self.on_error,
lro_options={"final-state-via": "location"},
path_format_arguments=self.url_parameters,
)
return self.on_error(session.http_response)
@property
def url(self):
return self.client.format_url(
"/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/networkVirtualAppliances/{networkVirtualApplianceName}/virtualApplianceSites/{siteName}",
**self.url_parameters
)
@property
def method(self):
return "DELETE"
@property
def error_format(self):
return "ODataV4Format"
@property
def url_parameters(self):
parameters = {
**self.serialize_url_param(
"networkVirtualApplianceName", self.ctx.args.appliance_name,
required=True,
),
**self.serialize_url_param(
"resourceGroupName", self.ctx.args.resource_group,
required=True,
),
**self.serialize_url_param(
"siteName", self.ctx.args.name,
required=True,
),
**self.serialize_url_param(
"subscriptionId", self.ctx.subscription_id,
required=True,
),
}
return parameters
@property
def query_parameters(self):
parameters = {
**self.serialize_query_param(
"api-version", "2021-08-01",
required=True,
),
}
return parameters
def on_200(self, session):
pass
def on_204(self, session):
pass
class _DeleteHelper:
"""Helper class for Delete"""
__all__ = ["Delete"]
|
[
"noreply@github.com"
] |
Azure.noreply@github.com
|
60570467f232d79d8b785162fa8abe654121701e
|
b9dda07897d552466695c735c14d624cf89315bc
|
/triggerflow/service/eventsources/model.py
|
220393130c315f170e96204d7db7a6ce32a801ff
|
[
"Apache-2.0"
] |
permissive
|
JosepSampe/triggerflow
|
02792ba96059f27c2d163ca88d50a10e030026ae
|
66d8adcd6b31692663ee861c334608b74fecf884
|
refs/heads/master
| 2023-01-12T12:12:33.007616
| 2020-10-20T13:14:18
| 2020-10-20T13:14:18
| 264,998,376
| 0
| 0
|
Apache-2.0
| 2020-05-18T16:32:06
| 2020-05-18T16:32:05
| null |
UTF-8
|
Python
| false
| false
| 383
|
py
|
from multiprocessing import Process
from threading import Thread
class EventSourceHook(Thread):
def __init__(self, name: str, *args, **kwargs):
super().__init__()
self.name = name
def run(self):
raise NotImplementedError()
def commit(self, records):
raise NotImplementedError()
def stop(self):
raise NotImplementedError()
|
[
"aitor.a98@gmail.com"
] |
aitor.a98@gmail.com
|
e378342db455f9d7483d9f6cf7982882e5d2ca99
|
b72596aa97a724f9f2cc6947b86a9b972846277f
|
/setup.py
|
8cba9868cc12580e64d54561b344cf8fca1cdca5
|
[
"MIT"
] |
permissive
|
dumpmemory/hourglass-transformer-pytorch
|
698cfcbc6a1b572efef37b5926d45dd598ff457b
|
4be33bb41adfedf1b739cd24bec9481bc83a93e2
|
refs/heads/main
| 2023-09-03T01:45:41.994192
| 2021-11-10T15:49:06
| 2021-11-10T15:49:06
| 426,081,172
| 0
| 0
|
MIT
| 2021-11-10T15:55:51
| 2021-11-09T03:41:56
|
Python
|
UTF-8
|
Python
| false
| false
| 750
|
py
|
from setuptools import setup, find_packages
setup(
name = 'hourglass-transformer-pytorch',
packages = find_packages(),
version = '0.0.6',
license='MIT',
description = 'Hourglass Transformer',
author = 'Phil Wang',
author_email = 'lucidrains@gmail.com',
url = 'https://github.com/lucidrains/hourglass-transformer-pytorch',
keywords = [
'artificial intelligence',
'attention mechanism',
'transformers'
],
install_requires=[
'einops',
'torch>=1.6'
],
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'Topic :: Scientific/Engineering :: Artificial Intelligence',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 3.6',
],
)
|
[
"lucidrains@gmail.com"
] |
lucidrains@gmail.com
|
523b42f752bced31bc63bb710b3b4fded293c9cf
|
20e3010608e40a6ec5ea56f69d122a62182e4bdb
|
/1 - Python-2/4 - strings functions/HW4/3. Make an IP adress unclickable.py
|
f6b7f30215f124961d64f2ec6f1ae189675582a4
|
[] |
no_license
|
LarisaOvchinnikova/Python
|
ee65eac221cd03563d60110118175692564c5b2d
|
9cc86a260828662995dec59a6d69528f96d37e79
|
refs/heads/master
| 2021-08-22T21:41:02.351589
| 2021-05-25T18:37:09
| 2021-05-25T18:37:09
| 253,842,826
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 149
|
py
|
# Input: address = "1.1.1.1"
# Output: "1[.]1[.]1[.]1"
def ip_address(address):
return address.replace(".", "[.]")
print(ip_address("1.1.1.1"))
|
[
"larisaplantation@gmail.com"
] |
larisaplantation@gmail.com
|
caa9cb15bb5cd49e3cb59f5ace978e207c998922
|
db37e5eab7b60057bbc1ae153df8693f0159b02c
|
/examples/decoupledibpm/flapping2dRe75/run/scripts/plot_vorticity_compare_li_et_al_2015.py
|
63ee97afbb13882c1575b1ae99fc77dbdad3f383
|
[
"BSD-3-Clause"
] |
permissive
|
stjordanis/petibm-examples
|
83f7212eadbc1bbfb2071d550969b252cbcfcd89
|
794de3613967c14750c750aed386602c988cff05
|
refs/heads/master
| 2022-04-12T20:29:33.566464
| 2020-02-29T22:45:39
| 2020-02-29T22:45:39
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 2,078
|
py
|
"""Plot the vorticity at saved time steps."""
from matplotlib import pyplot, image
import numpy
import pathlib
import petibmpy
simudir = pathlib.Path(__file__).absolute().parents[1] # simulation directory
datadir = simudir / 'output' # directory with field solution files
name = 'wz' # name of the variable to load and plot
save_figure = True # save Matplotlib figure as PNG
show_figure = True # show Matplotlib figure
# Load the grid from file.
filepath = datadir / 'grid.h5'
grid = petibmpy.read_grid_hdf5(filepath, name)
states = [2400, 2600, 2800, 3000, 3200]
pyplot.rc('font', family='serif', size=14)
fig, (ax1, ax2) = pyplot.subplots(nrows=2, ncols=5, figsize=(10.0, 5.0))
levels = numpy.linspace(-20.0, 20.0, num=40) # contour levels
for i, state in enumerate(states):
print(f'[time step {state}] Load and plot contours of {name}')
# Load data from file.
filepath = datadir / f'{state:0>7}.h5'
data = petibmpy.read_field_hdf5(filepath, name)
# Load body coordinates from file.
filepath = datadir / f'ellipse_{state:0>7}.2D'
body = petibmpy.read_body(filepath)
# Plot the contour of the field variable.
ax1[i].contour(*grid, data, levels=levels, linewidths=0.5, extend='both')
ax1[i].plot(*body, color='black', linewidth=0.5)
ax1[i].axis('scaled', adjustable='box')
ax1[i].set_xlim(-3.5, 2.5)
ax1[i].set_ylim(-5.0, 1.0)
ax1[i].axis('off')
# Add images from Li et al. (2015) to the figure.
datadir = simudir.parent / 'data'
times = [3.0, 3.25, 3.5, 3.75, 4.0]
for i, time in enumerate(times):
print(f'[time {time}] Display image from Li et al. (2015)')
filepath = datadir / f'li_et_al_2015_flapping_wz_{time:.2f}.png'
im = image.imread(str(filepath))
ax2[i].imshow(im)
ax2[i].axis('off')
fig.tight_layout()
if save_figure:
figdir = simudir / 'figures' # folder to contain PNG files
figdir.mkdir(parents=True, exist_ok=True)
filepath = figdir / f'wz_compare_li_et_al_2015.png'
fig.savefig(filepath, dpi=300, bbox_inches='tight')
if show_figure:
pyplot.show()
|
[
"mesnardo@gwu.edu"
] |
mesnardo@gwu.edu
|
c212488374a2e7a4dcf011707fabc37464e8b920
|
f79102231c83674a4c01e56e3953b2a65cb14da2
|
/leetcode/base/list/环形链表.py
|
31d0d694e9e23ee41583a99337ef25a65410b65f
|
[] |
no_license
|
Activity00/Python
|
4971b177beaf72df0de97f7e78f400d48104dce1
|
166d97f36bbeea74c84ec57466bd0a65b608ed09
|
refs/heads/master
| 2020-12-24T07:53:06.782982
| 2020-09-29T10:55:43
| 2020-09-29T10:55:43
| 73,362,001
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,237
|
py
|
# coding: utf-8
"""
@author: 武明辉
@time: 19-3-20 下午9:35
"""
"""
给定一个链表,判断链表中是否有环。
为了表示给定链表中的环,我们使用整数 pos 来表示链表尾连接到链表中的位置(索引从 0 开始)。 如果 pos 是 -1,则在该链表中没有环。
示例 1:
输入:head = [3,2,0,-4], pos = 1
输出:true
解释:链表中有一个环,其尾部连接到第二个节点。
示例 2:
输入:head = [1,2], pos = 0
输出:true
解释:链表中有一个环,其尾部连接到第一个节点。
示例 3:
输入:head = [1], pos = -1
输出:false
解释:链表中没有环。
进阶:
你能用 O(1)(即,常量)内存解决此问题吗?
"""
# Definition for singly-linked list.
class ListNode(object):
def __init__(self, x):
self.val = x
self.next = None
class Solution(object):
def hasCycle(self, head):
if not head:
return False
slow = fast = head
while fast.next and fast.next.next:
fast = fast.next.next
slow = slow.next
if fast == slow:
return True
return False
if __name__ == '__main__':
pass
|
[
"1032662429@qq.com"
] |
1032662429@qq.com
|
d294ee636acb84148e16ac385f849a18ab6a1d2d
|
e63f11c621ffa2c54a8bc4714c6fb0f868f902d6
|
/LianJia_Scrapy/item_url.py
|
964827b681f15e340e7a2dee5981496f848a2108
|
[] |
no_license
|
aquablue1/LianJia_Scrapy
|
5821fd93eca796d319f408d351cc30d860a0edb4
|
580ced19204d5eb9614c6a8b362b2cb9eba88388
|
refs/heads/master
| 2021-05-05T22:07:14.261137
| 2018-01-06T05:01:43
| 2018-01-06T05:01:43
| 116,090,808
| 0
| 1
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 308
|
py
|
# -*- coding: utf-8 -*-
# Define here the models for your scraped items
#
# See documentation in:
# https://doc.scrapy.org/en/latest/topics/items.html
import scrapy
class LianjiaScrapyItem(scrapy.Item):
# define the fields for your item here like:
# name = scrapy.Field()
url = scrapy.Field()
|
[
"94apieceofcake@gmail.com"
] |
94apieceofcake@gmail.com
|
94c044bdea784aa5da43326d563b722a3d5c4fc6
|
29da2ca6def1270be13a3096685a8e5d82828dff
|
/CIM14/CDPSM/GIS_Connectivity/IEC61970/Core/SubGeographicalRegion.py
|
0030c2438ce680b5ea6c4d046032e16e4f3f5353
|
[
"MIT"
] |
permissive
|
rimbendhaou/PyCIM
|
75eb3bcd3729b2410c03f3d5c66d6f1e05e21df3
|
d578bb0bf1af344342bd23344385ed9c06c2d0ee
|
refs/heads/master
| 2022-04-28T01:16:12.673867
| 2020-04-16T02:19:09
| 2020-04-16T02:19:09
| 256,085,381
| 0
| 0
|
MIT
| 2020-04-16T02:15:20
| 2020-04-16T02:08:14
| null |
UTF-8
|
Python
| false
| false
| 3,823
|
py
|
# Copyright (C) 2010-2011 Richard Lincoln
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to
# deal in the Software without restriction, including without limitation the
# rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
# sell copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
# IN THE SOFTWARE.
from CIM14.CDPSM.GIS_Connectivity.IEC61970.Core.IdentifiedObject import IdentifiedObject
class SubGeographicalRegion(IdentifiedObject):
"""A subset of a geographical region of a power system network model.
"""
def __init__(self, Region=None, Lines=None, Substations=None, *args, **kw_args):
"""Initialises a new 'SubGeographicalRegion' instance.
@param Region: The association is used in the naming hierarchy.
@param Lines: A Line can be contained by a SubGeographical Region.
@param Substations: The association is used in the naming hierarchy.
"""
self._Region = None
self.Region = Region
self._Lines = []
self.Lines = [] if Lines is None else Lines
self._Substations = []
self.Substations = [] if Substations is None else Substations
super(SubGeographicalRegion, self).__init__(*args, **kw_args)
_attrs = []
_attr_types = {}
_defaults = {}
_enums = {}
_refs = ["Region", "Lines", "Substations"]
_many_refs = ["Lines", "Substations"]
def getRegion(self):
"""The association is used in the naming hierarchy.
"""
return self._Region
def setRegion(self, value):
if self._Region is not None:
filtered = [x for x in self.Region.Regions if x != self]
self._Region._Regions = filtered
self._Region = value
if self._Region is not None:
if self not in self._Region._Regions:
self._Region._Regions.append(self)
Region = property(getRegion, setRegion)
def getLines(self):
"""A Line can be contained by a SubGeographical Region.
"""
return self._Lines
def setLines(self, value):
for x in self._Lines:
x.Region = None
for y in value:
y._Region = self
self._Lines = value
Lines = property(getLines, setLines)
def addLines(self, *Lines):
for obj in Lines:
obj.Region = self
def removeLines(self, *Lines):
for obj in Lines:
obj.Region = None
def getSubstations(self):
"""The association is used in the naming hierarchy.
"""
return self._Substations
def setSubstations(self, value):
for x in self._Substations:
x.Region = None
for y in value:
y._Region = self
self._Substations = value
Substations = property(getSubstations, setSubstations)
def addSubstations(self, *Substations):
for obj in Substations:
obj.Region = self
def removeSubstations(self, *Substations):
for obj in Substations:
obj.Region = None
|
[
"rwl@thinker.cable.virginmedia.net"
] |
rwl@thinker.cable.virginmedia.net
|
d53b1fc1e1689725994bab778b7f669f9af08d11
|
bd1362c60313784c90013dfc9f0169e64389bf27
|
/scripts/feature/min_Xhour.py
|
0f41041b31ea7c176d8d0c2e6714c2969c296d22
|
[] |
no_license
|
ForceCry/iem
|
391aa9daf796591909cb9d4e60e27375adfb0eab
|
4b0390d89e6570b99ca83a5fa9b042226e17c1ad
|
refs/heads/master
| 2020-12-24T19:04:55.517409
| 2013-04-09T14:25:36
| 2013-04-09T14:25:36
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 869
|
py
|
# Generate some comparison data between ASOS sites, tricky, me thinks
import iemdb
import datetime
import numpy
import mx.DateTime
ASOS = iemdb.connect('asos', bypass=True)
acursor = ASOS.cursor()
acursor.execute("SET TIME ZONE 'GMT'")
maxv = 0
def get_data(year, station):
global maxv
data = {}
acursor.execute("""SELECT valid, sknt from t"""+year+""" where station = %s
and (extract(minute from valid) between 50 and 59 or
extract(minute from valid) = 0)
and sknt >= 0 ORDER by valid ASC""", (station,
))
vals = [0,0,0,0]
for row in acursor:
vals.insert(0, row[1] )
vals.pop()
if min(vals) >= maxv:
print vals, min(vals), row[0]
maxv = min(vals)
station1 = 'DSM'
for year in range(1973,2011):
get_data(str(year), station1)
|
[
"akrherz@95f8c243-6001-0410-b151-932e6a9ed213"
] |
akrherz@95f8c243-6001-0410-b151-932e6a9ed213
|
48c2c3dca0b6a2b6c85044a00f274533db952693
|
60a831fb3c92a9d2a2b52ff7f5a0f665d4692a24
|
/IronPythonStubs/release/stubs.min/System/Windows/Controls/__init___parts/ContextMenuEventArgs.py
|
ddb3667cf2693b5c400b6c59a3043b012c6b0300
|
[
"MIT"
] |
permissive
|
shnlmn/Rhino-Grasshopper-Scripts
|
a9411098c5d1bbc55feb782def565d535b27b709
|
0e43c3c1d09fb12cdbd86a3c4e2ba49982e0f823
|
refs/heads/master
| 2020-04-10T18:59:43.518140
| 2020-04-08T02:49:07
| 2020-04-08T02:49:07
| 161,219,695
| 11
| 2
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 479
|
py
|
class ContextMenuEventArgs(RoutedEventArgs):
""" Provides data for the context menu event. """
CursorLeft=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Gets the horizontal position of the mouse.
Get: CursorLeft(self: ContextMenuEventArgs) -> float
"""
CursorTop=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Gets the vertical position of the mouse.
Get: CursorTop(self: ContextMenuEventArgs) -> float
"""
|
[
"magnetscoil@gmail.com"
] |
magnetscoil@gmail.com
|
e821be69dbcc904309be14ca117f4bbb2b7155e6
|
45e376ae66b78b17788b1d3575b334b2cb1d0b1c
|
/tests/cloudformation/checks/resource/aws/test_ECRImmutableTags.py
|
2bafbbce0a26573bbd0e9e83dbbd29b4d6be0c56
|
[
"Apache-2.0"
] |
permissive
|
bridgecrewio/checkov
|
aeb8febed2ed90e61d5755f8f9d80b125362644d
|
e64cbd27ffb6f09c2c9f081b45b7a821a3aa1a4d
|
refs/heads/main
| 2023-08-31T06:57:21.990147
| 2023-08-30T23:01:47
| 2023-08-30T23:01:47
| 224,386,599
| 5,929
| 1,056
|
Apache-2.0
| 2023-09-14T20:10:23
| 2019-11-27T08:55:14
|
Python
|
UTF-8
|
Python
| false
| false
| 827
|
py
|
import os
import unittest
from checkov.cloudformation.checks.resource.aws.ECRImmutableTags import check
from checkov.cloudformation.runner import Runner
from checkov.runner_filter import RunnerFilter
class TestECRImmutableTags(unittest.TestCase):
def test_summary(self):
runner = Runner()
current_dir = os.path.dirname(os.path.realpath(__file__))
test_files_dir = current_dir + "/example_ECRImmutableTags"
report = runner.run(root_folder=test_files_dir,runner_filter=RunnerFilter(checks=[check.id]))
summary = report.get_summary()
self.assertEqual(summary['passed'], 1)
self.assertEqual(summary['failed'], 2)
self.assertEqual(summary['skipped'], 0)
self.assertEqual(summary['parsing_errors'], 0)
if __name__ == '__main__':
unittest.main()
|
[
"noreply@github.com"
] |
bridgecrewio.noreply@github.com
|
1152ab09724194cae4e2fab10d422c80f3789189
|
57265c1c743f5da6778d5c065e03be93d4f0c93f
|
/djkombu/tests/testproj/manage.py
|
b9066fff599f1c1260d7622099fa544098000b78
|
[
"BSD-3-Clause"
] |
permissive
|
barseghyanartur/django-kombu
|
fb63dab46cce7048f50c5131a8edde98f0734c5e
|
0f7dbdbd153e7a6d9971dfbb030433a6a85dd984
|
refs/heads/master
| 2021-01-23T04:59:18.617326
| 2017-06-02T11:51:07
| 2017-06-02T11:51:07
| 92,947,716
| 0
| 0
| null | 2017-05-31T13:21:10
| 2017-05-31T13:21:10
| null |
UTF-8
|
Python
| false
| false
| 320
|
py
|
#!/usr/bin/env python
import os
import sys
if __name__ == "__main__":
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "testproj.settings")
sys.path.insert(0, os.path.join(os.getcwd(), '..', '..', '..'))
from django.core.management import execute_from_command_line
execute_from_command_line(sys.argv)
|
[
"artur.barseghyan@gmail.com"
] |
artur.barseghyan@gmail.com
|
4530e7da967992e4e873d204c25802ea30dd670f
|
9afb5742e08add8800ad2086ecddd74f017ac9a5
|
/tests/test_errors.py
|
2c27177c209173f9920701ae351953c2f5064ff8
|
[
"BSD-2-Clause"
] |
permissive
|
blockdiag/sphinxcontrib-actdiag
|
e7fac2739b7aef862f6b0dbea69548ec51960df9
|
8b7ec29b310e718c4510a99fd22c624adc5b19bf
|
refs/heads/master
| 2023-04-10T07:36:45.862708
| 2021-12-05T14:37:35
| 2021-12-05T14:37:35
| 34,159,673
| 1
| 2
|
NOASSERTION
| 2023-03-18T23:32:50
| 2015-04-18T09:11:37
|
Python
|
UTF-8
|
Python
| false
| false
| 1,992
|
py
|
# -*- coding: utf-8 -*-
from mock import patch
from sphinx_testing import with_app
import sys
import unittest
class TestSphinxcontribActdiagErrors(unittest.TestCase):
@with_app(srcdir='tests/docs/basic', write_docstring=True)
def test_parse_error(self, app, status, warning):
"""
.. actdiag::
{ A -> B;
"""
app.builder.build_all()
self.assertIn('got unexpected token:', warning.getvalue())
@with_app(srcdir='tests/docs/basic', confoverrides=dict(actdiag_html_image_format='JPG'))
def test_unknown_format_error(self, app, status, warning):
app.builder.build_all()
self.assertIn('unknown format: JPG', warning.getvalue())
@with_app(srcdir='tests/docs/basic', confoverrides=dict(actdiag_html_image_format='PDF'))
def test_reportlab_not_found_error(self, app, status, warning):
try:
# unload reportlab and make loading it impossible
sys.modules.pop('reportlab', None)
path = sys.path
sys.path = []
app.builder.build_all()
self.assertIn('Could not output PDF format. Install reportlab.',
warning.getvalue())
finally:
sys.path = path
@with_app(srcdir='tests/docs/basic')
@patch("actdiag.utils.rst.nodes.actdiag.processor.drawer.DiagramDraw")
def test_rendering_error(self, app, status, warning, DiagramDraw):
DiagramDraw.side_effect = RuntimeError("UNKNOWN ERROR!")
app.builder.build_all()
self.assertIn('UNKNOWN ERROR!', warning.getvalue())
@with_app(srcdir='tests/docs/basic')
@patch("sphinxcontrib.actdiag.actdiag.drawer.DiagramDraw.draw")
def test_font_settings_error(self, app, status, warning, draw):
draw.side_effect = UnicodeEncodeError("", "", 0, 0, "")
app.builder.build_all()
self.assertIn('UnicodeEncodeError caught (check your font settings)',
warning.getvalue())
|
[
"i.tkomiya@gmail.com"
] |
i.tkomiya@gmail.com
|
da144278f9b5122abe6a2ada6e8b937379d84335
|
9e643d565e38de1728eabf31304e7dcbdf3ebfdd
|
/Python/Django/manyToMany/apps/manyToManyApp/migrations/0001_initial.py
|
522b5d14fb92bd5b6297d49a27747de163be6a68
|
[] |
no_license
|
joeyzoland/DojoAssignments
|
88dca37ad1d5b585a4af1dabc49935ef34adf6a0
|
0cae15aa448c490af931b41939638456456cef63
|
refs/heads/master
| 2021-01-11T17:55:13.775179
| 2018-09-17T07:32:12
| 2018-09-17T07:32:12
| 79,875,553
| 0
| 1
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,308
|
py
|
# -*- coding: utf-8 -*-
# Generated by Django 1.10.5 on 2017-02-23 16:22
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='Interest',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=45)),
('created_at', models.DateTimeField(auto_now_add=True)),
('updated_at', models.DateTimeField(auto_now=True)),
],
),
migrations.CreateModel(
name='User',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=45)),
('created_at', models.DateTimeField(auto_now_add=True)),
('updated_at', models.DateTimeField(auto_now=True)),
],
),
migrations.AddField(
model_name='interest',
name='users',
field=models.ManyToManyField(related_name='interests', to='manyToManyApp.User'),
),
]
|
[
"joeyzoland@gmail.com"
] |
joeyzoland@gmail.com
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.